Saving all output to "!!{outputDirectory}!!/smb_mapjoin_5.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/smb_mapjoin_5.q >>> >>> >>> >>> >>> create table smb_bucket_1(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE; No rows affected >>> create table smb_bucket_2(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE; No rows affected >>> create table smb_bucket_3(key int, value string) CLUSTERED BY (key) SORTED BY (key) INTO 1 BUCKETS STORED AS RCFILE; No rows affected >>> >>> load data local inpath '../data/files/smbbucket_1.rc' overwrite into table smb_bucket_1; No rows affected >>> load data local inpath '../data/files/smbbucket_2.rc' overwrite into table smb_bucket_2; No rows affected >>> load data local inpath '../data/files/smbbucket_3.rc' overwrite into table smb_bucket_3; No rows affected >>> >>> set hive.optimize.bucketmapjoin = true; No rows affected >>> set hive.optimize.bucketmapjoin.sortedmerge = true; No rows affected >>> set hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat; No rows affected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Inner Join 0 to 1' ' Inner Join 1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' No rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Left Outer Join0 to 1' ' Inner Join 1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' No rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Left Outer Join0 to 1' ' Left Outer Join1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '1','val_1','','','','' '3','val_3','','','','' '4','val_4','','','','' '5','val_5','','','','' '10','val_10','','','','' 5 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Left Outer Join0 to 1' ' Right Outer Join1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '','','','','4','val_4' '','','','','10','val_10' '','','','','17','val_17' '','','','','19','val_19' '','','','','20','val_20' '','','','','23','val_23' 6 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Left Outer Join0 to 1' ' Outer Join 1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a left outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '1','val_1','','','','' '3','val_3','','','','' '4','val_4','','','','' '','','','','4','val_4' '5','val_5','','','','' '10','val_10','','','','' '','','','','10','val_10' '','','','','17','val_17' '','','','','19','val_19' '','','','','20','val_20' '','','','','23','val_23' 11 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_RIGHTOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Right Outer Join0 to 1' ' Inner Join 1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '','','20','val_20','20','val_20' '','','23','val_23','23','val_23' 2 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_RIGHTOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Right Outer Join0 to 1' ' Left Outer Join1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '','','20','val_20','20','val_20' '','','23','val_23','23','val_23' '','','25','val_25','','' '','','30','val_30','','' 4 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_RIGHTOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Right Outer Join0 to 1' ' Right Outer Join1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '','','','','4','val_4' '','','','','10','val_10' '','','','','17','val_17' '','','','','19','val_19' '','','20','val_20','20','val_20' '','','23','val_23','23','val_23' 6 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_RIGHTOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Right Outer Join0 to 1' ' Outer Join 1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a right outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '','','','','4','val_4' '','','','','10','val_10' '','','','','17','val_17' '','','','','19','val_19' '','','20','val_20','20','val_20' '','','23','val_23','23','val_23' '','','25','val_25','','' '','','30','val_30','','' 8 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_FULLOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Outer Join 0 to 1' ' Inner Join 1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '','','20','val_20','20','val_20' '','','23','val_23','23','val_23' 2 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_FULLOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Outer Join 0 to 1' ' Left Outer Join1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key left outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '1','val_1','','','','' '3','val_3','','','','' '4','val_4','','','','' '5','val_5','','','','' '10','val_10','','','','' '','','20','val_20','20','val_20' '','','23','val_23','23','val_23' '','','25','val_25','','' '','','30','val_30','','' 9 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_FULLOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Outer Join 0 to 1' ' Right Outer Join1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key right outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '','','','','4','val_4' '','','','','10','val_10' '','','','','17','val_17' '','','','','19','val_19' '','','20','val_20','20','val_20' '','','23','val_23','23','val_23' 6 rows selected >>> >>> explain select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_FULLOUTERJOIN (TOK_TABREF (TOK_TABNAME smb_bucket_1) a) (TOK_TABREF (TOK_TABNAME smb_bucket_2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME smb_bucket_3) c) (= (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a c))) (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' b ' ' TableScan' ' alias: b' ' Sorted Merge Bucket Map Join Operator' ' condition map:' ' Outer Join 0 to 1' ' Outer Join 1 to 2' ' condition expressions:' ' 0 {key} {value}' ' 1 {key} {value}' ' 2 {key} {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' 2 [Column[key]]' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: string' ' expr: _col4' ' type: int' ' expr: _col5' ' type: string' ' expr: _col8' ' type: int' ' expr: _col9' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 71 rows selected >>> select /*+mapjoin(a,c)*/ * from smb_bucket_1 a full outer join smb_bucket_2 b on a.key = b.key full outer join smb_bucket_3 c on b.key=c.key; 'key','value','key','value','key','value' '1','val_1','','','','' '3','val_3','','','','' '4','val_4','','','','' '','','','','4','val_4' '5','val_5','','','','' '10','val_10','','','','' '','','','','10','val_10' '','','','','17','val_17' '','','','','19','val_19' '','','20','val_20','20','val_20' '','','23','val_23','23','val_23' '','','25','val_25','','' '','','30','val_30','','' 13 rows selected >>> >>> >>> >>> >>> >>> !record