Saving all output to "!!{outputDirectory}!!/auto_join9.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/auto_join9.q >>> set hive.auto.convert.join = true; No rows affected >>> >>> CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE; No rows affected >>> >>> explain FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME srcpart) src1) (TOK_TABREF (TOK_TABNAME src) src2) (= (. (TOK_TABLE_OR_COL src1) key) (. (TOK_TABLE_OR_COL src2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) value))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL src1) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL src1) hr) '12')))))' '' 'STAGE DEPENDENCIES:' ' Stage-6 is a root stage , consists of Stage-7, Stage-8, Stage-1' ' Stage-7 has a backup stage: Stage-1' ' Stage-4 depends on stages: Stage-7' ' Stage-0 depends on stages: Stage-1, Stage-4, Stage-5' ' Stage-2 depends on stages: Stage-0' ' Stage-8 has a backup stage: Stage-1' ' Stage-5 depends on stages: Stage-8' ' Stage-1' '' 'STAGE PLANS:' ' Stage: Stage-6' ' Conditional Operator' '' ' Stage: Stage-7' ' Map Reduce Local Work' ' Alias -> Map Local Tables:' ' src2 ' ' Fetch Operator' ' limit: -1' ' Alias -> Map Local Operator Tree:' ' src2 ' ' TableScan' ' alias: src2' ' HashTable Sink Operator' ' condition expressions:' ' 0 {key} {ds} {hr}' ' 1 {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' Position of Big Table: 0' '' ' Stage: Stage-4' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src1 ' ' TableScan' ' alias: src1' ' Map Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {key} {ds} {hr}' ' 1 {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' outputColumnNames: _col0, _col2, _col3, _col7' ' Position of Big Table: 0' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col7' ' type: string' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: auto_join9.dest1' ' Local Work:' ' Map Reduce Local Work' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: auto_join9.dest1' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' ' Stage: Stage-8' ' Map Reduce Local Work' ' Alias -> Map Local Tables:' ' src1 ' ' Fetch Operator' ' limit: -1' ' Alias -> Map Local Operator Tree:' ' src1 ' ' TableScan' ' alias: src1' ' HashTable Sink Operator' ' condition expressions:' ' 0 {key} {ds} {hr}' ' 1 {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' Position of Big Table: 1' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src2 ' ' TableScan' ' alias: src2' ' Map Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {key} {ds} {hr}' ' 1 {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' outputColumnNames: _col0, _col2, _col3, _col7' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col7' ' type: string' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: auto_join9.dest1' ' Local Work:' ' Map Reduce Local Work' '' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src1 ' ' TableScan' ' alias: src1' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 0' ' value expressions:' ' expr: key' ' type: string' ' expr: ds' ' type: string' ' expr: hr' ' type: string' ' src2 ' ' TableScan' ' alias: src2' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 1' ' value expressions:' ' expr: value' ' type: string' ' Reduce Operator Tree:' ' Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {VALUE._col0} {VALUE._col2} {VALUE._col3}' ' 1 {VALUE._col1}' ' handleSkewJoin: false' ' outputColumnNames: _col0, _col2, _col3, _col7' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col7' ' type: string' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: auto_join9.dest1' '' '' 226 rows selected >>> >>> FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where src1.ds = '2008-04-08' and src1.hr = '12'; '_col0','_col1' No rows selected >>> >>> >>> >>> SELECT sum(hash(dest1.key,dest1.value)) FROM dest1; '_c0' '101861029915' 1 row selected >>> !record