Saving all output to "!!{outputDirectory}!!/auto_join14.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/auto_join14.q >>> >>> set hive.auto.convert.join = true; No rows affected >>> >>> CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE; No rows affected >>> >>> set mapred.job.tracker=does.notexist.com:666; No rows affected >>> set hive.exec.mode.local.auto=true; No rows affected >>> >>> explain FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100 INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src)) (TOK_TABREF (TOK_TABNAME srcpart)) (and (AND (= (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL srcpart) key)) (= (. (TOK_TABLE_OR_COL srcpart) ds) '2008-04-08')) (> (. (TOK_TABLE_OR_COL src) key) 100)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) value)))))' '' 'STAGE DEPENDENCIES:' ' Stage-6 is a root stage , consists of Stage-7, Stage-8, Stage-1' ' Stage-7 has a backup stage: Stage-1' ' Stage-4 depends on stages: Stage-7' ' Stage-0 depends on stages: Stage-1, Stage-4, Stage-5' ' Stage-2 depends on stages: Stage-0' ' Stage-8 has a backup stage: Stage-1' ' Stage-5 depends on stages: Stage-8' ' Stage-1' '' 'STAGE PLANS:' ' Stage: Stage-6' ' Conditional Operator' '' ' Stage: Stage-7' ' Map Reduce Local Work' ' Alias -> Map Local Tables:' ' srcpart ' ' Fetch Operator' ' limit: -1' ' Alias -> Map Local Operator Tree:' ' srcpart ' ' TableScan' ' alias: srcpart' ' Filter Operator' ' predicate:' ' expr: (key > 100.0)' ' type: boolean' ' HashTable Sink Operator' ' condition expressions:' ' 0 {key}' ' 1 {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' Position of Big Table: 0' '' ' Stage: Stage-4' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: (key > 100.0)' ' type: boolean' ' Map Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {key}' ' 1 {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' outputColumnNames: _col0, _col5' ' Position of Big Table: 0' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col5' ' type: string' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: auto_join14.dest1' ' Local Work:' ' Map Reduce Local Work' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: auto_join14.dest1' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' ' Stage: Stage-8' ' Map Reduce Local Work' ' Alias -> Map Local Tables:' ' src ' ' Fetch Operator' ' limit: -1' ' Alias -> Map Local Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: (key > 100.0)' ' type: boolean' ' HashTable Sink Operator' ' condition expressions:' ' 0 {key}' ' 1 {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' Position of Big Table: 1' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' srcpart ' ' TableScan' ' alias: srcpart' ' Filter Operator' ' predicate:' ' expr: (key > 100.0)' ' type: boolean' ' Map Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {key}' ' 1 {value}' ' handleSkewJoin: false' ' keys:' ' 0 [Column[key]]' ' 1 [Column[key]]' ' outputColumnNames: _col0, _col5' ' Position of Big Table: 1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col5' ' type: string' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: auto_join14.dest1' ' Local Work:' ' Map Reduce Local Work' '' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: (key > 100.0)' ' type: boolean' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 0' ' value expressions:' ' expr: key' ' type: string' ' srcpart ' ' TableScan' ' alias: srcpart' ' Filter Operator' ' predicate:' ' expr: (key > 100.0)' ' type: boolean' ' Reduce Output Operator' ' key expressions:' ' expr: key' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: key' ' type: string' ' tag: 1' ' value expressions:' ' expr: value' ' type: string' ' Reduce Operator Tree:' ' Join Operator' ' condition map:' ' Inner Join 0 to 1' ' condition expressions:' ' 0 {VALUE._col0}' ' 1 {VALUE._col1}' ' handleSkewJoin: false' ' outputColumnNames: _col0, _col5' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col5' ' type: string' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: auto_join14.dest1' '' '' 246 rows selected >>> >>> FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100 INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value; '_col0','_col1' No rows selected >>> >>> SELECT sum(hash(dest1.c1,dest1.c2)) FROM dest1; '_c0' '404554174174' 1 row selected >>> !record