Saving all output to "!!{outputDirectory}!!/insert_into4.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/insert_into4.q >>> set hive.merge.smallfiles.avgsize=16000000; No rows affected >>> >>> DROP TABLE insert_into4a; No rows affected >>> DROP TABLE insert_into4b; No rows affected >>> >>> CREATE TABLE insert_into4a (key int, value string); No rows affected >>> CREATE TABLE insert_into4b (key int, value string); No rows affected >>> >>> EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into4a))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 10)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 depends on stages: Stage-1' ' Stage-2 depends on stages: Stage-0' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' Limit' ' Reduce Output Operator' ' sort order: ' ' tag: -1' ' value expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' Reduce Operator Tree:' ' Extract' ' Limit' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into4.insert_into4a' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into4.insert_into4a' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' '' 64 rows selected >>> INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10; '_col0','_col1' No rows selected >>> SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a ) t; '_c0' '-826625916' 1 row selected >>> >>> EXPLAIN INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into4a))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 10)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 depends on stages: Stage-1' ' Stage-2 depends on stages: Stage-0' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' Limit' ' Reduce Output Operator' ' sort order: ' ' tag: -1' ' value expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' Reduce Operator Tree:' ' Extract' ' Limit' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into4.insert_into4a' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into4.insert_into4a' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' '' 64 rows selected >>> INSERT INTO TABLE insert_into4a SELECT * FROM src LIMIT 10; '_col0','_col1' No rows selected >>> SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4a ) t; '_c0' '-826625916' 1 row selected >>> >>> --At this point insert_into4a has 2 files (if INSERT INTO merges isn't fixed) >>> >>> EXPLAIN INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert_into4a))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into4b))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5' ' Stage-4' ' Stage-0 depends on stages: Stage-4, Stage-3, Stage-6' ' Stage-2 depends on stages: Stage-0' ' Stage-3' ' Stage-5' ' Stage-6 depends on stages: Stage-5' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' insert_into4a ' ' TableScan' ' alias: insert_into4a' ' Select Operator' ' expressions:' ' expr: key' ' type: int' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into4.insert_into4b' '' ' Stage: Stage-7' ' Conditional Operator' '' ' Stage: Stage-4' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into4.insert_into4b' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' ' Stage: Stage-3' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into4.insert_into4b' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into4.insert_into4b' '' ' Stage: Stage-6' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' '' 91 rows selected >>> INSERT INTO TABLE insert_into4b SELECT * FROM insert_into4a; 'key','value' No rows selected >>> SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into4b ) t; '_c0' '-826625916' 1 row selected >>> >>> DROP TABLE insert_into4a; No rows affected >>> DROP TABLE insert_into4b; No rows affected >>> !record