Saving all output to "!!{outputDirectory}!!/insert_into6.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/insert_into6.q >>> set hive.exec.dynamic.partition.mode=nonstrict; No rows affected >>> set hive.exec.dynamic.partition=true; No rows affected >>> >>> DROP TABLE insert_into6a; No rows affected >>> DROP TABLE insert_into6b; No rows affected >>> CREATE TABLE insert_into6a (key int, value string) PARTITIONED BY (ds string); No rows affected >>> CREATE TABLE insert_into6b (key int, value string) PARTITIONED BY (ds string); No rows affected >>> >>> EXPLAIN INSERT INTO TABLE insert_into6a PARTITION (ds='1') SELECT * FROM src LIMIT 150; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into6a) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 150)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 depends on stages: Stage-1' ' Stage-2 depends on stages: Stage-0' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' Limit' ' Reduce Output Operator' ' sort order: ' ' tag: -1' ' value expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' Reduce Operator Tree:' ' Extract' ' Limit' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: _col1' ' type: string' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into6.insert_into6a' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' partition:' ' ds 1' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into6.insert_into6a' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' '' 66 rows selected >>> INSERT INTO TABLE insert_into6a PARTITION (ds='1') SELECT * FROM src LIMIT 150; '_col0','_col1' No rows selected >>> INSERT INTO TABLE insert_into6a PARTITION (ds='2') SELECT * FROM src LIMIT 100; '_col0','_col1' No rows selected >>> SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6a ) t; '_c0' '-35226404960' 1 row selected >>> >>> EXPLAIN INSERT INTO TABLE insert_into6b PARTITION (ds) SELECT * FROM insert_into6a; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME insert_into6a))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert_into6b) (TOK_PARTSPEC (TOK_PARTVAL ds)))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5' ' Stage-4' ' Stage-0 depends on stages: Stage-4, Stage-3, Stage-6' ' Stage-2 depends on stages: Stage-0' ' Stage-3' ' Stage-5' ' Stage-6 depends on stages: Stage-5' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' insert_into6a ' ' TableScan' ' alias: insert_into6a' ' Select Operator' ' expressions:' ' expr: key' ' type: int' ' expr: value' ' type: string' ' expr: ds' ' type: string' ' outputColumnNames: _col0, _col1, _col2' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into6.insert_into6b' '' ' Stage: Stage-7' ' Conditional Operator' '' ' Stage: Stage-4' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' partition:' ' ds ' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into6.insert_into6b' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' ' Stage: Stage-3' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into6.insert_into6b' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: insert_into6.insert_into6b' '' ' Stage: Stage-6' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' '' 95 rows selected >>> INSERT INTO TABLE insert_into6b PARTITION (ds) SELECT * FROM insert_into6a; 'key','value','ds' No rows selected >>> SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into6b ) t; '_c0' '-35226404960' 1 row selected >>> >>> SHOW PARTITIONS insert_into6b; 'partition' 'ds=1' 'ds=2' 2 rows selected >>> >>> DROP TABLE insert_into6a; No rows affected >>> DROP TABLE insert_into6b; No rows affected >>> >>> !record