Saving all output to "!!{outputDirectory}!!/merge2.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/merge2.q >>> set hive.merge.mapfiles=true; No rows affected >>> set hive.merge.mapredfiles=true; No rows affected >>> set mapred.min.split.size=256; No rows affected >>> set mapred.min.split.size.per.node=256; No rows affected >>> set mapred.min.split.size.per.rack=256; No rows affected >>> set mapred.max.split.size=256; No rows affected >>> >>> create table test1(key int, val int); No rows affected >>> >>> explain insert overwrite table test1 select key, count(1) from src group by key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5' ' Stage-4' ' Stage-0 depends on stages: Stage-4, Stage-3, Stage-6' ' Stage-2 depends on stages: Stage-0' ' Stage-3' ' Stage-5' ' Stage-6 depends on stages: Stage-5' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' outputColumnNames: key' ' Group By Operator' ' aggregations:' ' expr: count(1)' ' bucketGroup: false' ' keys:' ' expr: key' ' type: string' ' mode: hash' ' outputColumnNames: _col0, _col1' ' Reduce Output Operator' ' key expressions:' ' expr: _col0' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: _col0' ' type: string' ' tag: -1' ' value expressions:' ' expr: _col1' ' type: bigint' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: count(VALUE._col0)' ' bucketGroup: false' ' keys:' ' expr: KEY._col0' ' type: string' ' mode: mergepartial' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: bigint' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: UDFToInteger(_col0)' ' type: int' ' expr: UDFToInteger(_col1)' ' type: int' ' outputColumnNames: _col0, _col1' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-7' ' Conditional Operator' '' ' Stage: Stage-4' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' ' Stage: Stage-3' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-6' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' '' 134 rows selected >>> >>> insert overwrite table test1 select key, count(1) from src group by key; '_col0','_col1' No rows selected >>> >>> select * from test1; 'key','val' '0','3' '10','1' '100','2' '103','2' '104','2' '105','1' '11','1' '111','1' '113','2' '114','1' '116','1' '118','2' '119','3' '12','2' '120','2' '125','2' '126','1' '128','3' '129','2' '131','1' '133','1' '134','2' '136','1' '137','2' '138','4' '143','1' '145','1' '146','2' '149','2' '15','2' '150','1' '152','2' '153','1' '155','1' '156','1' '157','1' '158','1' '160','1' '162','1' '163','1' '164','2' '165','2' '166','1' '167','3' '168','1' '169','4' '17','1' '170','1' '172','2' '174','2' '175','2' '176','2' '177','1' '178','1' '179','2' '18','2' '180','1' '181','1' '183','1' '186','1' '187','3' '189','1' '19','1' '190','1' '191','2' '192','1' '193','3' '194','1' '195','2' '196','1' '197','2' '199','3' '2','1' '20','1' '200','2' '201','1' '202','1' '203','2' '205','2' '207','2' '208','3' '209','2' '213','2' '214','1' '216','2' '217','2' '218','1' '219','2' '221','2' '222','1' '223','2' '224','2' '226','1' '228','1' '229','2' '230','5' '233','2' '235','1' '237','2' '238','2' '239','2' '24','2' '241','1' '242','2' '244','1' '247','1' '248','1' '249','1' '252','1' '255','2' '256','2' '257','1' '258','1' '26','2' '260','1' '262','1' '263','1' '265','2' '266','1' '27','1' '272','2' '273','3' '274','1' '275','1' '277','4' '278','2' '28','1' '280','2' '281','2' '282','2' '283','1' '284','1' '285','1' '286','1' '287','1' '288','2' '289','1' '291','1' '292','1' '296','1' '298','3' '30','1' '302','1' '305','1' '306','1' '307','2' '308','1' '309','2' '310','1' '311','3' '315','1' '316','3' '317','2' '318','3' '321','2' '322','2' '323','1' '325','2' '327','3' '33','1' '331','2' '332','1' '333','2' '335','1' '336','1' '338','1' '339','1' '34','1' '341','1' '342','2' '344','2' '345','1' '348','5' '35','3' '351','1' '353','2' '356','1' '360','1' '362','1' '364','1' '365','1' '366','1' '367','2' '368','1' '369','3' '37','2' '373','1' '374','1' '375','1' '377','1' '378','1' '379','1' '382','2' '384','3' '386','1' '389','1' '392','1' '393','1' '394','1' '395','2' '396','3' '397','2' '399','2' '4','1' '400','1' '401','5' '402','1' '403','3' '404','2' '406','4' '407','1' '409','3' '41','1' '411','1' '413','2' '414','2' '417','3' '418','1' '419','1' '42','2' '421','1' '424','2' '427','1' '429','2' '43','1' '430','3' '431','3' '432','1' '435','1' '436','1' '437','1' '438','3' '439','2' '44','1' '443','1' '444','1' '446','1' '448','1' '449','1' '452','1' '453','1' '454','3' '455','1' '457','1' '458','2' '459','2' '460','1' '462','2' '463','2' '466','3' '467','1' '468','4' '469','5' '47','1' '470','1' '472','1' '475','1' '477','1' '478','2' '479','1' '480','3' '481','1' '482','1' '483','1' '484','1' '485','1' '487','1' '489','4' '490','1' '491','1' '492','2' '493','1' '494','1' '495','1' '496','1' '497','1' '498','3' '5','3' '51','2' '53','1' '54','1' '57','1' '58','2' '64','1' '65','1' '66','1' '67','2' '69','1' '70','3' '72','2' '74','1' '76','2' '77','1' '78','1' '8','1' '80','1' '82','1' '83','2' '84','2' '85','1' '86','1' '87','1' '9','1' '90','3' '92','1' '95','2' '96','1' '97','2' '98','2' 309 rows selected >>> >>> drop table test1; No rows affected >>> >>> >>> create table test_src(key string, value string) partitioned by (ds string); No rows affected >>> create table test1(key string); No rows affected >>> >>> insert overwrite table test_src partition(ds='101') select * from src; 'key','value' No rows selected >>> insert overwrite table test_src partition(ds='102') select * from src; 'key','value' No rows selected >>> >>> explain insert overwrite table test1 select key from test_src; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5' ' Stage-4' ' Stage-0 depends on stages: Stage-4, Stage-3, Stage-6' ' Stage-2 depends on stages: Stage-0' ' Stage-3' ' Stage-5' ' Stage-6 depends on stages: Stage-5' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' test_src ' ' TableScan' ' alias: test_src' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' outputColumnNames: _col0' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-7' ' Conditional Operator' '' ' Stage: Stage-4' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' ' Stage: Stage-3' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-6' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' '' 89 rows selected >>> insert overwrite table test1 select key from test_src; 'key' No rows selected >>> >>> set hive.merge.smallfiles.avgsize=16; No rows affected >>> explain insert overwrite table test1 select key from test_src; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5' ' Stage-4' ' Stage-0 depends on stages: Stage-4, Stage-3, Stage-6' ' Stage-2 depends on stages: Stage-0' ' Stage-3' ' Stage-5' ' Stage-6 depends on stages: Stage-5' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' test_src ' ' TableScan' ' alias: test_src' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' outputColumnNames: _col0' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-7' ' Conditional Operator' '' ' Stage: Stage-4' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' ' Stage: Stage-3' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-5' ' Map Reduce' ' Alias -> Map Operator Tree:' ' pfile:!!{hive.exec.scratchdir}!! ' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: merge2.test1' '' ' Stage: Stage-6' ' Move Operator' ' files:' ' hdfs directory: true' ' destination: pfile:!!{hive.exec.scratchdir}!!' '' '' 89 rows selected >>> insert overwrite table test1 select key from test_src; 'key' No rows selected >>> !record