Saving all output to "!!{outputDirectory}!!/load_dyn_part13.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/load_dyn_part13.q >>> show partitions srcpart; 'partition' 'ds=2008-04-08/hr=11' 'ds=2008-04-08/hr=12' 'ds=2008-04-09/hr=11' 'ds=2008-04-09/hr=12' 4 rows selected >>> >>> >>> >>> create table if not exists nzhang_part13 like srcpart; No rows affected >>> describe extended nzhang_part13; 'col_name','data_type','comment' 'key','string','' 'value','string','' 'ds','string','' 'hr','string','' '','','' 'Detailed Table Information','Table(tableName:nzhang_part13, dbName:load_dyn_part13, owner:!!{user.name}!!, createTime:!!UNIXTIME!!, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:string, comment:null), FieldSchema(name:value, type:string, comment:null), FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], location:!!{hive.metastore.warehouse.dir}!!/load_dyn_part13.db/nzhang_part13, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}, skewedInfo:SkewedInfo(skewedColNames:[], skewedColValues:[], skewedColValueLocationMaps:{})), partitionKeys:[FieldSchema(name:ds, type:string, comment:null), FieldSchema(name:hr, type:string, comment:null)], parameters:{transient_lastDdlTime=!!UNIXTIME!!}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)','' 6 rows selected >>> >>> set hive.merge.mapfiles=false; No rows affected >>> set hive.merge.mapredfiles=false; No rows affected >>> set hive.exec.dynamic.partition=true; No rows affected >>> >>> explain insert overwrite table nzhang_part13 partition (ds="2010-03-03", hr) select * from ( select key, value, '22' from src where key < 20 union all select key, value, '33' from src where key > 20 and key < 40) s; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR '22')) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 20)))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR '33')) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) 20) (< (TOK_TABLE_OR_COL key) 40)))))) s)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME nzhang_part13) (TOK_PARTSPEC (TOK_PARTVAL ds "2010-03-03") (TOK_PARTVAL hr)))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 depends on stages: Stage-1' ' Stage-2 depends on stages: Stage-0' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' null-subquery1:s-subquery1:src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: (key < 20.0)' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' expr: '22'' ' type: string' ' outputColumnNames: _col0, _col1, _col2' ' Union' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col2' ' type: string' ' outputColumnNames: _col0, _col1, _col2' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: load_dyn_part13.nzhang_part13' ' null-subquery2:s-subquery2:src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: ((key > 20.0) and (key < 40.0))' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' expr: '33'' ' type: string' ' outputColumnNames: _col0, _col1, _col2' ' Union' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: string' ' expr: _col2' ' type: string' ' outputColumnNames: _col0, _col1, _col2' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: load_dyn_part13.nzhang_part13' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' partition:' ' ds 2010-03-03' ' hr ' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: load_dyn_part13.nzhang_part13' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' '' 98 rows selected >>> >>> insert overwrite table nzhang_part13 partition (ds="2010-03-03", hr) select * from ( select key, value, '22' from src where key < 20 union all select key, value, '33' from src where key > 20 and key < 40) s; 'key','value','_c2' No rows selected >>> >>> show partitions nzhang_part13; 'partition' 'ds=2010-03-03/hr=22' 'ds=2010-03-03/hr=33' 2 rows selected >>> >>> select * from nzhang_part13 where ds is not null and hr is not null; 'key','value','ds','hr' '15','val_15','2010-03-03','22' '17','val_17','2010-03-03','22' '0','val_0','2010-03-03','22' '4','val_4','2010-03-03','22' '12','val_12','2010-03-03','22' '8','val_8','2010-03-03','22' '0','val_0','2010-03-03','22' '0','val_0','2010-03-03','22' '15','val_15','2010-03-03','22' '19','val_19','2010-03-03','22' '10','val_10','2010-03-03','22' '5','val_5','2010-03-03','22' '11','val_11','2010-03-03','22' '5','val_5','2010-03-03','22' '2','val_2','2010-03-03','22' '12','val_12','2010-03-03','22' '5','val_5','2010-03-03','22' '18','val_18','2010-03-03','22' '9','val_9','2010-03-03','22' '18','val_18','2010-03-03','22' '27','val_27','2010-03-03','33' '37','val_37','2010-03-03','33' '35','val_35','2010-03-03','33' '24','val_24','2010-03-03','33' '26','val_26','2010-03-03','33' '35','val_35','2010-03-03','33' '34','val_34','2010-03-03','33' '30','val_30','2010-03-03','33' '33','val_33','2010-03-03','33' '35','val_35','2010-03-03','33' '24','val_24','2010-03-03','33' '26','val_26','2010-03-03','33' '28','val_28','2010-03-03','33' '37','val_37','2010-03-03','33' 34 rows selected >>> >>> >>> !record