Saving all output to "!!{outputDirectory}!!/notable_alias1.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/notable_alias1.q >>> CREATE TABLE dest1(dummy STRING, key INT, value DOUBLE) STORED AS TEXTFILE; No rows affected >>> >>> EXPLAIN FROM src INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR '1234') (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) 100)) (TOK_GROUPBY (TOK_TABLE_OR_COL key))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 depends on stages: Stage-1' ' Stage-2 depends on stages: Stage-0' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: (key < 100.0)' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' outputColumnNames: key' ' Group By Operator' ' aggregations:' ' expr: count(1)' ' bucketGroup: false' ' keys:' ' expr: key' ' type: string' ' mode: hash' ' outputColumnNames: _col0, _col1' ' Reduce Output Operator' ' key expressions:' ' expr: _col0' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: _col0' ' type: string' ' tag: -1' ' value expressions:' ' expr: _col1' ' type: bigint' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: count(VALUE._col0)' ' bucketGroup: false' ' keys:' ' expr: KEY._col0' ' type: string' ' mode: mergepartial' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: '1234'' ' type: string' ' expr: _col0' ' type: string' ' expr: _col1' ' type: bigint' ' outputColumnNames: _col0, _col1, _col2' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: UDFToInteger(_col1)' ' type: int' ' expr: UDFToDouble(_col2)' ' type: double' ' outputColumnNames: _col0, _col1, _col2' ' File Output Operator' ' compressed: false' ' GlobalTableId: 1' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: notable_alias1.dest1' '' ' Stage: Stage-0' ' Move Operator' ' tables:' ' replace: true' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' ' serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' ' name: notable_alias1.dest1' '' ' Stage: Stage-2' ' Stats-Aggr Operator' '' '' 96 rows selected >>> >>> FROM src INSERT OVERWRITE TABLE dest1 SELECT '1234', key, count(1) WHERE src.key < 100 group by key; '_col0','_col1','_col2' No rows selected >>> >>> SELECT dest1.* FROM dest1; 'dummy','key','value' '1234','0','3.0' '1234','10','1.0' '1234','11','1.0' '1234','12','2.0' '1234','15','2.0' '1234','17','1.0' '1234','18','2.0' '1234','19','1.0' '1234','2','1.0' '1234','20','1.0' '1234','24','2.0' '1234','26','2.0' '1234','27','1.0' '1234','28','1.0' '1234','30','1.0' '1234','33','1.0' '1234','34','1.0' '1234','35','3.0' '1234','37','2.0' '1234','4','1.0' '1234','41','1.0' '1234','42','2.0' '1234','43','1.0' '1234','44','1.0' '1234','47','1.0' '1234','5','3.0' '1234','51','2.0' '1234','53','1.0' '1234','54','1.0' '1234','57','1.0' '1234','58','2.0' '1234','64','1.0' '1234','65','1.0' '1234','66','1.0' '1234','67','2.0' '1234','69','1.0' '1234','70','3.0' '1234','72','2.0' '1234','74','1.0' '1234','76','2.0' '1234','77','1.0' '1234','78','1.0' '1234','8','1.0' '1234','80','1.0' '1234','82','1.0' '1234','83','2.0' '1234','84','2.0' '1234','85','1.0' '1234','86','1.0' '1234','87','1.0' '1234','9','1.0' '1234','90','3.0' '1234','92','1.0' '1234','95','2.0' '1234','96','1.0' '1234','97','2.0' '1234','98','2.0' 57 rows selected >>> !record