Saving all output to "!!{outputDirectory}!!/input22.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/input22.q >>> CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE; No rows affected >>> LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4; No rows affected >>> >>> EXPLAIN SELECT a.KEY2 FROM (SELECT INPUT4.*, INPUT4.KEY as KEY2 FROM INPUT4) a ORDER BY KEY2 LIMIT 10; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME INPUT4))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME INPUT4))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL INPUT4) KEY) KEY2)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) KEY2))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL KEY2))) (TOK_LIMIT 10)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' a:input4 ' ' TableScan' ' alias: input4' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' outputColumnNames: _col2' ' Select Operator' ' expressions:' ' expr: _col2' ' type: string' ' outputColumnNames: _col0' ' Reduce Output Operator' ' key expressions:' ' expr: _col0' ' type: string' ' sort order: +' ' tag: -1' ' value expressions:' ' expr: _col0' ' type: string' ' Reduce Operator Tree:' ' Extract' ' Limit' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: 10' '' '' 48 rows selected >>> >>> SELECT a.KEY2 FROM (SELECT INPUT4.*, INPUT4.KEY as KEY2 FROM INPUT4) a ORDER BY KEY2 LIMIT 10; 'key2' '0' '0' '0' '10' '100' '100' '103' '103' '104' '104' 10 rows selected >>> >>> >>> !record