Saving all output to "!!{outputDirectory}!!/quote2.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/quote2.q >>> EXPLAIN SELECT 'abc', "abc", 'abc\'', "abc\"", 'abc\\', "abc\\", 'abc\\\'', "abc\\\"", 'abc\\\\', "abc\\\\", 'abc\\\\\'', "abc\\\\\"", 'abc\\\\\\', "abc\\\\\\", 'abc""""\\', "abc''''\\", "awk '{print NR\"\\t\"$0}'", 'tab\ttab', "tab\ttab" FROM src LIMIT 1; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'abc') (TOK_SELEXPR "abc") (TOK_SELEXPR 'abc\'') (TOK_SELEXPR "abc\"") (TOK_SELEXPR 'abc\\') (TOK_SELEXPR "abc\\") (TOK_SELEXPR 'abc\\\'') (TOK_SELEXPR "abc\\\"") (TOK_SELEXPR 'abc\\\\') (TOK_SELEXPR "abc\\\\") (TOK_SELEXPR 'abc\\\\\'') (TOK_SELEXPR "abc\\\\\"") (TOK_SELEXPR 'abc\\\\\\') (TOK_SELEXPR "abc\\\\\\") (TOK_SELEXPR 'abc""""\\') (TOK_SELEXPR "abc''''\\") (TOK_SELEXPR "awk '{print NR\"\\t\"$0}'") (TOK_SELEXPR 'tab\ttab') (TOK_SELEXPR "tab\ttab")) (TOK_LIMIT 1)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Select Operator' ' expressions:' ' expr: 'abc'' ' type: string' ' expr: 'abc'' ' type: string' ' expr: 'abc''' ' type: string' ' expr: 'abc"'' ' type: string' ' expr: 'abc\'' ' type: string' ' expr: 'abc\'' ' type: string' ' expr: 'abc\''' ' type: string' ' expr: 'abc\"'' ' type: string' ' expr: 'abc\\'' ' type: string' ' expr: 'abc\\'' ' type: string' ' expr: 'abc\\''' ' type: string' ' expr: 'abc\\"'' ' type: string' ' expr: 'abc\\\'' ' type: string' ' expr: 'abc\\\'' ' type: string' ' expr: 'abc""""\'' ' type: string' ' expr: 'abc''''\'' ' type: string' ' expr: 'awk '{print NR"\t"$0}''' ' type: string' ' expr: 'tab tab'' ' type: string' ' expr: 'tab tab'' ' type: string' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18' ' Limit' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: 1' '' '' 68 rows selected >>> >>> SELECT 'abc', "abc", 'abc\'', "abc\"", 'abc\\', "abc\\", 'abc\\\'', "abc\\\"", 'abc\\\\', "abc\\\\", 'abc\\\\\'', "abc\\\\\"", 'abc\\\\\\', "abc\\\\\\", 'abc""""\\', "abc''''\\", "awk '{print NR\"\\t\"$0}'", 'tab\ttab', "tab\ttab" FROM src LIMIT 1; '_c0','_c1','_c2','_c3','_c4','_c5','_c6','_c7','_c8','_c9','_c10','_c11','_c12','_c13','_c14','_c15','_c16','_c17','_c18' 'abc','abc','abc'','abc"','abc\','abc\','abc\'','abc\"','abc\\','abc\\','abc\\'','abc\\"','abc\\\','abc\\\','abc""""\','abc''''\','awk '{print NR"\t"$0}'','tab tab','tab tab' 1 row selected >>> !record