Saving all output to "!!{outputDirectory}!!/udf_between.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/udf_between.q >>> describe function between; 'tab_name' 'between a [NOT] BETWEEN b AND c - evaluate if a is [not] in between b and c' 1 row selected >>> describe function extended between; 'tab_name' 'between a [NOT] BETWEEN b AND c - evaluate if a is [not] in between b and c' 1 row selected >>> >>> explain SELECT * FROM src where key + 100 between (150 + -50) AND (150 + 50) LIMIT 20; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (TOK_FUNCTION between KW_FALSE (+ (TOK_TABLE_OR_COL key) 100) (+ 150 (- 50)) (+ 150 50))) (TOK_LIMIT 20)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: (key + 100) BETWEEN (150 + (- 50)) AND (150 + 50)' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' Limit' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: 20' '' '' 38 rows selected >>> SELECT * FROM src where key + 100 between (150 + -50) AND (150 + 50) LIMIT 20; 'key','value' '86','val_86' '27','val_27' '98','val_98' '66','val_66' '37','val_37' '15','val_15' '82','val_82' '17','val_17' '0','val_0' '57','val_57' '20','val_20' '92','val_92' '47','val_47' '72','val_72' '4','val_4' '35','val_35' '54','val_54' '51','val_51' '65','val_65' '83','val_83' 20 rows selected >>> >>> explain SELECT * FROM src where key + 100 not between (150 + -50) AND (150 + 50) LIMIT 20; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (TOK_FUNCTION between KW_TRUE (+ (TOK_TABLE_OR_COL key) 100) (+ 150 (- 50)) (+ 150 50))) (TOK_LIMIT 20)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: (key + 100) NOT BETWEEN (150 + (- 50)) AND (150 + 50)' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' Limit' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: 20' '' '' 38 rows selected >>> SELECT * FROM src where key + 100 not between (150 + -50) AND (150 + 50) LIMIT 20; 'key','value' '238','val_238' '311','val_311' '165','val_165' '409','val_409' '255','val_255' '278','val_278' '484','val_484' '265','val_265' '193','val_193' '401','val_401' '150','val_150' '273','val_273' '224','val_224' '369','val_369' '128','val_128' '213','val_213' '146','val_146' '406','val_406' '429','val_429' '374','val_374' 20 rows selected >>> >>> explain SELECT * FROM src where 'b' between 'a' AND 'c' LIMIT 1; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (TOK_FUNCTION between KW_FALSE 'b' 'a' 'c')) (TOK_LIMIT 1)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: 'b' BETWEEN 'a' AND 'c'' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' Limit' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: 1' '' '' 38 rows selected >>> SELECT * FROM src where 'b' between 'a' AND 'c' LIMIT 1; 'key','value' '238','val_238' 1 row selected >>> >>> explain SELECT * FROM src where 2 between 2 AND '3' LIMIT 1; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (TOK_FUNCTION between KW_FALSE 2 2 '3')) (TOK_LIMIT 1)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: 2 BETWEEN 2 AND '3'' ' type: boolean' ' Select Operator' ' expressions:' ' expr: key' ' type: string' ' expr: value' ' type: string' ' outputColumnNames: _col0, _col1' ' Limit' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: 1' '' '' 38 rows selected >>> SELECT * FROM src where 2 between 2 AND '3' LIMIT 1; 'key','value' '238','val_238' 1 row selected >>> !record