Saving all output to "!!{outputDirectory}!!/udf_coalesce.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/udf_coalesce.q >>> DESCRIBE FUNCTION coalesce; 'tab_name' 'coalesce(a1, a2, ...) - Returns the first non-null argument' 1 row selected >>> DESCRIBE FUNCTION EXTENDED coalesce; 'tab_name' 'coalesce(a1, a2, ...) - Returns the first non-null argument' 'Example:' ' > SELECT coalesce(NULL, 1, NULL) FROM src LIMIT 1;' ' 1' 4 rows selected >>> >>> EXPLAIN SELECT COALESCE(1), COALESCE(1, 2), COALESCE(NULL, 2), COALESCE(1, NULL), COALESCE(NULL, NULL, 3), COALESCE(4, NULL, NULL, NULL), COALESCE('1'), COALESCE('1', '2'), COALESCE(NULL, '2'), COALESCE('1', NULL), COALESCE(NULL, NULL, '3'), COALESCE('4', NULL, NULL, NULL), COALESCE(1.0), COALESCE(1.0, 2.0), COALESCE(NULL, 2.0), COALESCE(NULL, 2.0, 3.0), COALESCE(2.0, NULL, 3.0), COALESCE(IF(TRUE, NULL, 0), NULL) FROM src LIMIT 1; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE 1)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL 3)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 4 TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL '3')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '4' TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0 3.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 2.0 TOK_NULL 3.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE (TOK_FUNCTION IF TRUE TOK_NULL 0) TOK_NULL))) (TOK_LIMIT 1)))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src ' ' TableScan' ' alias: src' ' Select Operator' ' expressions:' ' expr: COALESCE(1)' ' type: int' ' expr: COALESCE(1,2)' ' type: int' ' expr: COALESCE(null,2)' ' type: int' ' expr: COALESCE(1,null)' ' type: int' ' expr: COALESCE(null,null,3)' ' type: int' ' expr: COALESCE(4,null,null,null)' ' type: int' ' expr: COALESCE('1')' ' type: string' ' expr: COALESCE('1','2')' ' type: string' ' expr: COALESCE(null,'2')' ' type: string' ' expr: COALESCE('1',null)' ' type: string' ' expr: COALESCE(null,null,'3')' ' type: string' ' expr: COALESCE('4',null,null,null)' ' type: string' ' expr: COALESCE(1.0)' ' type: double' ' expr: COALESCE(1.0,2.0)' ' type: double' ' expr: COALESCE(null,2.0)' ' type: double' ' expr: COALESCE(null,2.0,3.0)' ' type: double' ' expr: COALESCE(2.0,null,3.0)' ' type: double' ' expr: COALESCE(if(true, null, 0),null)' ' type: int' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17' ' Limit' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: 1' '' '' 66 rows selected >>> >>> SELECT COALESCE(1), COALESCE(1, 2), COALESCE(NULL, 2), COALESCE(1, NULL), COALESCE(NULL, NULL, 3), COALESCE(4, NULL, NULL, NULL), COALESCE('1'), COALESCE('1', '2'), COALESCE(NULL, '2'), COALESCE('1', NULL), COALESCE(NULL, NULL, '3'), COALESCE('4', NULL, NULL, NULL), COALESCE(1.0), COALESCE(1.0, 2.0), COALESCE(NULL, 2.0), COALESCE(NULL, 2.0, 3.0), COALESCE(2.0, NULL, 3.0), COALESCE(IF(TRUE, NULL, 0), NULL) FROM src LIMIT 1; '_c0','_c1','_c2','_c3','_c4','_c5','_c6','_c7','_c8','_c9','_c10','_c11','_c12','_c13','_c14','_c15','_c16','_c17' '1','1','2','1','3','4','1','1','2','1','3','4','1.0','1.0','2.0','2.0','2.0','' 1 row selected >>> >>> EXPLAIN SELECT COALESCE(src_thrift.lint[1], 999), COALESCE(src_thrift.lintstring[0].mystring, '999'), COALESCE(src_thrift.mstringstring['key_2'], '999') FROM src_thrift; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_thrift))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 1) 999)) (TOK_SELEXPR (TOK_FUNCTION COALESCE (. ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0) mystring) '999')) (TOK_SELEXPR (TOK_FUNCTION COALESCE ([ (. (TOK_TABLE_OR_COL src_thrift) mstringstring) 'key_2') '999')))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src_thrift ' ' TableScan' ' alias: src_thrift' ' Select Operator' ' expressions:' ' expr: COALESCE(lint[1],999)' ' type: int' ' expr: COALESCE(lintstring[0].mystring,'999')' ' type: string' ' expr: COALESCE(mstringstring['key_2'],'999')' ' type: string' ' outputColumnNames: _col0, _col1, _col2' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 35 rows selected >>> >>> SELECT COALESCE(src_thrift.lint[1], 999), COALESCE(src_thrift.lintstring[0].mystring, '999'), COALESCE(src_thrift.mstringstring['key_2'], '999') FROM src_thrift; '_c0','_c1','_c2' '0','0','999' '2','1','999' '4','8','value_2' '6','27','999' '8','64','999' '10','125','999' '12','216','999' '14','343','999' '16','512','999' '18','729','999' '999','999','999' 11 rows selected >>> !record