Saving all output to "!!{outputDirectory}!!/count.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/count.q >>> create table abcd (a int, b int, c int, d int); No rows affected >>> LOAD DATA LOCAL INPATH '../data/files/in4.txt' INTO TABLE abcd; No rows affected >>> >>> select * from abcd; 'a','b','c','d' '','35','23','6' '10','1000','50','1' '100','100','10','3' '12','','80','2' '10','100','','5' '10','100','45','4' '12','100','75','7' 7 rows selected >>> set hive.map.aggr=true; No rows affected >>> explain select a, count(distinct b), count(distinct c), sum(d) from abcd group by a; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME abcd))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL a)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL d)))) (TOK_GROUPBY (TOK_TABLE_OR_COL a))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' abcd ' ' TableScan' ' alias: abcd' ' Select Operator' ' expressions:' ' expr: a' ' type: int' ' expr: b' ' type: int' ' expr: c' ' type: int' ' expr: d' ' type: int' ' outputColumnNames: a, b, c, d' ' Group By Operator' ' aggregations:' ' expr: count(DISTINCT b)' ' expr: count(DISTINCT c)' ' expr: sum(d)' ' bucketGroup: false' ' keys:' ' expr: a' ' type: int' ' expr: b' ' type: int' ' expr: c' ' type: int' ' mode: hash' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5' ' Reduce Output Operator' ' key expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: int' ' expr: _col2' ' type: int' ' sort order: +++' ' Map-reduce partition columns:' ' expr: _col0' ' type: int' ' tag: -1' ' value expressions:' ' expr: _col3' ' type: bigint' ' expr: _col4' ' type: bigint' ' expr: _col5' ' type: bigint' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: count(DISTINCT KEY._col1:0._col0)' ' expr: count(DISTINCT KEY._col1:1._col0)' ' expr: sum(VALUE._col2)' ' bucketGroup: false' ' keys:' ' expr: KEY._col0' ' type: int' ' mode: mergepartial' ' outputColumnNames: _col0, _col1, _col2, _col3' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: bigint' ' expr: _col2' ' type: bigint' ' expr: _col3' ' type: bigint' ' outputColumnNames: _col0, _col1, _col2, _col3' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 95 rows selected >>> select a, count(distinct b), count(distinct c), sum(d) from abcd group by a; 'a','_c1','_c2','_c3' '','1','1','6' '10','2','2','10' '12','1','2','9' '100','1','1','3' 4 rows selected >>> >>> explain select count(1), count(*), count(a), count(b), count(c), count(d), count(distinct a), count(distinct b), count(distinct c), count(distinct d), count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), count(distinct a,b,c,d) from abcd; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME abcd))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)) (TOK_SELEXPR (TOK_FUNCTIONSTAR count)) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL a))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' abcd ' ' TableScan' ' alias: abcd' ' Select Operator' ' expressions:' ' expr: a' ' type: int' ' expr: b' ' type: int' ' expr: c' ' type: int' ' expr: d' ' type: int' ' outputColumnNames: a, b, c, d' ' Group By Operator' ' aggregations:' ' expr: count(1)' ' expr: count()' ' expr: count(a)' ' expr: count(b)' ' expr: count(c)' ' expr: count(d)' ' expr: count(DISTINCT a)' ' expr: count(DISTINCT b)' ' expr: count(DISTINCT c)' ' expr: count(DISTINCT d)' ' expr: count(DISTINCT a, b)' ' expr: count(DISTINCT b, c)' ' expr: count(DISTINCT c, d)' ' expr: count(DISTINCT a, d)' ' expr: count(DISTINCT a, c)' ' expr: count(DISTINCT b, d)' ' expr: count(DISTINCT a, b, c)' ' expr: count(DISTINCT b, c, d)' ' expr: count(DISTINCT a, c, d)' ' expr: count(DISTINCT a, b, d)' ' expr: count(DISTINCT a, b, c, d)' ' bucketGroup: false' ' keys:' ' expr: a' ' type: int' ' expr: b' ' type: int' ' expr: c' ' type: int' ' expr: d' ' type: int' ' mode: hash' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24' ' Reduce Output Operator' ' key expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: int' ' expr: _col2' ' type: int' ' expr: _col3' ' type: int' ' sort order: ++++' ' tag: -1' ' value expressions:' ' expr: _col4' ' type: bigint' ' expr: _col5' ' type: bigint' ' expr: _col6' ' type: bigint' ' expr: _col7' ' type: bigint' ' expr: _col8' ' type: bigint' ' expr: _col9' ' type: bigint' ' expr: _col10' ' type: bigint' ' expr: _col11' ' type: bigint' ' expr: _col12' ' type: bigint' ' expr: _col13' ' type: bigint' ' expr: _col14' ' type: bigint' ' expr: _col15' ' type: bigint' ' expr: _col16' ' type: bigint' ' expr: _col17' ' type: bigint' ' expr: _col18' ' type: bigint' ' expr: _col19' ' type: bigint' ' expr: _col20' ' type: bigint' ' expr: _col21' ' type: bigint' ' expr: _col22' ' type: bigint' ' expr: _col23' ' type: bigint' ' expr: _col24' ' type: bigint' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: count(VALUE._col0)' ' expr: count(VALUE._col1)' ' expr: count(VALUE._col2)' ' expr: count(VALUE._col3)' ' expr: count(VALUE._col4)' ' expr: count(VALUE._col5)' ' expr: count(DISTINCT KEY._col0:0._col0)' ' expr: count(DISTINCT KEY._col0:1._col0)' ' expr: count(DISTINCT KEY._col0:2._col0)' ' expr: count(DISTINCT KEY._col0:3._col0)' ' expr: count(DISTINCT KEY._col0:4._col0, KEY._col0:4._col1)' ' expr: count(DISTINCT KEY._col0:5._col0, KEY._col0:5._col1)' ' expr: count(DISTINCT KEY._col0:6._col0, KEY._col0:6._col1)' ' expr: count(DISTINCT KEY._col0:7._col0, KEY._col0:7._col1)' ' expr: count(DISTINCT KEY._col0:8._col0, KEY._col0:8._col1)' ' expr: count(DISTINCT KEY._col0:9._col0, KEY._col0:9._col1)' ' expr: count(DISTINCT KEY._col0:10._col0, KEY._col0:10._col1, KEY._col0:10._col2)' ' expr: count(DISTINCT KEY._col0:11._col0, KEY._col0:11._col1, KEY._col0:11._col2)' ' expr: count(DISTINCT KEY._col0:12._col0, KEY._col0:12._col1, KEY._col0:12._col2)' ' expr: count(DISTINCT KEY._col0:13._col0, KEY._col0:13._col1, KEY._col0:13._col2)' ' expr: count(DISTINCT KEY._col0:14._col0, KEY._col0:14._col1, KEY._col0:14._col2, KEY._col0:14._col3)' ' bucketGroup: false' ' mode: mergepartial' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20' ' Select Operator' ' expressions:' ' expr: _col0' ' type: bigint' ' expr: _col1' ' type: bigint' ' expr: _col2' ' type: bigint' ' expr: _col3' ' type: bigint' ' expr: _col4' ' type: bigint' ' expr: _col5' ' type: bigint' ' expr: _col6' ' type: bigint' ' expr: _col7' ' type: bigint' ' expr: _col8' ' type: bigint' ' expr: _col9' ' type: bigint' ' expr: _col10' ' type: bigint' ' expr: _col11' ' type: bigint' ' expr: _col12' ' type: bigint' ' expr: _col13' ' type: bigint' ' expr: _col14' ' type: bigint' ' expr: _col15' ' type: bigint' ' expr: _col16' ' type: bigint' ' expr: _col17' ' type: bigint' ' expr: _col18' ' type: bigint' ' expr: _col19' ' type: bigint' ' expr: _col20' ' type: bigint' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 199 rows selected >>> select count(1), count(*), count(a), count(b), count(c), count(d), count(distinct a), count(distinct b), count(distinct c), count(distinct d), count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), count(distinct a,b,c,d) from abcd; '_c0','_c1','_c2','_c3','_c4','_c5','_c6','_c7','_c8','_c9','_c10','_c11','_c12','_c13','_c14','_c15','_c16','_c17','_c18','_c19','_c20' '7','7','6','6','6','7','3','3','6','7','4','5','6','6','5','6','4','5','5','5','4' 1 row selected >>> >>> set hive.map.aggr=false; No rows affected >>> explain select a, count(distinct b), count(distinct c), sum(d) from abcd group by a; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME abcd))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL a)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTION sum (TOK_TABLE_OR_COL d)))) (TOK_GROUPBY (TOK_TABLE_OR_COL a))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' abcd ' ' TableScan' ' alias: abcd' ' Select Operator' ' expressions:' ' expr: a' ' type: int' ' expr: b' ' type: int' ' expr: c' ' type: int' ' expr: d' ' type: int' ' outputColumnNames: a, b, c, d' ' Reduce Output Operator' ' key expressions:' ' expr: a' ' type: int' ' expr: b' ' type: int' ' expr: c' ' type: int' ' sort order: +++' ' Map-reduce partition columns:' ' expr: a' ' type: int' ' tag: -1' ' value expressions:' ' expr: d' ' type: int' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: count(DISTINCT KEY._col1:0._col0)' ' expr: count(DISTINCT KEY._col1:1._col0)' ' expr: sum(VALUE._col0)' ' bucketGroup: false' ' keys:' ' expr: KEY._col0' ' type: int' ' mode: complete' ' outputColumnNames: _col0, _col1, _col2, _col3' ' Select Operator' ' expressions:' ' expr: _col0' ' type: int' ' expr: _col1' ' type: bigint' ' expr: _col2' ' type: bigint' ' expr: _col3' ' type: bigint' ' outputColumnNames: _col0, _col1, _col2, _col3' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 76 rows selected >>> select a, count(distinct b), count(distinct c), sum(d) from abcd group by a; 'a','_c1','_c2','_c3' '','1','1','6' '10','2','2','10' '12','1','2','9' '100','1','1','3' 4 rows selected >>> >>> explain select count(1), count(*), count(a), count(b), count(c), count(d), count(distinct a), count(distinct b), count(distinct c), count(distinct d), count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), count(distinct a,b,c,d) from abcd; 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME abcd))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1)) (TOK_SELEXPR (TOK_FUNCTIONSTAR count)) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL a))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL d))) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL a) (TOK_TABLE_OR_COL b) (TOK_TABLE_OR_COL c) (TOK_TABLE_OR_COL d))))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' abcd ' ' TableScan' ' alias: abcd' ' Select Operator' ' expressions:' ' expr: a' ' type: int' ' expr: b' ' type: int' ' expr: c' ' type: int' ' expr: d' ' type: int' ' outputColumnNames: a, b, c, d' ' Reduce Output Operator' ' key expressions:' ' expr: a' ' type: int' ' expr: b' ' type: int' ' expr: c' ' type: int' ' expr: d' ' type: int' ' sort order: ++++' ' tag: -1' ' value expressions:' ' expr: 1' ' type: int' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: count(VALUE._col0)' ' expr: count()' ' expr: count(KEY._col0:14._col0)' ' expr: count(KEY._col0:14._col1)' ' expr: count(KEY._col0:14._col2)' ' expr: count(KEY._col0:14._col3)' ' expr: count(DISTINCT KEY._col0:0._col0)' ' expr: count(DISTINCT KEY._col0:1._col0)' ' expr: count(DISTINCT KEY._col0:2._col0)' ' expr: count(DISTINCT KEY._col0:3._col0)' ' expr: count(DISTINCT KEY._col0:4._col0, KEY._col0:4._col1)' ' expr: count(DISTINCT KEY._col0:5._col0, KEY._col0:5._col1)' ' expr: count(DISTINCT KEY._col0:6._col0, KEY._col0:6._col1)' ' expr: count(DISTINCT KEY._col0:7._col0, KEY._col0:7._col1)' ' expr: count(DISTINCT KEY._col0:8._col0, KEY._col0:8._col1)' ' expr: count(DISTINCT KEY._col0:9._col0, KEY._col0:9._col1)' ' expr: count(DISTINCT KEY._col0:10._col0, KEY._col0:10._col1, KEY._col0:10._col2)' ' expr: count(DISTINCT KEY._col0:11._col0, KEY._col0:11._col1, KEY._col0:11._col2)' ' expr: count(DISTINCT KEY._col0:12._col0, KEY._col0:12._col1, KEY._col0:12._col2)' ' expr: count(DISTINCT KEY._col0:13._col0, KEY._col0:13._col1, KEY._col0:13._col2)' ' expr: count(DISTINCT KEY._col0:14._col0, KEY._col0:14._col1, KEY._col0:14._col2, KEY._col0:14._col3)' ' bucketGroup: false' ' mode: complete' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20' ' Select Operator' ' expressions:' ' expr: _col0' ' type: bigint' ' expr: _col1' ' type: bigint' ' expr: _col2' ' type: bigint' ' expr: _col3' ' type: bigint' ' expr: _col4' ' type: bigint' ' expr: _col5' ' type: bigint' ' expr: _col6' ' type: bigint' ' expr: _col7' ' type: bigint' ' expr: _col8' ' type: bigint' ' expr: _col9' ' type: bigint' ' expr: _col10' ' type: bigint' ' expr: _col11' ' type: bigint' ' expr: _col12' ' type: bigint' ' expr: _col13' ' type: bigint' ' expr: _col14' ' type: bigint' ' expr: _col15' ' type: bigint' ' expr: _col16' ' type: bigint' ' expr: _col17' ' type: bigint' ' expr: _col18' ' type: bigint' ' expr: _col19' ' type: bigint' ' expr: _col20' ' type: bigint' ' outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 124 rows selected >>> select count(1), count(*), count(a), count(b), count(c), count(d), count(distinct a), count(distinct b), count(distinct c), count(distinct d), count(distinct a,b), count(distinct b,c), count(distinct c,d), count(distinct a,d), count(distinct a,c), count(distinct b,d), count(distinct a,b,c), count(distinct b,c,d), count(distinct a,c,d), count(distinct a,b,d), count(distinct a,b,c,d) from abcd; '_c0','_c1','_c2','_c3','_c4','_c5','_c6','_c7','_c8','_c9','_c10','_c11','_c12','_c13','_c14','_c15','_c16','_c17','_c18','_c19','_c20' '7','7','6','6','6','7','3','3','6','7','4','5','6','6','5','6','4','5','5','5','4' 1 row selected >>> !record