Saving all output to "!!{outputDirectory}!!/ppd_gby.q.raw". Enter "record" with no arguments to stop it. >>> !run !!{qFileDirectory}!!/ppd_gby.q >>> set hive.optimize.ppd=true; No rows affected >>> set hive.ppd.remove.duplicatefilters=false; No rows affected >>> >>> EXPLAIN SELECT src1.c1 FROM (SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400'); 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c1) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) key)) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) value) 'val_10')) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) value)))) src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1))) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) c1) 'val_200') (or (> (. (TOK_TABLE_OR_COL src1) c2) 30) (< (. (TOK_TABLE_OR_COL src1) c1) 'val_400'))))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src1:src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: ((value > 'val_10') and (value > 'val_200'))' ' type: boolean' ' Filter Operator' ' predicate:' ' expr: (value > 'val_10')' ' type: boolean' ' Select Operator' ' expressions:' ' expr: value' ' type: string' ' expr: key' ' type: string' ' outputColumnNames: value, key' ' Group By Operator' ' aggregations:' ' expr: count(key)' ' bucketGroup: false' ' keys:' ' expr: value' ' type: string' ' mode: hash' ' outputColumnNames: _col0, _col1' ' Reduce Output Operator' ' key expressions:' ' expr: _col0' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: _col0' ' type: string' ' tag: -1' ' value expressions:' ' expr: _col1' ' type: bigint' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: count(VALUE._col0)' ' bucketGroup: false' ' keys:' ' expr: KEY._col0' ' type: string' ' mode: mergepartial' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: bigint' ' outputColumnNames: _col0, _col1' ' Filter Operator' ' predicate:' ' expr: ((_col0 > 'val_200') and ((_col1 > 30) or (_col0 < 'val_400')))' ' type: boolean' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' outputColumnNames: _col0' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 88 rows selected >>> >>> SELECT src1.c1 FROM (SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400'); 'c1' 'val_201' 'val_202' 'val_203' 'val_205' 'val_207' 'val_208' 'val_209' 'val_213' 'val_214' 'val_216' 'val_217' 'val_218' 'val_219' 'val_221' 'val_222' 'val_223' 'val_224' 'val_226' 'val_228' 'val_229' 'val_230' 'val_233' 'val_235' 'val_237' 'val_238' 'val_239' 'val_24' 'val_241' 'val_242' 'val_244' 'val_247' 'val_248' 'val_249' 'val_252' 'val_255' 'val_256' 'val_257' 'val_258' 'val_26' 'val_260' 'val_262' 'val_263' 'val_265' 'val_266' 'val_27' 'val_272' 'val_273' 'val_274' 'val_275' 'val_277' 'val_278' 'val_28' 'val_280' 'val_281' 'val_282' 'val_283' 'val_284' 'val_285' 'val_286' 'val_287' 'val_288' 'val_289' 'val_291' 'val_292' 'val_296' 'val_298' 'val_30' 'val_302' 'val_305' 'val_306' 'val_307' 'val_308' 'val_309' 'val_310' 'val_311' 'val_315' 'val_316' 'val_317' 'val_318' 'val_321' 'val_322' 'val_323' 'val_325' 'val_327' 'val_33' 'val_331' 'val_332' 'val_333' 'val_335' 'val_336' 'val_338' 'val_339' 'val_34' 'val_341' 'val_342' 'val_344' 'val_345' 'val_348' 'val_35' 'val_351' 'val_353' 'val_356' 'val_360' 'val_362' 'val_364' 'val_365' 'val_366' 'val_367' 'val_368' 'val_369' 'val_37' 'val_373' 'val_374' 'val_375' 'val_377' 'val_378' 'val_379' 'val_382' 'val_384' 'val_386' 'val_389' 'val_392' 'val_393' 'val_394' 'val_395' 'val_396' 'val_397' 'val_399' 'val_4' 129 rows selected >>> >>> set hive.ppd.remove.duplicatefilters=true; No rows affected >>> >>> EXPLAIN SELECT src1.c1 FROM (SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400'); 'Explain' 'ABSTRACT SYNTAX TREE:' ' (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c1) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) key)) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) value) 'val_10')) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) value)))) src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1))) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) c1) 'val_200') (or (> (. (TOK_TABLE_OR_COL src1) c2) 30) (< (. (TOK_TABLE_OR_COL src1) c1) 'val_400'))))))' '' 'STAGE DEPENDENCIES:' ' Stage-1 is a root stage' ' Stage-0 is a root stage' '' 'STAGE PLANS:' ' Stage: Stage-1' ' Map Reduce' ' Alias -> Map Operator Tree:' ' src1:src ' ' TableScan' ' alias: src' ' Filter Operator' ' predicate:' ' expr: ((value > 'val_10') and (value > 'val_200'))' ' type: boolean' ' Select Operator' ' expressions:' ' expr: value' ' type: string' ' expr: key' ' type: string' ' outputColumnNames: value, key' ' Group By Operator' ' aggregations:' ' expr: count(key)' ' bucketGroup: false' ' keys:' ' expr: value' ' type: string' ' mode: hash' ' outputColumnNames: _col0, _col1' ' Reduce Output Operator' ' key expressions:' ' expr: _col0' ' type: string' ' sort order: +' ' Map-reduce partition columns:' ' expr: _col0' ' type: string' ' tag: -1' ' value expressions:' ' expr: _col1' ' type: bigint' ' Reduce Operator Tree:' ' Group By Operator' ' aggregations:' ' expr: count(VALUE._col0)' ' bucketGroup: false' ' keys:' ' expr: KEY._col0' ' type: string' ' mode: mergepartial' ' outputColumnNames: _col0, _col1' ' Filter Operator' ' predicate:' ' expr: ((_col0 > 'val_200') and ((_col1 > 30) or (_col0 < 'val_400')))' ' type: boolean' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' expr: _col1' ' type: bigint' ' outputColumnNames: _col0, _col1' ' Select Operator' ' expressions:' ' expr: _col0' ' type: string' ' outputColumnNames: _col0' ' File Output Operator' ' compressed: false' ' GlobalTableId: 0' ' table:' ' input format: org.apache.hadoop.mapred.TextInputFormat' ' output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' '' ' Stage: Stage-0' ' Fetch Operator' ' limit: -1' '' '' 84 rows selected >>> >>> SELECT src1.c1 FROM (SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400'); 'c1' 'val_201' 'val_202' 'val_203' 'val_205' 'val_207' 'val_208' 'val_209' 'val_213' 'val_214' 'val_216' 'val_217' 'val_218' 'val_219' 'val_221' 'val_222' 'val_223' 'val_224' 'val_226' 'val_228' 'val_229' 'val_230' 'val_233' 'val_235' 'val_237' 'val_238' 'val_239' 'val_24' 'val_241' 'val_242' 'val_244' 'val_247' 'val_248' 'val_249' 'val_252' 'val_255' 'val_256' 'val_257' 'val_258' 'val_26' 'val_260' 'val_262' 'val_263' 'val_265' 'val_266' 'val_27' 'val_272' 'val_273' 'val_274' 'val_275' 'val_277' 'val_278' 'val_28' 'val_280' 'val_281' 'val_282' 'val_283' 'val_284' 'val_285' 'val_286' 'val_287' 'val_288' 'val_289' 'val_291' 'val_292' 'val_296' 'val_298' 'val_30' 'val_302' 'val_305' 'val_306' 'val_307' 'val_308' 'val_309' 'val_310' 'val_311' 'val_315' 'val_316' 'val_317' 'val_318' 'val_321' 'val_322' 'val_323' 'val_325' 'val_327' 'val_33' 'val_331' 'val_332' 'val_333' 'val_335' 'val_336' 'val_338' 'val_339' 'val_34' 'val_341' 'val_342' 'val_344' 'val_345' 'val_348' 'val_35' 'val_351' 'val_353' 'val_356' 'val_360' 'val_362' 'val_364' 'val_365' 'val_366' 'val_367' 'val_368' 'val_369' 'val_37' 'val_373' 'val_374' 'val_375' 'val_377' 'val_378' 'val_379' 'val_382' 'val_384' 'val_386' 'val_389' 'val_392' 'val_393' 'val_394' 'val_395' 'val_396' 'val_397' 'val_399' 'val_4' 129 rows selected >>> !record