PREHOOK: query: -- Create two bucketed and sorted tables CREATE TABLE test_table1 (key INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test_table1 POSTHOOK: query: -- Create two bucketed and sorted tables CREATE TABLE test_table1 (key INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test_table1 PREHOOK: query: CREATE TABLE test_table2 (key INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test_table2 POSTHOOK: query: CREATE TABLE test_table2 (key INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test_table2 PREHOOK: query: CREATE TABLE test_table3 (key INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test_table3 POSTHOOK: query: CREATE TABLE test_table3 (key INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key) SORTED BY (key) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test_table3 PREHOOK: query: FROM src INSERT OVERWRITE TABLE test_table1 PARTITION (ds = '1') SELECT * where key < 10 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_table1@ds=1 POSTHOOK: query: FROM src INSERT OVERWRITE TABLE test_table1 PARTITION (ds = '1') SELECT * where key < 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_table1@ds=1 POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: FROM src INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '1') SELECT * where key < 100 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_table2@ds=1 POSTHOOK: query: FROM src INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '1') SELECT * where key < 100 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_table2@ds=1 POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: FROM src INSERT OVERWRITE TABLE test_table1 PARTITION (ds = '2') SELECT * where key < 10 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_table1@ds=2 POSTHOOK: query: FROM src INSERT OVERWRITE TABLE test_table1 PARTITION (ds = '2') SELECT * where key < 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_table1@ds=2 POSTHOOK: Lineage: test_table1 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: FROM src INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '2') SELECT * where key < 100 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_table2@ds=2 POSTHOOK: query: FROM src INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '2') SELECT * where key < 100 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_table2@ds=2 POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds = '1' and b.ds = '1' PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds = '1' and b.ds = '1' POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-3 is a root stage Stage-1 depends on stages: Stage-3 Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-3 Spark #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 key (type: int) 1 key (type: int) Local Work: Map Reduce Local Work Stage: Stage-1 Spark Edges: Reducer 3 <- Map 2 (PARTITION-LEVEL SORT, 1) #### A masked pattern was here #### Vertices: Map 2 Map Operator Tree: TableScan alias: b Statistics: Num rows: 84 Data size: 736 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 42 Data size: 368 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) outputColumnNames: _col0, _col1, _col7 input vertices: 0 Map 1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), concat(_col1, _col7) (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) Local Work: Map Reduce Local Work Reducer 3 Reduce Operator Tree: Extract Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds = '1' and b.ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds = '1' and b.ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 2 val_2val_2 1 4 val_4val_4 1 8 val_8val_8 1 PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 9 val_9val_9 1 PREHOOK: query: -- Since more than one partition of 'a' (the big table) is being selected, -- it should be a map-reduce job EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds is not null and b.ds = '1' PREHOOK: type: QUERY POSTHOOK: query: -- Since more than one partition of 'a' (the big table) is being selected, -- it should be a map-reduce job EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds is not null and b.ds = '1' POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-3 is a root stage Stage-1 depends on stages: Stage-3 Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-3 Spark #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 key (type: int) 1 key (type: int) Local Work: Map Reduce Local Work Stage: Stage-1 Spark Edges: Reducer 3 <- Map 2 (PARTITION-LEVEL SORT, 1) #### A masked pattern was here #### Vertices: Map 2 Map Operator Tree: TableScan alias: b Statistics: Num rows: 84 Data size: 736 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 42 Data size: 368 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) outputColumnNames: _col0, _col1, _col7 input vertices: 0 Map 1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), concat(_col1, _col7) (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) Local Work: Map Reduce Local Work Reducer 3 Reduce Operator Tree: Extract Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds is not null and b.ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table1@ds=2 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds is not null and b.ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table1@ds=2 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 2 val_2val_2 1 2 val_2val_2 1 4 val_4val_4 1 4 val_4val_4 1 8 val_8val_8 1 8 val_8val_8 1 PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 9 val_9val_9 1 9 val_9val_9 1 PREHOOK: query: -- Since a single partition of the big table ('a') is being selected, it should be a map-only -- job even though multiple partitions of 'b' are being selected EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds = '1' and b.ds is not null PREHOOK: type: QUERY POSTHOOK: query: -- Since a single partition of the big table ('a') is being selected, it should be a map-only -- job even though multiple partitions of 'b' are being selected EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds = '1' and b.ds is not null POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-3 is a root stage Stage-1 depends on stages: Stage-3 Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-3 Spark #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 key (type: int) 1 key (type: int) Local Work: Map Reduce Local Work Stage: Stage-1 Spark Edges: Reducer 3 <- Map 2 (PARTITION-LEVEL SORT, 1) #### A masked pattern was here #### Vertices: Map 2 Map Operator Tree: TableScan alias: b Statistics: Num rows: 168 Data size: 1472 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 84 Data size: 736 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) outputColumnNames: _col0, _col1, _col7 input vertices: 0 Map 1 Statistics: Num rows: 92 Data size: 809 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), concat(_col1, _col7) (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 92 Data size: 809 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 92 Data size: 809 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) Local Work: Map Reduce Local Work Reducer 3 Reduce Operator Tree: Extract Statistics: Num rows: 92 Data size: 809 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 92 Data size: 809 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds = '1' and b.ds is not null PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Input: default@test_table2@ds=2 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key WHERE a.ds = '1' and b.ds is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Input: default@test_table2@ds=2 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 2 val_2val_2 1 2 val_2val_2 1 4 val_4val_4 1 4 val_4val_4 1 8 val_8val_8 1 8 val_8val_8 1 PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 9 val_9val_9 1 9 val_9val_9 1 PREHOOK: query: -- This should be a map-only job EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM (select key, value from test_table1 where ds = '1') a JOIN (select key, value from test_table2 where ds = '1') b ON a.key = b.key PREHOOK: type: QUERY POSTHOOK: query: -- This should be a map-only job EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM (select key, value from test_table1 where ds = '1') a JOIN (select key, value from test_table2 where ds = '1') b ON a.key = b.key POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-3 is a root stage Stage-1 depends on stages: Stage-3 Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-3 Spark #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: test_table1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 _col0 (type: int) 1 _col0 (type: int) Local Work: Map Reduce Local Work Stage: Stage-1 Spark Edges: Reducer 3 <- Map 2 (PARTITION-LEVEL SORT, 1) #### A masked pattern was here #### Vertices: Map 2 Map Operator Tree: TableScan alias: test_table2 Statistics: Num rows: 84 Data size: 736 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 42 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 42 Data size: 368 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 keys: 0 _col0 (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1, _col3 input vertices: 0 Map 1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), concat(_col1, _col3) (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) Local Work: Map Reduce Local Work Reducer 3 Reduce Operator Tree: Extract Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM (select key, value from test_table1 where ds = '1') a JOIN (select key, value from test_table2 where ds = '1') b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.value, b.value) FROM (select key, value from test_table1 where ds = '1') a JOIN (select key, value from test_table2 where ds = '1') b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 2 val_2val_2 1 4 val_4val_4 1 8 val_8val_8 1 PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 9 val_9val_9 1 PREHOOK: query: -- This should be a map-only job EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.v1, b.v2) FROM (select key, concat(value, value) as v1 from test_table1 where ds = '1') a JOIN (select key, concat(value, value) as v2 from test_table2 where ds = '1') b ON a.key = b.key PREHOOK: type: QUERY POSTHOOK: query: -- This should be a map-only job EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.v1, b.v2) FROM (select key, concat(value, value) as v1 from test_table1 where ds = '1') a JOIN (select key, concat(value, value) as v2 from test_table2 where ds = '1') b ON a.key = b.key POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-3 is a root stage Stage-1 depends on stages: Stage-3 Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-3 Spark #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: test_table1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), concat(value, value) (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 _col0 (type: int) 1 _col0 (type: int) Local Work: Map Reduce Local Work Stage: Stage-1 Spark Edges: Reducer 3 <- Map 2 (PARTITION-LEVEL SORT, 1) #### A masked pattern was here #### Vertices: Map 2 Map Operator Tree: TableScan alias: test_table2 Statistics: Num rows: 84 Data size: 736 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 42 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), concat(value, value) (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 42 Data size: 368 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 keys: 0 _col0 (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1, _col3 input vertices: 0 Map 1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), concat(_col1, _col3) (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) Local Work: Map Reduce Local Work Reducer 3 Reduce Operator Tree: Extract Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.v1, b.v2) FROM (select key, concat(value, value) as v1 from test_table1 where ds = '1') a JOIN (select key, concat(value, value) as v2 from test_table2 where ds = '1') b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, concat(a.v1, b.v2) FROM (select key, concat(value, value) as v1 from test_table1 where ds = '1') a JOIN (select key, concat(value, value) as v2 from test_table2 where ds = '1') b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 2 val_2val_2val_2val_2 1 4 val_4val_4val_4val_4 1 8 val_8val_8val_8val_8 1 PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 9 val_9val_9val_9val_9 1 PREHOOK: query: -- This should be a map-reduce job EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key+a.key, concat(a.value, b.value) FROM (select key, value from test_table1 where ds = '1') a JOIN (select key, value from test_table2 where ds = '1') b ON a.key = b.key PREHOOK: type: QUERY POSTHOOK: query: -- This should be a map-reduce job EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key+a.key, concat(a.value, b.value) FROM (select key, value from test_table1 where ds = '1') a JOIN (select key, value from test_table2 where ds = '1') b ON a.key = b.key POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-3 is a root stage Stage-1 depends on stages: Stage-3 Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-3 Spark #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: test_table1 Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 _col0 (type: int) 1 _col0 (type: int) Local Work: Map Reduce Local Work Stage: Stage-1 Spark Edges: Reducer 3 <- Map 2 (PARTITION-LEVEL SORT, 1) #### A masked pattern was here #### Vertices: Map 2 Map Operator Tree: TableScan alias: test_table2 Statistics: Num rows: 84 Data size: 736 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) Statistics: Num rows: 42 Data size: 368 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 42 Data size: 368 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 keys: 0 _col0 (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1, _col3 input vertices: 0 Map 1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (_col0 + _col0) (type: int), concat(_col1, _col3) (type: string) outputColumnNames: _col0, _col1 Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) Local Work: Map Reduce Local Work Reducer 3 Reduce Operator Tree: Extract Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false Statistics: Num rows: 46 Data size: 404 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key+a.key, concat(a.value, b.value) FROM (select key, value from test_table1 where ds = '1') a JOIN (select key, value from test_table2 where ds = '1') b ON a.key = b.key PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key+a.key, concat(a.value, b.value) FROM (select key, value from test_table1 where ds = '1') a JOIN (select key, value from test_table2 where ds = '1') b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key EXPRESSION [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 4 val_2val_2 1 8 val_4val_4 1 10 val_5val_5 1 10 val_5val_5 1 10 val_5val_5 1 10 val_5val_5 1 10 val_5val_5 1 10 val_5val_5 1 10 val_5val_5 1 10 val_5val_5 1 10 val_5val_5 1 16 val_8val_8 1 18 val_9val_9 1 PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here ####