PREHOOK: query: -- Create two bucketed and sorted tables CREATE TABLE test_table1 (key INT, key2 INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key, key2) SORTED BY (key ASC, key2 DESC) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test_table1 POSTHOOK: query: -- Create two bucketed and sorted tables CREATE TABLE test_table1 (key INT, key2 INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key, key2) SORTED BY (key ASC, key2 DESC) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test_table1 PREHOOK: query: CREATE TABLE test_table2 (key INT, key2 INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key, key2) SORTED BY (key ASC, key2 DESC) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test_table2 POSTHOOK: query: CREATE TABLE test_table2 (key INT, key2 INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key, key2) SORTED BY (key ASC, key2 DESC) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test_table2 PREHOOK: query: CREATE TABLE test_table3 (key INT, key2 INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key, key2) SORTED BY (key ASC, key2 DESC) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test_table3 POSTHOOK: query: CREATE TABLE test_table3 (key INT, key2 INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key, key2) SORTED BY (key ASC, key2 DESC) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test_table3 PREHOOK: query: FROM src INSERT OVERWRITE TABLE test_table1 PARTITION (ds = '1') SELECT key, key+1, value where key < 10 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_table1@ds=1 POSTHOOK: query: FROM src INSERT OVERWRITE TABLE test_table1 PARTITION (ds = '1') SELECT key, key+1, value where key < 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_table1@ds=1 POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: FROM src INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '1') SELECT key, key+1, value where key < 100 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@test_table2@ds=1 POSTHOOK: query: FROM src INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '1') SELECT key, key+1, value where key < 100 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_table2@ds=1 POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation, since the sort-order matches EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, a.key2, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation, since the sort-order matches EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, a.key2, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 91 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Statistics: Num rows: 3 Data size: 27 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col0 (type: int), _col1 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, a.key2, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key, a.key2, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key2, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 2 3 val_2val_2 1 4 5 val_4val_4 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 8 9 val_8val_8 1 9 10 val_9val_9 1 PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation, since the sort-order matches EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq1.key, subq1.key2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation, since the sort-order matches EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq1.key, subq1.key2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 91 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Statistics: Num rows: 3 Data size: 27 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col0 (type: int), _col1 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq1.key, subq1.key2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq1.key, subq1.key2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key2, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 2 3 val_2val_2 1 4 5 val_4val_4 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 8 9 val_8val_8 1 9 10 val_9val_9 1 PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-reduce operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key2, a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-reduce operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT a.key2, a.key, concat(a.value, b.value) FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-6 is a root stage , consists of Stage-7, Stage-8, Stage-1 Stage-7 has a backup stage: Stage-1 Stage-4 depends on stages: Stage-7 Stage-0 depends on stages: Stage-1, Stage-4, Stage-5 Stage-2 depends on stages: Stage-0 Stage-8 has a backup stage: Stage-1 Stage-5 depends on stages: Stage-8 Stage-1 STAGE PLANS: Stage: Stage-6 Conditional Operator Stage: Stage-7 Map Reduce Local Work Alias -> Map Local Tables: b Fetch Operator limit: -1 Alias -> Map Local Operator Tree: b TableScan alias: b Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) HashTable Sink Operator keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) Stage: Stage-4 Map Reduce Map Operator Tree: TableScan alias: a Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col1 (type: int), _col0 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: +- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Local Work: Map Reduce Local Work Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator Stage: Stage-8 Map Reduce Local Work Alias -> Map Local Tables: a Fetch Operator limit: -1 Alias -> Map Local Operator Tree: a TableScan alias: a Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) HashTable Sink Operator keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) Stage: Stage-5 Map Reduce Map Operator Tree: TableScan alias: b Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col1 (type: int), _col0 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: +- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Local Work: Map Reduce Local Work Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 91 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Statistics: Num rows: 3 Data size: 27 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col1 (type: int), _col0 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: +- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-reduce operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq1.key2, subq1.key, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-reduce operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq1.key2, subq1.key, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-6 is a root stage , consists of Stage-7, Stage-8, Stage-1 Stage-7 has a backup stage: Stage-1 Stage-4 depends on stages: Stage-7 Stage-0 depends on stages: Stage-1, Stage-4, Stage-5 Stage-2 depends on stages: Stage-0 Stage-8 has a backup stage: Stage-1 Stage-5 depends on stages: Stage-8 Stage-1 STAGE PLANS: Stage: Stage-6 Conditional Operator Stage: Stage-7 Map Reduce Local Work Alias -> Map Local Tables: subq1:b Fetch Operator limit: -1 Alias -> Map Local Operator Tree: subq1:b TableScan alias: b Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) HashTable Sink Operator keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) Stage: Stage-4 Map Reduce Map Operator Tree: TableScan alias: a Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col1 (type: int), _col0 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: +- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Local Work: Map Reduce Local Work Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator Stage: Stage-8 Map Reduce Local Work Alias -> Map Local Tables: subq1:a Fetch Operator limit: -1 Alias -> Map Local Operator Tree: subq1:a TableScan alias: a Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) HashTable Sink Operator keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) Stage: Stage-5 Map Reduce Map Operator Tree: TableScan alias: b Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col1 (type: int), _col0 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: +- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Local Work: Map Reduce Local Work Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 91 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Statistics: Num rows: 3 Data size: 27 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col1 (type: int), _col0 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: +- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq2.key, subq2.key2, subq2.value from ( SELECT subq1.key2, subq1.key, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq2.key, subq2.key2, subq2.value from ( SELECT subq1.key2, subq1.key, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 91 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Statistics: Num rows: 3 Data size: 27 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col0 (type: int), _col1 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq2.key, subq2.key2, subq2.value from ( SELECT subq1.key2, subq1.key, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq2.key, subq2.key2, subq2.value from ( SELECT subq1.key2, subq1.key, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key2, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 2 3 val_2val_2 1 4 5 val_4val_4 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 8 9 val_8val_8 1 9 10 val_9val_9 1 PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq2.k2, subq2.k1, subq2.value from ( SELECT subq1.key2 as k1, subq1.key as k2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-only operation EXPLAIN INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq2.k2, subq2.k1, subq2.value from ( SELECT subq1.key2 as k1, subq1.key as k2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 Stage-2 depends on stages: Stage-0 STAGE PLANS: Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 91 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Statistics: Num rows: 3 Data size: 27 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col0 (type: int), _col1 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Stage: Stage-2 Stats-Aggr Operator PREHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq2.k2, subq2.k1, subq2.value from ( SELECT subq1.key2 as k1, subq1.key as k2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 PREHOOK: type: QUERY PREHOOK: Input: default@test_table1 PREHOOK: Input: default@test_table1@ds=1 PREHOOK: Input: default@test_table2 PREHOOK: Input: default@test_table2@ds=1 PREHOOK: Output: default@test_table3@ds=1 POSTHOOK: query: INSERT OVERWRITE TABLE test_table3 PARTITION (ds = '1') SELECT subq2.k2, subq2.k1, subq2.value from ( SELECT subq1.key2 as k1, subq1.key as k2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table1 POSTHOOK: Input: default@test_table1@ds=1 POSTHOOK: Input: default@test_table2 POSTHOOK: Input: default@test_table2@ds=1 POSTHOOK: Output: default@test_table3@ds=1 POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key2, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### PREHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 PREHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### POSTHOOK: query: select * from test_table3 tablesample (bucket 2 out of 2) s where ds = '1' POSTHOOK: type: QUERY POSTHOOK: Input: default@test_table3 POSTHOOK: Input: default@test_table3@ds=1 #### A masked pattern was here #### 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 0 1 val_0val_0 1 2 3 val_2val_2 1 4 5 val_4val_4 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 5 6 val_5val_5 1 8 9 val_8val_8 1 9 10 val_9val_9 1 PREHOOK: query: CREATE TABLE test_table4 (key INT, key2 INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key, key2) SORTED BY (key DESC, key2 DESC) INTO 2 BUCKETS PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@test_table4 POSTHOOK: query: CREATE TABLE test_table4 (key INT, key2 INT, value STRING) PARTITIONED BY (ds STRING) CLUSTERED BY (key, key2) SORTED BY (key DESC, key2 DESC) INTO 2 BUCKETS POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@test_table4 PREHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-reduce operation EXPLAIN INSERT OVERWRITE TABLE test_table4 PARTITION (ds = '1') SELECT subq2.k2, subq2.k1, subq2.value from ( SELECT subq1.key2 as k1, subq1.key as k2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 PREHOOK: type: QUERY POSTHOOK: query: -- Insert data into the bucketed table by selecting from another bucketed table -- This should be a map-reduce operation EXPLAIN INSERT OVERWRITE TABLE test_table4 PARTITION (ds = '1') SELECT subq2.k2, subq2.k1, subq2.value from ( SELECT subq1.key2 as k1, subq1.key as k2, subq1.value from ( SELECT a.key, a.key2, concat(a.value, b.value) as value FROM test_table1 a JOIN test_table2 b ON a.key = b.key and a.key2 = b.key2 WHERE a.ds = '1' and b.ds = '1' )subq1 )subq2 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-6 is a root stage , consists of Stage-7, Stage-8, Stage-1 Stage-7 has a backup stage: Stage-1 Stage-4 depends on stages: Stage-7 Stage-0 depends on stages: Stage-1, Stage-4, Stage-5 Stage-2 depends on stages: Stage-0 Stage-8 has a backup stage: Stage-1 Stage-5 depends on stages: Stage-8 Stage-1 STAGE PLANS: Stage: Stage-6 Conditional Operator Stage: Stage-7 Map Reduce Local Work Alias -> Map Local Tables: subq2:subq1:b Fetch Operator limit: -1 Alias -> Map Local Operator Tree: subq2:subq1:b TableScan alias: b Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) HashTable Sink Operator keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) Stage: Stage-4 Map Reduce Map Operator Tree: TableScan alias: a Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col0 (type: int), _col1 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: -- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Local Work: Map Reduce Local Work Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table4 Stage: Stage-0 Move Operator tables: partition: ds 1 replace: true table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table4 Stage: Stage-2 Stats-Aggr Operator Stage: Stage-8 Map Reduce Local Work Alias -> Map Local Tables: subq2:subq1:a Fetch Operator limit: -1 Alias -> Map Local Operator Tree: subq2:subq1:a TableScan alias: a Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) HashTable Sink Operator keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) Stage: Stage-5 Map Reduce Map Operator Tree: TableScan alias: b Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col0 (type: int), _col1 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: -- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Local Work: Map Reduce Local Work Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table4 Stage: Stage-1 Map Reduce Map Operator Tree: TableScan alias: a Statistics: Num rows: 10 Data size: 91 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and key2 is not null) (type: boolean) Statistics: Num rows: 3 Data size: 27 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: Inner Join 0 to 1 keys: 0 key (type: int), key2 (type: int) 1 key (type: int), key2 (type: int) outputColumnNames: _col0, _col1, _col2, _col9 Select Operator expressions: _col0 (type: int), _col1 (type: int), concat(_col2, _col9) (type: string) outputColumnNames: _col0, _col1, _col2 Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int) sort order: -- Map-reduce partition columns: _col0 (type: int), _col1 (type: int) value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Reduce Operator Tree: Extract File Output Operator compressed: false table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table4