PREHOOK: query: drop table hbsort PREHOOK: type: DROPTABLE POSTHOOK: query: drop table hbsort POSTHOOK: type: DROPTABLE PREHOOK: query: drop table hbpartition PREHOOK: type: DROPTABLE POSTHOOK: query: drop table hbpartition POSTHOOK: type: DROPTABLE PREHOOK: query: -- this is a dummy table used for controlling how the HFiles are -- created create table hbsort(key string, val string, val2 string) stored as INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.hbase.HiveHFileOutputFormat' #### A masked pattern was here #### PREHOOK: type: CREATETABLE POSTHOOK: query: -- this is a dummy table used for controlling how the HFiles are -- created create table hbsort(key string, val string, val2 string) stored as INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.hbase.HiveHFileOutputFormat' #### A masked pattern was here #### POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@hbsort PREHOOK: query: -- this is a dummy table used for controlling how the input file -- for TotalOrderPartitioner is created create table hbpartition(part_break string) row format serde 'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe' stored as inputformat 'org.apache.hadoop.mapred.TextInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat' PREHOOK: type: CREATETABLE POSTHOOK: query: -- this is a dummy table used for controlling how the input file -- for TotalOrderPartitioner is created create table hbpartition(part_break string) row format serde 'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe' stored as inputformat 'org.apache.hadoop.mapred.TextInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@hbpartition PREHOOK: query: -- this should produce one file, but we do not -- know what it will be called, so we will copy it to a well known #### A masked pattern was here #### insert overwrite table hbpartition select distinct value from src where value='val_100' or value='val_200' PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@hbpartition POSTHOOK: query: -- this should produce one file, but we do not -- know what it will be called, so we will copy it to a well known #### A masked pattern was here #### insert overwrite table hbpartition select distinct value from src where value='val_100' or value='val_200' POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@hbpartition POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] #### A masked pattern was here #### -- include some trailing blanks and nulls to make sure we handle them correctly insert overwrite table hbsort select distinct value, case when key=103 then cast(null as string) else key end, case when key=103 then '' else cast(key+1 as string) end from src cluster by value PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@hbsort #### A masked pattern was here #### -- include some trailing blanks and nulls to make sure we handle them correctly insert overwrite table hbsort select distinct value, case when key=103 then cast(null as string) else key end, case when key=103 then '' else cast(key+1 as string) end from src cluster by value POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@hbsort POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.val EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] #### A masked pattern was here #### PREHOOK: query: -- To get the files out to your local filesystem for loading into #### A masked pattern was here #### -- semicolon-terminate the line below before running this test: #### A masked pattern was here #### drop table hbsort PREHOOK: type: DROPTABLE PREHOOK: Input: default@hbsort PREHOOK: Output: default@hbsort POSTHOOK: query: -- To get the files out to your local filesystem for loading into #### A masked pattern was here #### -- semicolon-terminate the line below before running this test: #### A masked pattern was here #### drop table hbsort POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@hbsort POSTHOOK: Output: default@hbsort POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.val EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] PREHOOK: query: drop table hbpartition PREHOOK: type: DROPTABLE PREHOOK: Input: default@hbpartition PREHOOK: Output: default@hbpartition POSTHOOK: query: drop table hbpartition POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@hbpartition POSTHOOK: Output: default@hbpartition POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.val EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: hbsort.val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]