PREHOOK: query: EXPLAIN CREATE TABLE dest1(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat' PREHOOK: type: CREATETABLE POSTHOOK: query: EXPLAIN CREATE TABLE dest1(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat' POSTHOOK: type: CREATETABLE ABSTRACT SYNTAX TREE: (TOK_CREATETABLE (TOK_TABNAME dest1) TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEFILEFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' 'org.apache.hadoop.mapred.SequenceFileOutputFormat')) STAGE DEPENDENCIES: Stage-0 is a root stage STAGE PLANS: Stage: Stage-0 Create Table Operator: Create Table columns: key int, value string if not exists: false input format: org.apache.hadoop.mapred.SequenceFileInputFormat # buckets: -1 output format: org.apache.hadoop.mapred.SequenceFileOutputFormat name: dest1 isExternal: false PREHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat' PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING) STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileOutputFormat' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 PREHOOK: query: DESCRIBE EXTENDED dest1 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE EXTENDED dest1 POSTHOOK: type: DESCTABLE key int value string Detailed Table Information Table(tableName:dest1, dbName:default, owner:sdong, createTime:1297331254, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:key, type:int, comment:null), FieldSchema(name:value, type:string, comment:null)], location:pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1, inputFormat:org.apache.hadoop.mapred.SequenceFileInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{transient_lastDdlTime=1297331254}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE) PREHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10 PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest1 POSTHOOK: query: FROM src INSERT OVERWRITE TABLE dest1 SELECT src.key, src.value WHERE src.key < 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@dest1 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-47-38_871_2911208910534048188/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_01-47-38_871_2911208910534048188/-mr-10000 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 0 val_0 4 val_4 8 val_8 0 val_0 0 val_0 5 val_5 5 val_5 2 val_2 5 val_5 9 val_9