FAQ
Added: hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out?rev=1552451&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out Thu Dec 19 22:55:10 2013
@@ -0,0 +1,440 @@
+PREHOOK: query: create table src_p(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_p(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_p
+PREHOOK: query: insert overwrite table src_p partition(`!@#$%^&*()_q`='a') select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_p@!@%23$%25%5E&%2A()_q=a
+POSTHOOK: query: insert overwrite table src_p partition(`!@#$%^&*()_q`='a') select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_p@!@%23$%25%5E&%2A()_q=a
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: show partitions src_p
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions src_p
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+!@%23$%25%5E&%2A()_q=a
+PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q`
+from src_p where `!@#$%^&*()_q` = 'a' and `x+1`='10'
+group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = 'a'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q`
+from src_p where `!@#$%^&*()_q` = 'a' and `x+1`='10'
+group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = 'a'
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_p))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL !@#$%^&*()_q) 'a') (= (TOK_TABLE_OR_COL x+1) '10'))) (TOK_GROUPBY (TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL y&y) (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^&*()_q) 'a'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src_p
+ TableScan
+ alias: src_p
+ Filter Operator
+ predicate:
+ expr: (x+1 = '10')
+ type: boolean
+ Select Operator
+ expressions:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ outputColumnNames: x+1, y&y, !@#$%^&*()_q
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: x+1
+ type: string
+ expr: y&y
+ type: string
+ expr: !@#$%^&*()_q
+ type: string
+ mode: hash
+ outputColumnNames: _col0, _col1, _col2
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ sort order: +++
+ Map-reduce partition columns:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ expr: KEY._col1
+ type: string
+ expr: KEY._col2
+ type: string
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col2
+ type: string
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: create table src_p2(`x+1` string) partitioned by (`!@#$%^&*()_q` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_p2(`x+1` string) partitioned by (`!@#$%^&*()_q` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_p2
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table src_p2 partition(`!@#$%^&*()_q`)
+select key, value as `!@#$%^&*()_q` from src where key < '200'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_p2
+POSTHOOK: query: insert overwrite table src_p2 partition(`!@#$%^&*()_q`)
+select key, value as `!@#$%^&*()_q` from src where key < '200'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_0
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_10
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_100
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_103
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_104
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_105
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_11
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_111
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_113
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_114
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_116
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_118
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_119
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_12
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_120
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_125
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_126
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_128
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_129
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_131
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_133
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_134
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_136
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_137
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_138
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_143
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_145
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_146
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_149
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_15
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_150
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_152
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_153
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_155
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_156
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_157
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_158
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_160
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_162
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_163
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_164
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_165
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_166
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_167
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_168
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_169
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_17
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_170
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_172
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_174
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_175
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_176
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_177
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_178
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_179
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_18
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_180
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_181
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_183
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_186
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_187
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_189
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_19
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_190
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_191
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_192
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_193
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_194
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_195
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_196
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_197
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_199
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_2
+POSTHOOK: Output: default@src_p2@!@%23$%25%5E&%2A()_q=val_20
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_0).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_100).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_103).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_104).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_105).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_10).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_111).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_113).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_114).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_116).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_118).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_119).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_11).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_120).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_125).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_126).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_128).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_129).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_12).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_131).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_133).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_134).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_136).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_137).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_138).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_143).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_145).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_146).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_149).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_150).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_152).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_153).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_155).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_156).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_157).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_158).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_15).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_160).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_162).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_163).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_164).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_165).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_166).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_167).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_168).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_169).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_170).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_172).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_174).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_175).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_176).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_177).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_178).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_179).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_17).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_180).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_181).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_183).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_186).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_187).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_189).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_18).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_190).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_191).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_192).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_193).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_194).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_195).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_196).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_197).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_199).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_19).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_20).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_2).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: show partitions src_p2
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions src_p2
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_0).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_100).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_103).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_104).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_105).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_10).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_111).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_113).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_114).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_116).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_118).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_119).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_11).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_120).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_125).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_126).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_128).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_129).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_12).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_131).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_133).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_134).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_136).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_137).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_138).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_143).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_145).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_146).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_149).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_150).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_152).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_153).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_155).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_156).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_157).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_158).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_15).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_160).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_162).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_163).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_164).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_165).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_166).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_167).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_168).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_169).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_170).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_172).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_174).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_175).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_176).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_177).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_178).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_179).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_17).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_180).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_181).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_183).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_186).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_187).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_189).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_18).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_190).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_191).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_192).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_193).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_194).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_195).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_196).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_197).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_199).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_19).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_20).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p2 PARTITION(!@#$%^&*()_q=val_2).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+!@%23$%25%5E&%2A()_q=val_0
+!@%23$%25%5E&%2A()_q=val_10
+!@%23$%25%5E&%2A()_q=val_100
+!@%23$%25%5E&%2A()_q=val_103
+!@%23$%25%5E&%2A()_q=val_104
+!@%23$%25%5E&%2A()_q=val_105
+!@%23$%25%5E&%2A()_q=val_11
+!@%23$%25%5E&%2A()_q=val_111
+!@%23$%25%5E&%2A()_q=val_113
+!@%23$%25%5E&%2A()_q=val_114
+!@%23$%25%5E&%2A()_q=val_116
+!@%23$%25%5E&%2A()_q=val_118
+!@%23$%25%5E&%2A()_q=val_119
+!@%23$%25%5E&%2A()_q=val_12
+!@%23$%25%5E&%2A()_q=val_120
+!@%23$%25%5E&%2A()_q=val_125
+!@%23$%25%5E&%2A()_q=val_126
+!@%23$%25%5E&%2A()_q=val_128
+!@%23$%25%5E&%2A()_q=val_129
+!@%23$%25%5E&%2A()_q=val_131
+!@%23$%25%5E&%2A()_q=val_133
+!@%23$%25%5E&%2A()_q=val_134
+!@%23$%25%5E&%2A()_q=val_136
+!@%23$%25%5E&%2A()_q=val_137
+!@%23$%25%5E&%2A()_q=val_138
+!@%23$%25%5E&%2A()_q=val_143
+!@%23$%25%5E&%2A()_q=val_145
+!@%23$%25%5E&%2A()_q=val_146
+!@%23$%25%5E&%2A()_q=val_149
+!@%23$%25%5E&%2A()_q=val_15
+!@%23$%25%5E&%2A()_q=val_150
+!@%23$%25%5E&%2A()_q=val_152
+!@%23$%25%5E&%2A()_q=val_153
+!@%23$%25%5E&%2A()_q=val_155
+!@%23$%25%5E&%2A()_q=val_156
+!@%23$%25%5E&%2A()_q=val_157
+!@%23$%25%5E&%2A()_q=val_158
+!@%23$%25%5E&%2A()_q=val_160
+!@%23$%25%5E&%2A()_q=val_162
+!@%23$%25%5E&%2A()_q=val_163
+!@%23$%25%5E&%2A()_q=val_164
+!@%23$%25%5E&%2A()_q=val_165
+!@%23$%25%5E&%2A()_q=val_166
+!@%23$%25%5E&%2A()_q=val_167
+!@%23$%25%5E&%2A()_q=val_168
+!@%23$%25%5E&%2A()_q=val_169
+!@%23$%25%5E&%2A()_q=val_17
+!@%23$%25%5E&%2A()_q=val_170
+!@%23$%25%5E&%2A()_q=val_172
+!@%23$%25%5E&%2A()_q=val_174
+!@%23$%25%5E&%2A()_q=val_175
+!@%23$%25%5E&%2A()_q=val_176
+!@%23$%25%5E&%2A()_q=val_177
+!@%23$%25%5E&%2A()_q=val_178
+!@%23$%25%5E&%2A()_q=val_179
+!@%23$%25%5E&%2A()_q=val_18
+!@%23$%25%5E&%2A()_q=val_180
+!@%23$%25%5E&%2A()_q=val_181
+!@%23$%25%5E&%2A()_q=val_183
+!@%23$%25%5E&%2A()_q=val_186
+!@%23$%25%5E&%2A()_q=val_187
+!@%23$%25%5E&%2A()_q=val_189
+!@%23$%25%5E&%2A()_q=val_19
+!@%23$%25%5E&%2A()_q=val_190
+!@%23$%25%5E&%2A()_q=val_191
+!@%23$%25%5E&%2A()_q=val_192
+!@%23$%25%5E&%2A()_q=val_193
+!@%23$%25%5E&%2A()_q=val_194
+!@%23$%25%5E&%2A()_q=val_195
+!@%23$%25%5E&%2A()_q=val_196
+!@%23$%25%5E&%2A()_q=val_197
+!@%23$%25%5E&%2A()_q=val_199
+!@%23$%25%5E&%2A()_q=val_2
+!@%23$%25%5E&%2A()_q=val_20

Added: hive/trunk/ql/src/test/results/clientpositive/quotedid_skew.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/quotedid_skew.q.out?rev=1552451&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/quotedid_skew.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/quotedid_skew.q.out Thu Dec 19 22:55:10 2013
@@ -0,0 +1,226 @@
+PREHOOK: query: CREATE TABLE T1(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T1(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
+PREHOOK: type: LOAD
+PREHOOK: Output: default@t1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@t1
+PREHOOK: query: CREATE TABLE T2(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE T2(`!@#$%^&*()_q` string, `y&y` string)
+SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@T2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T2
+PREHOOK: type: LOAD
+PREHOOK: Output: default@t2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T2
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@t2
+PREHOOK: query: -- a simple join query with skew on both the tables on the join key
+-- adding a order by at the end to make the results deterministic
+
+EXPLAIN
+SELECT a.*, b.* FROM T1 a JOIN T2 b ON a. `!@#$%^&*()_q` = b. `!@#$%^&*()_q`
+PREHOOK: type: QUERY
+POSTHOOK: query: -- a simple join query with skew on both the tables on the join key
+-- adding a order by at the end to make the results deterministic
+
+EXPLAIN
+SELECT a.*, b.* FROM T1 a JOIN T2 b ON a. `!@#$%^&*()_q` = b. `!@#$%^&*()_q`
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME T1) a) (TOK_TABREF (TOK_TABNAME T2) b) (= (. (TOK_TABLE_OR_COL a) !@#$%^&*()_q) (. (TOK_TABLE_OR_COL b) !@#$%^&*()_q)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME a))) (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME b))))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-2 depends on stages: Stage-1, Stage-4
+ Stage-4 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ subquery1:a
+ TableScan
+ alias: a
+ Filter Operator
+ predicate:
+ expr: (not (!@#$%^&*()_q = '2'))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: !@#$%^&*()_q
+ type: string
+ tag: 0
+ value expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ expr: y&y
+ type: string
+ subquery1:b
+ TableScan
+ alias: b
+ Filter Operator
+ predicate:
+ expr: (not (!@#$%^&*()_q = '2'))
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: !@#$%^&*()_q
+ type: string
+ tag: 1
+ value expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ expr: y&y
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col4, _col5
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col4
+ type: string
+ expr: _col5
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ TableScan
+ Union
+ Select Operator
+ SELECT * : (no compute)
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+ TableScan
+ Union
+ Select Operator
+ SELECT * : (no compute)
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-4
+ Map Reduce
+ Alias -> Map Operator Tree:
+ a
+ TableScan
+ alias: a
+ Filter Operator
+ predicate:
+ expr: (!@#$%^&*()_q = '2')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: !@#$%^&*()_q
+ type: string
+ tag: 0
+ value expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ expr: y&y
+ type: string
+ b
+ TableScan
+ alias: b
+ Filter Operator
+ predicate:
+ expr: (!@#$%^&*()_q = '2')
+ type: boolean
+ Reduce Output Operator
+ key expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: !@#$%^&*()_q
+ type: string
+ tag: 1
+ value expressions:
+ expr: !@#$%^&*()_q
+ type: string
+ expr: y&y
+ type: string
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
+ handleSkewJoin: false
+ outputColumnNames: _col0, _col1, _col4, _col5
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ expr: _col4
+ type: string
+ expr: _col5
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+

Added: hive/trunk/ql/src/test/results/clientpositive/quotedid_smb.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/quotedid_smb.q.out?rev=1552451&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/quotedid_smb.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/quotedid_smb.q.out Thu Dec 19 22:55:10 2013
@@ -0,0 +1,83 @@
+PREHOOK: query: create table src_b(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_b(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_b
+PREHOOK: query: insert overwrite table src_b
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_b
+POSTHOOK: query: insert overwrite table src_b
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_b
+POSTHOOK: Lineage: src_b.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: create table src_b2(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_b2(`x+1` string, `!@#$%^&*()_q` string)
+clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_b2
+POSTHOOK: Lineage: src_b.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table src_b2
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_b2
+POSTHOOK: query: insert overwrite table src_b2
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_b2
+POSTHOOK: Lineage: src_b.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b2.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b2.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: select a.`x+1`, a.`!@#$%^&*()_q`, b.`x+1`, b.`!@#$%^&*()_q`
+from src_b a join src_b2 b on a.`!@#$%^&*()_q` = b.`!@#$%^&*()_q`
+where a.`x+1` < '11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_b
+PREHOOK: Input: default@src_b2
+#### A masked pattern was here ####
+POSTHOOK: query: select a.`x+1`, a.`!@#$%^&*()_q`, b.`x+1`, b.`!@#$%^&*()_q`
+from src_b a join src_b2 b on a.`!@#$%^&*()_q` = b.`!@#$%^&*()_q`
+where a.`x+1` < '11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_b
+POSTHOOK: Input: default@src_b2
+#### A masked pattern was here ####
+POSTHOOK: Lineage: src_b.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b2.!@#$%^&*()_q SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_b2.x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+0 val_0 0 val_0
+103 val_103 103 val_103
+103 val_103 103 val_103
+103 val_103 103 val_103
+103 val_103 103 val_103
+105 val_105 105 val_105
+10 val_10 10 val_10
+100 val_100 100 val_100
+100 val_100 100 val_100
+100 val_100 100 val_100
+100 val_100 100 val_100
+104 val_104 104 val_104
+104 val_104 104 val_104
+104 val_104 104 val_104
+104 val_104 104 val_104

Added: hive/trunk/ql/src/test/results/clientpositive/quotedid_tblproperty.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/quotedid_tblproperty.q.out?rev=1552451&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/quotedid_tblproperty.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/quotedid_tblproperty.q.out Thu Dec 19 22:55:10 2013
@@ -0,0 +1,15 @@
+PREHOOK: query: CREATE TABLE xyz(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
+STORED AS TEXTFILE
+TBLPROPERTIES('columns'='valid_colname,invalid.colname')
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE xyz(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
+STORED AS TEXTFILE
+TBLPROPERTIES('columns'='valid_colname,invalid.colname')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@xyz
+PREHOOK: query: describe xyz
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe xyz
+POSTHOOK: type: DESCTABLE
+valid_colname string from deserializer
+invalid.colname string from deserializer

Search Discussions

  • Hashutosh at Dec 19, 2013 at 10:55 pm
    Author: hashutosh
    Date: Thu Dec 19 22:55:10 2013
    New Revision: 1552451

    URL: http://svn.apache.org/r1552451
    Log:
    HIVE-6013 : Supporting Quoted Identifiers in Column Names (Harish Butani via Ashutosh Chauhan)

    Added:
         hive/trunk/ql/src/test/queries/clientpositive/quotedid_alter.q
         hive/trunk/ql/src/test/queries/clientpositive/quotedid_basic.q
         hive/trunk/ql/src/test/queries/clientpositive/quotedid_partition.q
         hive/trunk/ql/src/test/queries/clientpositive/quotedid_skew.q
         hive/trunk/ql/src/test/queries/clientpositive/quotedid_smb.q
         hive/trunk/ql/src/test/queries/clientpositive/quotedid_tblproperty.q
         hive/trunk/ql/src/test/results/clientpositive/quotedid_alter.q.out
         hive/trunk/ql/src/test/results/clientpositive/quotedid_basic.q.out
         hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out
         hive/trunk/ql/src/test/results/clientpositive/quotedid_skew.q.out
         hive/trunk/ql/src/test/results/clientpositive/quotedid_smb.q.out
         hive/trunk/ql/src/test/results/clientpositive/quotedid_tblproperty.q.out
    Removed:
         hive/trunk/ql/src/test/queries/clientnegative/invalid_columns.q
         hive/trunk/ql/src/test/results/clientnegative/invalid_columns.q.out
    Modified:
         hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
         hive/trunk/itests/qtest/pom.xml
         hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
         hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
         hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
         hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
         hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
         hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
         hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
         hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
         hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q
         hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q
         hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q
         hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q
         hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q
         hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q
         hive/trunk/ql/src/test/queries/clientpositive/regex_col.q
         hive/trunk/ql/src/test/queries/clientpositive/show_tablestatus.q
         hive/trunk/ql/src/test/queries/clientpositive/udf_index.q
         hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out
         hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out
         hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out
         hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out
         hive/trunk/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
         hive/trunk/ql/src/test/results/clientpositive/escape_clusterby1.q.out
         hive/trunk/ql/src/test/results/clientpositive/escape_distributeby1.q.out
         hive/trunk/ql/src/test/results/clientpositive/escape_orderby1.q.out
         hive/trunk/ql/src/test/results/clientpositive/escape_sortby1.q.out
         hive/trunk/ql/src/test/results/clientpositive/index_bitmap3.q.out
         hive/trunk/ql/src/test/results/clientpositive/index_bitmap_auto.q.out
         hive/trunk/ql/src/test/results/clientpositive/quote1.q.out

    Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
    +++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu Dec 19 22:55:10 2013
    @@ -867,7 +867,13 @@ public class HiveConf extends Configurat
          HIVESTAGEIDREARRANGE("hive.stageid.rearrange", "none"),
          HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES("hive.explain.dependency.append.tasktype", false),

    - HIVECOUNTERGROUP("hive.counters.group.name", "HIVE")
    + HIVECOUNTERGROUP("hive.counters.group.name", "HIVE"),
    +
    + // none, column
    + // none is the default(past) behavior. Implies only alphaNumeric and underscore are valid characters in identifiers.
    + // column: implies column names can contain any character.
    + HIVE_QUOTEDID_SUPPORT("hive.support.quoted.identifiers", "column",
    + new PatternValidator("none", "column"))
          ;

          public final String varname;

    Modified: hive/trunk/itests/qtest/pom.xml
    URL: http://svn.apache.org/viewvc/hive/trunk/itests/qtest/pom.xml?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/itests/qtest/pom.xml (original)
    +++ hive/trunk/itests/qtest/pom.xml Thu Dec 19 22:55:10 2013
    @@ -36,7 +36,7 @@
          <run_disabled>false</run_disabled>
          <clustermode></clustermode>
          <execute.beeline.tests>false</execute.beeline.tests>
    - <minimr.query.files>stats_counter_partitioned.q,list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q,stats_counter.q,auto_sortmerge_join_16.q</minimr.query.files>
    + <minimr.query.files>stats_counter_partitioned.q,list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,scriptfile1_win.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,schemeAuthority2.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q,import_exported_table.q,stats_counter.q,auto_sortmerge_join_16.q,quotedid_smb.q</minimr.query.files>
          <minimr.query.negative.files>cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q</minimr.query.negative.files>
          <beeline.positive.exclude>add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rena
      me.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_o
      verwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q</beeline.positive.exclude>
        </properties>

    Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
    +++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Thu Dec 19 22:55:10 2013
    @@ -471,10 +471,17 @@ public class MetaStoreUtils {
          }
          return false;
        }
    -
    +
    + /*
    + * At the Metadata level there are no restrictions on Column Names.
    + */
    + public static final boolean validateColumnName(String name) {
    + return true;
    + }
    +
        static public String validateTblColumns(List<FieldSchema> cols) {
          for (FieldSchema fieldSchema : cols) {
    - if (!validateName(fieldSchema.getName())) {
    + if (!validateColumnName(fieldSchema.getName())) {
              return "name: " + fieldSchema.getName();
            }
            if (!validateColumnType(fieldSchema.getType())) {
    @@ -559,7 +566,7 @@ public class MetaStoreUtils {
            return null;
          }
          for (String col : cols) {
    - if (!validateName(col)) {
    + if (!validateColumnName(col)) {
              return col;
            }
          }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java Thu Dec 19 22:55:10 2013
    @@ -271,9 +271,20 @@ public final class HiveUtils {
         * Regenerate an identifier as part of unparsing it back to SQL text.
         */
        public static String unparseIdentifier(String identifier) {
    + return unparseIdentifier(identifier, null);
    + }
    +
    + public static String unparseIdentifier(String identifier, Configuration conf) {
          // In the future, if we support arbitrary characters in
          // identifiers, then we'll need to escape any backticks
          // in identifier by doubling them up.
    +
    + // the time has come
    + String qIdSupport = conf == null ? null :
    + HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
    + if ( qIdSupport != null && !"none".equals(qIdSupport) ) {
    + identifier = identifier.replaceAll("`", "``");
    + }
          return "`" + identifier + "`";
        }


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Thu Dec 19 22:55:10 2013
    @@ -36,6 +36,7 @@ import org.apache.hadoop.fs.FileStatus;
      import org.apache.hadoop.fs.FileSystem;
      import org.apache.hadoop.fs.Path;
      import org.apache.hadoop.hive.common.JavaUtils;
    +import org.apache.hadoop.hive.conf.HiveConf;
      import org.apache.hadoop.hive.metastore.MetaStoreUtils;
      import org.apache.hadoop.hive.metastore.ProtectMode;
      import org.apache.hadoop.hive.metastore.TableType;
    @@ -195,7 +196,7 @@ public class Table implements Serializab
          List<String> colNames = new ArrayList<String>();
          while (iterCols.hasNext()) {
            String colName = iterCols.next().getName();
    - if (!MetaStoreUtils.validateName(colName)) {
    + if (!MetaStoreUtils.validateColumnName(colName)) {
              throw new HiveException("Invalid column name '" + colName
                  + "' in the table definition");
            }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Thu Dec 19 22:55:10 2013
    @@ -2760,7 +2760,7 @@ public class DDLSemanticAnalyzer extends
                } else {
                  cmd.append(" AND ");
                }
    - cmd.append(HiveUtils.unparseIdentifier(entry.getKey()));
    + cmd.append(HiveUtils.unparseIdentifier(entry.getKey(), conf));
                cmd.append(" = '");
                cmd.append(HiveUtils.escapeString(entry.getValue()));
                cmd.append("'");

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Thu Dec 19 22:55:10 2013
    @@ -16,7 +16,25 @@
      */
      lexer grammar HiveLexer;

    -@lexer::header {package org.apache.hadoop.hive.ql.parse;}
    +@lexer::header {
    +package org.apache.hadoop.hive.ql.parse;
    +
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.hadoop.hive.conf.HiveConf;
    +}
    +
    +@lexer::members {
    + private Configuration hiveConf;
    +
    + public void setHiveConf(Configuration hiveConf) {
    + this.hiveConf = hiveConf;
    + }
    +
    + protected boolean allowQuotedId() {
    + String supportedQIds = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
    + return !"none".equals(supportedQIds);
    + }
    +}

      // Keywords

    @@ -375,13 +393,42 @@ Number
          :
          (Digit)+ ( DOT (Digit)* (Exponent)? | Exponent)?
          ;
    -
    +
    +/*
    +An Identifier can be:
    +- tableName
    +- columnName
    +- select expr alias
    +- lateral view aliases
    +- database name
    +- view name
    +- subquery alias
    +- function name
    +- ptf argument identifier
    +- index name
    +- property name for: db,tbl,partition...
    +- fileFormat
    +- role name
    +- privilege name
    +- principal name
    +- macro name
    +- hint name
    +- window name
    +*/
      Identifier
          :
          (Letter | Digit) (Letter | Digit | '_')*
    + | {allowQuotedId()}? QuotedIdentifier /* though at the language level we allow all Identifiers to be QuotedIdentifiers;
    + at the API level only columns are allowed to be of this form */
    '`' RegexComponent+ '`'
          ;

    +fragment
    +QuotedIdentifier
    + :
    + '`' ( '``' | ~('`') )* '`' { setText(getText().substring(1, getText().length() -1 ).replaceAll("``", "`")); }
    + ;
    +
      CharSetName
          :
          '_' (Letter | Digit | '_' | '-' | '.' | ':' )+

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java Thu Dec 19 22:55:10 2013
    @@ -160,6 +160,11 @@ public class ParseDriver {
        public ASTNode parse(String command) throws ParseException {
          return parse(command, null);
        }
    +
    + public ASTNode parse(String command, Context ctx)
    + throws ParseException {
    + return parse(command, ctx, true);
    + }

        /**
         * Parses a command, optionally assigning the parser's token stream to the
    @@ -175,13 +180,17 @@ public class ParseDriver {
         *
         * @return parsed AST
         */
    - public ASTNode parse(String command, Context ctx) throws ParseException {
    + public ASTNode parse(String command, Context ctx, boolean setTokenRewriteStream)
    + throws ParseException {
          LOG.info("Parsing command: " + command);

          HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
          TokenRewriteStream tokens = new TokenRewriteStream(lexer);
          if (ctx != null) {
    - ctx.setTokenRewriteStream(tokens);
    + if ( setTokenRewriteStream) {
    + ctx.setTokenRewriteStream(tokens);
    + }
    + lexer.setHiveConf(ctx.getConf());
          }
          HiveParser parser = new HiveParser(tokens);
          parser.setTreeAdaptor(adaptor);

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Dec 19 22:55:10 2013
    @@ -266,7 +266,7 @@ public class SemanticAnalyzer extends Ba
          listMapJoinOpsNoReducer = new ArrayList<AbstractMapJoinOperator<? extends MapJoinDesc>>();
          groupOpToInputTables = new HashMap<GroupByOperator, Set<String>>();
          prunedPartitions = new HashMap<String, PrunedPartitionList>();
    - unparseTranslator = new UnparseTranslator();
    + unparseTranslator = new UnparseTranslator(conf);
          autogenColAliasPrfxLbl = HiveConf.getVar(conf,
              HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL);
          autogenColAliasPrfxIncludeFuncName = HiveConf.getBoolVar(conf,
    @@ -1350,7 +1350,7 @@ public class SemanticAnalyzer extends Ba
            String viewText = tab.getViewExpandedText();
            // Reparse text, passing null for context to avoid clobbering
            // the top-level token stream.
    - ASTNode tree = pd.parse(viewText, null);
    + ASTNode tree = pd.parse(viewText, ctx, false);
            tree = ParseUtils.findRootNonNullToken(tree);
            viewTree = tree;
            Dispatcher nodeOriginDispatcher = new Dispatcher() {
    @@ -2190,9 +2190,9 @@ public class SemanticAnalyzer extends Ba
                if (replacementText.length() > 0) {
                  replacementText.append(", ");
                }
    - replacementText.append(HiveUtils.unparseIdentifier(tmp[0]));
    + replacementText.append(HiveUtils.unparseIdentifier(tmp[0], conf));
                replacementText.append(".");
    - replacementText.append(HiveUtils.unparseIdentifier(tmp[1]));
    + replacementText.append(HiveUtils.unparseIdentifier(tmp[1], conf));
              }
            }
          }
    @@ -2748,7 +2748,11 @@ public class SemanticAnalyzer extends Ba
         * Returns whether the pattern is a regex expression (instead of a normal
         * string). Normal string is a string with all alphabets/digits and "_".
         */
    - private static boolean isRegex(String pattern) {
    + private static boolean isRegex(String pattern, HiveConf conf) {
    + String qIdSupport = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
    + if ( "column".equals(qIdSupport)) {
    + return false;
    + }
          for (int i = 0; i < pattern.length(); i++) {
            if (!Character.isLetterOrDigit(pattern.charAt(i))
                && pattern.charAt(i) != '_') {
    @@ -2942,7 +2946,7 @@ public class SemanticAnalyzer extends Ba
              selectStar = true;
            } else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause
                && !inputRR.getIsExprResolver()
    - && isRegex(unescapeIdentifier(expr.getChild(0).getText()))) {
    + && isRegex(unescapeIdentifier(expr.getChild(0).getText()), conf)) {
              // In case the expression is a regex COL.
              // This can only happen without AS clause
              // We don't allow this for ExprResolver - the Group By case
    @@ -2953,7 +2957,7 @@ public class SemanticAnalyzer extends Ba
                && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
                    .getChild(0).getText().toLowerCase())) && !hasAsClause
                && !inputRR.getIsExprResolver()
    - && isRegex(unescapeIdentifier(expr.getChild(1).getText()))) {
    + && isRegex(unescapeIdentifier(expr.getChild(1).getText()), conf)) {
              // In case the expression is TABLE.COL (col can be regex).
              // This can only happen without AS clause
              // We don't allow this for ExprResolver - the Group By case
    @@ -9017,10 +9021,10 @@ public class SemanticAnalyzer extends Ba
              // Modify a copy, not the original
              fieldSchema = new FieldSchema(fieldSchema);
              derivedSchema.set(i, fieldSchema);
    - sb.append(HiveUtils.unparseIdentifier(fieldSchema.getName()));
    + sb.append(HiveUtils.unparseIdentifier(fieldSchema.getName(), conf));
              sb.append(" AS ");
              String imposedName = imposedSchema.get(i).getName();
    - sb.append(HiveUtils.unparseIdentifier(imposedName));
    + sb.append(HiveUtils.unparseIdentifier(imposedName, conf));
              fieldSchema.setName(imposedName);
              // We don't currently allow imposition of a type
              fieldSchema.setComment(imposedSchema.get(i).getComment());
    @@ -9028,7 +9032,7 @@ public class SemanticAnalyzer extends Ba
            sb.append(" FROM (");
            sb.append(expandedText);
            sb.append(") ");
    - sb.append(HiveUtils.unparseIdentifier(createVwDesc.getViewName()));
    + sb.append(HiveUtils.unparseIdentifier(createVwDesc.getViewName(), conf));
            expandedText = sb.toString();
          }

    @@ -9209,9 +9213,9 @@ public class SemanticAnalyzer extends Ba
            }
            String[] tmp = input.reverseLookup(columnDesc.getColumn());
            StringBuilder replacementText = new StringBuilder();
    - replacementText.append(HiveUtils.unparseIdentifier(tmp[0]));
    + replacementText.append(HiveUtils.unparseIdentifier(tmp[0], conf));
            replacementText.append(".");
    - replacementText.append(HiveUtils.unparseIdentifier(tmp[1]));
    + replacementText.append(HiveUtils.unparseIdentifier(tmp[1], conf));
            unparseTranslator.addTranslation(node, replacementText.toString());
          }


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java Thu Dec 19 22:55:10 2013
    @@ -25,6 +25,7 @@ import java.util.NavigableMap;
      import java.util.TreeMap;

      import org.antlr.runtime.TokenRewriteStream;
    +import org.apache.hadoop.conf.Configuration;
      import org.apache.hadoop.hive.ql.metadata.HiveUtils;

      /**
    @@ -42,8 +43,10 @@ class UnparseTranslator {
        private final NavigableMap<Integer, Translation> translations;
        private final List<CopyTranslation> copyTranslations;
        private boolean enabled;
    + private Configuration conf;

    - public UnparseTranslator() {
    + public UnparseTranslator(Configuration conf) {
    + this.conf = conf;
          translations = new TreeMap<Integer, Translation>();
          copyTranslations = new ArrayList<CopyTranslation>();
        }
    @@ -152,12 +155,12 @@ class UnparseTranslator {
          else {
            // transform the table reference to an absolute reference (i.e., "db.table")
            StringBuilder replacementText = new StringBuilder();
    - replacementText.append(HiveUtils.unparseIdentifier(currentDatabaseName));
    + replacementText.append(HiveUtils.unparseIdentifier(currentDatabaseName, conf));
            replacementText.append('.');

            ASTNode identifier = (ASTNode)tableName.getChild(0);
            String identifierText = BaseSemanticAnalyzer.unescapeIdentifier(identifier.getText());
    - replacementText.append(HiveUtils.unparseIdentifier(identifierText));
    + replacementText.append(HiveUtils.unparseIdentifier(identifierText, conf));

            addTranslation(identifier, replacementText.toString());
          }
    @@ -176,7 +179,7 @@ class UnparseTranslator {
          assert (identifier.getToken().getType() == HiveParser.Identifier);
          String replacementText = identifier.getText();
          replacementText = BaseSemanticAnalyzer.unescapeIdentifier(replacementText);
    - replacementText = HiveUtils.unparseIdentifier(replacementText);
    + replacementText = HiveUtils.unparseIdentifier(replacementText, conf);
          addTranslation(identifier, replacementText);
        }


    Modified: hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q (original)
    +++ hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col1.q Thu Dec 19 22:55:10 2013
    @@ -1,2 +1,3 @@
    +set hive.support.quoted.identifiers=none;
      -- TOK_TABLE_OR_COL
      explain select * from (select `.*` from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;

    Modified: hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q (original)
    +++ hive/trunk/ql/src/test/queries/clientnegative/ambiguous_col2.q Thu Dec 19 22:55:10 2013
    @@ -1,2 +1,3 @@
    +set hive.support.quoted.identifiers=none;
      -- DOT
      explain select * from (select a.`[kv].*`, b.`[kv].*` from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;

    Modified: hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q (original)
    +++ hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q Thu Dec 19 22:55:10 2013
    @@ -1,2 +1,3 @@
    +set hive.support.quoted.identifiers=none;
      EXPLAIN
      SELECT `+++` FROM srcpart;

    Modified: hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q (original)
    +++ hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q Thu Dec 19 22:55:10 2013
    @@ -1,2 +1,3 @@
    +set hive.support.quoted.identifiers=none;
      EXPLAIN
      SELECT `.a.` FROM srcpart;

    Modified: hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q (original)
    +++ hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q Thu Dec 19 22:55:10 2013
    @@ -1,2 +1,3 @@
    +set hive.support.quoted.identifiers=none;
      EXPLAIN
      SELECT `..`, count(1) FROM srcpart GROUP BY `..`;

    Modified: hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q (original)
    +++ hive/trunk/ql/src/test/queries/clientpositive/ambiguous_col.q Thu Dec 19 22:55:10 2013
    @@ -1,3 +1,4 @@
    +set hive.support.quoted.identifiers=none;
      -- TOK_ALLCOLREF
      explain select * from (select a.key, a.* from (select * from src) a join (select * from src1) b on (a.key = b.key)) t;
      -- DOT

    Added: hive/trunk/ql/src/test/queries/clientpositive/quotedid_alter.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/quotedid_alter.q?rev=1552451&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/quotedid_alter.q (added)
    +++ hive/trunk/ql/src/test/queries/clientpositive/quotedid_alter.q Thu Dec 19 22:55:10 2013
    @@ -0,0 +1,21 @@
    +
    +set hive.support.quoted.identifiers=column;
    +
    +create table src_b3(`x+1` string, `!@#$%^&*()_q` string) ;
    +
    +alter table src_b3
    +clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
    +;
    +
    +
    +-- alter partition
    +create table src_p3(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string);
    +
    +insert overwrite table src_p3 partition(`!@#$%^&*()_q`='a') select * from src;
    +show partitions src_p3;
    +
    +alter table src_p3 add if not exists partition(`!@#$%^&*()_q`='b');
    +show partitions src_p3;
    +
    +alter table src_p3 partition(`!@#$%^&*()_q`='b') rename to partition(`!@#$%^&*()_q`='c');
    +show partitions src_p3;
    \ No newline at end of file

    Added: hive/trunk/ql/src/test/queries/clientpositive/quotedid_basic.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/quotedid_basic.q?rev=1552451&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/quotedid_basic.q (added)
    +++ hive/trunk/ql/src/test/queries/clientpositive/quotedid_basic.q Thu Dec 19 22:55:10 2013
    @@ -0,0 +1,34 @@
    +
    +set hive.support.quoted.identifiers=column;
    +
    +-- basic
    +create table t1(`x+1` string, `y&y` string, `!@#$%^&*()_q` string);
    +describe t1;
    +select `x+1`, `y&y`, `!@#$%^&*()_q` from t1;
    +explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1;
    +explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1';
    +explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1';
    +explain select `x+1`, `y&y`, `!@#$%^&*()_q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
    +from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1';
    +
    +-- case insensitive
    +explain select `X+1`, `Y&y`, `!@#$%^&*()_Q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
    +from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&Y`, `!@#$%^&*()_q` having `!@#$%^&*()_Q` = '1';
    +
    +
    +-- escaped back ticks
    +create table t4(`x+1``` string, `y&y` string);
    +describe t4;
    +insert into table t4 select * from src;
    +select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
    +from t4 where `x+1``` = '10' group by `x+1```, `y&y` having `x+1``` = '10';
    +
    +-- view
    +create view v1 as
    +select `x+1```, `y&y`
    +from t4 where `x+1``` < '200';
    +
    +select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
    +from v1
    +group by `x+1```, `y&y`
    +;
    \ No newline at end of file

    Added: hive/trunk/ql/src/test/queries/clientpositive/quotedid_partition.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/quotedid_partition.q?rev=1552451&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/quotedid_partition.q (added)
    +++ hive/trunk/ql/src/test/queries/clientpositive/quotedid_partition.q Thu Dec 19 22:55:10 2013
    @@ -0,0 +1,24 @@
    +
    +set hive.support.quoted.identifiers=column;
    +
    +
    +create table src_p(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string);
    +insert overwrite table src_p partition(`!@#$%^&*()_q`='a') select * from src;
    +
    +show partitions src_p;
    +
    +explain select `x+1`, `y&y`, `!@#$%^&*()_q`
    +from src_p where `!@#$%^&*()_q` = 'a' and `x+1`='10'
    +group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = 'a'
    +;
    +
    +set hive.exec.dynamic.partition.mode=nonstrict
    +;
    +
    +create table src_p2(`x+1` string) partitioned by (`!@#$%^&*()_q` string);
    +
    +insert overwrite table src_p2 partition(`!@#$%^&*()_q`)
    +select key, value as `!@#$%^&*()_q` from src where key < '200'
    +;
    +
    +show partitions src_p2;
    \ No newline at end of file

    Added: hive/trunk/ql/src/test/queries/clientpositive/quotedid_skew.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/quotedid_skew.q?rev=1552451&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/quotedid_skew.q (added)
    +++ hive/trunk/ql/src/test/queries/clientpositive/quotedid_skew.q Thu Dec 19 22:55:10 2013
    @@ -0,0 +1,26 @@
    +
    +set hive.support.quoted.identifiers=column;
    +
    +set hive.mapred.supports.subdirectories=true;
    +set hive.internal.ddl.list.bucketing.enable=true;
    +set hive.optimize.skewjoin.compiletime = true;
    +
    +CREATE TABLE T1(`!@#$%^&*()_q` string, `y&y` string)
    +SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
    +;
    +
    +LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
    +
    +CREATE TABLE T2(`!@#$%^&*()_q` string, `y&y` string)
    +SKEWED BY (`!@#$%^&*()_q`) ON ((2)) STORED AS TEXTFILE
    +;
    +
    +LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T2;
    +
    +-- a simple join query with skew on both the tables on the join key
    +-- adding a order by at the end to make the results deterministic
    +
    +EXPLAIN
    +SELECT a.*, b.* FROM T1 a JOIN T2 b ON a. `!@#$%^&*()_q` = b. `!@#$%^&*()_q`
    +;
    +

    Added: hive/trunk/ql/src/test/queries/clientpositive/quotedid_smb.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/quotedid_smb.q?rev=1552451&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/quotedid_smb.q (added)
    +++ hive/trunk/ql/src/test/queries/clientpositive/quotedid_smb.q Thu Dec 19 22:55:10 2013
    @@ -0,0 +1,34 @@
    +
    +set hive.support.quoted.identifiers=column;
    +
    +
    +set hive.enforce.bucketing = true;
    +set hive.enforce.sorting = true;
    +create table src_b(`x+1` string, `!@#$%^&*()_q` string)
    +clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
    +;
    +
    +insert overwrite table src_b
    +select * from src
    +;
    +
    +create table src_b2(`x+1` string, `!@#$%^&*()_q` string)
    +clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
    +;
    +
    +insert overwrite table src_b2
    +select * from src
    +;
    +
    +set hive.auto.convert.join=true;
    +set hive.auto.convert.sortmerge.join=true;
    +set hive.optimize.bucketmapjoin = true;
    +set hive.optimize.bucketmapjoin.sortedmerge = true;
    +
    +set hive.auto.convert.sortmerge.join.to.mapjoin=false;
    +set hive.auto.convert.sortmerge.join.bigtable.selection.policy = org.apache.hadoop.hive.ql.optimizer.TableSizeBasedBigTableSelectorForAutoSMJ;
    +
    +select a.`x+1`, a.`!@#$%^&*()_q`, b.`x+1`, b.`!@#$%^&*()_q`
    +from src_b a join src_b2 b on a.`!@#$%^&*()_q` = b.`!@#$%^&*()_q`
    +where a.`x+1` < '11'
    +;
    \ No newline at end of file

    Added: hive/trunk/ql/src/test/queries/clientpositive/quotedid_tblproperty.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/quotedid_tblproperty.q?rev=1552451&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/quotedid_tblproperty.q (added)
    +++ hive/trunk/ql/src/test/queries/clientpositive/quotedid_tblproperty.q Thu Dec 19 22:55:10 2013
    @@ -0,0 +1,8 @@
    +ADD JAR ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
    +
    +CREATE TABLE xyz(KEY STRING, VALUE STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.TestSerDe'
    +STORED AS TEXTFILE
    +TBLPROPERTIES('columns'='valid_colname,invalid.colname')
    +;
    +
    +describe xyz;
    \ No newline at end of file

    Modified: hive/trunk/ql/src/test/queries/clientpositive/regex_col.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/regex_col.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/regex_col.q (original)
    +++ hive/trunk/ql/src/test/queries/clientpositive/regex_col.q Thu Dec 19 22:55:10 2013
    @@ -1,3 +1,5 @@
    +set hive.support.quoted.identifiers=none;
    +
      EXPLAIN
      SELECT * FROM srcpart;


    Modified: hive/trunk/ql/src/test/queries/clientpositive/show_tablestatus.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_tablestatus.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/show_tablestatus.q (original)
    +++ hive/trunk/ql/src/test/queries/clientpositive/show_tablestatus.q Thu Dec 19 22:55:10 2013
    @@ -1,3 +1,4 @@
    +set hive.support.quoted.identifiers=none;
      EXPLAIN
      SHOW TABLE EXTENDED IN default LIKE `src`;


    Modified: hive/trunk/ql/src/test/queries/clientpositive/udf_index.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/udf_index.q?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/udf_index.q (original)
    +++ hive/trunk/ql/src/test/queries/clientpositive/udf_index.q Thu Dec 19 22:55:10 2013
    @@ -1,2 +1,3 @@
    +set hive.support.quoted.identifiers=none;
      DESCRIBE FUNCTION `index`;
      DESCRIBE FUNCTION EXTENDED `index`;

    Modified: hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out Thu Dec 19 22:55:10 2013
    @@ -37,7 +37,7 @@ POSTHOOK: query: ALTER TABLE xxx10 REPLA
      POSTHOOK: type: ALTERTABLE_REPLACECOLS
      POSTHOOK: Input: default@xxx10
      POSTHOOK: Output: default@xxx10
    -FAILED: SemanticException Line 1:30 Invalid column reference '`value`' in definition of VIEW xxx9 [
    +FAILED: SemanticException Line 1:30 Invalid column reference 'value' in definition of VIEW xxx9 [
      SELECT `xxx10`.`key`, `xxx10`.`value` FROM `default`.`xxx10`
      ] used as xxx at Line 1:39 in definition of VIEW xxx8 [
      SELECT `xxx`.`key`, `xxx`.`value` FROM `default`.`xxx9` `xxx`

    Modified: hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out Thu Dec 19 22:55:10 2013
    @@ -1,3 +1,3 @@
    -FAILED: SemanticException Line 2:7 Invalid column reference '`+++`': Dangling meta character '+' near index 0
    +FAILED: SemanticException Line 3:7 Invalid column reference '`+++`': Dangling meta character '+' near index 0
      +++
      ^

    Modified: hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out Thu Dec 19 22:55:10 2013
    @@ -1 +1 @@
    -FAILED: SemanticException [Error 10002]: Line 2:7 Invalid column reference '`.a.`'
    +FAILED: SemanticException [Error 10002]: Line 3:7 Invalid column reference '`.a.`'

    Modified: hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out Thu Dec 19 22:55:10 2013
    @@ -1 +1 @@
    -FAILED: SemanticException [Error 10004]: Line 2:44 Invalid table alias or column reference '`..`': (possible column names are: key, value, ds, hr)
    +FAILED: SemanticException [Error 10004]: Line 3:44 Invalid table alias or column reference '`..`': (possible column names are: key, value, ds, hr)

    Modified: hive/trunk/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out Thu Dec 19 22:55:10 2013
    @@ -95,7 +95,7 @@ POSTHOOK: Lineage: orderpayment_small.ti
      POSTHOOK: Lineage: orderpayment_small.userid SIMPLE []
      POSTHOOK: Lineage: user_small.userid SIMPLE [(testsrc)testsrc.FieldSchema(name:key, type:int, comment:null), ]
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME `orderpayment_small`) `orderpayment`) (TOK_TABREF (TOK_TABNAME `orderpayment_small`) `dim_pay_date`) (= (. (TOK_TABLE_OR_COL `dim_pay_date`) `date`) (. (TOK_TABLE_OR_COL `orderpayment`) `date`))) (TOK_TABREF (TOK_TABNAME `orderpayment_small`) `deal`) (= (. (TOK_TABLE_OR_COL `deal`) `dealid`) (. (TOK_TABLE_OR_COL `orderpayment`) `dealid`))) (TOK_TABREF (TOK_TABNAME `orderpayment_small`) `order_city`) (= (. (TOK_TABLE_OR_COL `order_city`) `cityid`) (. (TOK_TABLE_OR_COL `orderpayment`) `cityid`))) (TOK_TABREF (TOK_TABNAME `user_small`) `user`) (= (. (TOK_TABLE_OR_COL `user`) `userid`) (. (TOK_TABLE_OR_COL `orderpayment`) `userid`)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL `dim_pay_date`) `date`)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL `deal`) `dealid`))) (TOK_LIMIT 5)))
    + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME orderpayment_small) orderpayment) (TOK_TABREF (TOK_TABNAME orderpayment_small) dim_pay_date) (= (. (TOK_TABLE_OR_COL dim_pay_date) date) (. (TOK_TABLE_OR_COL orderpayment) date))) (TOK_TABREF (TOK_TABNAME orderpayment_small) deal) (= (. (TOK_TABLE_OR_COL deal) dealid) (. (TOK_TABLE_OR_COL orderpayment) dealid))) (TOK_TABREF (TOK_TABNAME orderpayment_small) order_city) (= (. (TOK_TABLE_OR_COL order_city) cityid) (. (TOK_TABLE_OR_COL orderpayment) cityid))) (TOK_TABREF (TOK_TABNAME user_small) user) (= (. (TOK_TABLE_OR_COL user) userid) (. (TOK_TABLE_OR_COL orderpayment) userid)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL dim_pay_date) date)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL deal) dealid))) (TOK_LIMIT 5)))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage

    Modified: hive/trunk/ql/src/test/results/clientpositive/escape_clusterby1.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/escape_clusterby1.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/escape_clusterby1.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientpositive/escape_clusterby1.q.out Thu Dec 19 22:55:10 2013
    @@ -66,7 +66,7 @@ POSTHOOK: query: explain
      select `key`, value from src cluster by `key`, value
      POSTHOOK: type: QUERY
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `key`)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL `key`) (TOK_TABLE_OR_COL value))))
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage

    Modified: hive/trunk/ql/src/test/results/clientpositive/escape_distributeby1.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/escape_distributeby1.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/escape_distributeby1.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientpositive/escape_distributeby1.q.out Thu Dec 19 22:55:10 2013
    @@ -61,7 +61,7 @@ POSTHOOK: query: explain
      select `key`, value from src distribute by `key`, value
      POSTHOOK: type: QUERY
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `key`)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL `key`) (TOK_TABLE_OR_COL value))))
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value))))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage

    Modified: hive/trunk/ql/src/test/results/clientpositive/escape_orderby1.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/escape_orderby1.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/escape_orderby1.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientpositive/escape_orderby1.q.out Thu Dec 19 22:55:10 2013
    @@ -61,7 +61,7 @@ POSTHOOK: query: explain
      select `key`, value from src order by `key`, value
      POSTHOOK: type: QUERY
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `key`)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL `key`)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)))))
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)))))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage

    Modified: hive/trunk/ql/src/test/results/clientpositive/escape_sortby1.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/escape_sortby1.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/escape_sortby1.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientpositive/escape_sortby1.q.out Thu Dec 19 22:55:10 2013
    @@ -61,7 +61,7 @@ POSTHOOK: query: explain
      select `key`, value from src sort by `key`, value
      POSTHOOK: type: QUERY
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `key`)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL `key`)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)))))
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value)))))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage

    Modified: hive/trunk/ql/src/test/results/clientpositive/index_bitmap3.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/index_bitmap3.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/index_bitmap3.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientpositive/index_bitmap3.q.out Thu Dec 19 22:55:10 2013
    @@ -131,7 +131,7 @@ POSTHOOK: Lineage: default__src_src2_ind
      POSTHOOK: Lineage: default__src_src2_index__._offset SIMPLE [(src)src.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
      POSTHOOK: Lineage: default__src_src2_index__.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME default__src_src1_index__))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `_bucketname`) bucketname) (TOK_SELEXPR (TOK_TABLE_OR_COL `_offset`) offset) (TOK_SELEXPR (TOK_TABLE_OR_COL `_bitmaps`) bitmaps)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 0)))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME default__src_src2_index__))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `_bucketname`) bucketname) (TOK_SELEXPR (TOK_TABLE_OR_COL `_offset`) offset) (TOK_SELEXPR (TOK_TABLE_OR_COL `_bitmaps`) bitmaps)) (TOK_WHERE (= (TOK_TABLE_OR_COL value) "val_0")))) b) (AND (= (. (TOK_TABLE_OR_COL a) bucketname) (. (TOK_TABLE_OR_COL b) bucketname)) (= (. (TOK_TABLE_OR_COL a) offset) (. (TOK_TABLE_OR_COL b) offset))))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (
      TOK_TABLE_OR_COL a) bucketname) `_bucketname`) (TOK_SELEXPR (TOK_FUNCTION COLLECT_SET (. (TOK_TABLE_OR_COL a) offset)) `_offsets`)) (TOK_WHERE (NOT (TOK_FUNCTION EWAH_BITMAP_EMPTY (TOK_FUNCTION EWAH_BITMAP_AND (. (TOK_TABLE_OR_COL a) bitmaps) (. (TOK_TABLE_OR_COL b) bitmaps))))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) bucketname))))
    + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME default__src_src1_index__))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL _bucketname) bucketname) (TOK_SELEXPR (TOK_TABLE_OR_COL _offset) offset) (TOK_SELEXPR (TOK_TABLE_OR_COL _bitmaps) bitmaps)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 0)))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME default__src_src2_index__))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL _bucketname) bucketname) (TOK_SELEXPR (TOK_TABLE_OR_COL _offset) offset) (TOK_SELEXPR (TOK_TABLE_OR_COL _bitmaps) bitmaps)) (TOK_WHERE (= (TOK_TABLE_OR_COL value) "val_0")))) b) (AND (= (. (TOK_TABLE_OR_COL a) bucketname) (. (TOK_TABLE_OR_COL b) bucketname)) (= (. (TOK_TABLE_OR_COL a) offset) (. (TOK_TABLE_OR_COL b) offset))))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR
      _COL a) bucketname) _bucketname) (TOK_SELEXPR (TOK_FUNCTION COLLECT_SET (. (TOK_TABLE_OR_COL a) offset)) _offsets)) (TOK_WHERE (NOT (TOK_FUNCTION EWAH_BITMAP_EMPTY (TOK_FUNCTION EWAH_BITMAP_AND (. (TOK_TABLE_OR_COL a) bitmaps) (. (TOK_TABLE_OR_COL b) bitmaps))))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) bucketname))))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage

    Modified: hive/trunk/ql/src/test/results/clientpositive/index_bitmap_auto.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/index_bitmap_auto.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/index_bitmap_auto.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientpositive/index_bitmap_auto.q.out Thu Dec 19 22:55:10 2013
    @@ -150,7 +150,7 @@ POSTHOOK: Lineage: default__src_src2_ind
      POSTHOOK: Lineage: default__src_src2_index__._offset SIMPLE [(src)src.FieldSchema(name:BLOCK__OFFSET__INSIDE__FILE, type:bigint, comment:), ]
      POSTHOOK: Lineage: default__src_src2_index__.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME default__src_src1_index__))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `_bucketname`) bucketname) (TOK_SELEXPR (TOK_TABLE_OR_COL `_offset`) offset) (TOK_SELEXPR (TOK_TABLE_OR_COL `_bitmaps`) bitmaps)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 0)))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME default__src_src2_index__))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `_bucketname`) bucketname) (TOK_SELEXPR (TOK_TABLE_OR_COL `_offset`) offset) (TOK_SELEXPR (TOK_TABLE_OR_COL `_bitmaps`) bitmaps)) (TOK_WHERE (= (TOK_TABLE_OR_COL value) "val_0")))) b) (AND (= (. (TOK_TABLE_OR_COL a) bucketname) (. (TOK_TABLE_OR_COL b) bucketname)) (= (. (TOK_TABLE_OR_COL a) offset) (. (TOK_TABLE_OR_COL b) offset))))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (
      TOK_TABLE_OR_COL a) bucketname) `_bucketname`) (TOK_SELEXPR (TOK_FUNCTION COLLECT_SET (. (TOK_TABLE_OR_COL a) offset)) `_offsets`)) (TOK_WHERE (NOT (TOK_FUNCTION EWAH_BITMAP_EMPTY (TOK_FUNCTION EWAH_BITMAP_AND (. (TOK_TABLE_OR_COL a) bitmaps) (. (TOK_TABLE_OR_COL b) bitmaps))))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) bucketname))))
    + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME default__src_src1_index__))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL _bucketname) bucketname) (TOK_SELEXPR (TOK_TABLE_OR_COL _offset) offset) (TOK_SELEXPR (TOK_TABLE_OR_COL _bitmaps) bitmaps)) (TOK_WHERE (= (TOK_TABLE_OR_COL key) 0)))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME default__src_src2_index__))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL _bucketname) bucketname) (TOK_SELEXPR (TOK_TABLE_OR_COL _offset) offset) (TOK_SELEXPR (TOK_TABLE_OR_COL _bitmaps) bitmaps)) (TOK_WHERE (= (TOK_TABLE_OR_COL value) "val_0")))) b) (AND (= (. (TOK_TABLE_OR_COL a) bucketname) (. (TOK_TABLE_OR_COL b) bucketname)) (= (. (TOK_TABLE_OR_COL a) offset) (. (TOK_TABLE_OR_COL b) offset))))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR
      _COL a) bucketname) _bucketname) (TOK_SELEXPR (TOK_FUNCTION COLLECT_SET (. (TOK_TABLE_OR_COL a) offset)) _offsets)) (TOK_WHERE (NOT (TOK_FUNCTION EWAH_BITMAP_EMPTY (TOK_FUNCTION EWAH_BITMAP_AND (. (TOK_TABLE_OR_COL a) bitmaps) (. (TOK_TABLE_OR_COL b) bitmaps))))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) bucketname))))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage

    Modified: hive/trunk/ql/src/test/results/clientpositive/quote1.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/quote1.q.out?rev=1552451&r1=1552450&r2=1552451&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/quote1.q.out (original)
    +++ hive/trunk/ql/src/test/results/clientpositive/quote1.q.out Thu Dec 19 22:55:10 2013
    @@ -12,7 +12,7 @@ FROM src
      INSERT OVERWRITE TABLE dest1 PARTITION(`table`='2008-04-08') SELECT src.key as `partition`, src.value as `from` WHERE src.key >= 200 and src.key < 300
      POSTHOOK: type: QUERY
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1) (TOK_PARTSPEC (TOK_PARTVAL `table` '2008-04-08')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) `partition`) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) `from`)) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 200) (< (. (TOK_TABLE_OR_COL src) key) 300)))))
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1) (TOK_PARTSPEC (TOK_PARTVAL table '2008-04-08')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) partition) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) from)) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL src) key) 200) (< (. (TOK_TABLE_OR_COL src) key) 300)))))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage
    @@ -116,7 +116,7 @@ POSTHOOK: query: EXPLAIN
      SELECT `int`.`location`, `int`.`type`, `int`.`table` FROM dest1 `int` WHERE `int`.`table` = '2008-04-08'
      POSTHOOK: type: QUERY
      ABSTRACT SYNTAX TREE:
    - (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1) `int`)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL `int`) `location`)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL `int`) `type`)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL `int`) `table`))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL `int`) `table`) '2008-04-08'))))
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1) int)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL int) location)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL int) type)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL int) table))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL int) table) '2008-04-08'))))

      STAGE DEPENDENCIES:
        Stage-1 is a root stage

    Added: hive/trunk/ql/src/test/results/clientpositive/quotedid_alter.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/quotedid_alter.q.out?rev=1552451&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/quotedid_alter.q.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/quotedid_alter.q.out Thu Dec 19 22:55:10 2013
    @@ -0,0 +1,76 @@
    +PREHOOK: query: create table src_b3(`x+1` string, `!@#$%^&*()_q` string)
    +PREHOOK: type: CREATETABLE
    +POSTHOOK: query: create table src_b3(`x+1` string, `!@#$%^&*()_q` string)
    +POSTHOOK: type: CREATETABLE
    +POSTHOOK: Output: default@src_b3
    +PREHOOK: query: alter table src_b3
    +clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
    +PREHOOK: type: ALTERTABLE_CLUSTER_SORT
    +PREHOOK: Input: default@src_b3
    +PREHOOK: Output: default@src_b3
    +POSTHOOK: query: alter table src_b3
    +clustered by (`!@#$%^&*()_q`) sorted by (`!@#$%^&*()_q`) into 2 buckets
    +POSTHOOK: type: ALTERTABLE_CLUSTER_SORT
    +POSTHOOK: Input: default@src_b3
    +POSTHOOK: Output: default@src_b3
    +PREHOOK: query: -- alter partition
    +create table src_p3(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string)
    +PREHOOK: type: CREATETABLE
    +POSTHOOK: query: -- alter partition
    +create table src_p3(`x+1` string, `y&y` string) partitioned by (`!@#$%^&*()_q` string)
    +POSTHOOK: type: CREATETABLE
    +POSTHOOK: Output: default@src_p3
    +PREHOOK: query: insert overwrite table src_p3 partition(`!@#$%^&*()_q`='a') select * from src
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@src
    +PREHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=a
    +POSTHOOK: query: insert overwrite table src_p3 partition(`!@#$%^&*()_q`='a') select * from src
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@src
    +POSTHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=a
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: show partitions src_p3
    +PREHOOK: type: SHOWPARTITIONS
    +POSTHOOK: query: show partitions src_p3
    +POSTHOOK: type: SHOWPARTITIONS
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +!@%23$%25%5E&%2A()_q=a
    +PREHOOK: query: alter table src_p3 add if not exists partition(`!@#$%^&*()_q`='b')
    +PREHOOK: type: ALTERTABLE_ADDPARTS
    +PREHOOK: Input: default@src_p3
    +POSTHOOK: query: alter table src_p3 add if not exists partition(`!@#$%^&*()_q`='b')
    +POSTHOOK: type: ALTERTABLE_ADDPARTS
    +POSTHOOK: Input: default@src_p3
    +POSTHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=b
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: show partitions src_p3
    +PREHOOK: type: SHOWPARTITIONS
    +POSTHOOK: query: show partitions src_p3
    +POSTHOOK: type: SHOWPARTITIONS
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +!@%23$%25%5E&%2A()_q=a
    +!@%23$%25%5E&%2A()_q=b
    +PREHOOK: query: alter table src_p3 partition(`!@#$%^&*()_q`='b') rename to partition(`!@#$%^&*()_q`='c')
    +PREHOOK: type: ALTERTABLE_RENAMEPART
    +PREHOOK: Input: default@src_p3
    +PREHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=b
    +POSTHOOK: query: alter table src_p3 partition(`!@#$%^&*()_q`='b') rename to partition(`!@#$%^&*()_q`='c')
    +POSTHOOK: type: ALTERTABLE_RENAMEPART
    +POSTHOOK: Input: default@src_p3
    +POSTHOOK: Input: default@src_p3@!@%23$%25%5E&%2A()_q=b
    +POSTHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=b
    +POSTHOOK: Output: default@src_p3@!@%23$%25%5E&%2A()_q=c
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: show partitions src_p3
    +PREHOOK: type: SHOWPARTITIONS
    +POSTHOOK: query: show partitions src_p3
    +POSTHOOK: type: SHOWPARTITIONS
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).x+1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: src_p3 PARTITION(!@#$%^&*()_q=a).y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +!@%23$%25%5E&%2A()_q=a
    +!@%23$%25%5E&%2A()_q=c

    Added: hive/trunk/ql/src/test/results/clientpositive/quotedid_basic.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/quotedid_basic.q.out?rev=1552451&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/quotedid_basic.q.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/quotedid_basic.q.out Thu Dec 19 22:55:10 2013
    @@ -0,0 +1,599 @@
    +PREHOOK: query: -- basic
    +create table t1(`x+1` string, `y&y` string, `!@#$%^&*()_q` string)
    +PREHOOK: type: CREATETABLE
    +POSTHOOK: query: -- basic
    +create table t1(`x+1` string, `y&y` string, `!@#$%^&*()_q` string)
    +POSTHOOK: type: CREATETABLE
    +POSTHOOK: Output: default@t1
    +PREHOOK: query: describe t1
    +PREHOOK: type: DESCTABLE
    +POSTHOOK: query: describe t1
    +POSTHOOK: type: DESCTABLE
    +x+1 string None
    +y&y string None
    +!@#$%^&*()_q string None
    +PREHOOK: query: select `x+1`, `y&y`, `!@#$%^&*()_q` from t1
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@t1
    +#### A masked pattern was here ####
    +POSTHOOK: query: select `x+1`, `y&y`, `!@#$%^&*()_q` from t1
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@t1
    +#### A masked pattern was here ####
    +PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1
    +PREHOOK: type: QUERY
    +POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q)))))
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-0 is a root stage
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Alias -> Map Operator Tree:
    + t1
    + TableScan
    + alias: t1
    + Select Operator
    + expressions:
    + expr: x+1
    + type: string
    + expr: y&y
    + type: string
    + expr: !@#$%^&*()_q
    + type: string
    + outputColumnNames: _col0, _col1, _col2
    + File Output Operator
    + compressed: false
    + GlobalTableId: 0
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    +
    +PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q))) (TOK_WHERE (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1'))))
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-0 is a root stage
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Alias -> Map Operator Tree:
    + t1
    + TableScan
    + alias: t1
    + Filter Operator
    + predicate:
    + expr: (!@#$%^&*()_q = '1')
    + type: boolean
    + Select Operator
    + expressions:
    + expr: x+1
    + type: string
    + expr: y&y
    + type: string
    + expr: !@#$%^&*()_q
    + type: string
    + outputColumnNames: _col0, _col1, _col2
    + File Output Operator
    + compressed: false
    + GlobalTableId: 0
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    +
    +PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q` from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q))) (TOK_WHERE (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1')) (TOK_GROUPBY (TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL y&y) (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1'))))
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-0 is a root stage
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Alias -> Map Operator Tree:
    + t1
    + TableScan
    + alias: t1
    + Filter Operator
    + predicate:
    + expr: (!@#$%^&*()_q = '1')
    + type: boolean
    + Select Operator
    + expressions:
    + expr: x+1
    + type: string
    + expr: y&y
    + type: string
    + expr: !@#$%^&*()_q
    + type: string
    + outputColumnNames: x+1, y&y, !@#$%^&*()_q
    + Group By Operator
    + bucketGroup: false
    + keys:
    + expr: x+1
    + type: string
    + expr: y&y
    + type: string
    + expr: !@#$%^&*()_q
    + type: string
    + mode: hash
    + outputColumnNames: _col0, _col1, _col2
    + Reduce Output Operator
    + key expressions:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + sort order: +++
    + Map-reduce partition columns:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + tag: -1
    + Reduce Operator Tree:
    + Group By Operator
    + bucketGroup: false
    + keys:
    + expr: KEY._col0
    + type: string
    + expr: KEY._col1
    + type: string
    + expr: KEY._col2
    + type: string
    + mode: mergepartial
    + outputColumnNames: _col0, _col1, _col2
    + Select Operator
    + expressions:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + outputColumnNames: _col0, _col1, _col2
    + File Output Operator
    + compressed: false
    + GlobalTableId: 0
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    +
    +PREHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
    +from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: explain select `x+1`, `y&y`, `!@#$%^&*()_q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
    +from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&y`, `!@#$%^&*()_q` having `!@#$%^&*()_q` = '1'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_SELEXPR (TOK_FUNCTION rank (TOK_WINDOWSPEC (TOK_PARTITIONINGSPEC (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL y&y)))))))) (TOK_WHERE (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1')) (TOK_GROUPBY (TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL y&y) (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1'))))
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-2 depends on stages: Stage-1
    + Stage-0 is a root stage
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Alias -> Map Operator Tree:
    + t1
    + TableScan
    + alias: t1
    + Filter Operator
    + predicate:
    + expr: (!@#$%^&*()_q = '1')
    + type: boolean
    + Select Operator
    + expressions:
    + expr: x+1
    + type: string
    + expr: y&y
    + type: string
    + expr: !@#$%^&*()_q
    + type: string
    + outputColumnNames: x+1, y&y, !@#$%^&*()_q
    + Group By Operator
    + bucketGroup: false
    + keys:
    + expr: x+1
    + type: string
    + expr: y&y
    + type: string
    + expr: !@#$%^&*()_q
    + type: string
    + mode: hash
    + outputColumnNames: _col0, _col1, _col2
    + Reduce Output Operator
    + key expressions:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + sort order: +++
    + Map-reduce partition columns:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + tag: -1
    + Reduce Operator Tree:
    + Group By Operator
    + bucketGroup: false
    + keys:
    + expr: KEY._col0
    + type: string
    + expr: KEY._col1
    + type: string
    + expr: KEY._col2
    + type: string
    + mode: mergepartial
    + outputColumnNames: _col0, _col1, _col2
    + File Output Operator
    + compressed: false
    + GlobalTableId: 0
    + table:
    + input format: org.apache.hadoop.mapred.SequenceFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
    +
    + Stage: Stage-2
    + Map Reduce
    + Alias -> Map Operator Tree:
    +#### A masked pattern was here ####
    + TableScan
    + Reduce Output Operator
    + key expressions:
    + expr: _col2
    + type: string
    + expr: _col1
    + type: string
    + sort order: ++
    + Map-reduce partition columns:
    + expr: _col2
    + type: string
    + tag: -1
    + value expressions:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + Reduce Operator Tree:
    + Extract
    + PTF Operator
    + Select Operator
    + expressions:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + expr: _wcol0
    + type: int
    + outputColumnNames: _col0, _col1, _col2, _col3
    + File Output Operator
    + compressed: false
    + GlobalTableId: 0
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    +
    +PREHOOK: query: -- case insensitive
    +explain select `X+1`, `Y&y`, `!@#$%^&*()_Q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
    +from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&Y`, `!@#$%^&*()_q` having `!@#$%^&*()_Q` = '1'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- case insensitive
    +explain select `X+1`, `Y&y`, `!@#$%^&*()_Q`, rank() over(partition by `!@#$%^&*()_q` order by `y&y`)
    +from t1 where `!@#$%^&*()_q` = '1' group by `x+1`, `y&Y`, `!@#$%^&*()_q` having `!@#$%^&*()_Q` = '1'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL X+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL Y&y)) (TOK_SELEXPR (TOK_TABLE_OR_COL !@#$%^&*()_Q)) (TOK_SELEXPR (TOK_FUNCTION rank (TOK_WINDOWSPEC (TOK_PARTITIONINGSPEC (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL y&y)))))))) (TOK_WHERE (= (TOK_TABLE_OR_COL !@#$%^&*()_q) '1')) (TOK_GROUPBY (TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL y&Y) (TOK_TABLE_OR_COL !@#$%^&*()_q)) (TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^&*()_Q) '1'))))
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-2 depends on stages: Stage-1
    + Stage-0 is a root stage
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Alias -> Map Operator Tree:
    + t1
    + TableScan
    + alias: t1
    + Filter Operator
    + predicate:
    + expr: (!@#$%^&*()_q = '1')
    + type: boolean
    + Select Operator
    + expressions:
    + expr: x+1
    + type: string
    + expr: y&y
    + type: string
    + expr: !@#$%^&*()_q
    + type: string
    + outputColumnNames: x+1, y&y, !@#$%^&*()_q
    + Group By Operator
    + bucketGroup: false
    + keys:
    + expr: x+1
    + type: string
    + expr: y&y
    + type: string
    + expr: !@#$%^&*()_q
    + type: string
    + mode: hash
    + outputColumnNames: _col0, _col1, _col2
    + Reduce Output Operator
    + key expressions:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + sort order: +++
    + Map-reduce partition columns:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + tag: -1
    + Reduce Operator Tree:
    + Group By Operator
    + bucketGroup: false
    + keys:
    + expr: KEY._col0
    + type: string
    + expr: KEY._col1
    + type: string
    + expr: KEY._col2
    + type: string
    + mode: mergepartial
    + outputColumnNames: _col0, _col1, _col2
    + File Output Operator
    + compressed: false
    + GlobalTableId: 0
    + table:
    + input format: org.apache.hadoop.mapred.SequenceFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
    +
    + Stage: Stage-2
    + Map Reduce
    + Alias -> Map Operator Tree:
    +#### A masked pattern was here ####
    + TableScan
    + Reduce Output Operator
    + key expressions:
    + expr: _col2
    + type: string
    + expr: _col1
    + type: string
    + sort order: ++
    + Map-reduce partition columns:
    + expr: _col2
    + type: string
    + tag: -1
    + value expressions:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + Reduce Operator Tree:
    + Extract
    + PTF Operator
    + Select Operator
    + expressions:
    + expr: _col0
    + type: string
    + expr: _col1
    + type: string
    + expr: _col2
    + type: string
    + expr: _wcol0
    + type: int
    + outputColumnNames: _col0, _col1, _col2, _col3
    + File Output Operator
    + compressed: false
    + GlobalTableId: 0
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    +
    +PREHOOK: query: -- escaped back ticks
    +create table t4(`x+1``` string, `y&y` string)
    +PREHOOK: type: CREATETABLE
    +POSTHOOK: query: -- escaped back ticks
    +create table t4(`x+1``` string, `y&y` string)
    +POSTHOOK: type: CREATETABLE
    +POSTHOOK: Output: default@t4
    +PREHOOK: query: describe t4
    +PREHOOK: type: DESCTABLE
    +POSTHOOK: query: describe t4
    +POSTHOOK: type: DESCTABLE
    +x+1` string None
    +y&y string None
    +PREHOOK: query: insert into table t4 select * from src
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@src
    +PREHOOK: Output: default@t4
    +POSTHOOK: query: insert into table t4 select * from src
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@src
    +POSTHOOK: Output: default@t4
    +POSTHOOK: Lineage: t4.x+1` SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: t4.y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
    +from t4 where `x+1``` = '10' group by `x+1```, `y&y` having `x+1``` = '10'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@t4
    +#### A masked pattern was here ####
    +POSTHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
    +from t4 where `x+1``` = '10' group by `x+1```, `y&y` having `x+1``` = '10'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@t4
    +#### A masked pattern was here ####
    +POSTHOOK: Lineage: t4.x+1` SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: t4.y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +10 val_10 1
    +PREHOOK: query: -- view
    +create view v1 as
    +select `x+1```, `y&y`
    +from t4 where `x+1``` < '200'
    +PREHOOK: type: CREATEVIEW
    +POSTHOOK: query: -- view
    +create view v1 as
    +select `x+1```, `y&y`
    +from t4 where `x+1``` < '200'
    +POSTHOOK: type: CREATEVIEW
    +POSTHOOK: Output: default@v1
    +POSTHOOK: Lineage: t4.x+1` SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: t4.y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
    +from v1
    +group by `x+1```, `y&y`
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@t4
    +PREHOOK: Input: default@v1
    +#### A masked pattern was here ####
    +POSTHOOK: query: select `x+1```, `y&y`, rank() over(partition by `x+1``` order by `y&y`)
    +from v1
    +group by `x+1```, `y&y`
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@t4
    +POSTHOOK: Input: default@v1
    +#### A masked pattern was here ####
    +POSTHOOK: Lineage: t4.x+1` SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: t4.y&y SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
    +0 val_0 1
    +10 val_10 1
    +100 val_100 1
    +103 val_103 1
    +104 val_104 1
    +105 val_105 1
    +11 val_11 1
    +111 val_111 1
    +113 val_113 1
    +114 val_114 1
    +116 val_116 1
    +118 val_118 1
    +119 val_119 1
    +12 val_12 1
    +120 val_120 1
    +125 val_125 1
    +126 val_126 1
    +128 val_128 1
    +129 val_129 1
    +131 val_131 1
    +133 val_133 1
    +134 val_134 1
    +136 val_136 1
    +137 val_137 1
    +138 val_138 1
    +143 val_143 1
    +145 val_145 1
    +146 val_146 1
    +149 val_149 1
    +15 val_15 1
    +150 val_150 1
    +152 val_152 1
    +153 val_153 1
    +155 val_155 1
    +156 val_156 1
    +157 val_157 1
    +158 val_158 1
    +160 val_160 1
    +162 val_162 1
    +163 val_163 1
    +164 val_164 1
    +165 val_165 1
    +166 val_166 1
    +167 val_167 1
    +168 val_168 1
    +169 val_169 1
    +17 val_17 1
    +170 val_170 1
    +172 val_172 1
    +174 val_174 1
    +175 val_175 1
    +176 val_176 1
    +177 val_177 1
    +178 val_178 1
    +179 val_179 1
    +18 val_18 1
    +180 val_180 1
    +181 val_181 1
    +183 val_183 1
    +186 val_186 1
    +187 val_187 1
    +189 val_189 1
    +19 val_19 1
    +190 val_190 1
    +191 val_191 1
    +192 val_192 1
    +193 val_193 1
    +194 val_194 1
    +195 val_195 1
    +196 val_196 1
    +197 val_197 1
    +199 val_199 1
    +2 val_2 1
    +20 val_20 1

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedDec 19, '13 at 10:55p
activeDec 19, '13 at 10:55p
posts2
users1
websitehive.apache.org

1 user in discussion

Hashutosh: 2 posts

People

Translate

site design / logo © 2021 Grokbase