FAQ
Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1448762&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Thu Feb 21 18:28:29 2013
@@ -0,0 +1,504 @@
+/**
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+*/
+parser grammar IdentifiersParser;
+
+options
+{
+output=AST;
+ASTLabelType=CommonTree;
+backtrack=false;
+k=3;
+}
+
+@members {
+ @Override
+ public Object recoverFromMismatchedSet(IntStream input,
+ RecognitionException re, BitSet follow) throws RecognitionException {
+ throw re;
+ }
+ @Override
+ public void displayRecognitionError(String[] tokenNames,
+ RecognitionException e) {
+ gParent.errors.add(new ParseError(gParent, e, tokenNames));
+ }
+}
+
+@rulecatch {
+catch (RecognitionException e) {
+ throw e;
+}
+}
+
+//-----------------------------------------------------------------------------------
+
+// group by a,b
+groupByClause
+@init { gParent.msgs.push("group by clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_GROUP KW_BY
+ groupByExpression
+ ( COMMA groupByExpression )*
+ ((rollup=KW_WITH KW_ROLLUP) | (cube=KW_WITH KW_CUBE)) ?
+ (sets=KW_GROUPING KW_SETS
+ LPAREN groupingSetExpression ( COMMA groupingSetExpression)* RPAREN ) ?
+ -> {rollup != null}? ^(TOK_ROLLUP_GROUPBY groupByExpression+)
+ -> {cube != null}? ^(TOK_CUBE_GROUPBY groupByExpression+)
+ -> {sets != null}? ^(TOK_GROUPING_SETS groupByExpression+ groupingSetExpression+)
+ -> ^(TOK_GROUPBY groupByExpression+)
+ ;
+
+groupingSetExpression
+@init {gParent.msgs.push("grouping set expression"); }
+@after {gParent.msgs.pop(); }
+ :
+ groupByExpression
+ -> ^(TOK_GROUPING_SETS_EXPRESSION groupByExpression)
+ |
+ LPAREN
+ groupByExpression (COMMA groupByExpression)*
+ RPAREN
+ -> ^(TOK_GROUPING_SETS_EXPRESSION groupByExpression+)
+ |
+ LPAREN
+ RPAREN
+ -> ^(TOK_GROUPING_SETS_EXPRESSION)
+ ;
+
+
+groupByExpression
+@init { gParent.msgs.push("group by expression"); }
+@after { gParent.msgs.pop(); }
+ :
+ expression
+ ;
+
+havingClause
+@init { gParent.msgs.push("having clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_HAVING havingCondition -> ^(TOK_HAVING havingCondition)
+ ;
+
+havingCondition
+@init { gParent.msgs.push("having condition"); }
+@after { gParent.msgs.pop(); }
+ :
+ expression
+ ;
+
+// order by a,b
+orderByClause
+@init { gParent.msgs.push("order by clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_ORDER KW_BY
+ LPAREN columnRefOrder
+ ( COMMA columnRefOrder)* RPAREN -> ^(TOK_ORDERBY columnRefOrder+)
+ |
+ KW_ORDER KW_BY
+ columnRefOrder
+ ( COMMA columnRefOrder)* -> ^(TOK_ORDERBY columnRefOrder+)
+ ;
+
+clusterByClause
+@init { gParent.msgs.push("cluster by clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_CLUSTER KW_BY
+ LPAREN expression (COMMA expression)* RPAREN -> ^(TOK_CLUSTERBY expression+)
+ |
+ KW_CLUSTER KW_BY
+ expression
+ ( COMMA expression )* -> ^(TOK_CLUSTERBY expression+)
+ ;
+
+distributeByClause
+@init { gParent.msgs.push("distribute by clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_DISTRIBUTE KW_BY
+ LPAREN expression (COMMA expression)* RPAREN -> ^(TOK_DISTRIBUTEBY expression+)
+ |
+ KW_DISTRIBUTE KW_BY
+ expression (COMMA expression)* -> ^(TOK_DISTRIBUTEBY expression+)
+ ;
+
+sortByClause
+@init { gParent.msgs.push("sort by clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_SORT KW_BY
+ LPAREN columnRefOrder
+ ( COMMA columnRefOrder)* RPAREN -> ^(TOK_SORTBY columnRefOrder+)
+ |
+ KW_SORT KW_BY
+ columnRefOrder
+ ( COMMA columnRefOrder)* -> ^(TOK_SORTBY columnRefOrder+)
+ ;
+
+// fun(par1, par2, par3)
+function
+@init { gParent.msgs.push("function specification"); }
+@after { gParent.msgs.pop(); }
+ :
+ functionName
+ LPAREN
+ (
+ (star=STAR)
+ | (dist=KW_DISTINCT)? (expression (COMMA expression)*)?
+ )
+ RPAREN -> {$star != null}? ^(TOK_FUNCTIONSTAR functionName)
+ -> {$dist == null}? ^(TOK_FUNCTION functionName (expression+)?)
+ -> ^(TOK_FUNCTIONDI functionName (expression+)?)
+ ;
+
+functionName
+@init { gParent.msgs.push("function name"); }
+@after { gParent.msgs.pop(); }
+ : // Keyword IF is also a function name
+ KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT | KW_UNIONTYPE | identifier
+ ;
+
+castExpression
+@init { gParent.msgs.push("cast expression"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_CAST
+ LPAREN
+ expression
+ KW_AS
+ primitiveType
+ RPAREN -> ^(TOK_FUNCTION primitiveType expression)
+ ;
+
+caseExpression
+@init { gParent.msgs.push("case expression"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_CASE expression
+ (KW_WHEN expression KW_THEN expression)+
+ (KW_ELSE expression)?
+ KW_END -> ^(TOK_FUNCTION KW_CASE expression*)
+ ;
+
+whenExpression
+@init { gParent.msgs.push("case expression"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_CASE
+ ( KW_WHEN expression KW_THEN expression)+
+ (KW_ELSE expression)?
+ KW_END -> ^(TOK_FUNCTION KW_WHEN expression*)
+ ;
+
+constant
+@init { gParent.msgs.push("constant"); }
+@after { gParent.msgs.pop(); }
+ :
+ Number
+ | StringLiteral
+ | stringLiteralSequence
+ | BigintLiteral
+ | SmallintLiteral
+ | TinyintLiteral
+ | charSetStringLiteral
+ | booleanValue
+ ;
+
+stringLiteralSequence
+ :
+ StringLiteral StringLiteral+ -> ^(TOK_STRINGLITERALSEQUENCE StringLiteral StringLiteral+)
+ ;
+
+charSetStringLiteral
+@init { gParent.msgs.push("character string literal"); }
+@after { gParent.msgs.pop(); }
+ :
+ csName=CharSetName csLiteral=CharSetLiteral -> ^(TOK_CHARSETLITERAL $csName $csLiteral)
+ ;
+
+expression
+@init { gParent.msgs.push("expression specification"); }
+@after { gParent.msgs.pop(); }
+ :
+ precedenceOrExpression
+ ;
+
+atomExpression
+ :
+ KW_NULL -> TOK_NULL
+ | constant
+ | function
+ | castExpression
+ | caseExpression
+ | whenExpression
+ | tableOrColumn
+ | LPAREN! expression RPAREN!
+ ;
+
+
+precedenceFieldExpression
+ :
+ atomExpression ((LSQUARE^ expression RSQUARE!) | (DOT^ identifier))*
+ ;
+
+precedenceUnaryOperator
+ :
+ PLUS | MINUS | TILDE
+ ;
+
+nullCondition
+ :
+ KW_NULL -> ^(TOK_ISNULL)
+ | KW_NOT KW_NULL -> ^(TOK_ISNOTNULL)
+ ;
+
+precedenceUnaryPrefixExpression
+ :
+ (precedenceUnaryOperator^)* precedenceFieldExpression
+ ;
+
+precedenceUnarySuffixExpression
+ : precedenceUnaryPrefixExpression (a=KW_IS nullCondition)?
+ -> {$a != null}? ^(TOK_FUNCTION nullCondition precedenceUnaryPrefixExpression)
+ -> precedenceUnaryPrefixExpression
+ ;
+
+
+precedenceBitwiseXorOperator
+ :
+ BITWISEXOR
+ ;
+
+precedenceBitwiseXorExpression
+ :
+ precedenceUnarySuffixExpression (precedenceBitwiseXorOperator^ precedenceUnarySuffixExpression)*
+ ;
+
+
+precedenceStarOperator
+ :
+ STAR | DIVIDE | MOD | DIV
+ ;
+
+precedenceStarExpression
+ :
+ precedenceBitwiseXorExpression (precedenceStarOperator^ precedenceBitwiseXorExpression)*
+ ;
+
+
+precedencePlusOperator
+ :
+ PLUS | MINUS
+ ;
+
+precedencePlusExpression
+ :
+ precedenceStarExpression (precedencePlusOperator^ precedenceStarExpression)*
+ ;
+
+
+precedenceAmpersandOperator
+ :
+ AMPERSAND
+ ;
+
+precedenceAmpersandExpression
+ :
+ precedencePlusExpression (precedenceAmpersandOperator^ precedencePlusExpression)*
+ ;
+
+
+precedenceBitwiseOrOperator
+ :
+ BITWISEOR
+ ;
+
+precedenceBitwiseOrExpression
+ :
+ precedenceAmpersandExpression (precedenceBitwiseOrOperator^ precedenceAmpersandExpression)*
+ ;
+
+
+// Equal operators supporting NOT prefix
+precedenceEqualNegatableOperator
+ :
+ KW_LIKE | KW_RLIKE | KW_REGEXP
+ ;
+
+precedenceEqualOperator
+ :
+ precedenceEqualNegatableOperator | EQUAL | EQUAL_NS | NOTEQUAL | LESSTHANOREQUALTO | LESSTHAN | GREATERTHANOREQUALTO | GREATERTHAN
+ ;
+
+precedenceEqualExpression
+ :
+ (left=precedenceBitwiseOrExpression -> $left)
+ (
+ (KW_NOT precedenceEqualNegatableOperator notExpr=precedenceBitwiseOrExpression)
+ -> ^(KW_NOT ^(precedenceEqualNegatableOperator $precedenceEqualExpression $notExpr))
+ | (precedenceEqualOperator equalExpr=precedenceBitwiseOrExpression)
+ -> ^(precedenceEqualOperator $precedenceEqualExpression $equalExpr)
+ | (KW_NOT KW_IN expressions)
+ -> ^(KW_NOT ^(TOK_FUNCTION KW_IN $precedenceEqualExpression expressions))
+ | (KW_IN expressions)
+ -> ^(TOK_FUNCTION KW_IN $precedenceEqualExpression expressions)
+ | ( KW_NOT KW_BETWEEN (min=precedenceBitwiseOrExpression) KW_AND (max=precedenceBitwiseOrExpression) )
+ -> ^(TOK_FUNCTION Identifier["between"] KW_TRUE $left $min $max)
+ | ( KW_BETWEEN (min=precedenceBitwiseOrExpression) KW_AND (max=precedenceBitwiseOrExpression) )
+ -> ^(TOK_FUNCTION Identifier["between"] KW_FALSE $left $min $max)
+ )*
+ ;
+
+expressions
+ :
+ LPAREN expression (COMMA expression)* RPAREN -> expression*
+ ;
+
+precedenceNotOperator
+ :
+ KW_NOT
+ ;
+
+precedenceNotExpression
+ :
+ (precedenceNotOperator^)* precedenceEqualExpression
+ ;
+
+
+precedenceAndOperator
+ :
+ KW_AND
+ ;
+
+precedenceAndExpression
+ :
+ precedenceNotExpression (precedenceAndOperator^ precedenceNotExpression)*
+ ;
+
+
+precedenceOrOperator
+ :
+ KW_OR
+ ;
+
+precedenceOrExpression
+ :
+ precedenceAndExpression (precedenceOrOperator^ precedenceAndExpression)*
+ ;
+
+
+booleanValue
+ :
+ KW_TRUE^ | KW_FALSE^
+ ;
+
+tableOrPartition
+ :
+ tableName partitionSpec? -> ^(TOK_TAB tableName partitionSpec?)
+ ;
+
+partitionSpec
+ :
+ KW_PARTITION
+ LPAREN partitionVal (COMMA partitionVal )* RPAREN -> ^(TOK_PARTSPEC partitionVal +)
+ ;
+
+partitionVal
+ :
+ identifier (EQUAL constant)? -> ^(TOK_PARTVAL identifier constant?)
+ ;
+
+dropPartitionSpec
+ :
+ KW_PARTITION
+ LPAREN dropPartitionVal (COMMA dropPartitionVal )* RPAREN -> ^(TOK_PARTSPEC dropPartitionVal +)
+ ;
+
+dropPartitionVal
+ :
+ identifier dropPartitionOperator constant -> ^(TOK_PARTVAL identifier dropPartitionOperator constant)
+ ;
+
+dropPartitionOperator
+ :
+ EQUAL | NOTEQUAL | LESSTHANOREQUALTO | LESSTHAN | GREATERTHANOREQUALTO | GREATERTHAN
+ ;
+
+sysFuncNames
+ :
+ KW_AND
+ | KW_OR
+ | KW_NOT
+ | KW_LIKE
+ | KW_IF
+ | KW_CASE
+ | KW_WHEN
+ | KW_TINYINT
+ | KW_SMALLINT
+ | KW_INT
+ | KW_BIGINT
+ | KW_FLOAT
+ | KW_DOUBLE
+ | KW_BOOLEAN
+ | KW_STRING
+ | KW_BINARY
+ | KW_ARRAY
+ | KW_MAP
+ | KW_STRUCT
+ | KW_UNIONTYPE
+ | EQUAL
+ | EQUAL_NS
+ | NOTEQUAL
+ | LESSTHANOREQUALTO
+ | LESSTHAN
+ | GREATERTHANOREQUALTO
+ | GREATERTHAN
+ | DIVIDE
+ | PLUS
+ | MINUS
+ | STAR
+ | MOD
+ | DIV
+ | AMPERSAND
+ | TILDE
+ | BITWISEOR
+ | BITWISEXOR
+ | KW_RLIKE
+ | KW_REGEXP
+ | KW_IN
+ | KW_BETWEEN
+ ;
+
+descFuncNames
+ :
+ sysFuncNames
+ | StringLiteral
+ | identifier
+ ;
+
+identifier
+ :
+ Identifier
+ | nonReserved -> Identifier[$nonReserved.text]
+ ;
+
+nonReserved
+ :
+ KW_TRUE | KW_FALSE | KW_ALL | KW_AND | KW_OR | KW_NOT | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_FROM | KW_AS | KW_DISTINCT | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_PRESERVE | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_COLUMN | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | K
  W_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOC
  KS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_INNER
+ ;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java Thu Feb 21 18:28:29 2013
@@ -19,15 +19,8 @@
  package org.apache.hadoop.hive.ql.parse;

  import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-
  import org.antlr.runtime.ANTLRStringStream;
-import org.antlr.runtime.BitSet;
  import org.antlr.runtime.CharStream;
-import org.antlr.runtime.FailedPredicateException;
-import org.antlr.runtime.IntStream;
-import org.antlr.runtime.MismatchedTokenException;
  import org.antlr.runtime.NoViableAltException;
  import org.antlr.runtime.RecognitionException;
  import org.antlr.runtime.Token;
@@ -48,170 +41,6 @@ public class ParseDriver {

    private static final Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver");

- private static HashMap<String, String> xlateMap;
- static {
- xlateMap = new HashMap<String, String>();
-
- // Keywords
- xlateMap.put("KW_TRUE", "TRUE");
- xlateMap.put("KW_FALSE", "FALSE");
- xlateMap.put("KW_ALL", "ALL");
- xlateMap.put("KW_AND", "AND");
- xlateMap.put("KW_OR", "OR");
- xlateMap.put("KW_NOT", "NOT");
- xlateMap.put("KW_LIKE", "LIKE");
-
- xlateMap.put("KW_ASC", "ASC");
- xlateMap.put("KW_DESC", "DESC");
- xlateMap.put("KW_ORDER", "ORDER");
- xlateMap.put("KW_BY", "BY");
- xlateMap.put("KW_GROUP", "GROUP");
- xlateMap.put("KW_WHERE", "WHERE");
- xlateMap.put("KW_FROM", "FROM");
- xlateMap.put("KW_AS", "AS");
- xlateMap.put("KW_SELECT", "SELECT");
- xlateMap.put("KW_DISTINCT", "DISTINCT");
- xlateMap.put("KW_INSERT", "INSERT");
- xlateMap.put("KW_OVERWRITE", "OVERWRITE");
- xlateMap.put("KW_OUTER", "OUTER");
- xlateMap.put("KW_JOIN", "JOIN");
- xlateMap.put("KW_LEFT", "LEFT");
- xlateMap.put("KW_RIGHT", "RIGHT");
- xlateMap.put("KW_FULL", "FULL");
- xlateMap.put("KW_ON", "ON");
- xlateMap.put("KW_PARTITION", "PARTITION");
- xlateMap.put("KW_PARTITIONS", "PARTITIONS");
- xlateMap.put("KW_TABLE", "TABLE");
- xlateMap.put("KW_TABLES", "TABLES");
- xlateMap.put("KW_TBLPROPERTIES", "TBLPROPERTIES");
- xlateMap.put("KW_SHOW", "SHOW");
- xlateMap.put("KW_MSCK", "MSCK");
- xlateMap.put("KW_DIRECTORY", "DIRECTORY");
- xlateMap.put("KW_LOCAL", "LOCAL");
- xlateMap.put("KW_TRANSFORM", "TRANSFORM");
- xlateMap.put("KW_USING", "USING");
- xlateMap.put("KW_CLUSTER", "CLUSTER");
- xlateMap.put("KW_DISTRIBUTE", "DISTRIBUTE");
- xlateMap.put("KW_SORT", "SORT");
- xlateMap.put("KW_UNION", "UNION");
- xlateMap.put("KW_LOAD", "LOAD");
- xlateMap.put("KW_DATA", "DATA");
- xlateMap.put("KW_INPATH", "INPATH");
- xlateMap.put("KW_IS", "IS");
- xlateMap.put("KW_NULL", "NULL");
- xlateMap.put("KW_CREATE", "CREATE");
- xlateMap.put("KW_EXTERNAL", "EXTERNAL");
- xlateMap.put("KW_ALTER", "ALTER");
- xlateMap.put("KW_DESCRIBE", "DESCRIBE");
- xlateMap.put("KW_DROP", "DROP");
- xlateMap.put("KW_REANME", "REANME");
- xlateMap.put("KW_TO", "TO");
- xlateMap.put("KW_COMMENT", "COMMENT");
- xlateMap.put("KW_BOOLEAN", "BOOLEAN");
- xlateMap.put("KW_TINYINT", "TINYINT");
- xlateMap.put("KW_SMALLINT", "SMALLINT");
- xlateMap.put("KW_INT", "INT");
- xlateMap.put("KW_BIGINT", "BIGINT");
- xlateMap.put("KW_FLOAT", "FLOAT");
- xlateMap.put("KW_DOUBLE", "DOUBLE");
- xlateMap.put("KW_DATE", "DATE");
- xlateMap.put("KW_DATETIME", "DATETIME");
- xlateMap.put("KW_TIMESTAMP", "TIMESTAMP");
- xlateMap.put("KW_STRING", "STRING");
- xlateMap.put("KW_BINARY", "BINARY");
- xlateMap.put("KW_ARRAY", "ARRAY");
- xlateMap.put("KW_MAP", "MAP");
- xlateMap.put("KW_REDUCE", "REDUCE");
- xlateMap.put("KW_PARTITIONED", "PARTITIONED");
- xlateMap.put("KW_CLUSTERED", "CLUSTERED");
- xlateMap.put("KW_SORTED", "SORTED");
- xlateMap.put("KW_INTO", "INTO");
- xlateMap.put("KW_BUCKETS", "BUCKETS");
- xlateMap.put("KW_ROW", "ROW");
- xlateMap.put("KW_FORMAT", "FORMAT");
- xlateMap.put("KW_DELIMITED", "DELIMITED");
- xlateMap.put("KW_FIELDS", "FIELDS");
- xlateMap.put("KW_TERMINATED", "TERMINATED");
- xlateMap.put("KW_COLLECTION", "COLLECTION");
- xlateMap.put("KW_ITEMS", "ITEMS");
- xlateMap.put("KW_KEYS", "KEYS");
- xlateMap.put("KW_KEY_TYPE", "$KEY$");
- xlateMap.put("KW_LINES", "LINES");
- xlateMap.put("KW_STORED", "STORED");
- xlateMap.put("KW_SEQUENCEFILE", "SEQUENCEFILE");
- xlateMap.put("KW_TEXTFILE", "TEXTFILE");
- xlateMap.put("KW_INPUTFORMAT", "INPUTFORMAT");
- xlateMap.put("KW_OUTPUTFORMAT", "OUTPUTFORMAT");
- xlateMap.put("KW_LOCATION", "LOCATION");
- xlateMap.put("KW_TABLESAMPLE", "TABLESAMPLE");
- xlateMap.put("KW_BUCKET", "BUCKET");
- xlateMap.put("KW_OUT", "OUT");
- xlateMap.put("KW_OF", "OF");
- xlateMap.put("KW_CAST", "CAST");
- xlateMap.put("KW_ADD", "ADD");
- xlateMap.put("KW_REPLACE", "REPLACE");
- xlateMap.put("KW_COLUMNS", "COLUMNS");
- xlateMap.put("KW_RLIKE", "RLIKE");
- xlateMap.put("KW_REGEXP", "REGEXP");
- xlateMap.put("KW_TEMPORARY", "TEMPORARY");
- xlateMap.put("KW_FUNCTION", "FUNCTION");
- xlateMap.put("KW_EXPLAIN", "EXPLAIN");
- xlateMap.put("KW_EXTENDED", "EXTENDED");
- xlateMap.put("KW_SERDE", "SERDE");
- xlateMap.put("KW_WITH", "WITH");
- xlateMap.put("KW_SERDEPROPERTIES", "SERDEPROPERTIES");
- xlateMap.put("KW_LIMIT", "LIMIT");
- xlateMap.put("KW_SET", "SET");
- xlateMap.put("KW_PROPERTIES", "TBLPROPERTIES");
- xlateMap.put("KW_VALUE_TYPE", "$VALUE$");
- xlateMap.put("KW_ELEM_TYPE", "$ELEM$");
-
- // Operators
- xlateMap.put("DOT", ".");
- xlateMap.put("COLON", ":");
- xlateMap.put("COMMA", ",");
- xlateMap.put("SEMICOLON", ");");
-
- xlateMap.put("LPAREN", "(");
- xlateMap.put("RPAREN", ")");
- xlateMap.put("LSQUARE", "[");
- xlateMap.put("RSQUARE", "]");
-
- xlateMap.put("EQUAL", "=");
- xlateMap.put("NOTEQUAL", "<>");
- xlateMap.put("EQUAL_NS", "<=>");
- xlateMap.put("LESSTHANOREQUALTO", "<=");
- xlateMap.put("LESSTHAN", "<");
- xlateMap.put("GREATERTHANOREQUALTO", ">=");
- xlateMap.put("GREATERTHAN", ">");
-
- xlateMap.put("DIVIDE", "/");
- xlateMap.put("PLUS", "+");
- xlateMap.put("MINUS", "-");
- xlateMap.put("STAR", "*");
- xlateMap.put("MOD", "%");
-
- xlateMap.put("AMPERSAND", "&");
- xlateMap.put("TILDE", "~");
- xlateMap.put("BITWISEOR", "|");
- xlateMap.put("BITWISEXOR", "^");
- xlateMap.put("CharSetLiteral", "\\'");
- }
-
- public static Collection<String> getKeywords() {
- return xlateMap.values();
- }
-
- private static String xlate(String name) {
-
- String ret = xlateMap.get(name);
- if (ret == null) {
- ret = name;
- }
-
- return ret;
- }
-
    /**
     * ANTLRNoCaseStringStream.
     *
@@ -298,89 +127,6 @@ public class ParseDriver {
    }

    /**
- * HiveParserX.
- *
- */
- public class HiveParserX extends HiveParser {
-
- private final ArrayList<ParseError> errors;
-
- public HiveParserX(TokenStream input) {
- super(input);
- errors = new ArrayList<ParseError>();
- }
-
- @Override
- public Object recoverFromMismatchedSet(IntStream input,
- RecognitionException re, BitSet follow) throws RecognitionException {
- throw re;
- }
-
- @Override
- public void displayRecognitionError(String[] tokenNames,
- RecognitionException e) {
-
- errors.add(new ParseError(this, e, tokenNames));
- }
-
- @Override
- public String getErrorHeader(RecognitionException e) {
- String header = null;
- if (e.charPositionInLine < 0 && input.LT(-1) != null) {
- Token t = input.LT(-1);
- header = "line " + t.getLine() + ":" + t.getCharPositionInLine();
- } else {
- header = super.getErrorHeader(e);
- }
-
- return header;
- }
-
-
- @Override
- public String getErrorMessage(RecognitionException e, String[] tokenNames) {
- String msg = null;
-
- // Translate the token names to something that the user can understand
- String[] xlateNames = new String[tokenNames.length];
- for (int i = 0; i < tokenNames.length; ++i) {
- xlateNames[i] = ParseDriver.xlate(tokenNames[i]);
- }
-
- if (e instanceof NoViableAltException) {
- @SuppressWarnings("unused")
- NoViableAltException nvae = (NoViableAltException) e;
- // for development, can add
- // "decision=<<"+nvae.grammarDecisionDescription+">>"
- // and "(decision="+nvae.decisionNumber+") and
- // "state "+nvae.stateNumber
- msg = "cannot recognize input near "
- + getTokenErrorDisplay(e.token)
- + (input.LT(2) != null ? " " + getTokenErrorDisplay(input.LT(2)) : "")
- + (input.LT(3) != null ? " " + getTokenErrorDisplay(input.LT(3)) : "");
- } else if (e instanceof MismatchedTokenException) {
- MismatchedTokenException mte = (MismatchedTokenException) e;
- msg = super.getErrorMessage(e, xlateNames) + (input.LT(-1) == null ? "":" near '" + input.LT(-1).getText()) + "'";
- } else if (e instanceof FailedPredicateException) {
- FailedPredicateException fpe = (FailedPredicateException) e;
- msg = "Failed to recognize predicate '" + fpe.token.getText() + "'. Failed rule: '" + fpe.ruleName + "'";
- } else {
- msg = super.getErrorMessage(e, xlateNames);
- }
-
- if (msgs.size() > 0) {
- msg = msg + " in " + msgs.peek();
- }
- return msg;
- }
-
- public ArrayList<ParseError> getErrors() {
- return errors;
- }
-
- }
-
- /**
     * Tree adaptor for making antlr return ASTNodes instead of CommonTree nodes
     * so that the graph walking algorithms and the rules framework defined in
     * ql.lib can be used with the AST Nodes.
@@ -437,22 +183,22 @@ public class ParseDriver {
      if (ctx != null) {
        ctx.setTokenRewriteStream(tokens);
      }
- HiveParserX parser = new HiveParserX(tokens);
+ HiveParser parser = new HiveParser(tokens);
      parser.setTreeAdaptor(adaptor);
      HiveParser.statement_return r = null;
      try {
        r = parser.statement();
      } catch (RecognitionException e) {
        e.printStackTrace();
- throw new ParseException(parser.getErrors());
+ throw new ParseException(parser.errors);
      }

- if (lexer.getErrors().size() == 0 && parser.getErrors().size() == 0) {
+ if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
        LOG.info("Parse Completed");
      } else if (lexer.getErrors().size() != 0) {
        throw new ParseException(lexer.getErrors());
      } else {
- throw new ParseException(parser.getErrors());
+ throw new ParseException(parser.errors);
      }

      return (ASTNode) r.getTree();

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SelectClauseParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SelectClauseParser.g?rev=1448762&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SelectClauseParser.g (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SelectClauseParser.g Thu Feb 21 18:28:29 2013
@@ -0,0 +1,157 @@
+/**
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+*/
+parser grammar SelectClauseParser;
+
+options
+{
+output=AST;
+ASTLabelType=CommonTree;
+backtrack=false;
+k=3;
+}
+
+@members {
+ @Override
+ public Object recoverFromMismatchedSet(IntStream input,
+ RecognitionException re, BitSet follow) throws RecognitionException {
+ throw re;
+ }
+ @Override
+ public void displayRecognitionError(String[] tokenNames,
+ RecognitionException e) {
+ gParent.errors.add(new ParseError(gParent, e, tokenNames));
+ }
+}
+
+@rulecatch {
+catch (RecognitionException e) {
+ throw e;
+}
+}
+
+//----------------------- Rules for parsing selectClause -----------------------------
+// select a,b,c ...
+selectClause
+@init { gParent.msgs.push("select clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_SELECT hintClause? (((KW_ALL | dist=KW_DISTINCT)? selectList)
+ | (transform=KW_TRANSFORM selectTrfmClause))
+ -> {$transform == null && $dist == null}? ^(TOK_SELECT hintClause? selectList)
+ -> {$transform == null && $dist != null}? ^(TOK_SELECTDI hintClause? selectList)
+ -> ^(TOK_SELECT hintClause? ^(TOK_SELEXPR selectTrfmClause) )
+ |
+ trfmClause ->^(TOK_SELECT ^(TOK_SELEXPR trfmClause))
+ ;
+
+selectList
+@init { gParent.msgs.push("select list"); }
+@after { gParent.msgs.pop(); }
+ :
+ selectItem ( COMMA selectItem )* -> selectItem+
+ ;
+
+selectTrfmClause
+@init { gParent.msgs.push("transform clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ LPAREN selectExpressionList RPAREN
+ inSerde=rowFormat inRec=recordWriter
+ KW_USING StringLiteral
+ ( KW_AS ((LPAREN (aliasList | columnNameTypeList) RPAREN) | (aliasList | columnNameTypeList)))?
+ outSerde=rowFormat outRec=recordReader
+ -> ^(TOK_TRANSFORM selectExpressionList $inSerde $inRec StringLiteral $outSerde $outRec aliasList? columnNameTypeList?)
+ ;
+
+hintClause
+@init { gParent.msgs.push("hint clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ DIVIDE STAR PLUS hintList STAR DIVIDE -> ^(TOK_HINTLIST hintList)
+ ;
+
+hintList
+@init { gParent.msgs.push("hint list"); }
+@after { gParent.msgs.pop(); }
+ :
+ hintItem (COMMA hintItem)* -> hintItem+
+ ;
+
+hintItem
+@init { gParent.msgs.push("hint item"); }
+@after { gParent.msgs.pop(); }
+ :
+ hintName (LPAREN hintArgs RPAREN)? -> ^(TOK_HINT hintName hintArgs?)
+ ;
+
+hintName
+@init { gParent.msgs.push("hint name"); }
+@after { gParent.msgs.pop(); }
+ :
+ KW_MAPJOIN -> TOK_MAPJOIN
+ | KW_STREAMTABLE -> TOK_STREAMTABLE
+ | KW_HOLD_DDLTIME -> TOK_HOLD_DDLTIME
+ ;
+
+hintArgs
+@init { gParent.msgs.push("hint arguments"); }
+@after { gParent.msgs.pop(); }
+ :
+ hintArgName (COMMA hintArgName)* -> ^(TOK_HINTARGLIST hintArgName+)
+ ;
+
+hintArgName
+@init { gParent.msgs.push("hint argument name"); }
+@after { gParent.msgs.pop(); }
+ :
+ identifier
+ ;
+
+selectItem
+@init { gParent.msgs.push("selection target"); }
+@after { gParent.msgs.pop(); }
+ :
+ ( selectExpression ((KW_AS? identifier) | (KW_AS LPAREN identifier (COMMA identifier)* RPAREN))?) -> ^(TOK_SELEXPR selectExpression identifier*)
+ ;
+
+trfmClause
+@init { gParent.msgs.push("transform clause"); }
+@after { gParent.msgs.pop(); }
+ :
+ ( KW_MAP selectExpressionList
+ | KW_REDUCE selectExpressionList )
+ inSerde=rowFormat inRec=recordWriter
+ KW_USING StringLiteral
+ ( KW_AS ((LPAREN (aliasList | columnNameTypeList) RPAREN) | (aliasList | columnNameTypeList)))?
+ outSerde=rowFormat outRec=recordReader
+ -> ^(TOK_TRANSFORM selectExpressionList $inSerde $inRec StringLiteral $outSerde $outRec aliasList? columnNameTypeList?)
+ ;
+
+selectExpression
+@init { gParent.msgs.push("select expression"); }
+@after { gParent.msgs.pop(); }
+ :
+ expression | tableAllColumns
+ ;
+
+selectExpressionList
+@init { gParent.msgs.push("select expression list"); }
+@after { gParent.msgs.pop(); }
+ :
+ selectExpression (COMMA selectExpression)* -> ^(TOK_EXPLIST selectExpression+)
+ ;
+

Modified: hive/trunk/ql/src/test/queries/clientnegative/show_tables_bad1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/show_tables_bad1.q?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/show_tables_bad1.q (original)
+++ hive/trunk/ql/src/test/queries/clientnegative/show_tables_bad1.q Thu Feb 21 18:28:29 2013
@@ -1 +1 @@
-SHOW TABLES LIKE;
+SHOW TABLES JOIN;

Modified: hive/trunk/ql/src/test/queries/clientnegative/show_tables_bad2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/show_tables_bad2.q?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/show_tables_bad2.q (original)
+++ hive/trunk/ql/src/test/queries/clientnegative/show_tables_bad2.q Thu Feb 21 18:28:29 2013
@@ -1 +1 @@
-SHOW TABLES FROM default LIKE;
+SHOW TABLES FROM default LIKE a b;

Added: hive/trunk/ql/src/test/queries/clientpositive/nonreserved_keywords_input37.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/nonreserved_keywords_input37.q?rev=1448762&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/nonreserved_keywords_input37.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/nonreserved_keywords_input37.q Thu Feb 21 18:28:29 2013
@@ -0,0 +1,12 @@
+CREATE TABLE table(string string) STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE table;
+
+SELECT table, count(1)
+FROM
+(
+ FROM table
+ SELECT TRANSFORM (table.string)
+ USING 'java -cp ../build/ql/test/classes org.apache.hadoop.hive.scripts.extracturl' AS (table, count)
+) subq
+GROUP BY table;

Added: hive/trunk/ql/src/test/queries/clientpositive/nonreserved_keywords_insert_into1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/nonreserved_keywords_insert_into1.q?rev=1448762&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/nonreserved_keywords_insert_into1.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/nonreserved_keywords_insert_into1.q Thu Feb 21 18:28:29 2013
@@ -0,0 +1,26 @@
+DROP TABLE insert;
+
+CREATE TABLE insert (from INT, as STRING);
+
+EXPLAIN INSERT INTO TABLE insert SELECT * FROM src LIMIT 100;
+INSERT INTO TABLE insert SELECT * FROM src LIMIT 100;
+SELECT SUM(HASH(hash)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (hash) FROM insert
+) t;
+
+EXPLAIN INSERT INTO TABLE insert SELECT * FROM src LIMIT 100;
+INSERT INTO TABLE insert SELECT * FROM src LIMIT 100;
+SELECT SUM(HASH(sum)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (sum) FROM insert
+) t;
+
+SELECT COUNT(*) FROM insert;
+
+EXPLAIN INSERT OVERWRITE TABLE insert SELECT * FROM src LIMIT 10;
+INSERT OVERWRITE TABLE insert SELECT * FROM src LIMIT 10;
+SELECT SUM(HASH(add)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (add) FROM insert
+) t;
+
+
+DROP TABLE insert;

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out Thu Feb 21 18:28:29 2013
@@ -23,5 +23,5 @@ POSTHOOK: Input: default@srcpart@ds=2008
  POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
  POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-FAILED: ParseException line 3:48 missing Identifier at ')' near '<EOF>'
+FAILED: ParseException line 3:48 cannot recognize input near ')' '<EOF>' '<EOF>' in archive statement


Modified: hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl2.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl2.q.out Thu Feb 21 18:28:29 2013
@@ -1,2 +1,2 @@
-FAILED: ParseException line 1:7 Failed to recognize predicate 'tabl'. Failed rule: 'kwRole' in create role
+FAILED: ParseException line 1:7 cannot recognize input near 'create' 'tabl' 'tmp_zshao_22' in ddl statement


Modified: hive/trunk/ql/src/test/results/clientnegative/invalid_tbl_name.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/invalid_tbl_name.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/invalid_tbl_name.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/invalid_tbl_name.q.out Thu Feb 21 18:28:29 2013
@@ -1,2 +1,2 @@
-FAILED: ParseException line 1:13 cannot recognize input near 'invalid' '-' 'name' in table name
+FAILED: ParseException line 1:20 cannot recognize input near 'invalid' '-' 'name' in table name


Modified: hive/trunk/ql/src/test/results/clientnegative/lateral_view_join.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/lateral_view_join.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/lateral_view_join.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/lateral_view_join.q.out Thu Feb 21 18:28:29 2013
@@ -1,2 +1,2 @@
-FAILED: ParseException line 1:59 extraneous input 'AS' expecting Identifier near 'myTable' in lateral view
+FAILED: ParseException line 1:62 missing AS at 'myTable' near '<EOF>'


Modified: hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out Thu Feb 21 18:28:29 2013
@@ -1,2 +1,2 @@
-FAILED: ParseException line 3:49 mismatched input 'LIMIT' expecting FROM near ')' in from clause
+FAILED: ParseException line 3:49 missing FROM at 'LIMIT' near ')' in table name


Modified: hive/trunk/ql/src/test/results/clientnegative/show_tables_bad1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/show_tables_bad1.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/show_tables_bad1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/show_tables_bad1.q.out Thu Feb 21 18:28:29 2013
@@ -1,2 +1,2 @@
-FAILED: ParseException line 1:16 mismatched input '<EOF>' expecting set null in Identifier for show statement
+FAILED: ParseException line 1:12 extraneous input 'JOIN' expecting EOF near '<EOF>'


Modified: hive/trunk/ql/src/test/results/clientnegative/show_tables_bad2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/show_tables_bad2.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/show_tables_bad2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/show_tables_bad2.q.out Thu Feb 21 18:28:29 2013
@@ -1,2 +1,2 @@
-FAILED: ParseException line 1:29 mismatched input '<EOF>' expecting set null in Identifier for show statement
+FAILED: ParseException line 1:32 extraneous input 'b' expecting EOF near '<EOF>'


Added: hive/trunk/ql/src/test/results/clientpositive/nonreserved_keywords_input37.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/nonreserved_keywords_input37.q.out?rev=1448762&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/nonreserved_keywords_input37.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/nonreserved_keywords_input37.q.out Thu Feb 21 18:28:29 2013
@@ -0,0 +1,35 @@
+PREHOOK: query: CREATE TABLE table(string string) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE table(string string) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@table
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE table
+PREHOOK: type: LOAD
+PREHOOK: Output: default@table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/docurl.txt' INTO TABLE table
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@table
+PREHOOK: query: SELECT table, count(1)
+FROM
+(
+ FROM table
+ SELECT TRANSFORM (table.string)
+#### A masked pattern was here ####
+) subq
+GROUP BY table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT table, count(1)
+FROM
+(
+ FROM table
+ SELECT TRANSFORM (table.string)
+#### A masked pattern was here ####
+) subq
+GROUP BY table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table
+#### A masked pattern was here ####
+1uauniajqtunlsvadmxhlxvngxpqjuzbpzvdiwmzphmbaicduzkgxgtdeiunduosu.html 4
+4uzsbtwvdypfitqfqdjosynqp.html 4

Added: hive/trunk/ql/src/test/results/clientpositive/nonreserved_keywords_insert_into1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/nonreserved_keywords_insert_into1.q.out?rev=1448762&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/nonreserved_keywords_insert_into1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/nonreserved_keywords_insert_into1.q.out Thu Feb 21 18:28:29 2013
@@ -0,0 +1,333 @@
+PREHOOK: query: DROP TABLE insert
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE insert
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE insert (from INT, as STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE insert (from INT, as STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@insert
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert SELECT * FROM src LIMIT 100
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert SELECT * FROM src LIMIT 100
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 100)))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ Limit
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ Reduce Operator Tree:
+ Extract
+ Limit
+ Select Operator
+ expressions:
+ expr: UDFToInteger(_col0)
+ type: int
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.insert
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.insert
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: INSERT INTO TABLE insert SELECT * FROM src LIMIT 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert
+POSTHOOK: query: INSERT INTO TABLE insert SELECT * FROM src LIMIT 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(hash)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (hash) FROM insert
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(hash)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (hash) FROM insert
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert
+#### A masked pattern was here ####
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+10226524244
+PREHOOK: query: EXPLAIN INSERT INTO TABLE insert SELECT * FROM src LIMIT 100
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert SELECT * FROM src LIMIT 100
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME insert))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 100)))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ Limit
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ Reduce Operator Tree:
+ Extract
+ Limit
+ Select Operator
+ expressions:
+ expr: UDFToInteger(_col0)
+ type: int
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.insert
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.insert
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: INSERT INTO TABLE insert SELECT * FROM src LIMIT 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert
+POSTHOOK: query: INSERT INTO TABLE insert SELECT * FROM src LIMIT 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(sum)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (sum) FROM insert
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(sum)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (sum) FROM insert
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert
+#### A masked pattern was here ####
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+20453048488
+PREHOOK: query: SELECT COUNT(*) FROM insert
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT COUNT(*) FROM insert
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert
+#### A masked pattern was here ####
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+200
+PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE insert SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN INSERT OVERWRITE TABLE insert SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME insert))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ Limit
+ Reduce Output Operator
+ sort order:
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ Reduce Operator Tree:
+ Extract
+ Limit
+ Select Operator
+ expressions:
+ expr: UDFToInteger(_col0)
+ type: int
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.insert
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.insert
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+
+PREHOOK: query: INSERT OVERWRITE TABLE insert SELECT * FROM src LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@insert
+POSTHOOK: query: INSERT OVERWRITE TABLE insert SELECT * FROM src LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@insert
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+PREHOOK: query: SELECT SUM(HASH(add)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (add) FROM insert
+) t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@insert
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SUM(HASH(add)) FROM (
+ SELECT TRANSFORM(*) USING 'tr \t _' AS (add) FROM insert
+) t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@insert
+#### A masked pattern was here ####
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+-826625916
+PREHOOK: query: DROP TABLE insert
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@insert
+PREHOOK: Output: default@insert
+POSTHOOK: query: DROP TABLE insert
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@insert
+POSTHOOK: Output: default@insert
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.as SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: insert.from EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/compiler/errors/missing_overwrite.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/errors/missing_overwrite.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/errors/missing_overwrite.q.out (original)
+++ hive/trunk/ql/src/test/results/compiler/errors/missing_overwrite.q.out Thu Feb 21 18:28:29 2013
@@ -1 +1 @@
-Parse Error: line 2:0 cannot recognize input near 'INSERT' 'TABLE' 'dest1' in insert clause
+Parse Error: line 2:7 cannot recognize input near 'INSERT' 'TABLE' 'dest1' in table source

Modified: hive/trunk/ql/src/test/results/compiler/errors/wrong_distinct2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/errors/wrong_distinct2.q.out?rev=1448762&r1=1448761&r2=1448762&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/errors/wrong_distinct2.q.out (original)
+++ hive/trunk/ql/src/test/results/compiler/errors/wrong_distinct2.q.out Thu Feb 21 18:28:29 2013
@@ -1 +1 @@
-Parse Error: line 2:45 cannot recognize input near 'DISTINCT' 'substr' '(' in select expression
+Parse Error: line 2:60 missing EOF at '(' near 'substr'

Search Discussions

Discussion Posts

Follow ups

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 1 of 2 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedFeb 21, '13 at 6:28p
activeFeb 21, '13 at 6:28p
posts2
users1
websitehive.apache.org

1 user in discussion

Kevinwilfong: 2 posts

People

Translate

site design / logo © 2021 Grokbase