Grokbase Groups Hive commits May 2009
FAQ
Author: zshao
Date: Fri May 29 00:26:47 2009
New Revision: 779804

URL: http://svn.apache.org/viewvc?rev=779804&view=rev
Log:
HIVE-514. Partition key names should be case insensitive in alter table add partition statement. (Prasad Chakka via zshao)

Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/conf/hive-default.xml
hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q
hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=779804&r1=779803&r2=779804&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri May 29 00:26:47 2009
@@ -180,6 +180,9 @@
HIVE-467. Scratch data location should be on different filesystems for
different types of intermediate data. (Joydeep Sen Sarma via rmurthy)

+ HIVE-514. Partition key names should be case insensitive in alter table add
+ partition statement. (Prasad Chakka via zshao)
+
Release 0.3.1 - Unreleased

INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/conf/hive-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/conf/hive-default.xml?rev=779804&r1=779803&r2=779804&view=diff
==============================================================================
--- hadoop/hive/trunk/conf/hive-default.xml (original)
+++ hadoop/hive/trunk/conf/hive-default.xml Fri May 29 00:26:47 2009
@@ -11,6 +11,16 @@

<!-- Hive Execution Parameters -->
<property>
+ <name>mapred.reduce.tasks</name>
+ <value>-1</value>
+ <description>The default number of reduce tasks per job. Typically set
+ to a prime close to the number of available hosts. Ignored when
+ mapred.job.tracker is "local". Hadoop set this to 1 by default, whereas hive uses -1 as its default value.
+ By setting this property to -1, Hive will automatically figure out what should be the number of reducers.
+ </description>
+</property>
+
+<property>
<name>hive.exec.scratchdir</name>
<value>/tmp/hive-${user.name}</value>
<description>Scratch space for Hive jobs</description>

Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java?rev=779804&r1=779803&r2=779804&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java Fri May 29 00:26:47 2009
@@ -203,7 +203,7 @@
if(i > 0) {
name.append(Path.SEPARATOR);
}
- name.append((partCols.get(i)).getName());
+ name.append((partCols.get(i)).getName().toLowerCase());
name.append('=');
name.append(vals.get(i));
}

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java?rev=779804&r1=779803&r2=779804&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java Fri May 29 00:26:47 2009
@@ -87,7 +87,11 @@

List<String> pvals = new ArrayList<String>();
for (FieldSchema field : tbl.getPartCols()) {
- pvals.add(partSpec.get(field.getName()));
+ String val = partSpec.get(field.getName());
+ if (val == null) {
+ throw new HiveException("partition spec is invalid. field.getName() does not exist in input.");
+ }
+ pvals.add(val);
}

org.apache.hadoop.hive.metastore.api.Partition tpart =

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=779804&r1=779803&r2=779804&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Fri May 29 00:26:47 2009
@@ -274,7 +274,7 @@
for (int i = 0; i < partspec.getChildCount(); ++i) {
ASTNode partspec_val = (ASTNode) partspec.getChild(i);
String val = stripQuotes(partspec_val.getChild(1).getText());
- partSpec.put(unescapeIdentifier(partspec_val.getChild(0).getText()), val);
+ partSpec.put(unescapeIdentifier(partspec_val.getChild(0).getText().toLowerCase()), val);
}
try {
// this doesn't create partition. partition is created in MoveTask

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=779804&r1=779803&r2=779804&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri May 29 00:26:47 2009
@@ -658,7 +658,7 @@
for (int i = 0; i < partspec.getChildCount(); ++i) {
CommonTree partspec_val = (CommonTree) partspec.getChild(i);
String val = stripQuotes(partspec_val.getChild(1).getText());
- partSpec.put(partspec_val.getChild(0).getText(), val);
+ partSpec.put(partspec_val.getChild(0).getText().toLowerCase(), val);
}
partSpecs.add(partSpec);
}

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q?rev=779804&r1=779803&r2=779804&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/loadpart1.q Fri May 29 00:26:47 2009
@@ -1,11 +1,19 @@
drop table hive_test_src;
drop table hive_test_dst;
+
create table hive_test_src ( col1 string ) stored as textfile ;
load data local inpath '../data/files/test.dat' overwrite into table hive_test_src ;
+
create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile;
-insert overwrite table hive_test_dst partition ( pcol1='test_part', pcol2='test_part') select col1 from hive_test_src ;
+insert overwrite table hive_test_dst partition ( pcol1='test_part', pCol2='test_Part') select col1 from hive_test_src ;
+select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part';
+
+insert overwrite table hive_test_dst partition ( pCol1='test_part', pcol2='test_Part') select col1 from hive_test_src ;
select * from hive_test_dst where pcol1='test_part' and pcol2='test_part';
-insert overwrite table hive_test_dst partition ( pcol1='test_part', pcol2='test_part') select col1 from hive_test_src ;
+
+select * from hive_test_dst where pcol1='test_part';
select * from hive_test_dst where pcol1='test_part' and pcol2='test_part';
+select * from hive_test_dst where pcol1='test_Part';
+
drop table hive_test_src;
drop table hive_test_dst;

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out?rev=779804&r1=779803&r2=779804&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/loadpart1.q.out Fri May 29 00:26:47 2009
@@ -3,29 +3,35 @@
query: create table hive_test_src ( col1 string ) stored as textfile
query: load data local inpath '../data/files/test.dat' overwrite into table hive_test_src
query: create table hive_test_dst ( col1 string ) partitioned by ( pcol1 string , pcol2 string) stored as sequencefile
-query: insert overwrite table hive_test_dst partition ( pcol1='test_part', pcol2='test_part') select col1 from hive_test_src
+query: insert overwrite table hive_test_dst partition ( pcol1='test_part', pCol2='test_Part') select col1 from hive_test_src
Input: default/hive_test_src
-Output: default/hive_test_dst/pcol1=test_part/pcol2=test_part
-query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
-Input: default/hive_test_dst/pcol1=test_part/pcol2=test_part
-Output: /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/608894653/647768977.10000
-1 test_part test_part
-2 test_part test_part
-3 test_part test_part
-4 test_part test_part
-5 test_part test_part
-6 test_part test_part
-query: insert overwrite table hive_test_dst partition ( pcol1='test_part', pcol2='test_part') select col1 from hive_test_src
+Output: default/hive_test_dst/pcol1=test_part/pcol2=test_Part
+query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_Part'
+Input: default/hive_test_dst/pcol1=test_part/pcol2=test_Part
+Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/20877141/262522507.10000
+1 test_part test_Part
+2 test_part test_Part
+3 test_part test_Part
+4 test_part test_Part
+5 test_part test_Part
+6 test_part test_Part
+query: insert overwrite table hive_test_dst partition ( pCol1='test_part', pcol2='test_Part') select col1 from hive_test_src
Input: default/hive_test_src
-Output: default/hive_test_dst/pcol1=test_part/pcol2=test_part
+Output: default/hive_test_dst/pcol1=test_part/pcol2=test_Part
+query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
+Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/1586437457/438136405.10000
+query: select * from hive_test_dst where pcol1='test_part'
+Input: default/hive_test_dst/pcol1=test_part/pcol2=test_Part
+Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/991232921/143300248.10000
+1 test_part test_Part
+2 test_part test_Part
+3 test_part test_Part
+4 test_part test_Part
+5 test_part test_Part
+6 test_part test_Part
query: select * from hive_test_dst where pcol1='test_part' and pcol2='test_part'
-Input: default/hive_test_dst/pcol1=test_part/pcol2=test_part
-Output: /data/users/pchakka/workspace/oshive/ql/../build/ql/tmp/255138744/205167628.10000
-1 test_part test_part
-2 test_part test_part
-3 test_part test_part
-4 test_part test_part
-5 test_part test_part
-6 test_part test_part
+Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/118754797/315488459.10000
+query: select * from hive_test_dst where pcol1='test_Part'
+Output: /Users/pchakka/workspace/oshive/ql/../build/ql/tmp/27905267/502654394.10000
query: drop table hive_test_src
query: drop table hive_test_dst

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedMay 29, '09 at 12:26a
activeMay 29, '09 at 12:26a
posts1
users1
websitehive.apache.org

1 user in discussion

Zshao: 1 post

People

Translate

site design / logo © 2021 Grokbase