FAQ
Author: brock
Date: Wed Dec 24 14:33:00 2014
New Revision: 1647794

URL: http://svn.apache.org/r1647794
Log:
HIVE-9193 - Fix ordering differences due to Java 8 (Part 3) (Mohit Sabharwal via Brock)

Added:
     hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out
     hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out
     hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out
Removed:
     hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.out
     hive/trunk/ql/src/test/results/clientpositive/avro_date.q.out
     hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.out
     hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.out
     hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.out
     hive/trunk/ql/src/test/results/clientpositive/input4.q.out
     hive/trunk/ql/src/test/results/clientpositive/join0.q.out
     hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out
     hive/trunk/ql/src/test/results/clientpositive/plan_json.q.out
     hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.out
Modified:
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
     hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q
     hive/trunk/ql/src/test/queries/clientpositive/avro_date.q
     hive/trunk/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q
     hive/trunk/ql/src/test/queries/clientpositive/avro_nullable_fields.q
     hive/trunk/ql/src/test/queries/clientpositive/avro_timestamp.q
     hive/trunk/ql/src/test/queries/clientpositive/input4.q
     hive/trunk/ql/src/test/queries/clientpositive/join0.q
     hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_6.q
     hive/trunk/ql/src/test/queries/clientpositive/plan_json.q

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java Wed Dec 24 14:33:00 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.hooks;
  import java.io.Serializable;
  import java.util.ArrayList;
  import java.util.HashSet;
+import java.util.LinkedHashSet;
  import java.util.List;
  import java.util.Set;

@@ -54,7 +55,8 @@ public class ReadEntity extends Entity i
    private boolean isUpdateOrDelete = false;

    // For views, the entities can be nested - by default, entities are at the top level
- private final Set<ReadEntity> parents = new HashSet<ReadEntity>();
+ // Must be deterministic order set for consistent q-test output across Java versions
+ private final Set<ReadEntity> parents = new LinkedHashSet<ReadEntity>();

    // The accessed columns of query
    private final List<String> accessedColumns = new ArrayList<String>();

Modified: hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q Wed Dec 24 14:33:00 2014
@@ -1,5 +1,7 @@
  set hive.security.authorization.enabled=true;

+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
  explain authorization select * from src join srcpart;
  explain formatted authorization select * from src join srcpart;


Modified: hive/trunk/ql/src/test/queries/clientpositive/avro_date.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/avro_date.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/avro_date.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/avro_date.q Wed Dec 24 14:33:00 2014
@@ -1,3 +1,5 @@
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
  DROP TABLE avro_date_staging;
  DROP TABLE avro_date;
  DROP TABLE avro_date_casts;

Modified: hive/trunk/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q Wed Dec 24 14:33:00 2014
@@ -4,6 +4,7 @@
  -- fileSchema = [{ "type" : "map", "values" : ["string","null"]}, "null"]
  -- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]

+-- JAVA_VERSION_SPECIFIC_OUTPUT

  DROP TABLE IF EXISTS avro_table;

@@ -11,4 +12,4 @@ CREATE TABLE avro_table (avreau_col_1 ma
  LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table;
  SELECT * FROM avro_table;

-DROP TABLE avro_table;
\ No newline at end of file
+DROP TABLE avro_table;

Modified: hive/trunk/ql/src/test/queries/clientpositive/avro_nullable_fields.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/avro_nullable_fields.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/avro_nullable_fields.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/avro_nullable_fields.q Wed Dec 24 14:33:00 2014
@@ -1,4 +1,7 @@
  -- Verify that nullable fields properly work
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
  CREATE TABLE test_serializer(string1 STRING,
                               int1 INT,
                               tinyint1 TINYINT,

Modified: hive/trunk/ql/src/test/queries/clientpositive/avro_timestamp.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/avro_timestamp.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/avro_timestamp.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/avro_timestamp.q Wed Dec 24 14:33:00 2014
@@ -1,3 +1,5 @@
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
  DROP TABLE avro_timestamp_staging;
  DROP TABLE avro_timestamp;
  DROP TABLE avro_timestamp_casts;

Modified: hive/trunk/ql/src/test/queries/clientpositive/input4.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/input4.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/input4.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/input4.q Wed Dec 24 14:33:00 2014
@@ -1,3 +1,5 @@
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
  CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE;
  EXPLAIN
  LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4;

Modified: hive/trunk/ql/src/test/queries/clientpositive/join0.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/join0.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/join0.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/join0.q Wed Dec 24 14:33:00 2014
@@ -1,3 +1,5 @@
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
  EXPLAIN
  SELECT src1.key as k1, src1.value as v1,
         src2.key as k2, src2.value as v2 FROM

Modified: hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_6.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_6.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_6.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/list_bucket_dml_6.q Wed Dec 24 14:33:00 2014
@@ -47,6 +47,7 @@ set hive.merge.mapredfiles=false;

  -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
  -- SORT_QUERY_RESULTS
+-- JAVA_VERSION_SPECIFIC_OUTPUT

  -- create a skewed table
  create table list_bucketing_dynamic_part (key String, value String)

Modified: hive/trunk/ql/src/test/queries/clientpositive/plan_json.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/plan_json.q?rev=1647794&r1=1647793&r2=1647794&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/plan_json.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/plan_json.q Wed Dec 24 14:33:00 2014
@@ -1,3 +1,5 @@
  -- explain plan json: the query gets the formatted json output of the query plan of the hive query

+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
  EXPLAIN FORMATTED SELECT count(1) FROM src;

Added: hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,47 @@
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+explain authorization select * from src join srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+explain authorization select * from src join srcpart
+POSTHOOK: type: QUERY
+INPUTS:
+ default@src
+ default@srcpart
+ default@srcpart@ds=2008-04-08/hr=11
+ default@srcpart@ds=2008-04-08/hr=12
+ default@srcpart@ds=2008-04-09/hr=11
+ default@srcpart@ds=2008-04-09/hr=12
+OUTPUTS:
+#### A masked pattern was here ####
+CURRENT_USER:
+ hive_test_user
+OPERATION:
+ QUERY
+AUTHORIZATION_FAILURES:
+ No privilege 'Select' found for inputs { database:default, table:src, columnName:key}
+ No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key}
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain formatted authorization select * from src join srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: explain formatted authorization select * from src join srcpart
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+PREHOOK: query: explain authorization use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain authorization use default
+POSTHOOK: type: SWITCHDATABASE
+INPUTS:
+ database:default
+OUTPUTS:
+CURRENT_USER:
+ hive_test_user
+OPERATION:
+ SWITCHDATABASE
+PREHOOK: query: explain formatted authorization use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain formatted authorization use default
+POSTHOOK: type: SWITCHDATABASE
+{"OUTPUTS":[],"INPUTS":["database:default"],"OPERATION":"SWITCHDATABASE","CURRENT_USER":"hive_test_user"}

Added: hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,47 @@
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+explain authorization select * from src join srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+explain authorization select * from src join srcpart
+POSTHOOK: type: QUERY
+INPUTS:
+ default@src
+ default@srcpart
+ default@srcpart@ds=2008-04-08/hr=11
+ default@srcpart@ds=2008-04-08/hr=12
+ default@srcpart@ds=2008-04-09/hr=11
+ default@srcpart@ds=2008-04-09/hr=12
+OUTPUTS:
+#### A masked pattern was here ####
+CURRENT_USER:
+ hive_test_user
+OPERATION:
+ QUERY
+AUTHORIZATION_FAILURES:
+ No privilege 'Select' found for inputs { database:default, table:src, columnName:key}
+ No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key}
+Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain formatted authorization select * from src join srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: explain formatted authorization select * from src join srcpart
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+PREHOOK: query: explain authorization use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain authorization use default
+POSTHOOK: type: SWITCHDATABASE
+INPUTS:
+ database:default
+OUTPUTS:
+CURRENT_USER:
+ hive_test_user
+OPERATION:
+ SWITCHDATABASE
+PREHOOK: query: explain formatted authorization use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain formatted authorization use default
+POSTHOOK: type: SWITCHDATABASE
+{"CURRENT_USER":"hive_test_user","OPERATION":"SWITCHDATABASE","INPUTS":["database:default"],"OUTPUTS":[]}

Added: hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.7.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.7.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.7.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.7.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,130 @@
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE avro_date_staging
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE avro_date_staging
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_date
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_date
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_date_casts
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_date_casts
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_date_staging
+POSTHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_date_staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_date_staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_date_staging
+PREHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>)
+ PARTITIONED BY (p1 int, p2 date)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_date
+POSTHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>)
+ PARTITIONED BY (p1 int, p2 date)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_date
+PREHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date_staging
+PREHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
+POSTHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date_staging
+POSTHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).d SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:d, type:date, comment:null), ]
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).l1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:l1, type:array<date>, comment:null), ]
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).m1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:m1, type:map<string,date>, comment:null), ]
+PREHOOK: query: SELECT * FROM avro_date
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21 {"foo":"1980-12-16","bar":"1998-05-07"} ["2011-09-04","2011-09-05"] 2 2014-09-26
+2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
+1947-02-11 {"baz":"1921-12-16"} ["2011-09-05"] 2 2014-09-26
+8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26
+PREHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+1947-02-11 1
+2012-02-21 1
+2014-02-11 1
+8200-02-11 1
+PREHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21 {"foo":"1980-12-16","bar":"1998-05-07"} ["2011-09-04","2011-09-05"] 2 2014-09-26
+2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
+8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26
+PREHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21 {"foo":"1980-12-16","bar":"1998-05-07"} ["2011-09-04","2011-09-05"] 2 2014-09-26
+2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
+1947-02-11 {"baz":"1921-12-16"} ["2011-09-05"] 2 2014-09-26
+PREHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26

Added: hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.8.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.8.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.8.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_date.q.java1.8.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,130 @@
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE avro_date_staging
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE avro_date_staging
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_date
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_date
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_date_casts
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_date_casts
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_date_staging
+POSTHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_date_staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_date_staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_date_staging
+PREHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>)
+ PARTITIONED BY (p1 int, p2 date)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_date
+POSTHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>)
+ PARTITIONED BY (p1 int, p2 date)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_date
+PREHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date_staging
+PREHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
+POSTHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date_staging
+POSTHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).d SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:d, type:date, comment:null), ]
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).l1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:l1, type:array<date>, comment:null), ]
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).m1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:m1, type:map<string,date>, comment:null), ]
+PREHOOK: query: SELECT * FROM avro_date
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21 {"bar":"1998-05-07","foo":"1980-12-16"} ["2011-09-04","2011-09-05"] 2 2014-09-26
+2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
+1947-02-11 {"baz":"1921-12-16"} ["2011-09-05"] 2 2014-09-26
+8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26
+PREHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+1947-02-11 1
+2012-02-21 1
+2014-02-11 1
+8200-02-11 1
+PREHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21 {"bar":"1998-05-07","foo":"1980-12-16"} ["2011-09-04","2011-09-05"] 2 2014-09-26
+2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
+8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26
+PREHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21 {"bar":"1998-05-07","foo":"1980-12-16"} ["2011-09-04","2011-09-05"] 2 2014-09-26
+2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
+1947-02-11 {"baz":"1921-12-16"} ["2011-09-05"] 2 2014-09-26
+PREHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26

Added: hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,57 @@
+PREHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
+-- vs record schema have the null values in different positions
+-- i.e.
+-- fileSchema = [{ "type" : "map", "values" : ["string","null"]}, "null"]
+-- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE IF EXISTS avro_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
+-- vs record schema have the null values in different positions
+-- i.e.
+-- fileSchema = [{ "type" : "map", "values" : ["string","null"]}, "null"]
+-- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE IF EXISTS avro_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: SELECT * FROM avro_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_table
+#### A masked pattern was here ####
+{"key4":null,"key3":"val3"}
+{"key4":null,"key3":"val3"}
+{"key2":"val2","key1":null}
+{"key4":null,"key3":"val3"}
+{"key4":null,"key3":"val3"}
+PREHOOK: query: DROP TABLE avro_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@avro_table
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: DROP TABLE avro_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@avro_table
+POSTHOOK: Output: default@avro_table

Added: hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,57 @@
+PREHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
+-- vs record schema have the null values in different positions
+-- i.e.
+-- fileSchema = [{ "type" : "map", "values" : ["string","null"]}, "null"]
+-- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE IF EXISTS avro_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
+-- vs record schema have the null values in different positions
+-- i.e.
+-- fileSchema = [{ "type" : "map", "values" : ["string","null"]}, "null"]
+-- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE IF EXISTS avro_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: SELECT * FROM avro_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_table
+#### A masked pattern was here ####
+{"key3":"val3","key4":null}
+{"key3":"val3","key4":null}
+{"key1":null,"key2":"val2"}
+{"key3":"val3","key4":null}
+{"key3":"val3","key4":null}
+PREHOOK: query: DROP TABLE avro_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@avro_table
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: DROP TABLE avro_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@avro_table
+POSTHOOK: Output: default@avro_table

Added: hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,179 @@
+PREHOOK: query: -- Verify that nullable fields properly work
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+CREATE TABLE test_serializer(string1 STRING,
+ int1 INT,
+ tinyint1 TINYINT,
+ smallint1 SMALLINT,
+ bigint1 BIGINT,
+ boolean1 BOOLEAN,
+ float1 FLOAT,
+ double1 DOUBLE,
+ list1 ARRAY<STRING>,
+ map1 MAP<STRING,INT>,
+ struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
+ enum1 STRING,
+ nullableint INT,
+ bytes1 BINARY,
+ fixed1 BINARY)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
+ STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_serializer
+POSTHOOK: query: -- Verify that nullable fields properly work
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+CREATE TABLE test_serializer(string1 STRING,
+ int1 INT,
+ tinyint1 TINYINT,
+ smallint1 SMALLINT,
+ bigint1 BIGINT,
+ boolean1 BOOLEAN,
+ float1 FLOAT,
+ double1 DOUBLE,
+ list1 ARRAY<STRING>,
+ map1 MAP<STRING,INT>,
+ struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
+ enum1 STRING,
+ nullableint INT,
+ bytes1 BINARY,
+ fixed1 BINARY)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
+ STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_serializer
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@test_serializer
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@test_serializer
+PREHOOK: query: CREATE TABLE as_avro
+ ROW FORMAT
+ SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
+ STORED AS
+ INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
+ TBLPROPERTIES (
+ 'avro.schema.literal'='{
+ "namespace": "com.howdy",
+ "name": "some_schema",
+ "type": "record",
+ "fields": [
+ { "name": "string1", "type": ["null", "string"] },
+ { "name": "int1", "type": ["null", "int"] },
+ { "name": "tinyint1", "type": ["null", "int"] },
+ { "name": "smallint1", "type": ["null", "int"] },
+ { "name": "bigint1", "type": ["null", "long"] },
+ { "name": "boolean1", "type": ["null", "boolean"] },
+ { "name": "float1", "type": ["null", "float"] },
+ { "name": "double1", "type": ["null", "double"] },
+ { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
+ { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
+ { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
+ { "name": "sInt", "type": "int" },
+ { "name": "sBoolean", "type": "boolean" },
+ { "name": "sString", "type": "string" }
+ ]}] },
+ { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
+ { "name": "nullableint", "type": ["null", "int"] },
+ { "name": "bytes1", "type": ["null", "bytes"] },
+ { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
+ ]
+ }'
+ )
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@as_avro
+POSTHOOK: query: CREATE TABLE as_avro
+ ROW FORMAT
+ SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
+ STORED AS
+ INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
+ TBLPROPERTIES (
+ 'avro.schema.literal'='{
+ "namespace": "com.howdy",
+ "name": "some_schema",
+ "type": "record",
+ "fields": [
+ { "name": "string1", "type": ["null", "string"] },
+ { "name": "int1", "type": ["null", "int"] },
+ { "name": "tinyint1", "type": ["null", "int"] },
+ { "name": "smallint1", "type": ["null", "int"] },
+ { "name": "bigint1", "type": ["null", "long"] },
+ { "name": "boolean1", "type": ["null", "boolean"] },
+ { "name": "float1", "type": ["null", "float"] },
+ { "name": "double1", "type": ["null", "double"] },
+ { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
+ { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
+ { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
+ { "name": "sInt", "type": "int" },
+ { "name": "sBoolean", "type": "boolean" },
+ { "name": "sString", "type": "string" }
+ ]}] },
+ { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
+ { "name": "nullableint", "type": ["null", "int"] },
+ { "name": "bytes1", "type": ["null", "bytes"] },
+ { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
+ ]
+ }'
+ )
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@as_avro
+PREHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_serializer
+PREHOOK: Output: default@as_avro
+POSTHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_serializer
+POSTHOOK: Output: default@as_avro
+POSTHOOK: Lineage: as_avro.bigint1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bigint1, type:bigint, comment:null), ]
+POSTHOOK: Lineage: as_avro.boolean1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:boolean1, type:boolean, comment:null), ]
+POSTHOOK: Lineage: as_avro.bytes1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bytes1, type:binary, comment:null), ]
+POSTHOOK: Lineage: as_avro.double1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:double1, type:double, comment:null), ]
+POSTHOOK: Lineage: as_avro.enum1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:enum1, type:string, comment:null), ]
+POSTHOOK: Lineage: as_avro.fixed1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:fixed1, type:binary, comment:null), ]
+POSTHOOK: Lineage: as_avro.float1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:float1, type:float, comment:null), ]
+POSTHOOK: Lineage: as_avro.int1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:int1, type:int, comment:null), ]
+POSTHOOK: Lineage: as_avro.list1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:list1, type:array<string>, comment:null), ]
+POSTHOOK: Lineage: as_avro.map1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:map1, type:map<string,int>, comment:null), ]
+POSTHOOK: Lineage: as_avro.nullableint SIMPLE [(test_serializer)test_serializer.FieldSchema(name:nullableint, type:int, comment:null), ]
+POSTHOOK: Lineage: as_avro.smallint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:smallint1, type:smallint, comment:null), ]
+POSTHOOK: Lineage: as_avro.string1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: as_avro.struct1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:struct1, type:struct<sint:int,sboolean:boolean,sstring:string>, comment:null), ]
+POSTHOOK: Lineage: as_avro.tinyint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:tinyint1, type:tinyint, comment:null), ]
+PREHOOK: query: SELECT * FROM as_avro
+PREHOOK: type: QUERY
+PREHOOK: Input: default@as_avro
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM as_avro
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@as_avro
+#### A masked pattern was here ####
+why hello there 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+another record 98 4 101 9999999 false 99.89 9.0E-8 ["beta"] {"Earth":101} {"sint":1134,"sboolean":false,"sstring":"wazzup"} RED NULL  ef
+third record 45 5 102 999999999 true 89.99 9.0E-14 ["alpha","gamma"] {"Earth":237,"Bob":723} {"sint":102,"sboolean":false,"sstring":"BNL"} GREEN NULL  hi
+NULL 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string NULL 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 NULL 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 NULL 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 NULL true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 NULL 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true NULL 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true 42.43 NULL ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 NULL {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] NULL {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} NULL BLUE 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} NULL 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE NULL  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72 NULL bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Bob":31,"Control":86} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  NULL

Added: hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,179 @@
+PREHOOK: query: -- Verify that nullable fields properly work
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+CREATE TABLE test_serializer(string1 STRING,
+ int1 INT,
+ tinyint1 TINYINT,
+ smallint1 SMALLINT,
+ bigint1 BIGINT,
+ boolean1 BOOLEAN,
+ float1 FLOAT,
+ double1 DOUBLE,
+ list1 ARRAY<STRING>,
+ map1 MAP<STRING,INT>,
+ struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
+ enum1 STRING,
+ nullableint INT,
+ bytes1 BINARY,
+ fixed1 BINARY)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
+ STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_serializer
+POSTHOOK: query: -- Verify that nullable fields properly work
+
+-- JAVA_VERSION_SPECIFIC_OUTPUT
+
+CREATE TABLE test_serializer(string1 STRING,
+ int1 INT,
+ tinyint1 TINYINT,
+ smallint1 SMALLINT,
+ bigint1 BIGINT,
+ boolean1 BOOLEAN,
+ float1 FLOAT,
+ double1 DOUBLE,
+ list1 ARRAY<STRING>,
+ map1 MAP<STRING,INT>,
+ struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
+ enum1 STRING,
+ nullableint INT,
+ bytes1 BINARY,
+ fixed1 BINARY)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
+ STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_serializer
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@test_serializer
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@test_serializer
+PREHOOK: query: CREATE TABLE as_avro
+ ROW FORMAT
+ SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
+ STORED AS
+ INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
+ TBLPROPERTIES (
+ 'avro.schema.literal'='{
+ "namespace": "com.howdy",
+ "name": "some_schema",
+ "type": "record",
+ "fields": [
+ { "name": "string1", "type": ["null", "string"] },
+ { "name": "int1", "type": ["null", "int"] },
+ { "name": "tinyint1", "type": ["null", "int"] },
+ { "name": "smallint1", "type": ["null", "int"] },
+ { "name": "bigint1", "type": ["null", "long"] },
+ { "name": "boolean1", "type": ["null", "boolean"] },
+ { "name": "float1", "type": ["null", "float"] },
+ { "name": "double1", "type": ["null", "double"] },
+ { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
+ { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
+ { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
+ { "name": "sInt", "type": "int" },
+ { "name": "sBoolean", "type": "boolean" },
+ { "name": "sString", "type": "string" }
+ ]}] },
+ { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
+ { "name": "nullableint", "type": ["null", "int"] },
+ { "name": "bytes1", "type": ["null", "bytes"] },
+ { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
+ ]
+ }'
+ )
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@as_avro
+POSTHOOK: query: CREATE TABLE as_avro
+ ROW FORMAT
+ SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
+ STORED AS
+ INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
+ TBLPROPERTIES (
+ 'avro.schema.literal'='{
+ "namespace": "com.howdy",
+ "name": "some_schema",
+ "type": "record",
+ "fields": [
+ { "name": "string1", "type": ["null", "string"] },
+ { "name": "int1", "type": ["null", "int"] },
+ { "name": "tinyint1", "type": ["null", "int"] },
+ { "name": "smallint1", "type": ["null", "int"] },
+ { "name": "bigint1", "type": ["null", "long"] },
+ { "name": "boolean1", "type": ["null", "boolean"] },
+ { "name": "float1", "type": ["null", "float"] },
+ { "name": "double1", "type": ["null", "double"] },
+ { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
+ { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
+ { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
+ { "name": "sInt", "type": "int" },
+ { "name": "sBoolean", "type": "boolean" },
+ { "name": "sString", "type": "string" }
+ ]}] },
+ { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
+ { "name": "nullableint", "type": ["null", "int"] },
+ { "name": "bytes1", "type": ["null", "bytes"] },
+ { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
+ ]
+ }'
+ )
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@as_avro
+PREHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_serializer
+PREHOOK: Output: default@as_avro
+POSTHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_serializer
+POSTHOOK: Output: default@as_avro
+POSTHOOK: Lineage: as_avro.bigint1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bigint1, type:bigint, comment:null), ]
+POSTHOOK: Lineage: as_avro.boolean1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:boolean1, type:boolean, comment:null), ]
+POSTHOOK: Lineage: as_avro.bytes1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bytes1, type:binary, comment:null), ]
+POSTHOOK: Lineage: as_avro.double1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:double1, type:double, comment:null), ]
+POSTHOOK: Lineage: as_avro.enum1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:enum1, type:string, comment:null), ]
+POSTHOOK: Lineage: as_avro.fixed1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:fixed1, type:binary, comment:null), ]
+POSTHOOK: Lineage: as_avro.float1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:float1, type:float, comment:null), ]
+POSTHOOK: Lineage: as_avro.int1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:int1, type:int, comment:null), ]
+POSTHOOK: Lineage: as_avro.list1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:list1, type:array<string>, comment:null), ]
+POSTHOOK: Lineage: as_avro.map1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:map1, type:map<string,int>, comment:null), ]
+POSTHOOK: Lineage: as_avro.nullableint SIMPLE [(test_serializer)test_serializer.FieldSchema(name:nullableint, type:int, comment:null), ]
+POSTHOOK: Lineage: as_avro.smallint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:smallint1, type:smallint, comment:null), ]
+POSTHOOK: Lineage: as_avro.string1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: as_avro.struct1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:struct1, type:struct<sint:int,sboolean:boolean,sstring:string>, comment:null), ]
+POSTHOOK: Lineage: as_avro.tinyint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:tinyint1, type:tinyint, comment:null), ]
+PREHOOK: query: SELECT * FROM as_avro
+PREHOOK: type: QUERY
+PREHOOK: Input: default@as_avro
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM as_avro
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@as_avro
+#### A masked pattern was here ####
+why hello there 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+another record 98 4 101 9999999 false 99.89 9.0E-8 ["beta"] {"Earth":101} {"sint":1134,"sboolean":false,"sstring":"wazzup"} RED NULL  ef
+third record 45 5 102 999999999 true 89.99 9.0E-14 ["alpha","gamma"] {"Earth":237,"Bob":723} {"sint":102,"sboolean":false,"sstring":"BNL"} GREEN NULL  hi
+NULL 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string NULL 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 NULL 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 NULL 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 NULL true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 NULL 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true NULL 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true 42.43 NULL ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 NULL {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] NULL {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} NULL BLUE 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} NULL 72  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE NULL  bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72 NULL bc
+string 42 3 100 1412341 true 42.43 85.23423424 ["alpha","beta","gamma"] {"Earth":42,"Control":86,"Bob":31} {"sint":17,"sboolean":true,"sstring":"Abe Linkedin"} BLUE 72  NULL

Added: hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,130 @@
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE avro_timestamp_staging
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE avro_timestamp_staging
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_timestamp
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_timestamp
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_timestamp_casts
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_timestamp_casts
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_timestamp_staging
+POSTHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_timestamp_staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_timestamp_staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_timestamp_staging
+PREHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+ PARTITIONED BY (p1 int, p2 timestamp)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_timestamp
+POSTHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+ PARTITIONED BY (p1 int, p2 timestamp)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_timestamp
+PREHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp_staging
+PREHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+POSTHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp_staging
+POSTHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).d SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:d, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).l1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:l1, type:array<timestamp>, comment:null), ]
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).m1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:m1, type:map<string,timestamp>, comment:null), ]
+PREHOOK: query: SELECT * FROM avro_timestamp
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123 {"foo":"1980-12-16 07:08:09.123","bar":"1998-05-07 07:08:09.123"} ["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1947-02-11 07:08:09.123 {"baz":"1921-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+8200-02-11 07:08:09.123 {"baz":"6981-12-16 07:08:09.123"} ["1039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+PREHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+1947-02-11 07:08:09.123 1
+2012-02-21 07:08:09.123 1
+2014-02-11 07:08:09.123 1
+8200-02-11 07:08:09.123 1
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123 {"foo":"1980-12-16 07:08:09.123","bar":"1998-05-07 07:08:09.123"} ["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+8200-02-11 07:08:09.123 {"baz":"6981-12-16 07:08:09.123"} ["1039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123 {"foo":"1980-12-16 07:08:09.123","bar":"1998-05-07 07:08:09.123"} ["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1947-02-11 07:08:09.123 {"baz":"1921-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+8200-02-11 07:08:09.123 {"baz":"6981-12-16 07:08:09.123"} ["1039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123

Added: hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,130 @@
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE avro_timestamp_staging
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+DROP TABLE avro_timestamp_staging
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_timestamp
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_timestamp
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_timestamp_casts
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_timestamp_casts
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_timestamp_staging
+POSTHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_timestamp_staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_timestamp_staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_timestamp_staging
+PREHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+ PARTITIONED BY (p1 int, p2 timestamp)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_timestamp
+POSTHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+ PARTITIONED BY (p1 int, p2 timestamp)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+ COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+ STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_timestamp
+PREHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp_staging
+PREHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+POSTHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp_staging
+POSTHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).d SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:d, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).l1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:l1, type:array<timestamp>, comment:null), ]
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).m1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:m1, type:map<string,timestamp>, comment:null), ]
+PREHOOK: query: SELECT * FROM avro_timestamp
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123 {"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"} ["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1947-02-11 07:08:09.123 {"baz":"1921-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+8200-02-11 07:08:09.123 {"baz":"6981-12-16 07:08:09.123"} ["1039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+PREHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+1947-02-11 07:08:09.123 1
+2012-02-21 07:08:09.123 1
+2014-02-11 07:08:09.123 1
+8200-02-11 07:08:09.123 1
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123 {"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"} ["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+8200-02-11 07:08:09.123 {"baz":"6981-12-16 07:08:09.123"} ["1039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123 {"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"} ["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1947-02-11 07:08:09.123 {"baz":"1921-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+8200-02-11 07:08:09.123 {"baz":"6981-12-16 07:08:09.123"} ["1039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123

Added: hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.7.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.7.out?rev=1647794&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.7.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.7.out Wed Dec 24 14:33:00 2014
@@ -0,0 +1,559 @@
+PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@INPUT4
+POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
+
+CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@INPUT4
+PREHOOK: query: EXPLAIN
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
+PREHOOK: type: LOAD
+POSTHOOK: query: EXPLAIN
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
+POSTHOOK: type: LOAD
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+ Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.input4
+
+ Stage: Stage-1
+ Stats-Aggr Operator
+
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@input4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@input4
+PREHOOK: query: EXPLAIN FORMATTED
+SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FORMATTED
+SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
+POSTHOOK: type: QUERY
+{"STAGE PLANS":{"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"TableScan":{"alias:":"input4alias","children":{"Select Operator":{"expressions:":"value (type: string), key (type: string)","outputColumnNames:":["_col0","_col1"],"children":{"ListSink":{}},"Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE"}},"Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE"}}}}},"STAGE DEPENDENCIES":{"Stage-0":{"ROOT STAGE":"TRUE"}}}
+PREHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input4
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input4
+#### A masked pattern was here ####
+val_238 238
+val_86 86
+val_311 311
+val_27 27
+val_165 165
+val_409 409
+val_255 255
+val_278 278
+val_98 98
+val_484 484
+val_265 265
+val_193 193
+val_401 401
+val_150 150
+val_273 273
+val_224 224
+val_369 369
+val_66 66
+val_128 128
+val_213 213
+val_146 146
+val_406 406
+val_429 429
+val_374 374
+val_152 152
+val_469 469
+val_145 145
+val_495 495
+val_37 37
+val_327 327
+val_281 281
+val_277 277
+val_209 209
+val_15 15
+val_82 82
+val_403 403
+val_166 166
+val_417 417
+val_430 430
+val_252 252
+val_292 292
+val_219 219
+val_287 287
+val_153 153
+val_193 193
+val_338 338
+val_446 446
+val_459 459
+val_394 394
+val_237 237
+val_482 482
+val_174 174
+val_413 413
+val_494 494
+val_207 207
+val_199 199
+val_466 466
+val_208 208
+val_174 174
+val_399 399
+val_396 396
+val_247 247
+val_417 417
+val_489 489
+val_162 162
+val_377 377
+val_397 397
+val_309 309
+val_365 365
+val_266 266
+val_439 439
+val_342 342
+val_367 367
+val_325 325
+val_167 167
+val_195 195
+val_475 475
+val_17 17
+val_113 113
+val_155 155
+val_203 203
+val_339 339
+val_0 0
+val_455 455
+val_128 128
+val_311 311
+val_316 316
+val_57 57
+val_302 302
+val_205 205
+val_149 149
+val_438 438
+val_345 345
+val_129 129
+val_170 170
+val_20 20
+val_489 489
+val_157 157
+val_378 378
+val_221 221
+val_92 92
+val_111 111
+val_47 47
+val_72 72
+val_4 4
+val_280 280
+val_35 35
+val_427 427
+val_277 277
+val_208 208
+val_356 356
+val_399 399
+val_169 169
+val_382 382
+val_498 498
+val_125 125
+val_386 386
+val_437 437
+val_469 469
+val_192 192
+val_286 286
+val_187 187
+val_176 176
+val_54 54
+val_459 459
+val_51 51
+val_138 138
+val_103 103
+val_239 239
+val_213 213
+val_216 216
+val_430 430
+val_278 278
+val_176 176
+val_289 289
+val_221 221
+val_65 65
+val_318 318
+val_332 332
+val_311 311
+val_275 275
+val_137 137
+val_241 241
+val_83 83
+val_333 333
+val_180 180
+val_284 284
+val_12 12
+val_230 230
+val_181 181
+val_67 67
+val_260 260
+val_404 404
+val_384 384
+val_489 489
+val_353 353
+val_373 373
+val_272 272
+val_138 138
+val_217 217
+val_84 84
+val_348 348
+val_466 466
+val_58 58
+val_8 8
+val_411 411
+val_230 230
+val_208 208
+val_348 348
+val_24 24
+val_463 463
+val_431 431
+val_179 179
+val_172 172
+val_42 42
+val_129 129
+val_158 158
+val_119 119
+val_496 496
+val_0 0
+val_322 322
+val_197 197
+val_468 468
+val_393 393
+val_454 454
+val_100 100
+val_298 298
+val_199 199
+val_191 191
+val_418 418
+val_96 96
+val_26 26
+val_165 165
+val_327 327
+val_230 230
+val_205 205
+val_120 120
+val_131 131
+val_51 51
+val_404 404
+val_43 43
+val_436 436
+val_156 156
+val_469 469
+val_468 468
+val_308 308
+val_95 95
+val_196 196
+val_288 288
+val_481 481
+val_457 457
+val_98 98
+val_282 282
+val_197 197
+val_187 187
+val_318 318
+val_318 318
+val_409 409
+val_470 470
+val_137 137
+val_369 369
+val_316 316
+val_169 169
+val_413 413
+val_85 85
+val_77 77
+val_0 0
+val_490 490
+val_87 87
+val_364 364
+val_179 179
+val_118 118
+val_134 134
+val_395 395
+val_282 282
+val_138 138
+val_238 238
+val_419 419
+val_15 15
+val_118 118
+val_72 72
+val_90 90
+val_307 307
+val_19 19
+val_435 435
+val_10 10
+val_277 277
+val_273 273
+val_306 306
+val_224 224
+val_309 309
+val_389 389
+val_327 327
+val_242 242
+val_369 369
+val_392 392
+val_272 272
+val_331 331
+val_401 401
+val_242 242
+val_452 452
+val_177 177
+val_226 226
+val_5 5
+val_497 497
+val_402 402
+val_396 396
+val_317 317
+val_395 395
+val_58 58
+val_35 35
+val_336 336
+val_95 95
+val_11 11
+val_168 168
+val_34 34
+val_229 229
+val_233 233
+val_143 143
+val_472 472
+val_322 322
+val_498 498
+val_160 160
+val_195 195
+val_42 42
+val_321 321
+val_430 430
+val_119 119
+val_489 489
+val_458 458
+val_78 78
+val_76 76
+val_41 41
+val_223 223
+val_492 492
+val_149 149
+val_449 449
+val_218 218
+val_228 228
+val_138 138
+val_453 453
+val_30 30
+val_209 209
+val_64 64
+val_468 468
+val_76 76
+val_74 74
+val_342 342
+val_69 69
+val_230 230
+val_33 33
+val_368 368
+val_103 103
+val_296 296
+val_113 113
+val_216 216
+val_367 367
+val_344 344
+val_167 167
+val_274 274
+val_219 219
+val_239 239
+val_485 485
+val_116 116
+val_223 223
+val_256 256
+val_263 263
+val_70 70
+val_487 487
+val_480 480
+val_401 401
+val_288 288
+val_191 191
+val_5 5
+val_244 244
+val_438 438
+val_128 128
+val_467 467
+val_432 432
+val_202 202
+val_316 316
+val_229 229
+val_469 469
+val_463 463
+val_280 280
+val_2 2
+val_35 35
+val_283 283
+val_331 331
+val_235 235
+val_80 80
+val_44 44
+val_193 193
+val_321 321
+val_335 335
+val_104 104
+val_466 466
+val_366 366
+val_175 175
+val_403 403
+val_483 483
+val_53 53
+val_105 105
+val_257 257
+val_406 406
+val_409 409
+val_190 190
+val_406 406
+val_401 401
+val_114 114
+val_258 258
+val_90 90
+val_203 203
+val_262 262
+val_348 348
+val_424 424
+val_12 12
+val_396 396
+val_201 201
+val_217 217
+val_164 164
+val_431 431
+val_454 454
+val_478 478
+val_298 298
+val_125 125
+val_431 431
+val_164 164
+val_424 424
+val_187 187
+val_382 382
+val_5 5
+val_70 70
+val_397 397
+val_480 480
+val_291 291
+val_24 24
+val_351 351
+val_255 255
+val_104 104
+val_70 70
+val_163 163
+val_438 438
+val_119 119
+val_414 414
+val_200 200
+val_491 491
+val_237 237
+val_439 439
+val_360 360
+val_248 248
+val_479 479
+val_305 305
+val_417 417
+val_199 199
+val_444 444
+val_120 120
+val_429 429
+val_169 169
+val_443 443
+val_323 323
+val_325 325
+val_277 277
+val_230 230
+val_478 478
+val_178 178
+val_468 468
+val_310 310
+val_317 317
+val_333 333
+val_493 493
+val_460 460
+val_207 207
+val_249 249
+val_265 265
+val_480 480
+val_83 83
+val_136 136
+val_353 353
+val_172 172
+val_214 214
+val_462 462
+val_233 233
+val_406 406
+val_133 133
+val_175 175
+val_189 189
+val_454 454
+val_375 375
+val_401 401
+val_421 421
+val_407 407
+val_384 384
+val_256 256
+val_26 26
+val_134 134
+val_67 67
+val_384 384
+val_379 379
+val_18 18
+val_462 462
+val_492 492
+val_100 100
+val_298 298
+val_9 9
+val_341 341
+val_498 498
+val_146 146
+val_458 458
+val_362 362
+val_186 186
+val_285 285
+val_348 348
+val_167 167
+val_18 18
+val_273 273
+val_183 183
+val_281 281
+val_344 344
+val_97 97
+val_469 469
+val_315 315
+val_84 84
+val_28 28
+val_37 37
+val_448 448
+val_152 152
+val_348 348
+val_307 307
+val_194 194
+val_414 414
+val_477 477
+val_222 222
+val_126 126
+val_90 90
+val_169 169
+val_403 403
+val_400 400
+val_200 200
+val_97 97

Search Discussions

  • Brock at Dec 24, 2014 at 2:33 pm
    Added: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,1127 @@
    +PREHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
    +-- The following explains merge example used in this test case
    +-- DML will generated 2 partitions
    +-- ds=2008-04-08/hr=a1
    +-- ds=2008-04-08/hr=b1
    +-- without merge, each partition has more files
    +-- ds=2008-04-08/hr=a1 has 2 files
    +-- ds=2008-04-08/hr=b1 has 6 files
    +-- with merge each partition has more files
    +-- ds=2008-04-08/hr=a1 has 1 files
    +-- ds=2008-04-08/hr=b1 has 4 files
    +-- The following shows file size and name in each directory
    +-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
    +-- without merge
    +-- 155 000000_0
    +-- 155 000001_0
    +-- with merge
    +-- 254 000000_0
    +-- hr=b1/key=103/value=val_103:
    +-- without merge
    +-- 99 000000_0
    +-- 99 000001_0
    +-- with merge
    +-- 142 000001_0
    +-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
    +-- without merge
    +-- 5181 000000_0
    +-- 5181 000001_0
    +-- with merge
    +-- 5181 000000_0
    +-- 5181 000001_0
    +-- hr=b1/key=484/value=val_484
    +-- without merge
    +-- 87 000000_0
    +-- 87 000001_0
    +-- with merge
    +-- 118 000002_0
    +
    +-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
    +-- SORT_QUERY_RESULTS
    +-- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +-- create a skewed table
    +create table list_bucketing_dynamic_part (key String, value String)
    + partitioned by (ds String, hr String)
    + skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
    + stored as DIRECTORIES
    + STORED AS RCFILE
    +PREHOOK: type: CREATETABLE
    +PREHOOK: Output: database:default
    +PREHOOK: Output: default@list_bucketing_dynamic_part
    +POSTHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
    +-- The following explains merge example used in this test case
    +-- DML will generated 2 partitions
    +-- ds=2008-04-08/hr=a1
    +-- ds=2008-04-08/hr=b1
    +-- without merge, each partition has more files
    +-- ds=2008-04-08/hr=a1 has 2 files
    +-- ds=2008-04-08/hr=b1 has 6 files
    +-- with merge each partition has more files
    +-- ds=2008-04-08/hr=a1 has 1 files
    +-- ds=2008-04-08/hr=b1 has 4 files
    +-- The following shows file size and name in each directory
    +-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
    +-- without merge
    +-- 155 000000_0
    +-- 155 000001_0
    +-- with merge
    +-- 254 000000_0
    +-- hr=b1/key=103/value=val_103:
    +-- without merge
    +-- 99 000000_0
    +-- 99 000001_0
    +-- with merge
    +-- 142 000001_0
    +-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
    +-- without merge
    +-- 5181 000000_0
    +-- 5181 000001_0
    +-- with merge
    +-- 5181 000000_0
    +-- 5181 000001_0
    +-- hr=b1/key=484/value=val_484
    +-- without merge
    +-- 87 000000_0
    +-- 87 000001_0
    +-- with merge
    +-- 118 000002_0
    +
    +-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
    +-- SORT_QUERY_RESULTS
    +-- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +-- create a skewed table
    +create table list_bucketing_dynamic_part (key String, value String)
    + partitioned by (ds String, hr String)
    + skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
    + stored as DIRECTORIES
    + STORED AS RCFILE
    +POSTHOOK: type: CREATETABLE
    +POSTHOOK: Output: database:default
    +POSTHOOK: Output: default@list_bucketing_dynamic_part
    +PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
    +explain extended
    +insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
    +explain extended
    +insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    +
    +TOK_QUERY
    + TOK_FROM
    + TOK_TABREF
    + TOK_TABNAME
    + srcpart
    + TOK_INSERT
    + TOK_DESTINATION
    + TOK_TAB
    + TOK_TABNAME
    + list_bucketing_dynamic_part
    + TOK_PARTSPEC
    + TOK_PARTVAL
    + ds
    + '2008-04-08'
    + TOK_PARTVAL
    + hr
    + TOK_SELECT
    + TOK_SELEXPR
    + TOK_TABLE_OR_COL
    + key
    + TOK_SELEXPR
    + TOK_TABLE_OR_COL
    + value
    + TOK_SELEXPR
    + TOK_FUNCTION
    + if
    + ==
    + %
    + TOK_TABLE_OR_COL
    + key
    + 100
    + 0
    + 'a1'
    + 'b1'
    + TOK_WHERE
    + =
    + TOK_TABLE_OR_COL
    + ds
    + '2008-04-08'
    +
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-0 depends on stages: Stage-1
    + Stage-2 depends on stages: Stage-0
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + alias: srcpart
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    + GatherStats: false
    + Select Operator
    + expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
    + outputColumnNames: _col0, _col1, _col2
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + GlobalTableId: 1
    +#### A masked pattern was here ####
    + NumFilesPerFileSink: 1
    + Static Partition Specification: ds=2008-04-08/
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    +#### A masked pattern was here ####
    + table:
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + TotalFiles: 1
    + GatherStats: true
    + MultiFileSpray: false
    + Path -> Alias:
    +#### A masked pattern was here ####
    + Path -> Partition:
    +#### A masked pattern was here ####
    + Partition
    + base file name: hr=11
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr 11
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + numFiles 1
    + numRows 500
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 5312
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + totalSize 5812
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.srcpart
    + name: default.srcpart
    +#### A masked pattern was here ####
    + Partition
    + base file name: hr=12
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr 12
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + numFiles 1
    + numRows 500
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 5312
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + totalSize 5812
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.srcpart
    + name: default.srcpart
    + Truncated Path -> Alias:
    + /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
    + /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
    +
    + Stage: Stage-0
    + Move Operator
    + tables:
    + partition:
    + ds 2008-04-08
    + hr
    + replace: true
    +#### A masked pattern was here ####
    + table:
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    +
    + Stage: Stage-2
    + Stats-Aggr Operator
    +#### A masked pattern was here ####
    +
    +PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@srcpart
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
    +POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@srcpart
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: -- check DML result
    +show partitions list_bucketing_dynamic_part
    +PREHOOK: type: SHOWPARTITIONS
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: -- check DML result
    +show partitions list_bucketing_dynamic_part
    +POSTHOOK: type: SHOWPARTITIONS
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +ds=2008-04-08/hr=a1
    +ds=2008-04-08/hr=b1
    +PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +# col_name data_type comment
    +
    +key string
    +value string
    +
    +# Partition Information
    +# col_name data_type comment
    +
    +ds string
    +hr string
    +
    +# Detailed Partition Information
    +Partition Value: [2008-04-08, a1]
    +Database: default
    +Table: list_bucketing_dynamic_part
    +#### A masked pattern was here ####
    +Protect Mode: None
    +#### A masked pattern was here ####
    +Partition Parameters:
    + COLUMN_STATS_ACCURATE true
    + numFiles 2
    + numRows 16
    + rawDataSize 136
    + totalSize 310
    +#### A masked pattern was here ####
    +
    +# Storage Information
    +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    +Compressed: No
    +Num Buckets: -1
    +Bucket Columns: []
    +Sort Columns: []
    +Stored As SubDirectories: Yes
    +Skewed Columns: [key, value]
    +Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
    +Storage Desc Params:
    + serialization.format 1
    +PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +# col_name data_type comment
    +
    +key string
    +value string
    +
    +# Partition Information
    +# col_name data_type comment
    +
    +ds string
    +hr string
    +
    +# Detailed Partition Information
    +Partition Value: [2008-04-08, b1]
    +Database: default
    +Table: list_bucketing_dynamic_part
    +#### A masked pattern was here ####
    +Protect Mode: None
    +#### A masked pattern was here ####
    +Partition Parameters:
    + COLUMN_STATS_ACCURATE true
    + numFiles 6
    + numRows 984
    + rawDataSize 9488
    + totalSize 10734
    +#### A masked pattern was here ####
    +
    +# Storage Information
    +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    +Compressed: No
    +Num Buckets: -1
    +Bucket Columns: []
    +Sort Columns: []
    +Stored As SubDirectories: Yes
    +Skewed Columns: [key, value]
    +Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
    +#### A masked pattern was here ####
    +Skewed Value to Truncated Path: {[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}
    +Storage Desc Params:
    + serialization.format 1
    +PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
    +explain extended
    +insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
    +explain extended
    +insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    +
    +TOK_QUERY
    + TOK_FROM
    + TOK_TABREF
    + TOK_TABNAME
    + srcpart
    + TOK_INSERT
    + TOK_DESTINATION
    + TOK_TAB
    + TOK_TABNAME
    + list_bucketing_dynamic_part
    + TOK_PARTSPEC
    + TOK_PARTVAL
    + ds
    + '2008-04-08'
    + TOK_PARTVAL
    + hr
    + TOK_SELECT
    + TOK_SELEXPR
    + TOK_TABLE_OR_COL
    + key
    + TOK_SELEXPR
    + TOK_TABLE_OR_COL
    + value
    + TOK_SELEXPR
    + TOK_FUNCTION
    + if
    + ==
    + %
    + TOK_TABLE_OR_COL
    + key
    + 100
    + 0
    + 'a1'
    + 'b1'
    + TOK_WHERE
    + =
    + TOK_TABLE_OR_COL
    + ds
    + '2008-04-08'
    +
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
    + Stage-4
    + Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
    + Stage-2 depends on stages: Stage-0
    + Stage-3
    + Stage-5
    + Stage-6 depends on stages: Stage-5
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + alias: srcpart
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    + GatherStats: false
    + Select Operator
    + expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
    + outputColumnNames: _col0, _col1, _col2
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + GlobalTableId: 1
    +#### A masked pattern was here ####
    + NumFilesPerFileSink: 1
    + Static Partition Specification: ds=2008-04-08/
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    +#### A masked pattern was here ####
    + table:
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + TotalFiles: 1
    + GatherStats: true
    + MultiFileSpray: false
    + Path -> Alias:
    +#### A masked pattern was here ####
    + Path -> Partition:
    +#### A masked pattern was here ####
    + Partition
    + base file name: hr=11
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr 11
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + numFiles 1
    + numRows 500
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 5312
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + totalSize 5812
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.srcpart
    + name: default.srcpart
    +#### A masked pattern was here ####
    + Partition
    + base file name: hr=12
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr 12
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + numFiles 1
    + numRows 500
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 5312
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + totalSize 5812
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.srcpart
    + name: default.srcpart
    + Truncated Path -> Alias:
    + /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
    + /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
    +
    + Stage: Stage-7
    + Conditional Operator
    +
    + Stage: Stage-4
    + Move Operator
    + files:
    + hdfs directory: true
    +#### A masked pattern was here ####
    +
    + Stage: Stage-0
    + Move Operator
    + tables:
    + partition:
    + ds 2008-04-08
    + hr
    + replace: true
    +#### A masked pattern was here ####
    + table:
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    +
    + Stage: Stage-2
    + Stats-Aggr Operator
    +#### A masked pattern was here ####
    +
    + Stage: Stage-3
    + Merge File Operator
    + Map Operator Tree:
    + RCFile Merge Operator
    + merge level: block
    + Path -> Alias:
    +#### A masked pattern was here ####
    + Path -> Partition:
    +#### A masked pattern was here ####
    + Partition
    + input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + name: default.list_bucketing_dynamic_part
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + Truncated Path -> Alias:
    +#### A masked pattern was here ####
    +
    + Stage: Stage-5
    + Merge File Operator
    + Map Operator Tree:
    + RCFile Merge Operator
    + merge level: block
    + Path -> Alias:
    +#### A masked pattern was here ####
    + Path -> Partition:
    +#### A masked pattern was here ####
    + Partition
    + input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + name: default.list_bucketing_dynamic_part
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + Truncated Path -> Alias:
    +#### A masked pattern was here ####
    +
    + Stage: Stage-6
    + Move Operator
    + files:
    + hdfs directory: true
    +#### A masked pattern was here ####
    +
    +PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@srcpart
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
    +POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@srcpart
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: -- check DML result
    +show partitions list_bucketing_dynamic_part
    +PREHOOK: type: SHOWPARTITIONS
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: -- check DML result
    +show partitions list_bucketing_dynamic_part
    +POSTHOOK: type: SHOWPARTITIONS
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +ds=2008-04-08/hr=a1
    +ds=2008-04-08/hr=b1
    +PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +# col_name data_type comment
    +
    +key string
    +value string
    +
    +# Partition Information
    +# col_name data_type comment
    +
    +ds string
    +hr string
    +
    +# Detailed Partition Information
    +Partition Value: [2008-04-08, a1]
    +Database: default
    +Table: list_bucketing_dynamic_part
    +#### A masked pattern was here ####
    +Protect Mode: None
    +#### A masked pattern was here ####
    +Partition Parameters:
    + COLUMN_STATS_ACCURATE true
    + numFiles 1
    + numRows 16
    + rawDataSize 136
    + totalSize 254
    +#### A masked pattern was here ####
    +
    +# Storage Information
    +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    +Compressed: No
    +Num Buckets: -1
    +Bucket Columns: []
    +Sort Columns: []
    +Stored As SubDirectories: Yes
    +Skewed Columns: [key, value]
    +Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
    +Storage Desc Params:
    + serialization.format 1
    +PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +# col_name data_type comment
    +
    +key string
    +value string
    +
    +# Partition Information
    +# col_name data_type comment
    +
    +ds string
    +hr string
    +
    +# Detailed Partition Information
    +Partition Value: [2008-04-08, b1]
    +Database: default
    +Table: list_bucketing_dynamic_part
    +#### A masked pattern was here ####
    +Protect Mode: None
    +#### A masked pattern was here ####
    +Partition Parameters:
    + COLUMN_STATS_ACCURATE true
    + numFiles 4
    + numRows 984
    + rawDataSize 9488
    + totalSize 10622
    +#### A masked pattern was here ####
    +
    +# Storage Information
    +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    +Compressed: No
    +Num Buckets: -1
    +Bucket Columns: []
    +Sort Columns: []
    +Stored As SubDirectories: Yes
    +Skewed Columns: [key, value]
    +Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
    +#### A masked pattern was here ####
    +Skewed Value to Truncated Path: {[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}
    +Storage Desc Params:
    + serialization.format 1
    +PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@srcpart
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +#### A masked pattern was here ####
    +POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@srcpart
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +#### A masked pattern was here ####
    +1000
    +PREHOOK: query: select count(*) from list_bucketing_dynamic_part
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +#### A masked pattern was here ####
    +POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +#### A masked pattern was here ####
    +1000
    +PREHOOK: query: explain extended
    +select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: explain extended
    +select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    +
    +TOK_QUERY
    + TOK_FROM
    + TOK_TABREF
    + TOK_TABNAME
    + list_bucketing_dynamic_part
    + TOK_INSERT
    + TOK_DESTINATION
    + TOK_DIR
    + TOK_TMP_FILE
    + TOK_SELECT
    + TOK_SELEXPR
    + TOK_ALLCOLREF
    + TOK_WHERE
    + and
    + =
    + TOK_TABLE_OR_COL
    + key
    + '484'
    + =
    + TOK_TABLE_OR_COL
    + value
    + 'val_484'
    +
    +
    +STAGE DEPENDENCIES:
    + Stage-0 is a root stage
    +
    +STAGE PLANS:
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    + Partition Description:
    + Partition
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr a1
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + numFiles 1
    + numRows 16
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 136
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + totalSize 254
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + name: default.list_bucketing_dynamic_part
    + Partition
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr b1
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + numFiles 4
    + numRows 984
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 9488
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + totalSize 10622
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + name: default.list_bucketing_dynamic_part
    + Processor Tree:
    + TableScan
    + alias: list_bucketing_dynamic_part
    + Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
    + GatherStats: false
    + Filter Operator
    + isSamplingPred: false
    + predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
    + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
    + ListSink
    +
    +PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +#### A masked pattern was here ####
    +POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +#### A masked pattern was here ####
    +484 val_484 2008-04-08 b1
    +484 val_484 2008-04-08 b1
    +PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@srcpart
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +#### A masked pattern was here ####
    +POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@srcpart
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +#### A masked pattern was here ####
    +484 val_484 2008-04-08 11
    +484 val_484 2008-04-08 12
    +PREHOOK: query: -- clean up
    +drop table list_bucketing_dynamic_part
    +PREHOOK: type: DROPTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +PREHOOK: Output: default@list_bucketing_dynamic_part
    +POSTHOOK: query: -- clean up
    +drop table list_bucketing_dynamic_part
    +POSTHOOK: type: DROPTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: Output: default@list_bucketing_dynamic_part

    Added: hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.7.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,13 @@
    +PREHOOK: query: -- explain plan json: the query gets the formatted json output of the query plan of the hive query
    +
    +-- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN FORMATTED SELECT count(1) FROM src
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- explain plan json: the query gets the formatted json output of the query plan of the hive query
    +
    +-- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN FORMATTED SELECT count(1) FROM src
    +POSTHOOK: type: QUERY
    +{"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Reduce Operator Tree:":{"Group By Operator":{"mode:":"mergepartial","aggregations:":["count(VALUE._col0)"],"outputColumnNames:":["_col0"],"children":{"Select Operator":{"expressions:":"_col0 (type: bigint)","outputColumnNames:":["_col0"],"children":{"File Output Operator":{"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","compressed:":"false","table:":{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"}}},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"Map Operator Tree:":[{"TableScan":{"alias:":"src","children":{"Select Operator":{"expressions:":"1 (type: int)","outputColumnNames:":["_col0"],"children":{"Group By Operator":{"mode:":
      "hash","aggregations:":["count(_col0)"],"outputColumnNames:":["_col0"],"children":{"Reduce Output Operator":{"sort order:":"","value expressions:":"_col0 (type: bigint)","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE"}},"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE"}}]}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}},"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"DEPENDENT STAGES":"Stage-1"}}}

    Added: hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/plan_json.q.java1.8.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,13 @@
    +PREHOOK: query: -- explain plan json: the query gets the formatted json output of the query plan of the hive query
    +
    +-- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN FORMATTED SELECT count(1) FROM src
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- explain plan json: the query gets the formatted json output of the query plan of the hive query
    +
    +-- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN FORMATTED SELECT count(1) FROM src
    +POSTHOOK: type: QUERY
    +{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"DEPENDENT STAGES":"Stage-1"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Select Operator":{"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","table:":{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}},"outputColumnNames:":["_col0"],"expressions:":"_col0 (type: bigint)"}},"outputColumnNames:":["_col0"]}},"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size
      : 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Select Operator":{"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Group By Operator":{"aggregations:":["count(_col0)"],"mode:":"hash","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Reduce Output Operator":{"value expressions:":"_col0 (type: bigint)","sort order:":"","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE"}},"outputColumnNames:":["_col0"]}},"outputColumnNames:":["_col0"],"expressions:":"1 (type: int)"}}}}]}},"Stage-0":{"Fetch Operator":{"Processor Tree:":{"ListSink":{}},"limit:":"-1"}}}}

    Added: hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,236 @@
    +Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
    +PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-0 depends on stages: Stage-1
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Tez
    + Edges:
    + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
    + Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
    +#### A masked pattern was here ####
    + Vertices:
    + Map 1
    + Map Operator Tree:
    + TableScan
    + alias: src
    + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
    + Filter Operator
    + predicate: (key < 10) (type: boolean)
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: key (type: string), value (type: string)
    + outputColumnNames: _col0, _col1
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + sort order:
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + value expressions: _col0 (type: string), _col1 (type: string)
    + Map 4
    + Map Operator Tree:
    + TableScan
    + alias: src
    + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
    + Filter Operator
    + predicate: (key < 10) (type: boolean)
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: key (type: string), value (type: string)
    + outputColumnNames: _col0, _col1
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + sort order:
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + value expressions: _col0 (type: string), _col1 (type: string)
    + Reducer 2
    + Reduce Operator Tree:
    + Merge Join Operator
    + condition map:
    + Inner Join 0 to 1
    + keys:
    + 0
    + 1
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
    + sort order: ++++
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + Reducer 3
    + Reduce Operator Tree:
    + Select Operator
    + expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    + Processor Tree:
    + ListSink
    +
    +Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
    +PREHOOK: query: EXPLAIN FORMATTED
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +POSTHOOK: query: EXPLAIN FORMATTED
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +#### A masked pattern was here ####
    +Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
    +PREHOOK: query: SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@src
    +#### A masked pattern was here ####
    +POSTHOOK: query: SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@src
    +#### A masked pattern was here ####
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 2 val_2
    +0 val_0 2 val_2
    +0 val_0 2 val_2
    +0 val_0 4 val_4
    +0 val_0 4 val_4
    +0 val_0 4 val_4
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 8 val_8
    +0 val_0 8 val_8
    +0 val_0 8 val_8
    +0 val_0 9 val_9
    +0 val_0 9 val_9
    +0 val_0 9 val_9
    +2 val_2 0 val_0
    +2 val_2 0 val_0
    +2 val_2 0 val_0
    +2 val_2 2 val_2
    +2 val_2 4 val_4
    +2 val_2 5 val_5
    +2 val_2 5 val_5
    +2 val_2 5 val_5
    +2 val_2 8 val_8
    +2 val_2 9 val_9
    +4 val_4 0 val_0
    +4 val_4 0 val_0
    +4 val_4 0 val_0
    +4 val_4 2 val_2
    +4 val_4 4 val_4
    +4 val_4 5 val_5
    +4 val_4 5 val_5
    +4 val_4 5 val_5
    +4 val_4 8 val_8
    +4 val_4 9 val_9
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 2 val_2
    +5 val_5 2 val_2
    +5 val_5 2 val_2
    +5 val_5 4 val_4
    +5 val_5 4 val_4
    +5 val_5 4 val_4
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 8 val_8
    +5 val_5 8 val_8
    +5 val_5 8 val_8
    +5 val_5 9 val_9
    +5 val_5 9 val_9
    +5 val_5 9 val_9
    +8 val_8 0 val_0
    +8 val_8 0 val_0
    +8 val_8 0 val_0
    +8 val_8 2 val_2
    +8 val_8 4 val_4
    +8 val_8 5 val_5
    +8 val_8 5 val_5
    +8 val_8 5 val_5
    +8 val_8 8 val_8
    +8 val_8 9 val_9
    +9 val_9 0 val_0
    +9 val_9 0 val_0
    +9 val_9 0 val_0
    +9 val_9 2 val_2
    +9 val_9 4 val_4
    +9 val_9 5 val_5
    +9 val_9 5 val_5
    +9 val_9 5 val_5
    +9 val_9 8 val_8
    +9 val_9 9 val_9

    Added: hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,236 @@
    +Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
    +PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-0 depends on stages: Stage-1
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Tez
    + Edges:
    + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
    + Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
    +#### A masked pattern was here ####
    + Vertices:
    + Map 1
    + Map Operator Tree:
    + TableScan
    + alias: src
    + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
    + Filter Operator
    + predicate: (key < 10) (type: boolean)
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: key (type: string), value (type: string)
    + outputColumnNames: _col0, _col1
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + sort order:
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + value expressions: _col0 (type: string), _col1 (type: string)
    + Map 4
    + Map Operator Tree:
    + TableScan
    + alias: src
    + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
    + Filter Operator
    + predicate: (key < 10) (type: boolean)
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: key (type: string), value (type: string)
    + outputColumnNames: _col0, _col1
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + sort order:
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + value expressions: _col0 (type: string), _col1 (type: string)
    + Reducer 2
    + Reduce Operator Tree:
    + Merge Join Operator
    + condition map:
    + Inner Join 0 to 1
    + keys:
    + 0
    + 1
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
    + sort order: ++++
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + Reducer 3
    + Reduce Operator Tree:
    + Select Operator
    + expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    + Processor Tree:
    + ListSink
    +
    +Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
    +PREHOOK: query: EXPLAIN FORMATTED
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +POSTHOOK: query: EXPLAIN FORMATTED
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +#### A masked pattern was here ####
    +Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
    +PREHOOK: query: SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@src
    +#### A masked pattern was here ####
    +POSTHOOK: query: SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@src
    +#### A masked pattern was here ####
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 2 val_2
    +0 val_0 2 val_2
    +0 val_0 2 val_2
    +0 val_0 4 val_4
    +0 val_0 4 val_4
    +0 val_0 4 val_4
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 8 val_8
    +0 val_0 8 val_8
    +0 val_0 8 val_8
    +0 val_0 9 val_9
    +0 val_0 9 val_9
    +0 val_0 9 val_9
    +2 val_2 0 val_0
    +2 val_2 0 val_0
    +2 val_2 0 val_0
    +2 val_2 2 val_2
    +2 val_2 4 val_4
    +2 val_2 5 val_5
    +2 val_2 5 val_5
    +2 val_2 5 val_5
    +2 val_2 8 val_8
    +2 val_2 9 val_9
    +4 val_4 0 val_0
    +4 val_4 0 val_0
    +4 val_4 0 val_0
    +4 val_4 2 val_2
    +4 val_4 4 val_4
    +4 val_4 5 val_5
    +4 val_4 5 val_5
    +4 val_4 5 val_5
    +4 val_4 8 val_8
    +4 val_4 9 val_9
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 2 val_2
    +5 val_5 2 val_2
    +5 val_5 2 val_2
    +5 val_5 4 val_4
    +5 val_5 4 val_4
    +5 val_5 4 val_4
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 8 val_8
    +5 val_5 8 val_8
    +5 val_5 8 val_8
    +5 val_5 9 val_9
    +5 val_5 9 val_9
    +5 val_5 9 val_9
    +8 val_8 0 val_0
    +8 val_8 0 val_0
    +8 val_8 0 val_0
    +8 val_8 2 val_2
    +8 val_8 4 val_4
    +8 val_8 5 val_5
    +8 val_8 5 val_5
    +8 val_8 5 val_5
    +8 val_8 8 val_8
    +8 val_8 9 val_9
    +9 val_9 0 val_0
    +9 val_9 0 val_0
    +9 val_9 0 val_0
    +9 val_9 2 val_2
    +9 val_9 4 val_4
    +9 val_9 5 val_5
    +9 val_9 5 val_5
    +9 val_9 5 val_5
    +9 val_9 8 val_8
    +9 val_9 9 val_9
  • Brock at Dec 24, 2014 at 2:33 pm
    Added: hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.8.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.8.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.8.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/input4.q.java1.8.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,559 @@
    +PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
    +PREHOOK: type: CREATETABLE
    +PREHOOK: Output: database:default
    +PREHOOK: Output: default@INPUT4
    +POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
    +POSTHOOK: type: CREATETABLE
    +POSTHOOK: Output: database:default
    +POSTHOOK: Output: default@INPUT4
    +PREHOOK: query: EXPLAIN
    +LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
    +PREHOOK: type: LOAD
    +POSTHOOK: query: EXPLAIN
    +LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
    +POSTHOOK: type: LOAD
    +STAGE DEPENDENCIES:
    + Stage-0 is a root stage
    + Stage-1 depends on stages: Stage-0
    +
    +STAGE PLANS:
    + Stage: Stage-0
    + Move Operator
    + tables:
    + replace: false
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.input4
    +
    + Stage: Stage-1
    + Stats-Aggr Operator
    +
    +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
    +PREHOOK: type: LOAD
    +#### A masked pattern was here ####
    +PREHOOK: Output: default@input4
    +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
    +POSTHOOK: type: LOAD
    +#### A masked pattern was here ####
    +POSTHOOK: Output: default@input4
    +PREHOOK: query: EXPLAIN FORMATTED
    +SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
    +PREHOOK: type: QUERY
    +POSTHOOK: query: EXPLAIN FORMATTED
    +SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
    +POSTHOOK: type: QUERY
    +{"STAGE DEPENDENCIES":{"Stage-0":{"ROOT STAGE":"TRUE"}},"STAGE PLANS":{"Stage-0":{"Fetch Operator":{"Processor Tree:":{"TableScan":{"alias:":"input4alias","Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE","children":{"ListSink":{}},"outputColumnNames:":["_col0","_col1"],"expressions:":"value (type: string), key (type: string)"}}}},"limit:":"-1"}}}}
    +PREHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@input4
    +#### A masked pattern was here ####
    +POSTHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@input4
    +#### A masked pattern was here ####
    +val_238 238
    +val_86 86
    +val_311 311
    +val_27 27
    +val_165 165
    +val_409 409
    +val_255 255
    +val_278 278
    +val_98 98
    +val_484 484
    +val_265 265
    +val_193 193
    +val_401 401
    +val_150 150
    +val_273 273
    +val_224 224
    +val_369 369
    +val_66 66
    +val_128 128
    +val_213 213
    +val_146 146
    +val_406 406
    +val_429 429
    +val_374 374
    +val_152 152
    +val_469 469
    +val_145 145
    +val_495 495
    +val_37 37
    +val_327 327
    +val_281 281
    +val_277 277
    +val_209 209
    +val_15 15
    +val_82 82
    +val_403 403
    +val_166 166
    +val_417 417
    +val_430 430
    +val_252 252
    +val_292 292
    +val_219 219
    +val_287 287
    +val_153 153
    +val_193 193
    +val_338 338
    +val_446 446
    +val_459 459
    +val_394 394
    +val_237 237
    +val_482 482
    +val_174 174
    +val_413 413
    +val_494 494
    +val_207 207
    +val_199 199
    +val_466 466
    +val_208 208
    +val_174 174
    +val_399 399
    +val_396 396
    +val_247 247
    +val_417 417
    +val_489 489
    +val_162 162
    +val_377 377
    +val_397 397
    +val_309 309
    +val_365 365
    +val_266 266
    +val_439 439
    +val_342 342
    +val_367 367
    +val_325 325
    +val_167 167
    +val_195 195
    +val_475 475
    +val_17 17
    +val_113 113
    +val_155 155
    +val_203 203
    +val_339 339
    +val_0 0
    +val_455 455
    +val_128 128
    +val_311 311
    +val_316 316
    +val_57 57
    +val_302 302
    +val_205 205
    +val_149 149
    +val_438 438
    +val_345 345
    +val_129 129
    +val_170 170
    +val_20 20
    +val_489 489
    +val_157 157
    +val_378 378
    +val_221 221
    +val_92 92
    +val_111 111
    +val_47 47
    +val_72 72
    +val_4 4
    +val_280 280
    +val_35 35
    +val_427 427
    +val_277 277
    +val_208 208
    +val_356 356
    +val_399 399
    +val_169 169
    +val_382 382
    +val_498 498
    +val_125 125
    +val_386 386
    +val_437 437
    +val_469 469
    +val_192 192
    +val_286 286
    +val_187 187
    +val_176 176
    +val_54 54
    +val_459 459
    +val_51 51
    +val_138 138
    +val_103 103
    +val_239 239
    +val_213 213
    +val_216 216
    +val_430 430
    +val_278 278
    +val_176 176
    +val_289 289
    +val_221 221
    +val_65 65
    +val_318 318
    +val_332 332
    +val_311 311
    +val_275 275
    +val_137 137
    +val_241 241
    +val_83 83
    +val_333 333
    +val_180 180
    +val_284 284
    +val_12 12
    +val_230 230
    +val_181 181
    +val_67 67
    +val_260 260
    +val_404 404
    +val_384 384
    +val_489 489
    +val_353 353
    +val_373 373
    +val_272 272
    +val_138 138
    +val_217 217
    +val_84 84
    +val_348 348
    +val_466 466
    +val_58 58
    +val_8 8
    +val_411 411
    +val_230 230
    +val_208 208
    +val_348 348
    +val_24 24
    +val_463 463
    +val_431 431
    +val_179 179
    +val_172 172
    +val_42 42
    +val_129 129
    +val_158 158
    +val_119 119
    +val_496 496
    +val_0 0
    +val_322 322
    +val_197 197
    +val_468 468
    +val_393 393
    +val_454 454
    +val_100 100
    +val_298 298
    +val_199 199
    +val_191 191
    +val_418 418
    +val_96 96
    +val_26 26
    +val_165 165
    +val_327 327
    +val_230 230
    +val_205 205
    +val_120 120
    +val_131 131
    +val_51 51
    +val_404 404
    +val_43 43
    +val_436 436
    +val_156 156
    +val_469 469
    +val_468 468
    +val_308 308
    +val_95 95
    +val_196 196
    +val_288 288
    +val_481 481
    +val_457 457
    +val_98 98
    +val_282 282
    +val_197 197
    +val_187 187
    +val_318 318
    +val_318 318
    +val_409 409
    +val_470 470
    +val_137 137
    +val_369 369
    +val_316 316
    +val_169 169
    +val_413 413
    +val_85 85
    +val_77 77
    +val_0 0
    +val_490 490
    +val_87 87
    +val_364 364
    +val_179 179
    +val_118 118
    +val_134 134
    +val_395 395
    +val_282 282
    +val_138 138
    +val_238 238
    +val_419 419
    +val_15 15
    +val_118 118
    +val_72 72
    +val_90 90
    +val_307 307
    +val_19 19
    +val_435 435
    +val_10 10
    +val_277 277
    +val_273 273
    +val_306 306
    +val_224 224
    +val_309 309
    +val_389 389
    +val_327 327
    +val_242 242
    +val_369 369
    +val_392 392
    +val_272 272
    +val_331 331
    +val_401 401
    +val_242 242
    +val_452 452
    +val_177 177
    +val_226 226
    +val_5 5
    +val_497 497
    +val_402 402
    +val_396 396
    +val_317 317
    +val_395 395
    +val_58 58
    +val_35 35
    +val_336 336
    +val_95 95
    +val_11 11
    +val_168 168
    +val_34 34
    +val_229 229
    +val_233 233
    +val_143 143
    +val_472 472
    +val_322 322
    +val_498 498
    +val_160 160
    +val_195 195
    +val_42 42
    +val_321 321
    +val_430 430
    +val_119 119
    +val_489 489
    +val_458 458
    +val_78 78
    +val_76 76
    +val_41 41
    +val_223 223
    +val_492 492
    +val_149 149
    +val_449 449
    +val_218 218
    +val_228 228
    +val_138 138
    +val_453 453
    +val_30 30
    +val_209 209
    +val_64 64
    +val_468 468
    +val_76 76
    +val_74 74
    +val_342 342
    +val_69 69
    +val_230 230
    +val_33 33
    +val_368 368
    +val_103 103
    +val_296 296
    +val_113 113
    +val_216 216
    +val_367 367
    +val_344 344
    +val_167 167
    +val_274 274
    +val_219 219
    +val_239 239
    +val_485 485
    +val_116 116
    +val_223 223
    +val_256 256
    +val_263 263
    +val_70 70
    +val_487 487
    +val_480 480
    +val_401 401
    +val_288 288
    +val_191 191
    +val_5 5
    +val_244 244
    +val_438 438
    +val_128 128
    +val_467 467
    +val_432 432
    +val_202 202
    +val_316 316
    +val_229 229
    +val_469 469
    +val_463 463
    +val_280 280
    +val_2 2
    +val_35 35
    +val_283 283
    +val_331 331
    +val_235 235
    +val_80 80
    +val_44 44
    +val_193 193
    +val_321 321
    +val_335 335
    +val_104 104
    +val_466 466
    +val_366 366
    +val_175 175
    +val_403 403
    +val_483 483
    +val_53 53
    +val_105 105
    +val_257 257
    +val_406 406
    +val_409 409
    +val_190 190
    +val_406 406
    +val_401 401
    +val_114 114
    +val_258 258
    +val_90 90
    +val_203 203
    +val_262 262
    +val_348 348
    +val_424 424
    +val_12 12
    +val_396 396
    +val_201 201
    +val_217 217
    +val_164 164
    +val_431 431
    +val_454 454
    +val_478 478
    +val_298 298
    +val_125 125
    +val_431 431
    +val_164 164
    +val_424 424
    +val_187 187
    +val_382 382
    +val_5 5
    +val_70 70
    +val_397 397
    +val_480 480
    +val_291 291
    +val_24 24
    +val_351 351
    +val_255 255
    +val_104 104
    +val_70 70
    +val_163 163
    +val_438 438
    +val_119 119
    +val_414 414
    +val_200 200
    +val_491 491
    +val_237 237
    +val_439 439
    +val_360 360
    +val_248 248
    +val_479 479
    +val_305 305
    +val_417 417
    +val_199 199
    +val_444 444
    +val_120 120
    +val_429 429
    +val_169 169
    +val_443 443
    +val_323 323
    +val_325 325
    +val_277 277
    +val_230 230
    +val_478 478
    +val_178 178
    +val_468 468
    +val_310 310
    +val_317 317
    +val_333 333
    +val_493 493
    +val_460 460
    +val_207 207
    +val_249 249
    +val_265 265
    +val_480 480
    +val_83 83
    +val_136 136
    +val_353 353
    +val_172 172
    +val_214 214
    +val_462 462
    +val_233 233
    +val_406 406
    +val_133 133
    +val_175 175
    +val_189 189
    +val_454 454
    +val_375 375
    +val_401 401
    +val_421 421
    +val_407 407
    +val_384 384
    +val_256 256
    +val_26 26
    +val_134 134
    +val_67 67
    +val_384 384
    +val_379 379
    +val_18 18
    +val_462 462
    +val_492 492
    +val_100 100
    +val_298 298
    +val_9 9
    +val_341 341
    +val_498 498
    +val_146 146
    +val_458 458
    +val_362 362
    +val_186 186
    +val_285 285
    +val_348 348
    +val_167 167
    +val_18 18
    +val_273 273
    +val_183 183
    +val_281 281
    +val_344 344
    +val_97 97
    +val_469 469
    +val_315 315
    +val_84 84
    +val_28 28
    +val_37 37
    +val_448 448
    +val_152 152
    +val_348 348
    +val_307 307
    +val_194 194
    +val_414 414
    +val_477 477
    +val_222 222
    +val_126 126
    +val_90 90
    +val_169 169
    +val_403 403
    +val_400 400
    +val_200 200
    +val_97 97

    Added: hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.7.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.7.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.7.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.7.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,238 @@
    +Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
    +PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-2 depends on stages: Stage-1
    + Stage-0 depends on stages: Stage-2
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + alias: src
    + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
    + Filter Operator
    + predicate: (key < 10) (type: boolean)
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: key (type: string), value (type: string)
    + outputColumnNames: _col0, _col1
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + sort order:
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + value expressions: _col0 (type: string), _col1 (type: string)
    + TableScan
    + alias: src
    + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
    + Filter Operator
    + predicate: (key < 10) (type: boolean)
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: key (type: string), value (type: string)
    + outputColumnNames: _col0, _col1
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + sort order:
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + value expressions: _col0 (type: string), _col1 (type: string)
    + Reduce Operator Tree:
    + Join Operator
    + condition map:
    + Inner Join 0 to 1
    + keys:
    + 0
    + 1
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + table:
    + input format: org.apache.hadoop.mapred.SequenceFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
    +
    + Stage: Stage-2
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + Reduce Output Operator
    + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
    + sort order: ++++
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + Reduce Operator Tree:
    + Select Operator
    + expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    + Processor Tree:
    + ListSink
    +
    +Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
    +PREHOOK: query: EXPLAIN FORMATTED
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +POSTHOOK: query: EXPLAIN FORMATTED
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +{"STAGE PLANS":{"Stage-2":{"Map Reduce":{"Reduce Operator Tree:":{"Select Operator":{"expressions:":"KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)","outputColumnNames:":["_col0","_col1","_col2","_col3"],"children":{"File Output Operator":{"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","compressed:":"false","table:":{"serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe","input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"}}},"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE"}},"Map Operator Tree:":[{"TableScan":{"children":{"Reduce Output Operator":{"sort order:":"++++","Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","key expressions:":"_col0 (type: string), _col1 (type: string), _col2 (ty
      pe: string), _col3 (type: string)"}}}}]}},"Stage-1":{"Map Reduce":{"Reduce Operator Tree:":{"Join Operator":{"keys:":{},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"children":{"File Output Operator":{"compressed:":"false","table:":{"serde:":"org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe","input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat"}}},"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","condition map:":[{"":"Inner Join 0 to 1"}]}},"Map Operator Tree:":[{"TableScan":{"alias:":"src","children":{"Filter Operator":{"predicate:":"(key < 10) (type: boolean)","children":{"Select Operator":{"expressions:":"key (type: string), value (type: string)","outputColumnNames:":["_col0","_col1"],"children":{"Reduce Output Operator":{"sort order:":"","value expressions:":"_col0 (type: string), _col1 (type: string)","Statistics:":"Num rows: 166 Data
       size: 1763 Basic stats: COMPLETE Column stats: NONE"}},"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE"}},"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE"}},"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE"}},{"TableScan":{"alias:":"src","children":{"Filter Operator":{"predicate:":"(key < 10) (type: boolean)","children":{"Select Operator":{"expressions:":"key (type: string), value (type: string)","outputColumnNames:":["_col0","_col1"],"children":{"Reduce Output Operator":{"sort order:":"","value expressions:":"_col0 (type: string), _col1 (type: string)","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE"}},"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE"}},"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE"}},"Statistics:":"Num rows: 500 Data size: 5312 Basic stats
      : COMPLETE Column stats: NONE"}}]}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}},"STAGE DEPENDENCIES":{"Stage-2":{"DEPENDENT STAGES":"Stage-1"},"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"DEPENDENT STAGES":"Stage-2"}}}
    +Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
    +PREHOOK: query: SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@src
    +#### A masked pattern was here ####
    +POSTHOOK: query: SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@src
    +#### A masked pattern was here ####
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 2 val_2
    +0 val_0 2 val_2
    +0 val_0 2 val_2
    +0 val_0 4 val_4
    +0 val_0 4 val_4
    +0 val_0 4 val_4
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 8 val_8
    +0 val_0 8 val_8
    +0 val_0 8 val_8
    +0 val_0 9 val_9
    +0 val_0 9 val_9
    +0 val_0 9 val_9
    +2 val_2 0 val_0
    +2 val_2 0 val_0
    +2 val_2 0 val_0
    +2 val_2 2 val_2
    +2 val_2 4 val_4
    +2 val_2 5 val_5
    +2 val_2 5 val_5
    +2 val_2 5 val_5
    +2 val_2 8 val_8
    +2 val_2 9 val_9
    +4 val_4 0 val_0
    +4 val_4 0 val_0
    +4 val_4 0 val_0
    +4 val_4 2 val_2
    +4 val_4 4 val_4
    +4 val_4 5 val_5
    +4 val_4 5 val_5
    +4 val_4 5 val_5
    +4 val_4 8 val_8
    +4 val_4 9 val_9
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 2 val_2
    +5 val_5 2 val_2
    +5 val_5 2 val_2
    +5 val_5 4 val_4
    +5 val_5 4 val_4
    +5 val_5 4 val_4
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 8 val_8
    +5 val_5 8 val_8
    +5 val_5 8 val_8
    +5 val_5 9 val_9
    +5 val_5 9 val_9
    +5 val_5 9 val_9
    +8 val_8 0 val_0
    +8 val_8 0 val_0
    +8 val_8 0 val_0
    +8 val_8 2 val_2
    +8 val_8 4 val_4
    +8 val_8 5 val_5
    +8 val_8 5 val_5
    +8 val_8 5 val_5
    +8 val_8 8 val_8
    +8 val_8 9 val_9
    +9 val_9 0 val_0
    +9 val_9 0 val_0
    +9 val_9 0 val_0
    +9 val_9 2 val_2
    +9 val_9 4 val_4
    +9 val_9 5 val_5
    +9 val_9 5 val_5
    +9 val_9 5 val_5
    +9 val_9 8 val_8
    +9 val_9 9 val_9

    Added: hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.8.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.8.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.8.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/join0.q.java1.8.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,238 @@
    +Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
    +PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +EXPLAIN
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-2 depends on stages: Stage-1
    + Stage-0 depends on stages: Stage-2
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + alias: src
    + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
    + Filter Operator
    + predicate: (key < 10) (type: boolean)
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: key (type: string), value (type: string)
    + outputColumnNames: _col0, _col1
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + sort order:
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + value expressions: _col0 (type: string), _col1 (type: string)
    + TableScan
    + alias: src
    + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
    + Filter Operator
    + predicate: (key < 10) (type: boolean)
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: key (type: string), value (type: string)
    + outputColumnNames: _col0, _col1
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + Reduce Output Operator
    + sort order:
    + Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
    + value expressions: _col0 (type: string), _col1 (type: string)
    + Reduce Operator Tree:
    + Join Operator
    + condition map:
    + Inner Join 0 to 1
    + keys:
    + 0
    + 1
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + table:
    + input format: org.apache.hadoop.mapred.SequenceFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
    +
    + Stage: Stage-2
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + Reduce Output Operator
    + key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
    + sort order: ++++
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + Reduce Operator Tree:
    + Select Operator
    + expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    + Processor Tree:
    + ListSink
    +
    +Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
    +PREHOOK: query: EXPLAIN FORMATTED
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +POSTHOOK: query: EXPLAIN FORMATTED
    +SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-2":{"DEPENDENT STAGES":"Stage-1"},"Stage-0":{"DEPENDENT STAGES":"Stage-2"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Reduce Operator Tree:":{"Join Operator":{"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","keys:":{},"children":{"File Output Operator":{"compressed:":"false","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"}}},"condition map:":[{"":"Inner Join 0 to 1"}],"outputColumnNames:":["_col0","_col1","_col2","_col3"]}},"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE","children":{"Filter Operator":{"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"Statist
      ics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Reduce Output Operator":{"value expressions:":"_col0 (type: string), _col1 (type: string)","sort order:":"","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE"}},"outputColumnNames:":["_col0","_col1"],"expressions:":"key (type: string), value (type: string)"}},"predicate:":"(key < 10) (type: boolean)"}}}},{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE","children":{"Filter Operator":{"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Reduce Output Operator":{"value expressions:":"_col0 (type: string), _col1 (type: string)","sort order:":"","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE"}},"
      outputColumnNames:":["_col0","_col1"],"expressions:":"key (type: string), value (type: string)"}},"predicate:":"(key < 10) (type: boolean)"}}}}]}},"Stage-2":{"Map Reduce":{"Reduce Operator Tree:":{"Select Operator":{"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.TextInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"expressions:":"KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)"}},"Map Operator Tree:":[{"TableScan":{"children":{"Reduce Output Operator":{"sort order:":"++++","Statistics:":"Num rows: 182
      Data size: 1939 Basic stats: COMPLETE Column stats: NONE","key expressions:":"_col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)"}}}}]}},"Stage-0":{"Fetch Operator":{"Processor Tree:":{"ListSink":{}},"limit:":"-1"}}}}
    +Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
    +PREHOOK: query: SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@src
    +#### A masked pattern was here ####
    +POSTHOOK: query: SELECT src1.key as k1, src1.value as v1,
    + src2.key as k2, src2.value as v2 FROM
    + (SELECT * FROM src WHERE src.key < 10) src1
    + JOIN
    + (SELECT * FROM src WHERE src.key < 10) src2
    + SORT BY k1, v1, k2, v2
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@src
    +#### A masked pattern was here ####
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 0 val_0
    +0 val_0 2 val_2
    +0 val_0 2 val_2
    +0 val_0 2 val_2
    +0 val_0 4 val_4
    +0 val_0 4 val_4
    +0 val_0 4 val_4
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 5 val_5
    +0 val_0 8 val_8
    +0 val_0 8 val_8
    +0 val_0 8 val_8
    +0 val_0 9 val_9
    +0 val_0 9 val_9
    +0 val_0 9 val_9
    +2 val_2 0 val_0
    +2 val_2 0 val_0
    +2 val_2 0 val_0
    +2 val_2 2 val_2
    +2 val_2 4 val_4
    +2 val_2 5 val_5
    +2 val_2 5 val_5
    +2 val_2 5 val_5
    +2 val_2 8 val_8
    +2 val_2 9 val_9
    +4 val_4 0 val_0
    +4 val_4 0 val_0
    +4 val_4 0 val_0
    +4 val_4 2 val_2
    +4 val_4 4 val_4
    +4 val_4 5 val_5
    +4 val_4 5 val_5
    +4 val_4 5 val_5
    +4 val_4 8 val_8
    +4 val_4 9 val_9
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 0 val_0
    +5 val_5 2 val_2
    +5 val_5 2 val_2
    +5 val_5 2 val_2
    +5 val_5 4 val_4
    +5 val_5 4 val_4
    +5 val_5 4 val_4
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 5 val_5
    +5 val_5 8 val_8
    +5 val_5 8 val_8
    +5 val_5 8 val_8
    +5 val_5 9 val_9
    +5 val_5 9 val_9
    +5 val_5 9 val_9
    +8 val_8 0 val_0
    +8 val_8 0 val_0
    +8 val_8 0 val_0
    +8 val_8 2 val_2
    +8 val_8 4 val_4
    +8 val_8 5 val_5
    +8 val_8 5 val_5
    +8 val_8 5 val_5
    +8 val_8 8 val_8
    +8 val_8 9 val_9
    +9 val_9 0 val_0
    +9 val_9 0 val_0
    +9 val_9 0 val_0
    +9 val_9 2 val_2
    +9 val_9 4 val_4
    +9 val_9 5 val_5
    +9 val_9 5 val_5
    +9 val_9 5 val_5
    +9 val_9 8 val_8
    +9 val_9 9 val_9

    Added: hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out?rev=1647794&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out Wed Dec 24 14:33:00 2014
    @@ -0,0 +1,1127 @@
    +PREHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
    +-- The following explains merge example used in this test case
    +-- DML will generated 2 partitions
    +-- ds=2008-04-08/hr=a1
    +-- ds=2008-04-08/hr=b1
    +-- without merge, each partition has more files
    +-- ds=2008-04-08/hr=a1 has 2 files
    +-- ds=2008-04-08/hr=b1 has 6 files
    +-- with merge each partition has more files
    +-- ds=2008-04-08/hr=a1 has 1 files
    +-- ds=2008-04-08/hr=b1 has 4 files
    +-- The following shows file size and name in each directory
    +-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
    +-- without merge
    +-- 155 000000_0
    +-- 155 000001_0
    +-- with merge
    +-- 254 000000_0
    +-- hr=b1/key=103/value=val_103:
    +-- without merge
    +-- 99 000000_0
    +-- 99 000001_0
    +-- with merge
    +-- 142 000001_0
    +-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
    +-- without merge
    +-- 5181 000000_0
    +-- 5181 000001_0
    +-- with merge
    +-- 5181 000000_0
    +-- 5181 000001_0
    +-- hr=b1/key=484/value=val_484
    +-- without merge
    +-- 87 000000_0
    +-- 87 000001_0
    +-- with merge
    +-- 118 000002_0
    +
    +-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
    +-- SORT_QUERY_RESULTS
    +-- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +-- create a skewed table
    +create table list_bucketing_dynamic_part (key String, value String)
    + partitioned by (ds String, hr String)
    + skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
    + stored as DIRECTORIES
    + STORED AS RCFILE
    +PREHOOK: type: CREATETABLE
    +PREHOOK: Output: database:default
    +PREHOOK: Output: default@list_bucketing_dynamic_part
    +POSTHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
    +-- The following explains merge example used in this test case
    +-- DML will generated 2 partitions
    +-- ds=2008-04-08/hr=a1
    +-- ds=2008-04-08/hr=b1
    +-- without merge, each partition has more files
    +-- ds=2008-04-08/hr=a1 has 2 files
    +-- ds=2008-04-08/hr=b1 has 6 files
    +-- with merge each partition has more files
    +-- ds=2008-04-08/hr=a1 has 1 files
    +-- ds=2008-04-08/hr=b1 has 4 files
    +-- The following shows file size and name in each directory
    +-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
    +-- without merge
    +-- 155 000000_0
    +-- 155 000001_0
    +-- with merge
    +-- 254 000000_0
    +-- hr=b1/key=103/value=val_103:
    +-- without merge
    +-- 99 000000_0
    +-- 99 000001_0
    +-- with merge
    +-- 142 000001_0
    +-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
    +-- without merge
    +-- 5181 000000_0
    +-- 5181 000001_0
    +-- with merge
    +-- 5181 000000_0
    +-- 5181 000001_0
    +-- hr=b1/key=484/value=val_484
    +-- without merge
    +-- 87 000000_0
    +-- 87 000001_0
    +-- with merge
    +-- 118 000002_0
    +
    +-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
    +-- SORT_QUERY_RESULTS
    +-- JAVA_VERSION_SPECIFIC_OUTPUT
    +
    +-- create a skewed table
    +create table list_bucketing_dynamic_part (key String, value String)
    + partitioned by (ds String, hr String)
    + skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
    + stored as DIRECTORIES
    + STORED AS RCFILE
    +POSTHOOK: type: CREATETABLE
    +POSTHOOK: Output: database:default
    +POSTHOOK: Output: default@list_bucketing_dynamic_part
    +PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
    +explain extended
    +insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
    +explain extended
    +insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    +
    +TOK_QUERY
    + TOK_FROM
    + TOK_TABREF
    + TOK_TABNAME
    + srcpart
    + TOK_INSERT
    + TOK_DESTINATION
    + TOK_TAB
    + TOK_TABNAME
    + list_bucketing_dynamic_part
    + TOK_PARTSPEC
    + TOK_PARTVAL
    + ds
    + '2008-04-08'
    + TOK_PARTVAL
    + hr
    + TOK_SELECT
    + TOK_SELEXPR
    + TOK_TABLE_OR_COL
    + key
    + TOK_SELEXPR
    + TOK_TABLE_OR_COL
    + value
    + TOK_SELEXPR
    + TOK_FUNCTION
    + if
    + ==
    + %
    + TOK_TABLE_OR_COL
    + key
    + 100
    + 0
    + 'a1'
    + 'b1'
    + TOK_WHERE
    + =
    + TOK_TABLE_OR_COL
    + ds
    + '2008-04-08'
    +
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-0 depends on stages: Stage-1
    + Stage-2 depends on stages: Stage-0
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + alias: srcpart
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    + GatherStats: false
    + Select Operator
    + expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
    + outputColumnNames: _col0, _col1, _col2
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + GlobalTableId: 1
    +#### A masked pattern was here ####
    + NumFilesPerFileSink: 1
    + Static Partition Specification: ds=2008-04-08/
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    +#### A masked pattern was here ####
    + table:
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + TotalFiles: 1
    + GatherStats: true
    + MultiFileSpray: false
    + Path -> Alias:
    +#### A masked pattern was here ####
    + Path -> Partition:
    +#### A masked pattern was here ####
    + Partition
    + base file name: hr=11
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr 11
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + numFiles 1
    + numRows 500
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 5312
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + totalSize 5812
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.srcpart
    + name: default.srcpart
    +#### A masked pattern was here ####
    + Partition
    + base file name: hr=12
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr 12
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + numFiles 1
    + numRows 500
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 5312
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + totalSize 5812
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.srcpart
    + name: default.srcpart
    + Truncated Path -> Alias:
    + /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
    + /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
    +
    + Stage: Stage-0
    + Move Operator
    + tables:
    + partition:
    + ds 2008-04-08
    + hr
    + replace: true
    +#### A masked pattern was here ####
    + table:
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    +
    + Stage: Stage-2
    + Stats-Aggr Operator
    +#### A masked pattern was here ####
    +
    +PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@srcpart
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
    +POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@srcpart
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: -- check DML result
    +show partitions list_bucketing_dynamic_part
    +PREHOOK: type: SHOWPARTITIONS
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: -- check DML result
    +show partitions list_bucketing_dynamic_part
    +POSTHOOK: type: SHOWPARTITIONS
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +ds=2008-04-08/hr=a1
    +ds=2008-04-08/hr=b1
    +PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +# col_name data_type comment
    +
    +key string
    +value string
    +
    +# Partition Information
    +# col_name data_type comment
    +
    +ds string
    +hr string
    +
    +# Detailed Partition Information
    +Partition Value: [2008-04-08, a1]
    +Database: default
    +Table: list_bucketing_dynamic_part
    +#### A masked pattern was here ####
    +Protect Mode: None
    +#### A masked pattern was here ####
    +Partition Parameters:
    + COLUMN_STATS_ACCURATE true
    + numFiles 2
    + numRows 16
    + rawDataSize 136
    + totalSize 310
    +#### A masked pattern was here ####
    +
    +# Storage Information
    +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    +Compressed: No
    +Num Buckets: -1
    +Bucket Columns: []
    +Sort Columns: []
    +Stored As SubDirectories: Yes
    +Skewed Columns: [key, value]
    +Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
    +Storage Desc Params:
    + serialization.format 1
    +PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +# col_name data_type comment
    +
    +key string
    +value string
    +
    +# Partition Information
    +# col_name data_type comment
    +
    +ds string
    +hr string
    +
    +# Detailed Partition Information
    +Partition Value: [2008-04-08, b1]
    +Database: default
    +Table: list_bucketing_dynamic_part
    +#### A masked pattern was here ####
    +Protect Mode: None
    +#### A masked pattern was here ####
    +Partition Parameters:
    + COLUMN_STATS_ACCURATE true
    + numFiles 6
    + numRows 984
    + rawDataSize 9488
    + totalSize 10734
    +#### A masked pattern was here ####
    +
    +# Storage Information
    +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    +Compressed: No
    +Num Buckets: -1
    +Bucket Columns: []
    +Sort Columns: []
    +Stored As SubDirectories: Yes
    +Skewed Columns: [key, value]
    +Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
    +#### A masked pattern was here ####
    +Skewed Value to Truncated Path: {[103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103, [484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484}
    +Storage Desc Params:
    + serialization.format 1
    +PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
    +explain extended
    +insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
    +explain extended
    +insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    +
    +TOK_QUERY
    + TOK_FROM
    + TOK_TABREF
    + TOK_TABNAME
    + srcpart
    + TOK_INSERT
    + TOK_DESTINATION
    + TOK_TAB
    + TOK_TABNAME
    + list_bucketing_dynamic_part
    + TOK_PARTSPEC
    + TOK_PARTVAL
    + ds
    + '2008-04-08'
    + TOK_PARTVAL
    + hr
    + TOK_SELECT
    + TOK_SELEXPR
    + TOK_TABLE_OR_COL
    + key
    + TOK_SELEXPR
    + TOK_TABLE_OR_COL
    + value
    + TOK_SELEXPR
    + TOK_FUNCTION
    + if
    + ==
    + %
    + TOK_TABLE_OR_COL
    + key
    + 100
    + 0
    + 'a1'
    + 'b1'
    + TOK_WHERE
    + =
    + TOK_TABLE_OR_COL
    + ds
    + '2008-04-08'
    +
    +
    +STAGE DEPENDENCIES:
    + Stage-1 is a root stage
    + Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
    + Stage-4
    + Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
    + Stage-2 depends on stages: Stage-0
    + Stage-3
    + Stage-5
    + Stage-6 depends on stages: Stage-5
    +
    +STAGE PLANS:
    + Stage: Stage-1
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + alias: srcpart
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    + GatherStats: false
    + Select Operator
    + expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
    + outputColumnNames: _col0, _col1, _col2
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    + File Output Operator
    + compressed: false
    + GlobalTableId: 1
    +#### A masked pattern was here ####
    + NumFilesPerFileSink: 1
    + Static Partition Specification: ds=2008-04-08/
    + Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
    +#### A masked pattern was here ####
    + table:
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + TotalFiles: 1
    + GatherStats: true
    + MultiFileSpray: false
    + Path -> Alias:
    +#### A masked pattern was here ####
    + Path -> Partition:
    +#### A masked pattern was here ####
    + Partition
    + base file name: hr=11
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr 11
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + numFiles 1
    + numRows 500
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 5312
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + totalSize 5812
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.srcpart
    + name: default.srcpart
    +#### A masked pattern was here ####
    + Partition
    + base file name: hr=12
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr 12
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + numFiles 1
    + numRows 500
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 5312
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + totalSize 5812
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments 'default','default'
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.srcpart
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct srcpart { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    + name: default.srcpart
    + name: default.srcpart
    + Truncated Path -> Alias:
    + /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
    + /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
    +
    + Stage: Stage-7
    + Conditional Operator
    +
    + Stage: Stage-4
    + Move Operator
    + files:
    + hdfs directory: true
    +#### A masked pattern was here ####
    +
    + Stage: Stage-0
    + Move Operator
    + tables:
    + partition:
    + ds 2008-04-08
    + hr
    + replace: true
    +#### A masked pattern was here ####
    + table:
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    +
    + Stage: Stage-2
    + Stats-Aggr Operator
    +#### A masked pattern was here ####
    +
    + Stage: Stage-3
    + Merge File Operator
    + Map Operator Tree:
    + RCFile Merge Operator
    + merge level: block
    + Path -> Alias:
    +#### A masked pattern was here ####
    + Path -> Partition:
    +#### A masked pattern was here ####
    + Partition
    + input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + name: default.list_bucketing_dynamic_part
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + Truncated Path -> Alias:
    +#### A masked pattern was here ####
    +
    + Stage: Stage-5
    + Merge File Operator
    + Map Operator Tree:
    + RCFile Merge Operator
    + merge level: block
    + Path -> Alias:
    +#### A masked pattern was here ####
    + Path -> Partition:
    +#### A masked pattern was here ####
    + Partition
    + input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + name: default.list_bucketing_dynamic_part
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + Truncated Path -> Alias:
    +#### A masked pattern was here ####
    +
    + Stage: Stage-6
    + Move Operator
    + files:
    + hdfs directory: true
    +#### A masked pattern was here ####
    +
    +PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@srcpart
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
    +POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
    +select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@srcpart
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
    +POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
    +PREHOOK: query: -- check DML result
    +show partitions list_bucketing_dynamic_part
    +PREHOOK: type: SHOWPARTITIONS
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: -- check DML result
    +show partitions list_bucketing_dynamic_part
    +POSTHOOK: type: SHOWPARTITIONS
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +ds=2008-04-08/hr=a1
    +ds=2008-04-08/hr=b1
    +PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +# col_name data_type comment
    +
    +key string
    +value string
    +
    +# Partition Information
    +# col_name data_type comment
    +
    +ds string
    +hr string
    +
    +# Detailed Partition Information
    +Partition Value: [2008-04-08, a1]
    +Database: default
    +Table: list_bucketing_dynamic_part
    +#### A masked pattern was here ####
    +Protect Mode: None
    +#### A masked pattern was here ####
    +Partition Parameters:
    + COLUMN_STATS_ACCURATE true
    + numFiles 1
    + numRows 16
    + rawDataSize 136
    + totalSize 254
    +#### A masked pattern was here ####
    +
    +# Storage Information
    +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    +Compressed: No
    +Num Buckets: -1
    +Bucket Columns: []
    +Sort Columns: []
    +Stored As SubDirectories: Yes
    +Skewed Columns: [key, value]
    +Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
    +Storage Desc Params:
    + serialization.format 1
    +PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +# col_name data_type comment
    +
    +key string
    +value string
    +
    +# Partition Information
    +# col_name data_type comment
    +
    +ds string
    +hr string
    +
    +# Detailed Partition Information
    +Partition Value: [2008-04-08, b1]
    +Database: default
    +Table: list_bucketing_dynamic_part
    +#### A masked pattern was here ####
    +Protect Mode: None
    +#### A masked pattern was here ####
    +Partition Parameters:
    + COLUMN_STATS_ACCURATE true
    + numFiles 4
    + numRows 984
    + rawDataSize 9488
    + totalSize 10622
    +#### A masked pattern was here ####
    +
    +# Storage Information
    +SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    +OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    +Compressed: No
    +Num Buckets: -1
    +Bucket Columns: []
    +Sort Columns: []
    +Stored As SubDirectories: Yes
    +Skewed Columns: [key, value]
    +Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
    +#### A masked pattern was here ####
    +Skewed Value to Truncated Path: {[103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103, [484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484}
    +Storage Desc Params:
    + serialization.format 1
    +PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@srcpart
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +#### A masked pattern was here ####
    +POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@srcpart
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +#### A masked pattern was here ####
    +1000
    +PREHOOK: query: select count(*) from list_bucketing_dynamic_part
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +#### A masked pattern was here ####
    +POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +#### A masked pattern was here ####
    +1000
    +PREHOOK: query: explain extended
    +select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
    +PREHOOK: type: QUERY
    +POSTHOOK: query: explain extended
    +select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
    +POSTHOOK: type: QUERY
    +ABSTRACT SYNTAX TREE:
    +
    +TOK_QUERY
    + TOK_FROM
    + TOK_TABREF
    + TOK_TABNAME
    + list_bucketing_dynamic_part
    + TOK_INSERT
    + TOK_DESTINATION
    + TOK_DIR
    + TOK_TMP_FILE
    + TOK_SELECT
    + TOK_SELEXPR
    + TOK_ALLCOLREF
    + TOK_WHERE
    + and
    + =
    + TOK_TABLE_OR_COL
    + key
    + '484'
    + =
    + TOK_TABLE_OR_COL
    + value
    + 'val_484'
    +
    +
    +STAGE DEPENDENCIES:
    + Stage-0 is a root stage
    +
    +STAGE PLANS:
    + Stage: Stage-0
    + Fetch Operator
    + limit: -1
    + Partition Description:
    + Partition
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr a1
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + numFiles 1
    + numRows 16
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 136
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + totalSize 254
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + name: default.list_bucketing_dynamic_part
    + Partition
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + partition values:
    + ds 2008-04-08
    + hr b1
    + properties:
    + COLUMN_STATS_ACCURATE true
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + numFiles 4
    + numRows 984
    + partition_columns ds/hr
    + partition_columns.types string:string
    + rawDataSize 9488
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + totalSize 10622
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +
    + input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
    + output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
    + properties:
    + bucket_count -1
    + columns key,value
    + columns.comments
    + columns.types string:string
    +#### A masked pattern was here ####
    + name default.list_bucketing_dynamic_part
    + partition_columns ds/hr
    + partition_columns.types string:string
    + serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
    + serialization.format 1
    + serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    +#### A masked pattern was here ####
    + serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
    + name: default.list_bucketing_dynamic_part
    + name: default.list_bucketing_dynamic_part
    + Processor Tree:
    + TableScan
    + alias: list_bucketing_dynamic_part
    + Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
    + GatherStats: false
    + Filter Operator
    + isSamplingPred: false
    + predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
    + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
    + Select Operator
    + expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
    + outputColumnNames: _col0, _col1, _col2, _col3
    + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
    + ListSink
    +
    +PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +#### A masked pattern was here ####
    +POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
    +POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
    +#### A masked pattern was here ####
    +484 val_484 2008-04-08 b1
    +484 val_484 2008-04-08 b1
    +PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@srcpart
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +#### A masked pattern was here ####
    +POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@srcpart
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
    +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
    +#### A masked pattern was here ####
    +484 val_484 2008-04-08 11
    +484 val_484 2008-04-08 12
    +PREHOOK: query: -- clean up
    +drop table list_bucketing_dynamic_part
    +PREHOOK: type: DROPTABLE
    +PREHOOK: Input: default@list_bucketing_dynamic_part
    +PREHOOK: Output: default@list_bucketing_dynamic_part
    +POSTHOOK: query: -- clean up
    +drop table list_bucketing_dynamic_part
    +POSTHOOK: type: DROPTABLE
    +POSTHOOK: Input: default@list_bucketing_dynamic_part
    +POSTHOOK: Output: default@list_bucketing_dynamic_part

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedDec 24, '14 at 2:33p
activeDec 24, '14 at 2:33p
posts3
users1
websitehive.apache.org

1 user in discussion

Brock: 3 posts

People

Translate

site design / logo © 2021 Grokbase