FAQ
Author: namit
Date: Sat Jan 19 03:54:17 2013
New Revision: 1435484

URL: http://svn.apache.org/viewvc?rev=1435484&view=rev
Log:
HIVE-3699 Multiple insert overwrite into multiple tables query stores same results
in all tables (Navis via namit)



Added:
     hive/trunk/ql/src/test/queries/clientpositive/multi_insert_gby.q
     hive/trunk/ql/src/test/results/clientpositive/multi_insert_gby.q.out
Modified:
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
     hive/trunk/ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out
     hive/trunk/ql/src/test/results/clientpositive/multi_insert.q.out
     hive/trunk/ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1435484&r1=1435483&r2=1435484&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Sat Jan 19 03:54:17 2013
@@ -32,7 +32,6 @@ import org.apache.commons.logging.LogFac
  import org.apache.hadoop.hive.conf.HiveConf;
  import org.apache.hadoop.hive.ql.exec.ColumnInfo;
  import org.apache.hadoop.hive.ql.exec.FilterOperator;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
  import org.apache.hadoop.hive.ql.exec.JoinOperator;
  import org.apache.hadoop.hive.ql.exec.Operator;
  import org.apache.hadoop.hive.ql.exec.OperatorFactory;
@@ -54,14 +53,12 @@ import org.apache.hadoop.hive.ql.parse.S
  import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
  import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
  import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
  import org.apache.hadoop.hive.ql.plan.FilterDesc;
  import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
  import org.apache.hadoop.hive.ql.plan.JoinDesc;
  import org.apache.hadoop.hive.ql.plan.OperatorDesc;
  import org.apache.hadoop.hive.ql.plan.TableScanDesc;
  import org.apache.hadoop.hive.serde2.Deserializer;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
  import org.apache.hadoop.mapred.JobConf;

  /**
@@ -585,16 +582,22 @@ public final class OpProcFactory {
          ExprWalkerInfo ewi, Set<String> aliases, boolean ignoreAliases)
          throws SemanticException {
        boolean hasUnpushedPredicates = false;
- if (nd.getChildren() == null || nd.getChildren().size() > 1) {
+ Operator<?> current = (Operator<?>) nd;
+ List<Operator<?>> children = current.getChildOperators();
+ if (children == null || children.isEmpty()) {
+ return hasUnpushedPredicates;
+ }
+ if (children.size() > 1) {
          // ppd for multi-insert query is not yet implemented
          // no-op for leafs
+ for (Operator<?> child : children) {
+ removeCandidates(child, owi); // remove candidated filters on this branch
+ }
          return hasUnpushedPredicates;
        }
        Operator<? extends OperatorDesc> op =
          (Operator<? extends OperatorDesc>) nd;
- ExprWalkerInfo childPreds = owi
- .getPrunedPreds((Operator<? extends OperatorDesc>) nd.getChildren()
- .get(0));
+ ExprWalkerInfo childPreds = owi.getPrunedPreds(children.get(0));
        if (childPreds == null) {
          return hasUnpushedPredicates;
        }
@@ -622,6 +625,17 @@ public final class OpProcFactory {
        return hasUnpushedPredicates;
      }

+ private void removeCandidates(Operator<?> operator, OpWalkerInfo owi) {
+ if (operator instanceof FilterOperator) {
+ owi.getCandidateFilterOps().remove(operator);
+ }
+ if (operator.getChildOperators() != null) {
+ for (Operator<?> child : operator.getChildOperators()) {
+ removeCandidates(child, owi);
+ }
+ }
+ }
+
      protected ExprWalkerInfo mergeChildrenPred(Node nd, OpWalkerInfo owi,
          Set<String> excludedAliases, boolean ignoreAliases)
          throws SemanticException {

Added: hive/trunk/ql/src/test/queries/clientpositive/multi_insert_gby.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/multi_insert_gby.q?rev=1435484&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/multi_insert_gby.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/multi_insert_gby.q Sat Jan 19 03:54:17 2013
@@ -0,0 +1,33 @@
+--HIVE-3699 Multiple insert overwrite into multiple tables query stores same results in all tables
+create table e1 (key string, count int);
+create table e2 (key string, count int);
+
+explain FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) WHERE key>500 GROUP BY key ORDER BY key;
+
+FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) WHERE key>500 GROUP BY key ORDER BY key;
+
+select * from e1;
+select * from e2;
+
+explain FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) GROUP BY key ORDER BY key;
+
+FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) GROUP BY key ORDER BY key;
+
+select * from e1;
+select * from e2;

Modified: hive/trunk/ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out?rev=1435484&r1=1435483&r2=1435484&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/groupby_multi_single_reducer2.q.out Sat Jan 19 03:54:17 2013
@@ -62,73 +62,81 @@ STAGE PLANS:
                          type: string
        Reduce Operator Tree:
          Forward
- Group By Operator
- aggregations:
- expr: count(DISTINCT KEY._col1:0._col0)
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 >= 5.0)
+ type: boolean
+ Group By Operator
+ aggregations:
+ expr: count(DISTINCT KEY._col1:0._col0)
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
- type: bigint
+ mode: complete
                outputColumnNames: _col0, _col1
                Select Operator
                  expressions:
                        expr: _col0
                        type: string
- expr: UDFToInteger(_col1)
- type: int
+ expr: _col1
+ type: bigint
                  outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.dest_g2
- Group By Operator
- aggregations:
- expr: count(DISTINCT KEY._col1:0._col0)
- expr: count(VALUE._col0)
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- mode: complete
- outputColumnNames: _col0, _col1, _col2
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: UDFToInteger(_col1)
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest_g2
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 5.0)
+ type: boolean
+ Group By Operator
+ aggregations:
+ expr: count(DISTINCT KEY._col1:0._col0)
+ expr: count(VALUE._col0)
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
- type: bigint
- expr: _col2
- type: bigint
+ mode: complete
                outputColumnNames: _col0, _col1, _col2
                Select Operator
                  expressions:
                        expr: _col0
                        type: string
- expr: UDFToInteger(_col1)
- type: int
- expr: UDFToInteger(_col2)
- type: int
+ expr: _col1
+ type: bigint
+ expr: _col2
+ type: bigint
                  outputColumnNames: _col0, _col1, _col2
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.dest_g3
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: UDFToInteger(_col1)
+ type: int
+ expr: UDFToInteger(_col2)
+ type: int
+ outputColumnNames: _col0, _col1, _col2
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest_g3

    Stage: Stage-0
      Move Operator
@@ -189,11 +197,6 @@ POSTHOOK: Lineage: dest_g2.key SIMPLE [(
  POSTHOOK: Lineage: dest_g3.c1 EXPRESSION [(src)src.null, ]
  POSTHOOK: Lineage: dest_g3.c2 EXPRESSION [(src)src.null, ]
  POSTHOOK: Lineage: dest_g3.key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 1
-1 71
-2 69
-3 62
-4 74
  5 6
  6 5
  7 6
@@ -217,11 +220,6 @@ POSTHOOK: Lineage: dest_g3.key SIMPLE [(
  2 69 111
  3 62 99
  4 74 124
-5 6 10
-6 5 6
-7 6 10
-8 8 10
-9 7 12
  PREHOOK: query: DROP TABLE dest_g2
  PREHOOK: type: DROPTABLE
  PREHOOK: Input: default@dest_g2

Modified: hive/trunk/ql/src/test/results/clientpositive/multi_insert.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/multi_insert.q.out?rev=1435484&r1=1435483&r2=1435484&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/multi_insert.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/multi_insert.q.out Sat Jan 19 03:54:17 2013
@@ -973,54 +973,62 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2

    Stage: Stage-0
      Move Operator
@@ -1104,12 +1112,6 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  0 val_0
-11 val_11
-12 val_12
-15 val_15
-17 val_17
-18 val_18
-19 val_19
  2 val_2
  4 val_4
  5 val_5
@@ -1139,18 +1141,12 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 val_0
  11 val_11
  12 val_12
  15 val_15
  17 val_17
  18 val_18
  19 val_19
-2 val_2
-4 val_4
-5 val_5
-8 val_8
-9 val_9
  PREHOOK: query: explain
  from src
  insert overwrite table src_multi1 select * where key < 10 group by key, value
@@ -1230,54 +1226,62 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2

    Stage: Stage-8
      Conditional Operator
@@ -1443,12 +1447,6 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  0 val_0
-11 val_11
-12 val_12
-15 val_15
-17 val_17
-18 val_18
-19 val_19
  2 val_2
  4 val_4
  5 val_5
@@ -1478,18 +1476,12 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 val_0
  11 val_11
  12 val_12
  15 val_15
  17 val_17
  18 val_18
  19 val_19
-2 val_2
-4 val_4
-5 val_5
-8 val_8
-9 val_9
  PREHOOK: query: explain
  from src
  insert overwrite table src_multi1 select * where key < 10 group by key, value
@@ -1559,54 +1551,62 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2

    Stage: Stage-0
      Move Operator
@@ -1690,12 +1690,6 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  0 val_0
-11 val_11
-12 val_12
-15 val_15
-17 val_17
-18 val_18
-19 val_19
  2 val_2
  4 val_4
  5 val_5
@@ -1725,18 +1719,12 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 val_0
  11 val_11
  12 val_12
  15 val_15
  17 val_17
  18 val_18
  19 val_19
-2 val_2
-4 val_4
-5 val_5
-8 val_8
-9 val_9
  PREHOOK: query: explain
  from src
  insert overwrite table src_multi1 select * where key < 10 group by key, value
@@ -1816,54 +1804,62 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2

    Stage: Stage-8
      Conditional Operator
@@ -2029,12 +2025,6 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  0 val_0
-11 val_11
-12 val_12
-15 val_15
-17 val_17
-18 val_18
-19 val_19
  2 val_2
  4 val_4
  5 val_5
@@ -2064,18 +2054,12 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 val_0
  11 val_11
  12 val_12
  15 val_15
  17 val_17
  18 val_18
  19 val_19
-2 val_2
-4 val_4
-5 val_5
-8 val_8
-9 val_9
  PREHOOK: query: explain
  from (select * from src union all select * from src) s
  insert overwrite table src_multi1 select * where key < 10

Added: hive/trunk/ql/src/test/results/clientpositive/multi_insert_gby.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/multi_insert_gby.q.out?rev=1435484&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/multi_insert_gby.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/multi_insert_gby.q.out Sat Jan 19 03:54:17 2013
@@ -0,0 +1,869 @@
+PREHOOK: query: --HIVE-3699 Multiple insert overwrite into multiple tables query stores same results in all tables
+create table e1 (key string, count int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: --HIVE-3699 Multiple insert overwrite into multiple tables query stores same results in all tables
+create table e1 (key string, count int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@e1
+PREHOOK: query: create table e2 (key string, count int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table e2 (key string, count int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@e2
+PREHOOK: query: explain FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) WHERE key>500 GROUP BY key ORDER BY key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) WHERE key>500 GROUP BY key ORDER BY key
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME e1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTIONSTAR COUNT))) (TOK_WHERE (> (TOK_TABLE_OR_COL key) 450)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME e2))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTIONSTAR COUNT))) (TOK_WHERE (> (TOK_TABLE_OR_COL key) 500)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))
+
+STAGE DEPENDENCIES:
+ Stage-2 is a root stage
+ Stage-3 depends on stages: Stage-2
+ Stage-0 depends on stages: Stage-3
+ Stage-4 depends on stages: Stage-0
+ Stage-5 depends on stages: Stage-2
+ Stage-1 depends on stages: Stage-5
+ Stage-6 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Filter Operator
+ predicate:
+ expr: ((key > 450.0) or (key > 500.0))
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ outputColumnNames: key
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Forward
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 > 450.0)
+ type: boolean
+ Group By Operator
+ aggregations:
+ expr: count()
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ mode: complete
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 > 500.0)
+ type: boolean
+ Group By Operator
+ aggregations:
+ expr: count()
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ mode: complete
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+ Stage: Stage-3
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ Reduce Operator Tree:
+ Extract
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: UDFToInteger(_col1)
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.e1
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.e1
+
+ Stage: Stage-4
+ Stats-Aggr Operator
+
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ Reduce Operator Tree:
+ Extract
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: UDFToInteger(_col1)
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.e2
+
+ Stage: Stage-1
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.e2
+
+ Stage: Stage-6
+ Stats-Aggr Operator
+
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) WHERE key>500 GROUP BY key ORDER BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@e1
+PREHOOK: Output: default@e2
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) WHERE key>500 GROUP BY key ORDER BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@e1
+POSTHOOK: Output: default@e2
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+PREHOOK: query: select * from e1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+452 1
+453 1
+454 3
+455 1
+457 1
+458 2
+459 2
+460 1
+462 2
+463 2
+466 3
+467 1
+468 4
+469 5
+470 1
+472 1
+475 1
+477 1
+478 2
+479 1
+480 3
+481 1
+482 1
+483 1
+484 1
+485 1
+487 1
+489 4
+490 1
+491 1
+492 2
+493 1
+494 1
+495 1
+496 1
+497 1
+498 3
+PREHOOK: query: select * from e2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+PREHOOK: query: explain FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) GROUP BY key ORDER BY key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) GROUP BY key ORDER BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME e1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTIONSTAR COUNT))) (TOK_WHERE (> (TOK_TABLE_OR_COL key) 450)) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME e2))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTIONSTAR COUNT))) (TOK_GROUPBY (TOK_TABLE_OR_COL key)) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))
+
+STAGE DEPENDENCIES:
+ Stage-2 is a root stage
+ Stage-3 depends on stages: Stage-2
+ Stage-0 depends on stages: Stage-3
+ Stage-4 depends on stages: Stage-0
+ Stage-5 depends on stages: Stage-2
+ Stage-1 depends on stages: Stage-5
+ Stage-6 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-2
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ outputColumnNames: key
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ sort order: +
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: -1
+ Reduce Operator Tree:
+ Forward
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 > 450.0)
+ type: boolean
+ Group By Operator
+ aggregations:
+ expr: count()
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ mode: complete
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Group By Operator
+ aggregations:
+ expr: count()
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
+ type: string
+ mode: complete
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+ Stage: Stage-3
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ Reduce Operator Tree:
+ Extract
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: UDFToInteger(_col1)
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.e1
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.e1
+
+ Stage: Stage-4
+ Stats-Aggr Operator
+
+ Stage: Stage-5
+ Map Reduce
+ Alias -> Map Operator Tree:
+#### A masked pattern was here ####
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: string
+ sort order: +
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: bigint
+ Reduce Operator Tree:
+ Extract
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: UDFToInteger(_col1)
+ type: int
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.e2
+
+ Stage: Stage-1
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.e2
+
+ Stage: Stage-6
+ Stats-Aggr Operator
+
+
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) GROUP BY key ORDER BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@e1
+PREHOOK: Output: default@e2
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE e1
+ SELECT key, COUNT(*) WHERE key>450 GROUP BY key ORDER BY key
+INSERT OVERWRITE TABLE e2
+ SELECT key, COUNT(*) GROUP BY key ORDER BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@e1
+POSTHOOK: Output: default@e2
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+PREHOOK: query: select * from e1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+452 1
+453 1
+454 3
+455 1
+457 1
+458 2
+459 2
+460 1
+462 2
+463 2
+466 3
+467 1
+468 4
+469 5
+470 1
+472 1
+475 1
+477 1
+478 2
+479 1
+480 3
+481 1
+482 1
+483 1
+484 1
+485 1
+487 1
+489 4
+490 1
+491 1
+492 2
+493 1
+494 1
+495 1
+496 1
+497 1
+498 3
+PREHOOK: query: select * from e2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from e2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@e2
+#### A masked pattern was here ####
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e1.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+POSTHOOK: Lineage: e2.count EXPRESSION []
+0 3
+10 1
+100 2
+103 2
+104 2
+105 1
+11 1
+111 1
+113 2
+114 1
+116 1
+118 2
+119 3
+12 2
+120 2
+125 2
+126 1
+128 3
+129 2
+131 1
+133 1
+134 2
+136 1
+137 2
+138 4
+143 1
+145 1
+146 2
+149 2
+15 2
+150 1
+152 2
+153 1
+155 1
+156 1
+157 1
+158 1
+160 1
+162 1
+163 1
+164 2
+165 2
+166 1
+167 3
+168 1
+169 4
+17 1
+170 1
+172 2
+174 2
+175 2
+176 2
+177 1
+178 1
+179 2
+18 2
+180 1
+181 1
+183 1
+186 1
+187 3
+189 1
+19 1
+190 1
+191 2
+192 1
+193 3
+194 1
+195 2
+196 1
+197 2
+199 3
+2 1
+20 1
+200 2
+201 1
+202 1
+203 2
+205 2
+207 2
+208 3
+209 2
+213 2
+214 1
+216 2
+217 2
+218 1
+219 2
+221 2
+222 1
+223 2
+224 2
+226 1
+228 1
+229 2
+230 5
+233 2
+235 1
+237 2
+238 2
+239 2
+24 2
+241 1
+242 2
+244 1
+247 1
+248 1
+249 1
+252 1
+255 2
+256 2
+257 1
+258 1
+26 2
+260 1
+262 1
+263 1
+265 2
+266 1
+27 1
+272 2
+273 3
+274 1
+275 1
+277 4
+278 2
+28 1
+280 2
+281 2
+282 2
+283 1
+284 1
+285 1
+286 1
+287 1
+288 2
+289 1
+291 1
+292 1
+296 1
+298 3
+30 1
+302 1
+305 1
+306 1
+307 2
+308 1
+309 2
+310 1
+311 3
+315 1
+316 3
+317 2
+318 3
+321 2
+322 2
+323 1
+325 2
+327 3
+33 1
+331 2
+332 1
+333 2
+335 1
+336 1
+338 1
+339 1
+34 1
+341 1
+342 2
+344 2
+345 1
+348 5
+35 3
+351 1
+353 2
+356 1
+360 1
+362 1
+364 1
+365 1
+366 1
+367 2
+368 1
+369 3
+37 2
+373 1
+374 1
+375 1
+377 1
+378 1
+379 1
+382 2
+384 3
+386 1
+389 1
+392 1
+393 1
+394 1
+395 2
+396 3
+397 2
+399 2
+4 1
+400 1
+401 5
+402 1
+403 3
+404 2
+406 4
+407 1
+409 3
+41 1
+411 1
+413 2
+414 2
+417 3
+418 1
+419 1
+42 2
+421 1
+424 2
+427 1
+429 2
+43 1
+430 3
+431 3
+432 1
+435 1
+436 1
+437 1
+438 3
+439 2
+44 1
+443 1
+444 1
+446 1
+448 1
+449 1
+452 1
+453 1
+454 3
+455 1
+457 1
+458 2
+459 2
+460 1
+462 2
+463 2
+466 3
+467 1
+468 4
+469 5
+47 1
+470 1
+472 1
+475 1
+477 1
+478 2
+479 1
+480 3
+481 1
+482 1
+483 1
+484 1
+485 1
+487 1
+489 4
+490 1
+491 1
+492 2
+493 1
+494 1
+495 1
+496 1
+497 1
+498 3
+5 3
+51 2
+53 1
+54 1
+57 1
+58 2
+64 1
+65 1
+66 1
+67 2
+69 1
+70 3
+72 2
+74 1
+76 2
+77 1
+78 1
+8 1
+80 1
+82 1
+83 2
+84 2
+85 1
+86 1
+87 1
+9 1
+90 3
+92 1
+95 2
+96 1
+97 2
+98 2

Modified: hive/trunk/ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out?rev=1435484&r1=1435483&r2=1435484&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out Sat Jan 19 03:54:17 2013
@@ -990,54 +990,62 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2

    Stage: Stage-4
      Dependency Collection
@@ -1124,12 +1132,6 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  0 val_0
-11 val_11
-12 val_12
-15 val_15
-17 val_17
-18 val_18
-19 val_19
  2 val_2
  4 val_4
  5 val_5
@@ -1159,18 +1161,12 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 val_0
  11 val_11
  12 val_12
  15 val_15
  17 val_17
  18 val_18
  19 val_19
-2 val_2
-4 val_4
-5 val_5
-8 val_8
-9 val_9
  PREHOOK: query: explain
  from src
  insert overwrite table src_multi1 select * where key < 10 group by key, value
@@ -1251,54 +1247,62 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2

    Stage: Stage-8
      Conditional Operator
@@ -1467,12 +1471,6 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  0 val_0
-11 val_11
-12 val_12
-15 val_15
-17 val_17
-18 val_18
-19 val_19
  2 val_2
  4 val_4
  5 val_5
@@ -1502,18 +1500,12 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 val_0
  11 val_11
  12 val_12
  15 val_15
  17 val_17
  18 val_18
  19 val_19
-2 val_2
-4 val_4
-5 val_5
-8 val_8
-9 val_9
  PREHOOK: query: explain
  from src
  insert overwrite table src_multi1 select * where key < 10 group by key, value
@@ -1584,54 +1576,62 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2

    Stage: Stage-4
      Dependency Collection
@@ -1718,12 +1718,6 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  0 val_0
-11 val_11
-12 val_12
-15 val_15
-17 val_17
-18 val_18
-19 val_19
  2 val_2
  4 val_4
  5 val_5
@@ -1753,18 +1747,12 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 val_0
  11 val_11
  12 val_12
  15 val_15
  17 val_17
  18 val_18
  19 val_19
-2 val_2
-4 val_4
-5 val_5
-8 val_8
-9 val_9
  PREHOOK: query: explain
  from src
  insert overwrite table src_multi1 select * where key < 10 group by key, value
@@ -1845,54 +1833,62 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 1
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi1
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi1
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 2
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.src_multi2
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 2
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.src_multi2

    Stage: Stage-8
      Conditional Operator
@@ -2061,12 +2057,6 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  0 val_0
-11 val_11
-12 val_12
-15 val_15
-17 val_17
-18 val_18
-19 val_19
  2 val_2
  4 val_4
  5 val_5
@@ -2096,18 +2086,12 @@ POSTHOOK: Lineage: src_multi2.value SIMP
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  POSTHOOK: Lineage: src_multi2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-0 val_0
  11 val_11
  12 val_12
  15 val_15
  17 val_17
  18 val_18
  19 val_19
-2 val_2
-4 val_4
-5 val_5
-8 val_8
-9 val_9
  PREHOOK: query: explain
  from (select * from src union all select * from src) s
  insert overwrite table src_multi1 select * where key < 10
@@ -4365,50 +4349,58 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

    Stage: Stage-3
      Map Reduce
@@ -4608,50 +4600,58 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

    Stage: Stage-3
      Map Reduce
@@ -4851,50 +4851,58 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

    Stage: Stage-3
      Map Reduce
@@ -5094,50 +5102,58 @@ STAGE PLANS:
                    tag: -1
        Reduce Operator Tree:
          Forward
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Filter Operator
+ predicate:
+ expr: (KEY._col0 < 10.0)
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- Group By Operator
- bucketGroup: false
- keys:
- expr: KEY._col0
- type: string
- expr: KEY._col1
- type: string
- mode: complete
- outputColumnNames: _col0, _col1
- Select Operator
- expressions:
- expr: _col0
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Filter Operator
+ predicate:
+ expr: ((KEY._col0 > 10.0) and (KEY._col0 < 20.0))
+ type: boolean
+ Group By Operator
+ bucketGroup: false
+ keys:
+ expr: KEY._col0
                      type: string
- expr: _col1
+ expr: KEY._col1
                      type: string
+ mode: complete
                outputColumnNames: _col0, _col1
- File Output Operator
- compressed: false
- GlobalTableId: 0
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

    Stage: Stage-3
      Map Reduce

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedJan 19, '13 at 3:54a
activeJan 19, '13 at 3:54a
posts1
users1
websitehive.apache.org

1 user in discussion

Namit: 1 post

People

Translate

site design / logo © 2021 Grokbase