FAQ
Author: namit
Date: Wed Aug 29 17:43:59 2012
New Revision: 1378659

URL: http://svn.apache.org/viewvc?rev=1378659&view=rev
Log:
HIVE-3410 All operators's conf should inherit from a common class
(Namit via Carl)


Added:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/JoinReorder.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcCtx.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/LineageCtx.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpWalkerCtx.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinResolver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Wed Aug 29 17:43:59 2012
@@ -94,6 +94,7 @@ import org.apache.hadoop.hive.ql.parse.S
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.processors.CommandProcessor;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -509,7 +510,7 @@ public class Driver implements CommandPr
}

private void doAuthorization(BaseSemanticAnalyzer sem)
- throws HiveException, AuthorizationException {
+ throws HiveException, AuthorizationException {
HashSet<ReadEntity> inputs = sem.getInputs();
HashSet<WriteEntity> outputs = sem.getOutputs();
SessionState ss = SessionState.get();
@@ -583,9 +584,9 @@ public class Driver implements CommandPr
ParseContext parseCtx = querySem.getParseContext();
Map<TableScanOperator, Table> tsoTopMap = parseCtx.getTopToTable();

- for (Map.Entry<String, Operator<? extends Serializable>> topOpMap : querySem
+ for (Map.Entry<String, Operator<? extends OperatorDesc>> topOpMap : querySem
.getParseContext().getTopOps().entrySet()) {
- Operator<? extends Serializable> topOp = topOpMap.getValue();
+ Operator<? extends OperatorDesc> topOp = topOpMap.getValue();
if (topOp instanceof TableScanOperator
&& tsoTopMap.containsKey(topOp)) {
TableScanOperator tableScanOp = (TableScanOperator) topOp;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java Wed Aug 29 17:43:59 2012
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.ql.hooks.L
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.AdjacencyType;
import org.apache.hadoop.hive.ql.plan.api.NodeType;
import org.apache.hadoop.hive.ql.plan.api.TaskType;
@@ -152,18 +153,18 @@ public class QueryPlan implements Serial
*/
private void populateOperatorGraph(
org.apache.hadoop.hive.ql.plan.api.Task task,
- Collection<Operator<? extends Serializable>> topOps) {
+ Collection<Operator<? extends OperatorDesc>> topOps) {

task.setOperatorGraph(new org.apache.hadoop.hive.ql.plan.api.Graph());
task.getOperatorGraph().setNodeType(NodeType.OPERATOR);

- Queue<Operator<? extends Serializable>> opsToVisit =
- new LinkedList<Operator<? extends Serializable>>();
- Set<Operator<? extends Serializable>> opsVisited =
- new HashSet<Operator<? extends Serializable>>();
+ Queue<Operator<? extends OperatorDesc>> opsToVisit =
+ new LinkedList<Operator<? extends OperatorDesc>>();
+ Set<Operator<? extends OperatorDesc>> opsVisited =
+ new HashSet<Operator<? extends OperatorDesc>>();
opsToVisit.addAll(topOps);
while (opsToVisit.peek() != null) {
- Operator<? extends Serializable> op = opsToVisit.remove();
+ Operator<? extends OperatorDesc> op = opsToVisit.remove();
opsVisited.add(op);
// populate the operator
org.apache.hadoop.hive.ql.plan.api.Operator operator =
@@ -177,7 +178,7 @@ public class QueryPlan implements Serial
new org.apache.hadoop.hive.ql.plan.api.Adjacency();
entry.setAdjacencyType(AdjacencyType.CONJUNCTIVE);
entry.setNode(op.getOperatorId());
- for (Operator<? extends Serializable> childOp : op.getChildOperators()) {
+ for (Operator<? extends OperatorDesc> childOp : op.getChildOperators()) {
entry.addToChildren(childOp.getOperatorId());
if (!opsVisited.contains(childOp)) {
opsToVisit.add(childOp);
@@ -230,8 +231,8 @@ public class QueryPlan implements Serial
reduceTask.setTaskId(stage.getStageId() + "_REDUCE");
reduceTask.setTaskType(TaskType.REDUCE);
stage.addToTaskList(reduceTask);
- Collection<Operator<? extends Serializable>> reducerTopOps =
- new ArrayList<Operator<? extends Serializable>>();
+ Collection<Operator<? extends OperatorDesc>> reducerTopOps =
+ new ArrayList<Operator<? extends OperatorDesc>>();
reducerTopOps.add(mrTask.getWork().getReducer());
populateOperatorGraph(reduceTask, reducerTopOps);
}
@@ -309,8 +310,11 @@ public class QueryPlan implements Serial
} else {
task.setStarted(started.contains(task.getTaskId()));
task.setDone(done.contains(task.getTaskId()));
- for (org.apache.hadoop.hive.ql.plan.api.Operator op : task
- .getOperatorList()) {
+ if (task.getOperatorList() == null) {
+ return;
+ }
+ for (org.apache.hadoop.hive.ql.plan.api.Operator op :
+ task.getOperatorList()) {
// if the task has started, all operators within the task have
// started
op.setStarted(started.contains(task.getTaskId()));
@@ -370,8 +374,8 @@ public class QueryPlan implements Serial
done.add(task.getId() + "_MAP");
}
if (mrTask.hasReduce()) {
- Collection<Operator<? extends Serializable>> reducerTopOps =
- new ArrayList<Operator<? extends Serializable>>();
+ Collection<Operator<? extends OperatorDesc>> reducerTopOps =
+ new ArrayList<Operator<? extends OperatorDesc>>();
reducerTopOps.add(mrTask.getWork().getReducer());
extractOperatorCounters(reducerTopOps, task.getId() + "_REDUCE");
if (mrTask.reduceStarted()) {
@@ -393,21 +397,21 @@ public class QueryPlan implements Serial
}

private void extractOperatorCounters(
- Collection<Operator<? extends Serializable>> topOps, String taskId) {
- Queue<Operator<? extends Serializable>> opsToVisit =
- new LinkedList<Operator<? extends Serializable>>();
- Set<Operator<? extends Serializable>> opsVisited =
- new HashSet<Operator<? extends Serializable>>();
+ Collection<Operator<? extends OperatorDesc>> topOps, String taskId) {
+ Queue<Operator<? extends OperatorDesc>> opsToVisit =
+ new LinkedList<Operator<? extends OperatorDesc>>();
+ Set<Operator<? extends OperatorDesc>> opsVisited =
+ new HashSet<Operator<? extends OperatorDesc>>();
opsToVisit.addAll(topOps);
while (opsToVisit.size() != 0) {
- Operator<? extends Serializable> op = opsToVisit.remove();
+ Operator<? extends OperatorDesc> op = opsToVisit.remove();
opsVisited.add(op);
counters.put(op.getOperatorId(), op.getCounters());
if (op.getDone()) {
done.add(op.getOperatorId());
}
if (op.getChildOperators() != null) {
- for (Operator<? extends Serializable> childOp : op.getChildOperators()) {
+ for (Operator<? extends OperatorDesc> childOp : op.getChildOperators()) {
if (!opsVisited.contains(childOp)) {
opsToVisit.add(childOp);
}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Wed Aug 29 17:43:59 2012
@@ -50,9 +50,9 @@ import org.apache.hadoop.hive.common.Com
import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
@@ -66,6 +66,7 @@ import org.apache.hadoop.hive.ql.plan.Fe
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
@@ -178,7 +179,7 @@ public class ExecDriver extends Task<Map
* @return true if fatal errors happened during job execution, false otherwise.
*/
public boolean checkFatalErrors(Counters ctrs, StringBuilder errMsg) {
- for (Operator<? extends Serializable> op : work.getAliasToWork().values()) {
+ for (Operator<? extends OperatorDesc> op : work.getAliasToWork().values()) {
if (op.checkFatalErrors(ctrs, errMsg)) {
return true;
}
@@ -195,7 +196,8 @@ public class ExecDriver extends Task<Map
// fix up outputs
Map<String, ArrayList<String>> pa = work.getPathToAliases();
if (pa != null) {
- ArrayList<Operator<? extends Serializable>> opList = new ArrayList<Operator<? extends Serializable>>();
+ List<Operator<? extends OperatorDesc>> opList =
+ new ArrayList<Operator<? extends OperatorDesc>>();

if (work.getReducer() != null) {
opList.add(work.getReducer());
@@ -206,7 +208,7 @@ public class ExecDriver extends Task<Map
opList.add(work.getAliasToWork().get(a));

while (!opList.isEmpty()) {
- Operator<? extends Serializable> op = opList.remove(0);
+ Operator<? extends OperatorDesc> op = opList.remove(0);

if (op instanceof FileSinkOperator) {
FileSinkDesc fdesc = ((FileSinkOperator) op).getConf();
@@ -489,7 +491,7 @@ public class ExecDriver extends Task<Map
if (rj != null) {
JobCloseFeedBack feedBack = new JobCloseFeedBack();
if (work.getAliasToWork() != null) {
- for (Operator<? extends Serializable> op : work.getAliasToWork().values()) {
+ for (Operator<? extends OperatorDesc> op : work.getAliasToWork().values()) {
op.jobClose(job, success, feedBack);
}
}
@@ -743,7 +745,7 @@ public class ExecDriver extends Task<Map
}

@Override
- public Collection<Operator<? extends Serializable>> getTopOperators() {
+ public Collection<Operator<? extends OperatorDesc>> getTopOperators() {
return getWork().getAliasToWork().values();
}

@@ -947,11 +949,12 @@ public class ExecDriver extends Task<Map
if (pa != null) {
for (List<String> ls : pa.values()) {
for (String a : ls) {
- ArrayList<Operator<? extends Serializable>> opList = new ArrayList<Operator<? extends Serializable>>();
+ ArrayList<Operator<? extends OperatorDesc>> opList =
+ new ArrayList<Operator<? extends OperatorDesc>>();
opList.add(work.getAliasToWork().get(a));

while (!opList.isEmpty()) {
- Operator<? extends Serializable> op = opList.remove(0);
+ Operator<? extends OperatorDesc> op = opList.remove(0);

if (op instanceof FileSinkOperator) {
FileSinkDesc fdesc = ((FileSinkOperator) op).getConf();
@@ -973,7 +976,7 @@ public class ExecDriver extends Task<Map

@Override
public void updateCounters(Counters ctrs, RunningJob rj) throws IOException {
- for (Operator<? extends Serializable> op : work.getAliasToWork().values()) {
+ for (Operator<? extends OperatorDesc> op : work.getAliasToWork().values()) {
op.updateCounters(ctrs);
}
if (work.getReducer() != null) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java Wed Aug 29 17:43:59 2012
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.exec;

import java.io.IOException;
-import java.io.Serializable;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.net.URLClassLoader;
@@ -31,6 +30,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
@@ -104,8 +104,8 @@ public class ExecMapper extends MapReduc
//The following code is for mapjoin
//initialize all the dummy ops
l4j.info("Initializing dummy operator");
- List<Operator<? extends Serializable>> dummyOps = localWork.getDummyParentOp();
- for(Operator<? extends Serializable> dummyOp : dummyOps){
+ List<Operator<? extends OperatorDesc>> dummyOps = localWork.getDummyParentOp();
+ for (Operator<? extends OperatorDesc> dummyOp : dummyOps){
dummyOp.setExecContext(execContext);
dummyOp.initialize(jc,null);
}
@@ -194,9 +194,9 @@ public class ExecMapper extends MapReduc

//for close the local work
if(localWork != null){
- List<Operator<? extends Serializable>> dummyOps = localWork.getDummyParentOp();
+ List<Operator<? extends OperatorDesc>> dummyOps = localWork.getDummyParentOp();

- for(Operator<? extends Serializable> dummyOp : dummyOps){
+ for (Operator<? extends OperatorDesc> dummyOp : dummyOps){
dummyOp.close(abort);
}
}
@@ -204,7 +204,7 @@ public class ExecMapper extends MapReduc
if (fetchOperators != null) {
MapredLocalWork localWork = mo.getConf().getMapLocalWork();
for (Map.Entry<String, FetchOperator> entry : fetchOperators.entrySet()) {
- Operator<? extends Serializable> forwardOp = localWork
+ Operator<? extends OperatorDesc> forwardOp = localWork
.getAliasToWork().get(entry.getKey());
forwardOp.close(abort);
}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Wed Aug 29 17:43:59 2012
@@ -24,6 +24,7 @@ import java.io.OutputStream;
import java.io.PrintStream;
import java.io.Serializable;
import java.lang.annotation.Annotation;
+import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
@@ -41,12 +42,12 @@ import org.apache.hadoop.hive.ql.Context
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.StringUtils;
import org.json.JSONException;
import org.json.JSONObject;
-import java.lang.reflect.InvocationTargetException;

/**
* ExplainTask implementation.
@@ -281,7 +282,8 @@ public class ExplainTask extends Task<Ex
// If this is an operator then we need to call the plan generation on the
// conf and then the children
if (work instanceof Operator) {
- Operator<? extends Serializable> operator = (Operator<? extends Serializable>) work;
+ Operator<? extends OperatorDesc> operator =
+ (Operator<? extends OperatorDesc>) work;
if (operator.getConf() != null) {
JSONObject jsonOut = outputPlan(operator.getConf(), out, extended,
jsonOutput, jsonOutput ? 0 : indent);
@@ -291,7 +293,7 @@ public class ExplainTask extends Task<Ex
}

if (operator.getChildOperators() != null) {
- for (Operator<? extends Serializable> op : operator.getChildOperators()) {
+ for (Operator<? extends OperatorDesc> op : operator.getChildOperators()) {
JSONObject jsonOut = outputPlan(op, out, extended, jsonOutput, jsonOutput ? 0 : indent + 2);
if (jsonOutput) {
json.put(operator.getOperatorId(), jsonOut);
@@ -651,6 +653,7 @@ public class ExplainTask extends Task<Ex
throw new RuntimeException("Unexpected call");
}

+ @Override
public List<FieldSchema> getResultSchema() {
FieldSchema tmpFieldSchema = new FieldSchema();
List<FieldSchema> colList = new ArrayList<FieldSchema>();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java Wed Aug 29 17:43:59 2012
@@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.plan.Ag
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
@@ -1057,7 +1058,7 @@ public class GroupByOperator extends Ope

// Group by contains the columns needed - no need to aggregate from children
public List<String> genColLists(
- HashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx) {
+ HashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx) {
List<String> colLists = new ArrayList<String>();
ArrayList<ExprNodeDesc> keys = conf.getKeys();
for (ExprNodeDesc key : keys) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Wed Aug 29 17:43:59 2012
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.conf.HiveC
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
@@ -58,7 +59,7 @@ import org.apache.hadoop.util.StringUtil
* different from regular operators in that it starts off by processing a
* Writable data structure from a Table (instead of a Hive Object).
**/
-public class MapOperator extends Operator<MapredWork> implements Serializable {
+public class MapOperator extends Operator<MapredWork> implements Serializable, Cloneable {

private static final long serialVersionUID = 1L;

@@ -83,17 +84,17 @@ public class MapOperator extends Operato
private Map<MapInputPath, MapOpCtx> opCtxMap;
private final Set<MapInputPath> listInputPaths = new HashSet<MapInputPath>();

- private Map<Operator<? extends Serializable>, java.util.ArrayList<String>> operatorToPaths;
+ private Map<Operator<? extends OperatorDesc>, ArrayList<String>> operatorToPaths;

- private final Map<Operator<? extends Serializable>, MapOpCtx> childrenOpToOpCtxMap =
- new HashMap<Operator<? extends Serializable>, MapOpCtx>();
+ private final Map<Operator<? extends OperatorDesc>, MapOpCtx> childrenOpToOpCtxMap =
+ new HashMap<Operator<? extends OperatorDesc>, MapOpCtx>();

- private ArrayList<Operator<? extends Serializable>> extraChildrenToClose = null;
+ private ArrayList<Operator<? extends OperatorDesc>> extraChildrenToClose = null;

private static class MapInputPath {
String path;
String alias;
- Operator<? extends Serializable> op;
+ Operator<? extends OperatorDesc> op;

/**
* @param path
@@ -101,7 +102,7 @@ public class MapOperator extends Operato
* @param op
*/
public MapInputPath(String path, String alias,
- Operator<? extends Serializable> op) {
+ Operator<? extends OperatorDesc> op) {
this.path = path;
this.alias = alias;
this.op = op;
@@ -129,11 +130,11 @@ public class MapOperator extends Operato
return ret;
}

- public Operator<? extends Serializable> getOp() {
+ public Operator<? extends OperatorDesc> getOp() {
return op;
}

- public void setOp(Operator<? extends Serializable> op) {
+ public void setOp(Operator<? extends OperatorDesc> op) {
this.op = op;
}

@@ -304,7 +305,7 @@ public class MapOperator extends Operato
* need to be changed if the input changes
**/
private void setInspectorInput(MapInputPath inp) {
- Operator<? extends Serializable> op = inp.getOp();
+ Operator<? extends OperatorDesc> op = inp.getOp();

deserializer = opCtxMap.get(inp).getDeserializer();
isPartitioned = opCtxMap.get(inp).isPartitioned();
@@ -367,9 +368,10 @@ public class MapOperator extends Operato
Path fpath = new Path((new Path(HiveConf.getVar(hconf,
HiveConf.ConfVars.HADOOPMAPFILENAME))).toUri().getPath());

- ArrayList<Operator<? extends Serializable>> children = new ArrayList<Operator<? extends Serializable>>();
+ ArrayList<Operator<? extends OperatorDesc>> children =
+ new ArrayList<Operator<? extends OperatorDesc>>();
opCtxMap = new HashMap<MapInputPath, MapOpCtx>();
- operatorToPaths = new HashMap<Operator<? extends Serializable>, java.util.ArrayList<String>>();
+ operatorToPaths = new HashMap<Operator<? extends OperatorDesc>, ArrayList<String>>();

statsMap.put(Counter.DESERIALIZE_ERRORS, deserialize_error_count);

@@ -380,17 +382,17 @@ public class MapOperator extends Operato
List<String> aliases = conf.getPathToAliases().get(onefile);

for (String onealias : aliases) {
- Operator<? extends Serializable> op = conf.getAliasToWork().get(
+ Operator<? extends OperatorDesc> op = conf.getAliasToWork().get(
onealias);
LOG.info("Adding alias " + onealias + " to work list for file "
+ onefile);
MapInputPath inp = new MapInputPath(onefile, onealias, op);
opCtxMap.put(inp, opCtx);
if (operatorToPaths.get(op) == null) {
- operatorToPaths.put(op, new java.util.ArrayList<String>());
+ operatorToPaths.put(op, new ArrayList<String>());
}
operatorToPaths.get(op).add(onefile);
- op.setParentOperators(new ArrayList<Operator<? extends Serializable>>());
+ op.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
op.getParentOperators().add(this);
// check for the operators who will process rows coming to this Map
// Operator
@@ -423,11 +425,11 @@ public class MapOperator extends Operato
public void initializeOp(Configuration hconf) throws HiveException {
// set that parent initialization is done and call initialize on children
state = State.INIT;
- List<Operator<? extends Serializable>> children = getChildOperators();
+ List<Operator<? extends OperatorDesc>> children = getChildOperators();

- for (Entry<Operator<? extends Serializable>, MapOpCtx> entry : childrenOpToOpCtxMap
+ for (Entry<Operator<? extends OperatorDesc>, MapOpCtx> entry : childrenOpToOpCtxMap
.entrySet()) {
- Operator<? extends Serializable> child = entry.getKey();
+ Operator<? extends OperatorDesc> child = entry.getKey();
MapOpCtx mapOpCtx = entry.getValue();
// Add alias, table name, and partitions to hadoop conf so that their
// children will
@@ -448,12 +450,12 @@ public class MapOperator extends Operato
HiveConf.setVar(hconf, HiveConf.ConfVars.HIVEPARTITIONNAME, entry
.getValue().partName);
MapInputPath input = entry.getKey();
- Operator<? extends Serializable> op = input.op;
+ Operator<? extends OperatorDesc> op = input.op;
// op is not in the children list, so need to remember it and close it
// afterwards
if (children.indexOf(op) == -1) {
if (extraChildrenToClose == null) {
- extraChildrenToClose = new ArrayList<Operator<? extends Serializable>>();
+ extraChildrenToClose = new ArrayList<Operator<? extends OperatorDesc>>();
}
extraChildrenToClose.add(op);
op.initialize(hconf, new ObjectInspector[] {entry.getValue().getRowObjectInspector()});
@@ -467,7 +469,7 @@ public class MapOperator extends Operato
@Override
public void closeOp(boolean abort) throws HiveException {
if (extraChildrenToClose != null) {
- for (Operator<? extends Serializable> op : extraChildrenToClose) {
+ for (Operator<? extends OperatorDesc> op : extraChildrenToClose) {
op.close(abort);
}
}
@@ -486,7 +488,7 @@ public class MapOperator extends Operato
// Operator
if (!onepath.toUri().relativize(fpath.toUri()).equals(fpath.toUri())) {
String onealias = conf.getPathToAliases().get(onefile).get(0);
- Operator<? extends Serializable> op =
+ Operator<? extends OperatorDesc> op =
conf.getAliasToWork().get(onealias);

LOG.info("Processing alias " + onealias + " for file " + onefile);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java Wed Aug 29 17:43:59 2012
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.DriverC
import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.JobConf;
@@ -548,7 +549,7 @@ public class MapRedTask extends ExecDriv
}

@Override
- public Operator<? extends Serializable> getReducer() {
+ public Operator<? extends OperatorDesc> getReducer() {
return getWork().getReducer();
}
}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java Wed Aug 29 17:43:59 2012
@@ -52,6 +52,7 @@ import org.apache.hadoop.hive.ql.metadat
import org.apache.hadoop.hive.ql.plan.BucketMapJoinContext;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
@@ -312,7 +313,7 @@ public class MapredLocalTask extends Tas
}

// get the root operator
- Operator<? extends Serializable> forwardOp = work.getAliasToWork().get(alias);
+ Operator<? extends OperatorDesc> forwardOp = work.getAliasToWork().get(alias);
// walk through the operator tree
while (true) {
InspectableObject row = fetchOp.getNextRow();
@@ -342,7 +343,8 @@ public class MapredLocalTask extends Tas
for (Map.Entry<String, FetchWork> entry : work.getAliasToFetchWork().entrySet()) {
JobConf jobClone = new JobConf(job);

- Operator<? extends Serializable> tableScan = work.getAliasToWork().get(entry.getKey());
+ Operator<? extends OperatorDesc> tableScan =
+ work.getAliasToWork().get(entry.getKey());
boolean setColumnsNeeded = false;
if (tableScan instanceof TableScanOperator) {
ArrayList<Integer> list = ((TableScanOperator) tableScan).getNeededColumnIDs();
@@ -366,7 +368,7 @@ public class MapredLocalTask extends Tas
for (Map.Entry<String, FetchOperator> entry : fetchOperators.entrySet()) {
// get the forward op
String alias = entry.getKey();
- Operator<? extends Serializable> forwardOp = work.getAliasToWork().get(alias);
+ Operator<? extends OperatorDesc> forwardOp = work.getAliasToWork().get(alias);

// put the exe context into all the operators
forwardOp.setExecContext(execContext);
@@ -386,8 +388,8 @@ public class MapredLocalTask extends Tas

private void generateDummyHashTable(String alias, String bigBucketFileName) throws HiveException,IOException {
// find the (byte)tag for the map join(HashTableSinkOperator)
- Operator<? extends Serializable> parentOp = work.getAliasToWork().get(alias);
- Operator<? extends Serializable> childOp = parentOp.getChildOperators().get(0);
+ Operator<? extends OperatorDesc> parentOp = work.getAliasToWork().get(alias);
+ Operator<? extends OperatorDesc> childOp = parentOp.getChildOperators().get(0);
while ((childOp != null) && (!(childOp instanceof HashTableSinkOperator))) {
parentOp = childOp;
assert parentOp.getChildOperators().size() == 1;
@@ -447,7 +449,7 @@ public class MapredLocalTask extends Tas
}

@Override
- public Collection<Operator<? extends Serializable>> getTopOperators() {
+ public Collection<Operator<? extends OperatorDesc>> getTopOperators() {
return getWork().getAliasToWork().values();
}


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Wed Aug 29 17:43:59 2012
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.metadat
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -46,15 +47,15 @@ import org.apache.hadoop.mapred.Reporter
/**
* Base operator implementation.
**/
-public abstract class Operator<T extends Serializable> implements Serializable,
- Node {
+public abstract class Operator<T extends OperatorDesc> implements Serializable,Cloneable,
+ Node {

// Bean methods

private static final long serialVersionUID = 1L;

- protected List<Operator<? extends Serializable>> childOperators;
- protected List<Operator<? extends Serializable>> parentOperators;
+ protected List<Operator<? extends OperatorDesc>> childOperators;
+ protected List<Operator<? extends OperatorDesc>> parentOperators;
protected String operatorId;
/**
* List of counter names associated with the operator. It contains the
@@ -122,11 +123,11 @@ public abstract class Operator<T extends
}

public void setChildOperators(
- List<Operator<? extends Serializable>> childOperators) {
+ List<Operator<? extends OperatorDesc>> childOperators) {
this.childOperators = childOperators;
}

- public List<Operator<? extends Serializable>> getChildOperators() {
+ public List<Operator<? extends OperatorDesc>> getChildOperators() {
return childOperators;
}

@@ -140,7 +141,7 @@ public abstract class Operator<T extends
}

ArrayList<Node> ret_vec = new ArrayList<Node>();
- for (Operator<? extends Serializable> op : getChildOperators()) {
+ for (Operator<? extends OperatorDesc> op : getChildOperators()) {
ret_vec.add(op);
}

@@ -148,11 +149,11 @@ public abstract class Operator<T extends
}

public void setParentOperators(
- List<Operator<? extends Serializable>> parentOperators) {
+ List<Operator<? extends OperatorDesc>> parentOperators) {
this.parentOperators = parentOperators;
}

- public List<Operator<? extends Serializable>> getParentOperators() {
+ public List<Operator<? extends OperatorDesc>> getParentOperators() {
return parentOperators;
}

@@ -231,7 +232,7 @@ public abstract class Operator<T extends
return;
}

- for (Operator<? extends Serializable> op : childOperators) {
+ for (Operator<? extends OperatorDesc> op : childOperators) {
op.setReporter(rep);
}
}
@@ -244,7 +245,7 @@ public abstract class Operator<T extends
return;
}

- for (Operator<? extends Serializable> op : childOperators) {
+ for (Operator<? extends OperatorDesc> op : childOperators) {
op.setOutputCollector(out);
}
}
@@ -259,7 +260,7 @@ public abstract class Operator<T extends
return;
}

- for (Operator<? extends Serializable> op : childOperators) {
+ for (Operator<? extends OperatorDesc> op : childOperators) {
op.setAlias(alias);
}
}
@@ -282,7 +283,7 @@ public abstract class Operator<T extends
if (parentOperators == null) {
return true;
}
- for (Operator<? extends Serializable> parent : parentOperators) {
+ for (Operator<? extends OperatorDesc> parent : parentOperators) {
if (parent == null) {
//return true;
continue;
@@ -331,7 +332,7 @@ public abstract class Operator<T extends
}
childOperatorsTag = new int[childOperatorsArray.length];
for (int i = 0; i < childOperatorsArray.length; i++) {
- List<Operator<? extends Serializable>> parentOperators = childOperatorsArray[i]
+ List<Operator<? extends OperatorDesc>> parentOperators = childOperatorsArray[i]
.getParentOperators();
if (parentOperators == null) {
throw new HiveException("Hive internal error: parent is null in "
@@ -361,7 +362,7 @@ public abstract class Operator<T extends
public void initializeLocalWork(Configuration hconf) throws HiveException {
if (childOperators != null) {
for (int i =0; i<childOperators.size();i++) {
- Operator<? extends Serializable> childOp = this.childOperators.get(i);
+ Operator<? extends OperatorDesc> childOp = this.childOperators.get(i);
childOp.initializeLocalWork(hconf);
}
}
@@ -485,7 +486,7 @@ public abstract class Operator<T extends
}

LOG.debug("Starting group for children:");
- for (Operator<? extends Serializable> op : childOperators) {
+ for (Operator<? extends OperatorDesc> op : childOperators) {
op.startGroup();
}

@@ -505,7 +506,7 @@ public abstract class Operator<T extends
}

LOG.debug("Ending group for children:");
- for (Operator<? extends Serializable> op : childOperators) {
+ for (Operator<? extends OperatorDesc> op : childOperators) {
op.endGroup();
}

@@ -514,7 +515,7 @@ public abstract class Operator<T extends

protected boolean allInitializedParentsAreClosed() {
if (parentOperators != null) {
- for (Operator<? extends Serializable> parent : parentOperators) {
+ for (Operator<? extends OperatorDesc> parent : parentOperators) {
if(parent==null){
continue;
}
@@ -562,7 +563,7 @@ public abstract class Operator<T extends
return;
}

- for (Operator<? extends Serializable> op : childOperators) {
+ for (Operator<? extends OperatorDesc> op : childOperators) {
op.close(abort);
}

@@ -595,7 +596,7 @@ public abstract class Operator<T extends
return;
}

- for (Operator<? extends Serializable> op : childOperators) {
+ for (Operator<? extends OperatorDesc> op : childOperators) {
op.jobClose(conf, success, feedBack);
}
}
@@ -604,7 +605,7 @@ public abstract class Operator<T extends
* Cache childOperators in an array for faster access. childOperatorsArray is
* accessed per row, so it's important to make the access efficient.
*/
- protected transient Operator<? extends Serializable>[] childOperatorsArray = null;
+ protected transient Operator<? extends OperatorDesc>[] childOperatorsArray = null;
protected transient int[] childOperatorsTag;

// counters for debugging
@@ -620,14 +621,14 @@ public abstract class Operator<T extends
* @param newChild
* the new child
*/
- public void replaceChild(Operator<? extends Serializable> child,
- Operator<? extends Serializable> newChild) {
+ public void replaceChild(Operator<? extends OperatorDesc> child,
+ Operator<? extends OperatorDesc> newChild) {
int childIndex = childOperators.indexOf(child);
assert childIndex != -1;
childOperators.set(childIndex, newChild);
}

- public void removeChild(Operator<? extends Serializable> child) {
+ public void removeChild(Operator<? extends OperatorDesc> child) {
int childIndex = childOperators.indexOf(child);
assert childIndex != -1;
if (childOperators.size() == 1) {
@@ -651,7 +652,8 @@ public abstract class Operator<T extends
* @param child If this operator is not the only parent of the child. There can be unpredictable result.
* @throws SemanticException
*/
- public void removeChildAndAdoptItsChildren(Operator<? extends Serializable> child) throws SemanticException {
+ public void removeChildAndAdoptItsChildren(
+ Operator<? extends OperatorDesc> child) throws SemanticException {
int childIndex = childOperators.indexOf(child);
if (childIndex == -1) {
throw new SemanticException(
@@ -664,18 +666,18 @@ public abstract class Operator<T extends
childOperators.addAll(childIndex, child.getChildOperators());
}

- for (Operator<? extends Serializable> gc : child.getChildOperators()) {
- List<Operator<? extends Serializable>> parents = gc.getParentOperators();
+ for (Operator<? extends OperatorDesc> gc : child.getChildOperators()) {
+ List<Operator<? extends OperatorDesc>> parents = gc.getParentOperators();
int index = parents.indexOf(child);
if (index == -1) {
throw new SemanticException(
- "Exception when trying to remove partition predicates: fail to find parent from child");
+ "Exception when trying to remove partition predicates: fail to find parent from child");
}
parents.set(index, this);
}
}

- public void removeParent(Operator<? extends Serializable> parent) {
+ public void removeParent(Operator<? extends OperatorDesc> parent) {
int parentIndex = parentOperators.indexOf(parent);
assert parentIndex != -1;
if (parentOperators.size() == 1) {
@@ -702,8 +704,8 @@ public abstract class Operator<T extends
* @param newParent
* the new parent
*/
- public void replaceParent(Operator<? extends Serializable> parent,
- Operator<? extends Serializable> newParent) {
+ public void replaceParent(Operator<? extends OperatorDesc> parent,
+ Operator<? extends OperatorDesc> newParent) {
int parentIndex = parentOperators.indexOf(parent);
assert parentIndex != -1;
parentOperators.set(parentIndex, newParent);
@@ -755,7 +757,7 @@ public abstract class Operator<T extends

int childrenDone = 0;
for (int i = 0; i < childOperatorsArray.length; i++) {
- Operator<? extends Serializable> o = childOperatorsArray[i];
+ Operator<? extends OperatorDesc> o = childOperatorsArray[i];
if (o.getDone()) {
childrenDone++;
} else {
@@ -778,7 +780,7 @@ public abstract class Operator<T extends
public void reset(){
this.state=State.INIT;
if (childOperators != null) {
- for (Operator<? extends Serializable> o : childOperators) {
+ for (Operator<? extends OperatorDesc> o : childOperators) {
o.reset();
}
}
@@ -790,13 +792,13 @@ public abstract class Operator<T extends
*
*/
public static interface OperatorFunc {
- void func(Operator<? extends Serializable> op);
+ void func(Operator<? extends OperatorDesc> op);
}

public void preorderMap(OperatorFunc opFunc) {
opFunc.func(this);
if (childOperators != null) {
- for (Operator<? extends Serializable> o : childOperators) {
+ for (Operator<? extends OperatorDesc> o : childOperators) {
o.preorderMap(opFunc);
}
}
@@ -863,7 +865,7 @@ public abstract class Operator<T extends
if (childOperators != null) {
s.append(ls);
s.append(" <Children>");
- for (Operator<? extends Serializable> o : childOperators) {
+ for (Operator<? extends OperatorDesc> o : childOperators) {
s.append(o.dump(level + 2, seenOpts));
}
s.append(ls);
@@ -873,7 +875,7 @@ public abstract class Operator<T extends
if (parentOperators != null) {
s.append(ls);
s.append(" <Parent>");
- for (Operator<? extends Serializable> o : parentOperators) {
+ for (Operator<? extends OperatorDesc> o : parentOperators) {
s.append("Id = " + o.id + " ");
s.append(o.dump(level, seenOpts));
}
@@ -1154,7 +1156,7 @@ public abstract class Operator<T extends
// but, some operators may be updated more than once and that's ok
if (getChildren() != null) {
for (Node op : getChildren()) {
- ((Operator<? extends Serializable>) op).updateCounters(ctrs);
+ ((Operator<? extends OperatorDesc>) op).updateCounters(ctrs);
}
}
}
@@ -1189,7 +1191,7 @@ public abstract class Operator<T extends

if (getChildren() != null) {
for (Node op : getChildren()) {
- if (((Operator<? extends Serializable>) op).checkFatalErrors(ctrs,
+ if (((Operator<? extends OperatorDesc>) op).checkFatalErrors(ctrs,
errMsg)) {
return true;
}
@@ -1309,7 +1311,7 @@ public abstract class Operator<T extends
this.execContext = execContext;
if(this.childOperators != null) {
for (int i = 0; i<this.childOperators.size();i++) {
- Operator<? extends Serializable> op = this.childOperators.get(i);
+ Operator<? extends OperatorDesc> op = this.childOperators.get(i);
op.setExecContext(execContext);
}
}
@@ -1321,7 +1323,7 @@ public abstract class Operator<T extends
this.cleanUpInputFileChangedOp();
if(this.childOperators != null) {
for (int i = 0; i<this.childOperators.size();i++) {
- Operator<? extends Serializable> op = this.childOperators.get(i);
+ Operator<? extends OperatorDesc> op = this.childOperators.get(i);
op.cleanUpInputFileChanged();
}
}
@@ -1332,4 +1334,25 @@ public abstract class Operator<T extends
public void cleanUpInputFileChangedOp() throws HiveException {
}

+ @Override
+ public Operator<? extends OperatorDesc> clone()
+ throws CloneNotSupportedException {
+
+ List<Operator<? extends OperatorDesc>> parents = getParentOperators();
+ List<Operator<? extends OperatorDesc>> parentClones =
+ new ArrayList<Operator<? extends OperatorDesc>>();
+
+ if (parents != null) {
+ for (Operator<? extends OperatorDesc> parent : parents) {
+ parentClones.add((Operator<? extends OperatorDesc>)(parent.clone()));
+ }
+ }
+
+ T descClone = (T)conf.clone();
+ Operator<? extends OperatorDesc> ret =
+ (Operator<? extends OperatorDesc>) OperatorFactory.getAndMakeChild(
+ descClone, getSchema(), parentClones);
+
+ return ret;
+ }
}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java Wed Aug 29 17:43:59 2012
@@ -18,7 +18,6 @@

package org.apache.hadoop.hive.ql.exec;

-import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;

@@ -35,6 +34,7 @@ import org.apache.hadoop.hive.ql.plan.La
import org.apache.hadoop.hive.ql.plan.LateralViewJoinDesc;
import org.apache.hadoop.hive.ql.plan.LimitDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
import org.apache.hadoop.hive.ql.plan.ScriptDesc;
@@ -54,7 +54,7 @@ public final class OperatorFactory {
*
* @param <T>
*/
- public static final class OpTuple<T extends Serializable> {
+ public static final class OpTuple<T extends OperatorDesc> {
public Class<T> descClass;
public Class<? extends Operator<T>> opClass;

@@ -93,7 +93,7 @@ public final class OperatorFactory {
HashTableSinkOperator.class));
}

- public static <T extends Serializable> Operator<T> get(Class<T> opClass) {
+ public static <T extends OperatorDesc> Operator<T> get(Class<T> opClass) {

for (OpTuple o : opvec) {
if (o.descClass == opClass) {
@@ -111,7 +111,7 @@ public final class OperatorFactory {
+ opClass.getName());
}

- public static <T extends Serializable> Operator<T> get(Class<T> opClass,
+ public static <T extends OperatorDesc> Operator<T> get(Class<T> opClass,
RowSchema rwsch) {

Operator<T> ret = get(opClass);
@@ -122,36 +122,46 @@ public final class OperatorFactory {
/**
* Returns an operator given the conf and a list of children operators.
*/
- public static <T extends Serializable> Operator<T> get(T conf,
- Operator<? extends Serializable>... oplist) {
+ public static <T extends OperatorDesc> Operator<T> get(T conf,
+ Operator<? extends OperatorDesc>... oplist) {
Operator<T> ret = get((Class<T>) conf.getClass());
ret.setConf(conf);
+ makeChild(ret, oplist);
+ return (ret);
+ }
+
+ /**
+ * Returns an operator given the conf and a list of children operators.
+ */
+ public static void makeChild(
+ Operator<? extends OperatorDesc> ret,
+ Operator<? extends OperatorDesc>... oplist) {
if (oplist.length == 0) {
- return (ret);
+ return;
}

- ArrayList<Operator<? extends Serializable>> clist = new ArrayList<Operator<? extends Serializable>>();
- for (Operator op : oplist) {
+ ArrayList<Operator<? extends OperatorDesc>> clist =
+ new ArrayList<Operator<? extends OperatorDesc>>();
+ for (Operator<? extends OperatorDesc> op : oplist) {
clist.add(op);
}
ret.setChildOperators(clist);

// Add this parent to the children
- for (Operator op : oplist) {
- List<Operator<? extends Serializable>> parents = op.getParentOperators();
+ for (Operator<? extends OperatorDesc> op : oplist) {
+ List<Operator<? extends OperatorDesc>> parents = op.getParentOperators();
if (parents == null) {
- parents = new ArrayList<Operator<? extends Serializable>>();
+ parents = new ArrayList<Operator<? extends OperatorDesc>>();
}
parents.add(ret);
op.setParentOperators(parents);
}
- return (ret);
}

/**
* Returns an operator given the conf and a list of children operators.
*/
- public static <T extends Serializable> Operator<T> get(T conf,
+ public static <T extends OperatorDesc> Operator<T> get(T conf,
RowSchema rwsch, Operator... oplist) {
Operator<T> ret = get(conf, oplist);
ret.setSchema(rwsch);
@@ -161,7 +171,7 @@ public final class OperatorFactory {
/**
* Returns an operator given the conf and a list of parent operators.
*/
- public static <T extends Serializable> Operator<T> getAndMakeChild(T conf,
+ public static <T extends OperatorDesc> Operator<T> getAndMakeChild(T conf,
Operator... oplist) {
Operator<T> ret = get((Class<T>) conf.getClass());
ret.setConf(conf);
@@ -180,7 +190,8 @@ public final class OperatorFactory {
}

// add parents for the newly created operator
- List<Operator<? extends Serializable>> parent = new ArrayList<Operator<? extends Serializable>>();
+ List<Operator<? extends OperatorDesc>> parent =
+ new ArrayList<Operator<? extends OperatorDesc>>();
for (Operator op : oplist) {
parent.add(op);
}
@@ -193,8 +204,8 @@ public final class OperatorFactory {
/**
* Returns an operator given the conf and a list of parent operators.
*/
- public static <T extends Serializable> Operator<T> getAndMakeChild(T conf,
- List<Operator<? extends Serializable>> oplist) {
+ public static <T extends OperatorDesc> Operator<T> getAndMakeChild(T conf,
+ List<Operator<? extends OperatorDesc>> oplist) {
Operator<T> ret = get((Class<T>) conf.getClass());
ret.setConf(conf);
if (oplist.size() == 0) {
@@ -212,7 +223,8 @@ public final class OperatorFactory {
}

// add parents for the newly created operator
- List<Operator<? extends Serializable>> parent = new ArrayList<Operator<? extends Serializable>>();
+ List<Operator<? extends OperatorDesc>> parent =
+ new ArrayList<Operator<? extends OperatorDesc>>();
for (Operator op : oplist) {
parent.add(op);
}
@@ -225,7 +237,7 @@ public final class OperatorFactory {
/**
* Returns an operator given the conf and a list of parent operators.
*/
- public static <T extends Serializable> Operator<T> getAndMakeChild(T conf,
+ public static <T extends OperatorDesc> Operator<T> getAndMakeChild(T conf,
RowSchema rwsch, Operator... oplist) {
Operator<T> ret = getAndMakeChild(conf, oplist);
ret.setSchema(rwsch);
@@ -235,8 +247,8 @@ public final class OperatorFactory {
/**
* Returns an operator given the conf and a list of parent operators.
*/
- public static <T extends Serializable> Operator<T> getAndMakeChild(T conf,
- RowSchema rwsch, List<Operator<? extends Serializable>> oplist) {
+ public static <T extends OperatorDesc> Operator<T> getAndMakeChild(T conf,
+ RowSchema rwsch, List<Operator<? extends OperatorDesc>> oplist) {
Operator<T> ret = getAndMakeChild(conf, oplist);
ret.setSchema(rwsch);
return (ret);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java Wed Aug 29 17:43:59 2012
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.plan.Bu
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -148,9 +149,9 @@ public class SMBMapJoinOperator extends
for (Map.Entry<String, FetchWork> entry : localWork.getAliasToFetchWork()
.entrySet()) {
JobConf jobClone = new JobConf(hconf);
- Operator<? extends Serializable> tableScan = localWork.getAliasToWork()
- .get(entry.getKey());
- if(tableScan instanceof TableScanOperator) {
+ Operator<? extends OperatorDesc> tableScan = localWork.getAliasToWork()
+ .get(entry.getKey());
+ if (tableScan instanceof TableScanOperator) {
ArrayList<Integer> list = ((TableScanOperator)tableScan).getNeededColumnIDs();
if (list != null) {
ColumnProjectionUtils.appendReadColumnIDs(jobClone, list);
@@ -165,8 +166,8 @@ public class SMBMapJoinOperator extends
}

for (Map.Entry<String, FetchOperator> entry : fetchOperators.entrySet()) {
- Operator<? extends Serializable> forwardOp = localWork.getAliasToWork()
- .get(entry.getKey());
+ Operator<? extends OperatorDesc> forwardOp = localWork.getAliasToWork()
+ .get(entry.getKey());
// All the operators need to be initialized before process
forwardOp.setExecContext(this.getExecContext());
FetchOperator fetchOp = entry.getValue();
@@ -500,7 +501,7 @@ public class SMBMapJoinOperator extends
String tble = this.tagToAlias.get(tag);
FetchOperator fetchOp = fetchOperators.get(tble);

- Operator<? extends Serializable> forwardOp = localWork.getAliasToWork()
+ Operator<? extends OperatorDesc> forwardOp = localWork.getAliasToWork()
.get(tble);
try {
InspectableObject row = fetchOp.getNextRow();
@@ -565,7 +566,7 @@ public class SMBMapJoinOperator extends
super.closeOp(abort);
if (fetchOperators != null) {
for (Map.Entry<String, FetchOperator> entry : fetchOperators.entrySet()) {
- Operator<? extends Serializable> forwardOp = localWork
+ Operator<? extends OperatorDesc> forwardOp = localWork
.getAliasToWork().get(entry.getKey());
forwardOp.close(abort);
}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java Wed Aug 29 17:43:59 2012
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec;

import java.io.FileNotFoundException;
import java.io.IOException;
-import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -35,6 +34,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -94,11 +94,11 @@ public class SkewJoinHandler {
List<Object> dummyKey = null;
String taskId;

- private final CommonJoinOperator<? extends Serializable> joinOp;
+ private final CommonJoinOperator<? extends OperatorDesc> joinOp;
private final int numAliases;
private final JoinDesc conf;

- public SkewJoinHandler(CommonJoinOperator<? extends Serializable> joinOp) {
+ public SkewJoinHandler(CommonJoinOperator<? extends OperatorDesc> joinOp) {
this.joinOp = joinOp;
numAliases = joinOp.numAliases;
conf = joinOp.getConf();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java Wed Aug 29 17:43:59 2012
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.QueryPl
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
@@ -350,18 +351,18 @@ public abstract class Task<T extends Ser
return false;
}

- public Collection<Operator<? extends Serializable>> getTopOperators() {
- return new LinkedList<Operator<? extends Serializable>>();
+ public Collection<Operator<? extends OperatorDesc>> getTopOperators() {
+ return new LinkedList<Operator<? extends OperatorDesc>>();
}
-
+
public boolean hasReduce() {
return false;
}

- public Operator<? extends Serializable> getReducer() {
+ public Operator<? extends OperatorDesc> getReducer() {
return null;
}
-
+
public HashMap<String, Long> getCounters() {
return taskCounters;
}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java Wed Aug 29 17:43:59 2012
@@ -20,10 +20,12 @@ package org.apache.hadoop.hive.ql.exec;

import java.io.Serializable;

+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+
/**
* Terminal Operator Base Class.
**/
-public abstract class TerminalOperator<T extends Serializable> extends
+public abstract class TerminalOperator<T extends OperatorDesc> extends
Operator<T> implements Serializable {
private static final long serialVersionUID = 1L;


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Wed Aug 29 17:43:59 2012
@@ -118,8 +118,8 @@ import org.apache.hadoop.hive.ql.plan.Ma
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.stats.StatsFactory;
import org.apache.hadoop.hive.ql.stats.StatsPublisher;
@@ -135,8 +135,8 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.mapred.FileOutputFormat;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java Wed Aug 29 17:43:59 2012
@@ -18,7 +18,6 @@

package org.apache.hadoop.hive.ql.index.compact;

-import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
@@ -57,14 +56,13 @@ import org.apache.hadoop.hive.ql.plan.Ex
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;

public class CompactIndexHandler extends TableBasedIndexHandler {

@@ -252,9 +250,11 @@ public class CompactIndexHandler extends
* @param operators
* @return whether or not it has found its target
*/
- private boolean findIndexColumnFilter(Collection<Operator<? extends Serializable>> operators) {
- for (Operator<? extends Serializable> op : operators) {
- if (op instanceof FilterOperator && ((FilterOperator)op).getConf().getPredicate().getChildren() != null) {
+ private boolean findIndexColumnFilter(
+ Collection<Operator<? extends OperatorDesc>> operators) {
+ for (Operator<? extends OperatorDesc> op : operators) {
+ if (op instanceof FilterOperator &&
+ ((FilterOperator)op).getConf().getPredicate().getChildren() != null) {
// Is this the target
if (findIndexColumnExprNodeDesc(((FilterOperator)op).getConf().getPredicate())) {
((FilterOperator)op).getConf().setSortedFilter(true);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java Wed Aug 29 17:43:59 2012
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -41,6 +40,7 @@ import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.parse.SplitSample;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.shims.HadoopShims.CombineFileInputFormatShim;
@@ -224,10 +224,10 @@ public class CombineHiveInputFormat<K ex
// Splits are not shared across different partitions with different input formats.
// For example, 2 partitions (1 sequencefile and 1 rcfile) will have 2 different splits
private static class CombinePathInputFormat {
- private final List<Operator<? extends Serializable>> opList;
+ private final List<Operator<? extends OperatorDesc>> opList;
private final String inputFormatClassName;

- public CombinePathInputFormat(List<Operator<? extends Serializable>> opList,
+ public CombinePathInputFormat(List<Operator<? extends OperatorDesc>> opList,
String inputFormatClassName) {
this.opList = opList;
this.inputFormatClassName = inputFormatClassName;
@@ -259,7 +259,7 @@ public class CombineHiveInputFormat<K ex
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
init(job);
Map<String, ArrayList<String>> pathToAliases = mrwork.getPathToAliases();
- Map<String, Operator<? extends Serializable>> aliasToWork =
+ Map<String, Operator<? extends OperatorDesc>> aliasToWork =
mrwork.getAliasToWork();
CombineFileInputFormatShim combine = ShimLoader.getHadoopShims()
.getCombineFileInputFormat();
@@ -341,7 +341,7 @@ public class CombineHiveInputFormat<K ex

// Does a pool exist for this path already
CombineFilter f = null;
- List<Operator<? extends Serializable>> opList = null;
+ List<Operator<? extends OperatorDesc>> opList = null;
boolean done = false;

if (!mrwork.isMapperCannotSpanPartns()) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java Wed Aug 29 17:43:59 2012
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.io;

import java.io.IOException;
-import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -31,15 +30,16 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.InputFormat;
@@ -385,11 +385,11 @@ public final class HiveFileFormatUtils {
* @param aliasToWork The operator tree to be invoked for a given alias
* @param dir The path to look for
**/
- public static List<Operator<? extends Serializable>> doGetWorksFromPath(
+ public static List<Operator<? extends OperatorDesc>> doGetWorksFromPath(
Map<String, ArrayList<String>> pathToAliases,
- Map<String, Operator<? extends Serializable>> aliasToWork, Path dir) {
- List<Operator<? extends Serializable>> opList =
- new ArrayList<Operator<? extends Serializable>>();
+ Map<String, Operator<? extends OperatorDesc>> aliasToWork, Path dir) {
+ List<Operator<? extends OperatorDesc>> opList =
+ new ArrayList<Operator<? extends OperatorDesc>>();

List<String> aliases = doGetAliasesFromPath(pathToAliases, dir);
for (String alias : aliases) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java Wed Aug 29 17:43:59 2012
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
@@ -39,6 +38,7 @@ import org.apache.hadoop.hive.ql.exec.Ta
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -416,8 +416,8 @@ public class HiveInputFormat<K extends W
}

for (String alias : aliases) {
- Operator<? extends Serializable> op = this.mrwork.getAliasToWork().get(
- alias);
+ Operator<? extends OperatorDesc> op = this.mrwork.getAliasToWork().get(
+ alias);
if (op != null && op instanceof TableScanOperator) {
TableScanOperator tableScan = (TableScanOperator) op;


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java Wed Aug 29 17:43:59 2012
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.lib;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;
@@ -43,7 +42,7 @@ public class DefaultGraphWalker implemen

/**
* Constructor.
- *
+ *
* @param disp
* dispatcher to call for each op encountered
*/
@@ -68,7 +67,7 @@ public class DefaultGraphWalker implemen

/**
* Dispatch the current operator.
- *
+ *
* @param nd
* node being walked
* @param ndStack
@@ -91,7 +90,7 @@ public class DefaultGraphWalker implemen

/**
* starting point for walking.
- *
+ *
* @throws SemanticException
*/
public void startWalking(Collection<Node> startNodes,
@@ -108,7 +107,7 @@ public class DefaultGraphWalker implemen

/**
* walk the current operator and its descendants.
- *
+ *
* @param nd
* current operator in the graph
* @throws SemanticException
@@ -122,7 +121,7 @@ public class DefaultGraphWalker implemen
getDispatchedList().containsAll(nd.getChildren())) {
// all children are done or no need to walk the children
if (!getDispatchedList().contains(nd)) {
- dispatch(nd, opStack);
+ dispatch(nd, opStack);
}
opStack.pop();
return;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Wed Aug 29 17:43:59 2012
@@ -543,7 +543,7 @@ public class Table implements Serializab
tTable.getSd().getSkewedInfo().setSkewedColNames(skewedColNames);
}

- public List<String> getSkewedColName() {
+ public List<String> getSkewedColNames() {
return tTable.getSd().getSkewedInfo().getSkewedColNames();
}


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketMapJoinOptimizer.java Wed Aug 29 17:43:59 2012
@@ -18,7 +18,6 @@
package org.apache.hadoop.hive.ql.optimizer;

import java.io.IOException;
-import java.io.Serializable;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
@@ -65,6 +64,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

/**
@@ -188,7 +188,8 @@ public class BucketMapJoinOptimizer impl
LinkedHashMap<String, List<List<String>>> aliasToPartitionBucketFileNamesMapping =
new LinkedHashMap<String, List<List<String>>>();

- Map<String, Operator<? extends Serializable>> topOps = this.pGraphContext.getTopOps();
+ Map<String, Operator<? extends OperatorDesc>> topOps =
+ this.pGraphContext.getTopOps();
Map<TableScanOperator, Table> topToTable = this.pGraphContext.getTopToTable();

// (partition to bucket file names) and (partition to bucket number) for

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java Wed Aug 29 17:43:59 2012
@@ -18,7 +18,6 @@

package org.apache.hadoop.hive.ql.optimizer;

-import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
@@ -39,6 +38,7 @@ import org.apache.hadoop.hive.ql.lib.Rul
import org.apache.hadoop.hive.ql.parse.OpParseContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;

/**
* Implementation of one of the rule-based optimization steps. ColumnPruner gets
@@ -50,7 +50,7 @@ import org.apache.hadoop.hive.ql.parse.S
*/
public class ColumnPruner implements Transform {
protected ParseContext pGraphContext;
- private HashMap<Operator<? extends Serializable>, OpParseContext> opToParseCtxMap;
+ private HashMap<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap;

/**
* empty constructor.

Search Discussions

  • Namit at Aug 29, 2012 at 5:44 pm
    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.List;
    @@ -33,6 +32,7 @@ import org.apache.hadoop.hive.ql.parse.O
    import org.apache.hadoop.hive.ql.parse.RowResolver;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.SelectDesc;

    /**
    @@ -40,15 +40,15 @@ import org.apache.hadoop.hive.ql.plan.Se
    */
    public class ColumnPrunerProcCtx implements NodeProcessorCtx {

    - private final Map<Operator<? extends Serializable>, List<String>> prunedColLists;
    + private final Map<Operator<? extends OperatorDesc>, List<String>> prunedColLists;

    - private final HashMap<Operator<? extends Serializable>, OpParseContext> opToParseCtxMap;
    + private final HashMap<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap;

    private final Map<CommonJoinOperator, Map<Byte, List<String>>> joinPrunedColLists;

    public ColumnPrunerProcCtx(
    - HashMap<Operator<? extends Serializable>, OpParseContext> opToParseContextMap) {
    - prunedColLists = new HashMap<Operator<? extends Serializable>, List<String>>();
    + HashMap<Operator<? extends OperatorDesc>, OpParseContext> opToParseContextMap) {
    + prunedColLists = new HashMap<Operator<? extends OperatorDesc>, List<String>>();
    opToParseCtxMap = opToParseContextMap;
    joinPrunedColLists = new HashMap<CommonJoinOperator, Map<Byte, List<String>>>();
    }
    @@ -60,15 +60,15 @@ public class ColumnPrunerProcCtx impleme
    /**
    * @return the prunedColLists
    */
    - public List<String> getPrunedColList(Operator<? extends Serializable> op) {
    + public List<String> getPrunedColList(Operator<? extends OperatorDesc> op) {
    return prunedColLists.get(op);
    }

    - public HashMap<Operator<? extends Serializable>, OpParseContext> getOpToParseCtxMap() {
    + public HashMap<Operator<? extends OperatorDesc>, OpParseContext> getOpToParseCtxMap() {
    return opToParseCtxMap;
    }

    - public Map<Operator<? extends Serializable>, List<String>> getPrunedColLists() {
    + public Map<Operator<? extends OperatorDesc>, List<String>> getPrunedColLists() {
    return prunedColLists;
    }

    @@ -77,17 +77,17 @@ public class ColumnPrunerProcCtx impleme
    * RowResolver and are different from the external column names) that are
    * needed in the subtree. These columns eventually have to be selected from
    * the table scan.
    - *
    + *
    * @param curOp
    * The root of the operator subtree.
    * @return List<String> of the internal column names.
    * @throws SemanticException
    */
    - public List<String> genColLists(Operator<? extends Serializable> curOp)
    + public List<String> genColLists(Operator<? extends OperatorDesc> curOp)
    throws SemanticException {
    List<String> colList = new ArrayList<String>();
    if (curOp.getChildOperators() != null) {
    - for (Operator<? extends Serializable> child : curOp.getChildOperators()) {
    + for (Operator<? extends OperatorDesc> child : curOp.getChildOperators()) {
    if (child instanceof CommonJoinOperator) {
    int tag = child.getParentOperators().indexOf(curOp);
    List<String> prunList = joinPrunedColLists.get(child).get((byte) tag);
    @@ -105,7 +105,7 @@ public class ColumnPrunerProcCtx impleme
    * Creates the list of internal column names from select expressions in a
    * select operator. This function is used for the select operator instead of
    * the genColLists function (which is used by the rest of the operators).
    - *
    + *
    * @param op
    * The select operator.
    * @return List<String> of the internal column names.
    @@ -122,7 +122,7 @@ public class ColumnPrunerProcCtx impleme

    /**
    * Creates the list of internal column names for select * expressions.
    - *
    + *
    * @param op
    * The select operator.
    * @param colList

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.Collections;
    import java.util.HashMap;
    @@ -62,6 +61,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
    import org.apache.hadoop.hive.ql.plan.GroupByDesc;
    import org.apache.hadoop.hive.ql.plan.JoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
    import org.apache.hadoop.hive.ql.plan.SelectDesc;
    @@ -154,8 +154,8 @@ public final class ColumnPrunerProcFacto
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
    Object... nodeOutputs) throws SemanticException {
    ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
    - cppCtx.getPrunedColLists().put((Operator<? extends Serializable>) nd,
    - cppCtx.genColLists((Operator<? extends Serializable>) nd));
    + cppCtx.getPrunedColLists().put((Operator<? extends OperatorDesc>) nd,
    + cppCtx.genColLists((Operator<? extends OperatorDesc>) nd));

    return null;
    }
    @@ -180,8 +180,8 @@ public final class ColumnPrunerProcFacto
    TableScanOperator scanOp = (TableScanOperator) nd;
    ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
    List<String> cols = cppCtx
    - .genColLists((Operator<? extends Serializable>) nd);
    - cppCtx.getPrunedColLists().put((Operator<? extends Serializable>) nd,
    + .genColLists((Operator<? extends OperatorDesc>) nd);
    + cppCtx.getPrunedColLists().put((Operator<? extends OperatorDesc>) nd,
    cols);
    ArrayList<Integer> needed_columns = new ArrayList<Integer>();
    RowResolver inputRR = cppCtx.getOpToParseCtxMap().get(scanOp).getRowResolver();
    @@ -241,13 +241,13 @@ public final class ColumnPrunerProcFacto
    Object... nodeOutputs) throws SemanticException {
    ReduceSinkOperator op = (ReduceSinkOperator) nd;
    ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
    - HashMap<Operator<? extends Serializable>, OpParseContext> opToParseCtxMap = cppCtx
    + HashMap<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap = cppCtx
    .getOpToParseCtxMap();
    RowResolver redSinkRR = opToParseCtxMap.get(op).getRowResolver();
    ReduceSinkDesc conf = op.getConf();
    - List<Operator<? extends Serializable>> childOperators = op
    + List<Operator<? extends OperatorDesc>> childOperators = op
    .getChildOperators();
    - List<Operator<? extends Serializable>> parentOperators = op
    + List<Operator<? extends OperatorDesc>> parentOperators = op
    .getParentOperators();

    List<String> colLists = new ArrayList<String>();
    @@ -259,7 +259,7 @@ public final class ColumnPrunerProcFacto
    if ((childOperators.size() == 1)
    && (childOperators.get(0) instanceof JoinOperator)) {
    assert parentOperators.size() == 1;
    - Operator<? extends Serializable> par = parentOperators.get(0);
    + Operator<? extends OperatorDesc> par = parentOperators.get(0);
    JoinOperator childJoin = (JoinOperator) childOperators.get(0);
    RowResolver parRR = opToParseCtxMap.get(par).getRowResolver();
    List<String> childJoinCols = cppCtx.getJoinPrunedColLists().get(
    @@ -405,7 +405,7 @@ public final class ColumnPrunerProcFacto

    LateralViewJoinOperator lvJoin = null;
    if (op.getChildOperators() != null) {
    - for (Operator<? extends Serializable> child : op.getChildOperators()) {
    + for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
    // If one of my children is a FileSink or Script, return all columns.
    // Without this break, a bug in ReduceSink to Extract edge column
    // pruning will manifest
    @@ -490,14 +490,14 @@ public final class ColumnPrunerProcFacto
    */
    private void handleChildren(SelectOperator op,
    List<String> retainedSelOutputCols, ColumnPrunerProcCtx cppCtx) throws SemanticException {
    - for (Operator<? extends Serializable> child : op.getChildOperators()) {
    + for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
    if (child instanceof ReduceSinkOperator) {
    boolean[] flags = getPruneReduceSinkOpRetainFlags(
    retainedSelOutputCols, (ReduceSinkOperator) child);
    pruneReduceSinkOperator(flags, (ReduceSinkOperator) child, cppCtx);
    } else if (child instanceof FilterOperator) {
    // filter operator has the same output columns as its parent
    - for (Operator<? extends Serializable> filterChild : child
    + for (Operator<? extends OperatorDesc> filterChild : child
    .getChildOperators()) {
    if (filterChild instanceof ReduceSinkOperator) {
    boolean[] flags = getPruneReduceSinkOpRetainFlags(
    @@ -647,7 +647,7 @@ public final class ColumnPrunerProcFacto
    }

    private static void pruneOperator(NodeProcessorCtx ctx,
    - Operator<? extends Serializable> op,
    + Operator<? extends OperatorDesc> op,
    List<String> cols)
    throws SemanticException {
    // the pruning needs to preserve the order of columns in the input schema
    @@ -671,7 +671,7 @@ public final class ColumnPrunerProcFacto
    * @return
    * @throws SemanticException
    */
    - private static List<String> preserveColumnOrder(Operator<? extends Serializable> op,
    + private static List<String> preserveColumnOrder(Operator<? extends OperatorDesc> op,
    List<String> cols)
    throws SemanticException {
    RowSchema inputSchema = op.getSchema();
    @@ -696,10 +696,10 @@ public final class ColumnPrunerProcFacto
    Map<Byte, List<Integer>> retainMap, boolean mapJoin) throws SemanticException {
    ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
    Map<Byte, List<String>> prunedColLists = new HashMap<Byte, List<String>>();
    - List<Operator<? extends Serializable>> childOperators = op
    + List<Operator<? extends OperatorDesc>> childOperators = op
    .getChildOperators();

    - for (Operator<? extends Serializable> child : childOperators) {
    + for (Operator<? extends OperatorDesc> child : childOperators) {
    if (child instanceof FileSinkOperator) {
    return;
    }
    @@ -787,7 +787,7 @@ public final class ColumnPrunerProcFacto

    }

    - for (Operator<? extends Serializable> child : childOperators) {
    + for (Operator<? extends OperatorDesc> child : childOperators) {
    if (child instanceof ReduceSinkOperator) {
    boolean[] flags = getPruneReduceSinkOpRetainFlags(childColLists,
    (ReduceSinkOperator) child);

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java Wed Aug 29 17:43:59 2012
    @@ -69,6 +69,7 @@ import org.apache.hadoop.hive.ql.plan.Lo
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    import org.apache.hadoop.hive.ql.plan.MoveWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
    @@ -213,7 +214,7 @@ public class GenMRFileSink1 implements N
    }

    // create a dummy tableScan operator
    - Operator<? extends Serializable> tsMerge = OperatorFactory.get(
    + Operator<? extends OperatorDesc> tsMerge = OperatorFactory.get(
    TableScanDesc.class, inputRS);

    ArrayList<String> outputColumns = new ArrayList<String>();
    @@ -335,7 +336,8 @@ public class GenMRFileSink1 implements N

    // Create a TableScan operator
    RowSchema inputRS = fsInput.getSchema();
    - Operator<? extends Serializable> tsMerge = OperatorFactory.get(TableScanDesc.class, inputRS);
    + Operator<? extends OperatorDesc> tsMerge =
    + OperatorFactory.get(TableScanDesc.class, inputRS);

    // Create a FileSink operator
    TableDesc ts = (TableDesc) fsInputDesc.getTableInfo().clone();
    @@ -510,7 +512,7 @@ public class GenMRFileSink1 implements N
    * @param parentFS the last FileSinkOperator in the parent MapReduce work
    * @return the MapredWork
    */
    - private MapredWork createMergeTask(HiveConf conf, Operator<? extends Serializable> topOp,
    + private MapredWork createMergeTask(HiveConf conf, Operator<? extends OperatorDesc> topOp,
    FileSinkDesc fsDesc) {

    ArrayList<String> aliases = new ArrayList<String>();
    @@ -556,7 +558,7 @@ public class GenMRFileSink1 implements N
    work.setMapperCannotSpanPartns(true);
    work.setPathToAliases(pathToAliases);
    work.setAliasToWork(
    - new LinkedHashMap<String, Operator<? extends Serializable>>());
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>());
    if (hasDynamicPartitions) {
    work.getPathToPartitionInfo().put(inputDir,
    new PartitionDesc(tblDesc, null));
    @@ -696,11 +698,11 @@ public class GenMRFileSink1 implements N
    mvTask = findMoveTask(ctx.getMvTask(), fsOp);
    }

    - Operator<? extends Serializable> currTopOp = ctx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = ctx.getCurrTopOp();
    String currAliasId = ctx.getCurrAliasId();
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap =
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    ctx.getOpTaskMap();
    - List<Operator<? extends Serializable>> seenOps = ctx.getSeenOps();
    + List<Operator<? extends OperatorDesc>> seenOps = ctx.getSeenOps();
    List<Task<? extends Serializable>> rootTasks = ctx.getRootTasks();

    // Set the move task to be dependent on the current task

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMROperator.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.Map;
    import java.util.Stack;

    @@ -28,6 +27,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
    import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
    import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Processor for the rule - no specific rule fired.
    @@ -39,7 +39,7 @@ public class GenMROperator implements No

    /**
    * Reduce Scan encountered.
    - *
    + *
    * @param nd
    * the reduce sink operator encountered
    * @param procCtx
    @@ -49,10 +49,10 @@ public class GenMROperator implements No
    Object... nodeOutputs) throws SemanticException {
    GenMRProcContext ctx = (GenMRProcContext) procCtx;

    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(stack.get(stack.size() - 2));
    - mapCurrCtx.put((Operator<? extends Serializable>) nd, new GenMapRedCtx(
    + mapCurrCtx.put((Operator<? extends OperatorDesc>) nd, new GenMapRedCtx(
    mapredCtx.getCurrTask(), mapredCtx.getCurrTopOp(), mapredCtx
    .getCurrAliasId()));
    return null;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java Wed Aug 29 17:43:59 2012
    @@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.parse.P
    import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    import org.apache.hadoop.hive.ql.plan.MoveWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.TableDesc;

    /**
    @@ -54,7 +55,7 @@ public class GenMRProcContext implements
    */
    public static class GenMapRedCtx {
    Task<? extends Serializable> currTask;
    - Operator<? extends Serializable> currTopOp;
    + Operator<? extends OperatorDesc> currTopOp;
    String currAliasId;

    public GenMapRedCtx() {
    @@ -69,7 +70,7 @@ public class GenMRProcContext implements
    * the current alias for the to operator
    */
    public GenMapRedCtx(Task<? extends Serializable> currTask,
    - Operator<? extends Serializable> currTopOp, String currAliasId) {
    + Operator<? extends OperatorDesc> currTopOp, String currAliasId) {
    this.currTask = currTask;
    this.currTopOp = currTopOp;
    this.currAliasId = currAliasId;
    @@ -85,7 +86,7 @@ public class GenMRProcContext implements
    /**
    * @return current top operator
    */
    - public Operator<? extends Serializable> getCurrTopOp() {
    + public Operator<? extends OperatorDesc> getCurrTopOp() {
    return currTopOp;
    }

    @@ -105,13 +106,13 @@ public class GenMRProcContext implements
    Task<? extends Serializable> uTask;
    List<String> taskTmpDir;
    List<TableDesc> tt_desc;
    - List<Operator<? extends Serializable>> listTopOperators;
    + List<Operator<? extends OperatorDesc>> listTopOperators;

    public GenMRUnionCtx() {
    uTask = null;
    taskTmpDir = new ArrayList<String>();
    tt_desc = new ArrayList<TableDesc>();
    - listTopOperators = new ArrayList<Operator<? extends Serializable>>();
    + listTopOperators = new ArrayList<Operator<? extends OperatorDesc>>();
    }

    public Task<? extends Serializable> getUTask() {
    @@ -138,16 +139,16 @@ public class GenMRProcContext implements
    return tt_desc;
    }

    - public List<Operator<? extends Serializable>> getListTopOperators() {
    + public List<Operator<? extends OperatorDesc>> getListTopOperators() {
    return listTopOperators;
    }

    public void setListTopOperators(
    - List<Operator<? extends Serializable>> listTopOperators) {
    + List<Operator<? extends OperatorDesc>> listTopOperators) {
    this.listTopOperators = listTopOperators;
    }

    - public void addListTopOperators(Operator<? extends Serializable> topOperator) {
    + public void addListTopOperators(Operator<? extends OperatorDesc> topOperator) {
    listTopOperators.add(topOperator);
    }
    }
    @@ -159,7 +160,7 @@ public class GenMRProcContext implements
    public static class GenMRMapJoinCtx {
    String taskTmpDir;
    TableDesc tt_desc;
    - Operator<? extends Serializable> rootMapJoinOp;
    + Operator<? extends OperatorDesc> rootMapJoinOp;
    AbstractMapJoinOperator<? extends MapJoinDesc> oldMapJoin;

    public GenMRMapJoinCtx() {
    @@ -176,7 +177,7 @@ public class GenMRProcContext implements
    * @param oldMapJoin
    */
    public GenMRMapJoinCtx(String taskTmpDir, TableDesc tt_desc,
    - Operator<? extends Serializable> rootMapJoinOp,
    + Operator<? extends OperatorDesc> rootMapJoinOp,
    AbstractMapJoinOperator<? extends MapJoinDesc> oldMapJoin) {
    this.taskTmpDir = taskTmpDir;
    this.tt_desc = tt_desc;
    @@ -203,7 +204,7 @@ public class GenMRProcContext implements
    /**
    * @return the childSelect
    */
    - public Operator<? extends Serializable> getRootMapJoinOp() {
    + public Operator<? extends OperatorDesc> getRootMapJoinOp() {
    return rootMapJoinOp;
    }

    @@ -211,7 +212,7 @@ public class GenMRProcContext implements
    * @param rootMapJoinOp
    * the rootMapJoinOp to set
    */
    - public void setRootMapJoinOp(Operator<? extends Serializable> rootMapJoinOp) {
    + public void setRootMapJoinOp(Operator<? extends OperatorDesc> rootMapJoinOp) {
    this.rootMapJoinOp = rootMapJoinOp;
    }

    @@ -232,23 +233,24 @@ public class GenMRProcContext implements
    }

    private HiveConf conf;
    - private HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap;
    + private
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap;
    private HashMap<UnionOperator, GenMRUnionCtx> unionTaskMap;
    private HashMap<AbstractMapJoinOperator<? extends MapJoinDesc>, GenMRMapJoinCtx> mapJoinTaskMap;
    - private List<Operator<? extends Serializable>> seenOps;
    + private List<Operator<? extends OperatorDesc>> seenOps;
    private List<FileSinkOperator> seenFileSinkOps;

    private ParseContext parseCtx;
    private List<Task<MoveWork>> mvTask;
    private List<Task<? extends Serializable>> rootTasks;

    - private LinkedHashMap<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx;
    + private LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx;
    private Task<? extends Serializable> currTask;
    - private Operator<? extends Serializable> currTopOp;
    + private Operator<? extends OperatorDesc> currTopOp;
    private UnionOperator currUnionOp;
    private AbstractMapJoinOperator<? extends MapJoinDesc> currMapJoinOp;
    private String currAliasId;
    - private List<Operator<? extends Serializable>> rootOps;
    + private List<Operator<? extends OperatorDesc>> rootOps;
    private DependencyCollectionTask dependencyTaskForMultiInsert;

    /**
    @@ -287,11 +289,11 @@ public class GenMRProcContext implements
    */
    public GenMRProcContext(
    HiveConf conf,
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap,
    - List<Operator<? extends Serializable>> seenOps, ParseContext parseCtx,
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap,
    + List<Operator<? extends OperatorDesc>> seenOps, ParseContext parseCtx,
    List<Task<MoveWork>> mvTask,
    List<Task<? extends Serializable>> rootTasks,
    - LinkedHashMap<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx,
    + LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx,
    Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
    this.conf = conf;
    this.opTaskMap = opTaskMap;
    @@ -307,7 +309,7 @@ public class GenMRProcContext implements
    currUnionOp = null;
    currMapJoinOp = null;
    currAliasId = null;
    - rootOps = new ArrayList<Operator<? extends Serializable>>();
    + rootOps = new ArrayList<Operator<? extends OperatorDesc>>();
    rootOps.addAll(parseCtx.getTopOps().values());
    unionTaskMap = new HashMap<UnionOperator, GenMRUnionCtx>();
    mapJoinTaskMap = new HashMap<AbstractMapJoinOperator<? extends MapJoinDesc>, GenMRMapJoinCtx>();
    @@ -317,7 +319,8 @@ public class GenMRProcContext implements
    /**
    * @return reducer to task mapping
    */
    - public HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> getOpTaskMap() {
    + public HashMap<Operator<? extends OperatorDesc>,
    + Task<? extends Serializable>> getOpTaskMap() {
    return opTaskMap;
    }

    @@ -326,14 +329,14 @@ public class GenMRProcContext implements
    * reducer to task mapping
    */
    public void setOpTaskMap(
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap) {
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap) {
    this.opTaskMap = opTaskMap;
    }

    /**
    * @return operators already visited
    */
    - public List<Operator<? extends Serializable>> getSeenOps() {
    + public List<Operator<? extends OperatorDesc>> getSeenOps() {
    return seenOps;
    }

    @@ -348,7 +351,7 @@ public class GenMRProcContext implements
    * @param seenOps
    * operators already visited
    */
    - public void setSeenOps(List<Operator<? extends Serializable>> seenOps) {
    + public void setSeenOps(List<Operator<? extends OperatorDesc>> seenOps) {
    this.seenOps = seenOps;
    }

    @@ -363,7 +366,7 @@ public class GenMRProcContext implements
    /**
    * @return top operators for tasks
    */
    - public List<Operator<? extends Serializable>> getRootOps() {
    + public List<Operator<? extends OperatorDesc>> getRootOps() {
    return rootOps;
    }

    @@ -371,7 +374,7 @@ public class GenMRProcContext implements
    * @param rootOps
    * top operators for tasks
    */
    - public void setRootOps(List<Operator<? extends Serializable>> rootOps) {
    + public void setRootOps(List<Operator<? extends OperatorDesc>> rootOps) {
    this.rootOps = rootOps;
    }

    @@ -423,7 +426,7 @@ public class GenMRProcContext implements
    /**
    * @return operator to task mappings
    */
    - public LinkedHashMap<Operator<? extends Serializable>, GenMapRedCtx> getMapCurrCtx() {
    + public LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> getMapCurrCtx() {
    return mapCurrCtx;
    }

    @@ -432,7 +435,7 @@ public class GenMRProcContext implements
    * operator to task mappings
    */
    public void setMapCurrCtx(
    - LinkedHashMap<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx) {
    + LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx) {
    this.mapCurrCtx = mapCurrCtx;
    }

    @@ -454,7 +457,7 @@ public class GenMRProcContext implements
    /**
    * @return current top operator
    */
    - public Operator<? extends Serializable> getCurrTopOp() {
    + public Operator<? extends OperatorDesc> getCurrTopOp() {
    return currTopOp;
    }

    @@ -462,7 +465,7 @@ public class GenMRProcContext implements
    * @param currTopOp
    * current top operator
    */
    - public void setCurrTopOp(Operator<? extends Serializable> currTopOp) {
    + public void setCurrTopOp(Operator<? extends OperatorDesc> currTopOp) {
    this.currTopOp = currTopOp;
    }


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java Wed Aug 29 17:43:59 2012
    @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
    import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Processor for the rule - table scan followed by reduce sink.
    @@ -43,7 +44,7 @@ public class GenMRRedSink1 implements No

    /**
    * Reduce Scan encountered.
    - *
    + *
    * @param nd
    * the reduce sink operator encountered
    * @param opProcCtx
    @@ -54,15 +55,15 @@ public class GenMRRedSink1 implements No
    ReduceSinkOperator op = (ReduceSinkOperator) nd;
    GenMRProcContext ctx = (GenMRProcContext) opProcCtx;

    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(stack.get(stack.size() - 2));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    MapredWork currPlan = (MapredWork) currTask.getWork();
    - Operator<? extends Serializable> currTopOp = mapredCtx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = mapredCtx.getCurrTopOp();
    String currAliasId = mapredCtx.getCurrAliasId();
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap = ctx
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap = ctx
    .getOpTaskMap();
    Task<? extends Serializable> opMapTask = opTaskMap.get(reducer);


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java Wed Aug 29 17:43:59 2012
    @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
    import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
    import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Processor for the rule - reduce sink followed by reduce sink.
    @@ -41,7 +42,7 @@ public class GenMRRedSink2 implements No

    /**
    * Reduce Scan encountered.
    - *
    + *
    * @param nd
    * the reduce sink operator encountered
    * @param opProcCtx
    @@ -52,14 +53,14 @@ public class GenMRRedSink2 implements No
    ReduceSinkOperator op = (ReduceSinkOperator) nd;
    GenMRProcContext ctx = (GenMRProcContext) opProcCtx;

    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    - Operator<? extends Serializable> currTopOp = mapredCtx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = mapredCtx.getCurrTopOp();
    String currAliasId = mapredCtx.getCurrAliasId();
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    - Map<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap = ctx
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
    + Map<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap = ctx
    .getOpTaskMap();
    Task<? extends Serializable> opMapTask = opTaskMap.get(reducer);


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java Wed Aug 29 17:43:59 2012
    @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
    import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Processor for the rule - union followed by reduce sink.
    @@ -56,8 +57,8 @@ public class GenMRRedSink3 implements No

    // union consisted on a bunch of map-reduce jobs, and it has been split at
    // the union
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(ctx.getCurrUnionOp());

    @@ -70,7 +71,7 @@ public class GenMRRedSink3 implements No


    MapredWork plan = (MapredWork) unionTask.getWork();
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap = ctx
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap = ctx
    .getOpTaskMap();
    Task<? extends Serializable> reducerTask = opTaskMap.get(reducer);


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java Wed Aug 29 17:43:59 2012
    @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
    import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Processor for the rule - map join followed by reduce sink.
    @@ -43,7 +44,7 @@ public class GenMRRedSink4 implements No

    /**
    * Reduce Scan encountered.
    - *
    + *
    * @param nd
    * the reduce sink operator encountered
    * @param opProcCtx
    @@ -58,13 +59,13 @@ public class GenMRRedSink4 implements No

    // map-join consisted on a bunch of map-only jobs, and it has been split
    // after the mapjoin
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    MapredWork plan = (MapredWork) currTask.getWork();
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap = ctx
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap = ctx
    .getOpTaskMap();
    Task<? extends Serializable> opMapTask = opTaskMap.get(reducer);


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java Wed Aug 29 17:43:59 2012
    @@ -41,6 +41,7 @@ import org.apache.hadoop.hive.ql.parse.P
    import org.apache.hadoop.hive.ql.parse.QBParseInfo;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.StatsWork;
    /**
    * Processor for the rule - table scan.
    @@ -62,17 +63,17 @@ public class GenMRTableScan1 implements
    TableScanOperator op = (TableScanOperator) nd;
    GenMRProcContext ctx = (GenMRProcContext) opProcCtx;
    ParseContext parseCtx = ctx.getParseCtx();
    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();

    // create a dummy MapReduce task
    MapredWork currWork = GenMapRedUtils.getMapRedWork(parseCtx);
    Task<? extends Serializable> currTask = TaskFactory.get(currWork, parseCtx.getConf());
    - Operator<? extends Serializable> currTopOp = op;
    + Operator<? extends OperatorDesc> currTopOp = op;
    ctx.setCurrTask(currTask);
    ctx.setCurrTopOp(currTopOp);

    for (String alias : parseCtx.getTopOps().keySet()) {
    - Operator<? extends Serializable> currOp = parseCtx.getTopOps().get(alias);
    + Operator<? extends OperatorDesc> currOp = parseCtx.getTopOps().get(alias);
    if (currOp == op) {
    String currAliasId = alias;
    ctx.setCurrAliasId(currAliasId);

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java Wed Aug 29 17:43:59 2012
    @@ -39,13 +39,14 @@ import org.apache.hadoop.hive.ql.optimiz
    import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRUnionCtx;
    import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
    import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
    -import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcFactory;
    import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext.UnionParseContext;
    +import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcFactory;
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.TableDesc;
    @@ -89,7 +90,7 @@ public class GenMRUnion1 implements Node
    }
    else {
    ctx.getMapCurrCtx().put(
    - (Operator<? extends Serializable>) union,
    + (Operator<? extends OperatorDesc>) union,
    new GenMapRedCtx(ctx.getCurrTask(), ctx.getCurrTopOp(),
    ctx.getCurrAliasId()));
    }
    @@ -127,8 +128,8 @@ public class GenMRUnion1 implements Node
    * @param uCtxTask
    */
    private void processSubQueryUnionCreateIntermediate(
    - Operator<? extends Serializable> parent,
    - Operator<? extends Serializable> child,
    + Operator<? extends OperatorDesc> parent,
    + Operator<? extends OperatorDesc> child,
    Task<? extends Serializable> uTask, GenMRProcContext ctx,
    GenMRUnionCtx uCtxTask) {
    ParseContext parseCtx = ctx.getParseCtx();
    @@ -141,21 +142,23 @@ public class GenMRUnion1 implements Node
    String taskTmpDir = baseCtx.getMRTmpFileURI();

    // Create a file sink operator for this file name
    - Operator<? extends Serializable> fs_op = OperatorFactory.get(
    + Operator<? extends OperatorDesc> fs_op = OperatorFactory.get(
    new FileSinkDesc(taskTmpDir, tt_desc, parseCtx.getConf().getBoolVar(
    HiveConf.ConfVars.COMPRESSINTERMEDIATE)), parent.getSchema());

    assert parent.getChildOperators().size() == 1;
    parent.getChildOperators().set(0, fs_op);

    - List<Operator<? extends Serializable>> parentOpList = new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> parentOpList =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    parentOpList.add(parent);
    fs_op.setParentOperators(parentOpList);

    // Create a dummy table scan operator
    - Operator<? extends Serializable> ts_op = OperatorFactory.get(
    + Operator<? extends OperatorDesc> ts_op = OperatorFactory.get(
    new TableScanDesc(), parent.getSchema());
    - List<Operator<? extends Serializable>> childOpList = new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> childOpList =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    childOpList.add(child);
    ts_op.setChildOperators(childOpList);
    child.replaceParent(parent, ts_op);
    @@ -199,8 +202,8 @@ public class GenMRUnion1 implements Node
    Task<? extends Serializable> uTask = uCtxTask.getUTask();
    MapredWork plan = (MapredWork) uTask.getWork();
    ctx.setCurrTask(uTask);
    - List<Operator<? extends Serializable>> seenOps = ctx.getSeenOps();
    - Operator<? extends Serializable> topOp = ctx.getCurrTopOp();
    + List<Operator<? extends OperatorDesc>> seenOps = ctx.getSeenOps();
    + Operator<? extends OperatorDesc> topOp = ctx.getCurrTopOp();
    if (!seenOps.contains(topOp) && topOp != null) {
    seenOps.add(topOp);
    GenMapRedUtils.setTaskPlan(ctx.getCurrAliasId(), ctx
    @@ -247,7 +250,7 @@ public class GenMRUnion1 implements Node

    // Map-only subqueries can be optimized in future to not write to a file in
    // future
    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();

    UnionParseContext uPrsCtx = uCtx.getUnionParseContext(union);

    @@ -305,7 +308,7 @@ public class GenMRUnion1 implements Node

    ctx.setCurrTask(uTask);

    - mapCurrCtx.put((Operator<? extends Serializable>) nd,
    + mapCurrCtx.put((Operator<? extends OperatorDesc>) nd,
    new GenMapRedCtx(ctx.getCurrTask(), null, null));

    return null;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Wed Aug 29 17:43:59 2012
    @@ -61,15 +61,16 @@ import org.apache.hadoop.hive.ql.parse.S
    import org.apache.hadoop.hive.ql.plan.BucketMapJoinContext;
    import org.apache.hadoop.hive.ql.plan.FetchWork;
    import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
    +import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
    import org.apache.hadoop.hive.ql.plan.TableDesc;
    import org.apache.hadoop.hive.ql.plan.TableScanDesc;
    -import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;

    /**
    * General utility common functions for the Processor to convert operator into
    @@ -92,14 +93,15 @@ public final class GenMapRedUtils {
    */
    public static void initPlan(ReduceSinkOperator op, GenMRProcContext opProcCtx)
    throws SemanticException {
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = opProcCtx.getMapCurrCtx();
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx =
    + opProcCtx.getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    MapredWork plan = (MapredWork) currTask.getWork();
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap =
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    opProcCtx.getOpTaskMap();
    - Operator<? extends Serializable> currTopOp = opProcCtx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = opProcCtx.getCurrTopOp();

    opTaskMap.put(reducer, currTask);
    plan.setReducer(reducer);
    @@ -117,7 +119,7 @@ public final class GenMapRedUtils {
    }

    assert currTopOp != null;
    - List<Operator<? extends Serializable>> seenOps = opProcCtx.getSeenOps();
    + List<Operator<? extends OperatorDesc>> seenOps = opProcCtx.getSeenOps();
    String currAliasId = opProcCtx.getCurrAliasId();

    if (!seenOps.contains(currTopOp)) {
    @@ -134,8 +136,9 @@ public final class GenMapRedUtils {
    }

    public static void initMapJoinPlan(
    - Operator<? extends Serializable> op, GenMRProcContext ctx,
    - boolean readInputMapJoin, boolean readInputUnion, boolean setReducer, int pos) throws SemanticException {
    + Operator<? extends OperatorDesc> op, GenMRProcContext ctx,
    + boolean readInputMapJoin, boolean readInputUnion, boolean setReducer, int pos)
    + throws SemanticException {
    initMapJoinPlan(op, ctx, readInputMapJoin, readInputUnion, setReducer, pos, false);
    }

    @@ -149,20 +152,21 @@ public final class GenMapRedUtils {
    * @param pos
    * position of the parent
    */
    - public static void initMapJoinPlan(Operator<? extends Serializable> op,
    + public static void initMapJoinPlan(Operator<? extends OperatorDesc> op,
    GenMRProcContext opProcCtx, boolean readInputMapJoin,
    boolean readInputUnion, boolean setReducer, int pos, boolean createLocalPlan)
    throws SemanticException {
    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = opProcCtx.getMapCurrCtx();
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx =
    + opProcCtx.getMapCurrCtx();
    assert (((pos == -1) && (readInputMapJoin)) || (pos != -1));
    int parentPos = (pos == -1) ? 0 : pos;
    GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(
    parentPos));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    MapredWork plan = (MapredWork) currTask.getWork();
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap =
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    opProcCtx.getOpTaskMap();
    - Operator<? extends Serializable> currTopOp = opProcCtx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = opProcCtx.getCurrTopOp();

    // The mapjoin has already been encountered. Some context must be stored
    // about that
    @@ -173,7 +177,7 @@ public final class GenMapRedUtils {
    false : true;

    if (setReducer) {
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
    plan.setReducer(reducer);
    opTaskMap.put(reducer, currTask);
    if (reducer.getClass() == JoinOperator.class) {
    @@ -189,7 +193,7 @@ public final class GenMapRedUtils {
    GenMRMapJoinCtx mjCtx = opProcCtx.getMapJoinCtx(currMapJoinOp);
    String taskTmpDir;
    TableDesc tt_desc;
    - Operator<? extends Serializable> rootOp;
    + Operator<? extends OperatorDesc> rootOp;

    if (mjCtx.getOldMapJoin() == null || setReducer) {
    taskTmpDir = mjCtx.getTaskTmpDir();
    @@ -222,7 +226,7 @@ public final class GenMapRedUtils {
    }

    assert currTopOp != null;
    - List<Operator<? extends Serializable>> seenOps = opProcCtx.getSeenOps();
    + List<Operator<? extends OperatorDesc>> seenOps = opProcCtx.getSeenOps();
    String currAliasId = opProcCtx.getCurrAliasId();

    seenOps.add(currTopOp);
    @@ -249,7 +253,7 @@ public final class GenMapRedUtils {
    }
    if (localPlan == null && createLocalPlan) {
    localPlan = new MapredLocalWork(
    - new LinkedHashMap<String, Operator<? extends Serializable>>(),
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
    new LinkedHashMap<String, FetchWork>());
    }
    } else {
    @@ -298,10 +302,10 @@ public final class GenMapRedUtils {
    public static void initUnionPlan(ReduceSinkOperator op,
    GenMRProcContext opProcCtx,
    Task<? extends Serializable> unionTask) throws SemanticException {
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);

    MapredWork plan = (MapredWork) unionTask.getWork();
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap =
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    opProcCtx.getOpTaskMap();

    opTaskMap.put(reducer, unionTask);
    @@ -320,10 +324,10 @@ public final class GenMapRedUtils {
    private static void setUnionPlan(GenMRProcContext opProcCtx,
    boolean local, MapredWork plan, GenMRUnionCtx uCtx,
    boolean mergeTask) throws SemanticException {
    - Operator<? extends Serializable> currTopOp = opProcCtx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = opProcCtx.getCurrTopOp();

    if (currTopOp != null) {
    - List<Operator<? extends Serializable>> seenOps = opProcCtx.getSeenOps();
    + List<Operator<? extends OperatorDesc>> seenOps = opProcCtx.getSeenOps();
    String currAliasId = opProcCtx.getCurrAliasId();
    if (!seenOps.contains(currTopOp) || mergeTask) {
    seenOps.add(currTopOp);
    @@ -340,7 +344,7 @@ public final class GenMapRedUtils {
    int size = taskTmpDirLst.size();
    assert local == false;

    - List<Operator<? extends Serializable>> topOperators =
    + List<Operator<? extends OperatorDesc>> topOperators =
    uCtx.getListTopOperators();

    for (int pos = 0; pos < size; pos++) {
    @@ -422,7 +426,7 @@ public final class GenMapRedUtils {
    opProcCtx.setCurrTask(existingTask);
    }

    - public static void joinPlan(Operator<? extends Serializable> op,
    + public static void joinPlan(Operator<? extends OperatorDesc> op,
    Task<? extends Serializable> oldTask, Task<? extends Serializable> task,
    GenMRProcContext opProcCtx, int pos, boolean split,
    boolean readMapJoinData, boolean readUnionData) throws SemanticException {
    @@ -443,14 +447,14 @@ public final class GenMapRedUtils {
    * @param pos
    * position of the parent in the stack
    */
    - public static void joinPlan(Operator<? extends Serializable> op,
    + public static void joinPlan(Operator<? extends OperatorDesc> op,
    Task<? extends Serializable> oldTask, Task<? extends Serializable> task,
    GenMRProcContext opProcCtx, int pos, boolean split,
    boolean readMapJoinData, boolean readUnionData, boolean createLocalWork)
    throws SemanticException {
    Task<? extends Serializable> currTask = task;
    MapredWork plan = (MapredWork) currTask.getWork();
    - Operator<? extends Serializable> currTopOp = opProcCtx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = opProcCtx.getCurrTopOp();
    List<Task<? extends Serializable>> parTasks = null;

    // terminate the old task and make current task dependent on it
    @@ -471,7 +475,7 @@ public final class GenMapRedUtils {
    }

    if (currTopOp != null) {
    - List<Operator<? extends Serializable>> seenOps = opProcCtx.getSeenOps();
    + List<Operator<? extends OperatorDesc>> seenOps = opProcCtx.getSeenOps();
    String currAliasId = opProcCtx.getCurrAliasId();

    if (!seenOps.contains(currTopOp)) {
    @@ -500,7 +504,7 @@ public final class GenMapRedUtils {
    AbstractMapJoinOperator<? extends MapJoinDesc> oldMapJoin = mjCtx.getOldMapJoin();
    String taskTmpDir = null;
    TableDesc tt_desc = null;
    - Operator<? extends Serializable> rootOp = null;
    + Operator<? extends OperatorDesc> rootOp = null;

    boolean local = ((pos == -1) || (pos == (mjOp.getConf())
    .getPosBigTable())) ? false : true;
    @@ -552,7 +556,7 @@ public final class GenMapRedUtils {
    MapredWork cplan = getMapRedWork(parseCtx);
    Task<? extends Serializable> redTask = TaskFactory.get(cplan, parseCtx
    .getConf());
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);

    // Add the reducer
    cplan.setReducer(reducer);
    @@ -560,7 +564,7 @@ public final class GenMapRedUtils {

    cplan.setNumReduceTasks(new Integer(desc.getNumReducers()));

    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap =
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    opProcCtx.getOpTaskMap();
    opTaskMap.put(reducer, redTask);
    Task<? extends Serializable> currTask = opProcCtx.getCurrTask();
    @@ -584,7 +588,7 @@ public final class GenMapRedUtils {
    * processing context
    */
    public static void setTaskPlan(String alias_id,
    - Operator<? extends Serializable> topOp, MapredWork plan, boolean local,
    + Operator<? extends OperatorDesc> topOp, MapredWork plan, boolean local,
    GenMRProcContext opProcCtx) throws SemanticException {
    setTaskPlan(alias_id, topOp, plan, local, opProcCtx, null);
    }
    @@ -606,7 +610,7 @@ public final class GenMapRedUtils {
    * pruned partition list. If it is null it will be computed on-the-fly.
    */
    public static void setTaskPlan(String alias_id,
    - Operator<? extends Serializable> topOp, MapredWork plan, boolean local,
    + Operator<? extends OperatorDesc> topOp, MapredWork plan, boolean local,
    GenMRProcContext opProcCtx, PrunedPartitionList pList) throws SemanticException {
    ParseContext parseCtx = opProcCtx.getParseCtx();
    Set<ReadEntity> inputs = opProcCtx.getInputs();
    @@ -810,7 +814,7 @@ public final class GenMapRedUtils {
    MapredLocalWork localPlan = plan.getMapLocalWork();
    if (localPlan == null) {
    localPlan = new MapredLocalWork(
    - new LinkedHashMap<String, Operator<? extends Serializable>>(),
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
    new LinkedHashMap<String, FetchWork>());
    }

    @@ -845,7 +849,7 @@ public final class GenMapRedUtils {
    * table descriptor
    */
    public static void setTaskPlan(String path, String alias,
    - Operator<? extends Serializable> topOp, MapredWork plan, boolean local,
    + Operator<? extends OperatorDesc> topOp, MapredWork plan, boolean local,
    TableDesc tt_desc) throws SemanticException {

    if(path == null || alias == null) {
    @@ -864,7 +868,7 @@ public final class GenMapRedUtils {
    MapredLocalWork localPlan = plan.getMapLocalWork();
    if (localPlan == null) {
    localPlan = new MapredLocalWork(
    - new LinkedHashMap<String, Operator<? extends Serializable>>(),
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
    new LinkedHashMap<String, FetchWork>());
    }

    @@ -885,7 +889,7 @@ public final class GenMapRedUtils {
    * current top operator in the path
    */
    public static void setKeyAndValueDesc(MapredWork plan,
    - Operator<? extends Serializable> topOp) {
    + Operator<? extends OperatorDesc> topOp) {
    if (topOp == null) {
    return;
    }
    @@ -900,9 +904,9 @@ public final class GenMapRedUtils {
    }
    tagToSchema.set(tag, rs.getConf().getValueSerializeInfo());
    } else {
    - List<Operator<? extends Serializable>> children = topOp.getChildOperators();
    + List<Operator<? extends OperatorDesc>> children = topOp.getChildOperators();
    if (children != null) {
    - for (Operator<? extends Serializable> op : children) {
    + for (Operator<? extends OperatorDesc> op : children) {
    setKeyAndValueDesc(plan, op);
    }
    }
    @@ -935,7 +939,7 @@ public final class GenMapRedUtils {
    work.setMapperCannotSpanPartns(mapperCannotSpanPartns);
    work.setPathToAliases(new LinkedHashMap<String, ArrayList<String>>());
    work.setPathToPartitionInfo(new LinkedHashMap<String, PartitionDesc>());
    - work.setAliasToWork(new LinkedHashMap<String, Operator<? extends Serializable>>());
    + work.setAliasToWork(new LinkedHashMap<String, Operator<? extends OperatorDesc>>());
    work.setTagToValueDesc(new ArrayList<TableDesc>());
    work.setReducer(null);
    work.setHadoopSupportsSplittable(
    @@ -954,8 +958,8 @@ public final class GenMapRedUtils {
    * parse context
    */
    @SuppressWarnings("nls")
    - private static Operator<? extends Serializable> putOpInsertMap(
    - Operator<? extends Serializable> op, RowResolver rr, ParseContext parseCtx) {
    + public static Operator<? extends OperatorDesc> putOpInsertMap(
    + Operator<? extends OperatorDesc> op, RowResolver rr, ParseContext parseCtx) {
    OpParseContext ctx = new OpParseContext(rr);
    parseCtx.getOpParseCtx().put(op, ctx);
    return op;
    @@ -971,12 +975,12 @@ public final class GenMapRedUtils {
    * @param setReducer does the reducer needs to be set
    * @param pos position of the parent
    **/
    - public static void splitTasks(Operator<? extends Serializable> op,
    + public static void splitTasks(Operator<? extends OperatorDesc> op,
    Task<? extends Serializable> parentTask,
    Task<? extends Serializable> childTask, GenMRProcContext opProcCtx,
    boolean setReducer, boolean local, int posn) throws SemanticException {
    childTask.getWork();
    - Operator<? extends Serializable> currTopOp = opProcCtx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = opProcCtx.getCurrTopOp();

    ParseContext parseCtx = opProcCtx.getParseCtx();
    parentTask.addDependentTask(childTask);
    @@ -992,7 +996,7 @@ public final class GenMapRedUtils {
    Context baseCtx = parseCtx.getContext();
    String taskTmpDir = baseCtx.getMRTmpFileURI();

    - Operator<? extends Serializable> parent = op.getParentOperators().get(posn);
    + Operator<? extends OperatorDesc> parent = op.getParentOperators().get(posn);
    TableDesc tt_desc = PlanUtils.getIntermediateFileTableDesc(PlanUtils
    .getFieldSchemasFromRowSchema(parent.getSchema(), "temporarycol"));

    @@ -1007,11 +1011,11 @@ public final class GenMapRedUtils {
    desc.setCompressType(parseCtx.getConf().getVar(
    HiveConf.ConfVars.COMPRESSINTERMEDIATETYPE));
    }
    - Operator<? extends Serializable> fs_op = putOpInsertMap(OperatorFactory
    + Operator<? extends OperatorDesc> fs_op = putOpInsertMap(OperatorFactory
    .get(desc, parent.getSchema()), null, parseCtx);

    // replace the reduce child with this operator
    - List<Operator<? extends Serializable>> childOpList = parent
    + List<Operator<? extends OperatorDesc>> childOpList = parent
    .getChildOperators();
    for (int pos = 0; pos < childOpList.size(); pos++) {
    if (childOpList.get(pos) == op) {
    @@ -1020,30 +1024,31 @@ public final class GenMapRedUtils {
    }
    }

    - List<Operator<? extends Serializable>> parentOpList =
    - new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> parentOpList =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    parentOpList.add(parent);
    fs_op.setParentOperators(parentOpList);

    // create a dummy tableScan operator on top of op
    // TableScanOperator is implicitly created here for each MapOperator
    RowResolver rowResolver = opProcCtx.getParseCtx().getOpParseCtx().get(parent).getRowResolver();
    - Operator<? extends Serializable> ts_op = putOpInsertMap(OperatorFactory
    + Operator<? extends OperatorDesc> ts_op = putOpInsertMap(OperatorFactory
    .get(TableScanDesc.class, parent.getSchema()), rowResolver, parseCtx);

    - childOpList = new ArrayList<Operator<? extends Serializable>>();
    + childOpList = new ArrayList<Operator<? extends OperatorDesc>>();
    childOpList.add(op);
    ts_op.setChildOperators(childOpList);
    op.getParentOperators().set(posn, ts_op);

    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = opProcCtx.getMapCurrCtx();
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx =
    + opProcCtx.getMapCurrCtx();
    mapCurrCtx.put(ts_op, new GenMapRedCtx(childTask, null, null));

    String streamDesc = taskTmpDir;
    MapredWork cplan = (MapredWork) childTask.getWork();

    if (setReducer) {
    - Operator<? extends Serializable> reducer = op.getChildOperators().get(0);
    + Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);

    if (reducer.getClass() == JoinOperator.class) {
    String origStreamDesc;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GlobalLimitOptimizer.java Wed Aug 29 17:43:59 2012
    @@ -18,6 +18,8 @@

    package org.apache.hadoop.hive.ql.optimizer;

    +import java.util.Map;
    +
    import org.apache.commons.logging.Log;
    import org.apache.commons.logging.LogFactory;
    import org.apache.hadoop.hive.conf.HiveConf;
    @@ -35,9 +37,7 @@ import org.apache.hadoop.hive.ql.parse.Q
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.parse.SplitSample;
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    -
    -import java.io.Serializable;
    -import java.util.Map;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * This optimizer is used to reduce the input size for the query for queries which are
    @@ -58,7 +58,7 @@ public class GlobalLimitOptimizer implem

    public ParseContext transform(ParseContext pctx) throws SemanticException {
    Context ctx = pctx.getContext();
    - Map<String, Operator<? extends Serializable>> topOps = pctx.getTopOps();
    + Map<String, Operator<? extends OperatorDesc>> topOps = pctx.getTopOps();
    GlobalLimitCtx globalLimitCtx = pctx.getGlobalLimitCtx();
    Map<TableScanOperator, ExprNodeDesc> opToPartPruner = pctx.getOpToPartPruner();
    Map<TableScanOperator, PrunedPartitionList> opToPartList = pctx.getOpToPartList();

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GroupByOptimizer.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.LinkedHashMap;
    import java.util.LinkedList;
    @@ -57,6 +56,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
    import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
    import org.apache.hadoop.hive.ql.plan.GroupByDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

    /**
    @@ -175,7 +175,7 @@ public class GroupByOptimizer implements
    }

    for (String table : tblNames) {
    - Operator<? extends Serializable> topOp = pGraphContext.getTopOps().get(
    + Operator<? extends OperatorDesc> topOp = pGraphContext.getTopOps().get(
    table);
    if (topOp == null || (!(topOp instanceof TableScanOperator))) {
    // this is in a sub-query.

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/JoinReorder.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/JoinReorder.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/JoinReorder.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/JoinReorder.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.HashSet;
    import java.util.Set;

    @@ -29,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.Ta
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    import org.apache.hadoop.hive.ql.parse.QBJoinTree;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Implementation of rule-based join table reordering optimization. User passes
    @@ -42,7 +42,7 @@ public class JoinReorder implements Tran
    * the whole tree is traversed. Possible sizes: 0: the operator and its
    * subtree don't contain any big tables 1: the subtree of the operator
    * contains a big table 2: the operator is a big table
    - *
    + *
    * @param operator
    * The operator which output size is to be estimated
    * @param bigTables
    @@ -50,12 +50,12 @@ public class JoinReorder implements Tran
    * @return The estimated size - 0 (no streamed tables), 1 (streamed tables in
    * subtree) or 2 (a streamed table)
    */
    - private int getOutputSize(Operator<? extends Serializable> operator,
    + private int getOutputSize(Operator<? extends OperatorDesc> operator,
    Set<String> bigTables) {
    // If a join operator contains a big subtree, there is a chance that its
    // output is also big, so the output size is 1 (medium)
    if (operator instanceof JoinOperator) {
    - for (Operator<? extends Serializable> o : operator.getParentOperators()) {
    + for (Operator<? extends OperatorDesc> o : operator.getParentOperators()) {
    if (getOutputSize(o, bigTables) != 0) {
    return 1;
    }
    @@ -74,7 +74,7 @@ public class JoinReorder implements Tran
    // the biggest output from a parent
    int maxSize = 0;
    if (operator.getParentOperators() != null) {
    - for (Operator<? extends Serializable> o : operator.getParentOperators()) {
    + for (Operator<? extends OperatorDesc> o : operator.getParentOperators()) {
    int current = getOutputSize(o, bigTables);
    if (current > maxSize) {
    maxSize = current;
    @@ -87,7 +87,7 @@ public class JoinReorder implements Tran

    /**
    * Find all big tables from STREAMTABLE hints.
    - *
    + *
    * @param joinCtx
    * The join context
    * @return Set of all big tables
    @@ -107,7 +107,7 @@ public class JoinReorder implements Tran
    /**
    * Reorder the tables in a join operator appropriately (by reordering the tags
    * of the reduces sinks).
    - *
    + *
    * @param joinOp
    * The join operator to be processed
    * @param bigTables
    @@ -148,7 +148,7 @@ public class JoinReorder implements Tran
    * Transform the query tree. For each join, check which reduce sink will
    * output the biggest result (based on STREAMTABLE hints) and give it the
    * biggest tag so that it gets streamed.
    - *
    + *
    * @param pactx
    * current parse context
    */

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java Wed Aug 29 17:43:59 2012
    @@ -45,6 +45,7 @@ import org.apache.hadoop.hive.ql.parse.S
    import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.TableDesc;

    @@ -57,9 +58,9 @@ public final class MapJoinFactory {
    int pos = 0;
    int size = stack.size();
    assert size >= 2 && stack.get(size - 1) == op;
    - Operator<? extends Serializable> parent = (Operator<? extends Serializable>) stack
    - .get(size - 2);
    - List<Operator<? extends Serializable>> parOp = op.getParentOperators();
    + Operator<? extends OperatorDesc> parent =
    + (Operator<? extends OperatorDesc>) stack.get(size - 2);
    + List<Operator<? extends OperatorDesc>> parOp = op.getParentOperators();
    pos = parOp.indexOf(parent);
    assert pos < parOp.size();
    return pos;
    @@ -72,24 +73,24 @@ public final class MapJoinFactory {

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
    - Object... nodeOutputs) throws SemanticException {
    + Object... nodeOutputs) throws SemanticException {
    AbstractMapJoinOperator<MapJoinDesc> mapJoin = (AbstractMapJoinOperator<MapJoinDesc>) nd;
    GenMRProcContext ctx = (GenMRProcContext) procCtx;

    // find the branch on which this processor was invoked
    int pos = getPositionParent(mapJoin, stack);

    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(mapJoin.getParentOperators().get(
    pos));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    MapredWork currPlan = (MapredWork) currTask.getWork();
    - Operator<? extends Serializable> currTopOp = mapredCtx.getCurrTopOp();
    + Operator<? extends OperatorDesc> currTopOp = mapredCtx.getCurrTopOp();
    String currAliasId = mapredCtx.getCurrAliasId();
    - Operator<? extends Serializable> reducer = mapJoin;
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap = ctx
    - .getOpTaskMap();
    + Operator<? extends OperatorDesc> reducer = mapJoin;
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    + ctx.getOpTaskMap();
    Task<? extends Serializable> opMapTask = opTaskMap.get(reducer);

    ctx.setCurrTopOp(currTopOp);
    @@ -138,11 +139,11 @@ public final class MapJoinFactory {
    : true;

    GenMapRedUtils.splitTasks(mapJoin, currTask, redTask, opProcCtx, false,
    - local, pos);
    + local, pos);

    currTask = opProcCtx.getCurrTask();
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap = opProcCtx
    - .getOpTaskMap();
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    + opProcCtx.getOpTaskMap();
    Task<? extends Serializable> opMapTask = opTaskMap.get(mapJoin);

    // If the plan for this reducer does not exist, initialize the plan
    @@ -195,9 +196,9 @@ public final class MapJoinFactory {
    if (listMapJoinOps.contains(mapJoin)) {
    ctx.setCurrAliasId(null);
    ctx.setCurrTopOp(null);
    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    - mapCurrCtx.put((Operator<? extends Serializable>) nd, new GenMapRedCtx(
    + mapCurrCtx.put((Operator<? extends OperatorDesc>) nd, new GenMapRedCtx(
    ctx.getCurrTask(), null, null));
    return null;
    }
    @@ -230,14 +231,15 @@ public final class MapJoinFactory {
    sel.setParentOperators(null);

    // Create a file sink operator for this file name
    - Operator<? extends Serializable> fs_op = OperatorFactory.get(
    + Operator<? extends OperatorDesc> fs_op = OperatorFactory.get(
    new FileSinkDesc(taskTmpDir, tt_desc, parseCtx.getConf().getBoolVar(
    HiveConf.ConfVars.COMPRESSINTERMEDIATE)), mapJoin.getSchema());

    assert mapJoin.getChildOperators().size() == 1;
    mapJoin.getChildOperators().set(0, fs_op);

    - List<Operator<? extends Serializable>> parentOpList = new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> parentOpList =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    parentOpList.add(mapJoin);
    fs_op.setParentOperators(parentOpList);

    @@ -247,9 +249,9 @@ public final class MapJoinFactory {
    ctx.setCurrAliasId(null);
    ctx.setCurrTopOp(null);

    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    - mapCurrCtx.put((Operator<? extends Serializable>) nd, new GenMapRedCtx(
    + mapCurrCtx.put((Operator<? extends OperatorDesc>) nd, new GenMapRedCtx(
    ctx.getCurrTask(), null, null));

    return null;
    @@ -263,8 +265,9 @@ public final class MapJoinFactory {

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
    - Object... nodeOutputs) throws SemanticException {
    - AbstractMapJoinOperator<? extends MapJoinDesc> mapJoin = (AbstractMapJoinOperator<? extends MapJoinDesc>) nd;
    + Object... nodeOutputs) throws SemanticException {
    + AbstractMapJoinOperator<? extends MapJoinDesc> mapJoin =
    + (AbstractMapJoinOperator<? extends MapJoinDesc>) nd;
    GenMRProcContext ctx = (GenMRProcContext) procCtx;

    ctx.getParseCtx();
    @@ -282,16 +285,16 @@ public final class MapJoinFactory {
    // find the branch on which this processor was invoked
    int pos = getPositionParent(mapJoin, stack);

    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(mapJoin.getParentOperators().get(
    pos));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    MapredWork currPlan = (MapredWork) currTask.getWork();
    mapredCtx.getCurrAliasId();
    - Operator<? extends Serializable> reducer = mapJoin;
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap = ctx
    - .getOpTaskMap();
    + Operator<? extends OperatorDesc> reducer = mapJoin;
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    + ctx.getOpTaskMap();
    Task<? extends Serializable> opMapTask = opTaskMap.get(reducer);

    ctx.setCurrTask(currTask);
    @@ -321,7 +324,7 @@ public final class MapJoinFactory {

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
    - Object... nodeOutputs) throws SemanticException {
    + Object... nodeOutputs) throws SemanticException {
    GenMRProcContext ctx = (GenMRProcContext) procCtx;

    ParseContext parseCtx = ctx.getParseCtx();
    @@ -341,15 +344,15 @@ public final class MapJoinFactory {
    // find the branch on which this processor was invoked
    int pos = getPositionParent(mapJoin, stack);

    - Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
    + Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
    .getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(mapJoin.getParentOperators().get(
    pos));
    Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
    MapredWork currPlan = (MapredWork) currTask.getWork();
    - Operator<? extends Serializable> reducer = mapJoin;
    - HashMap<Operator<? extends Serializable>, Task<? extends Serializable>> opTaskMap = ctx
    - .getOpTaskMap();
    + Operator<? extends OperatorDesc> reducer = mapJoin;
    + HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
    + ctx.getOpTaskMap();
    Task<? extends Serializable> opMapTask = opTaskMap.get(reducer);

    // union result cannot be a map table

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.HashSet;
    @@ -69,6 +68,7 @@ import org.apache.hadoop.hive.ql.plan.Jo
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
    @@ -97,8 +97,8 @@ public class MapJoinProcessor implements
    }

    @SuppressWarnings("nls")
    - private Operator<? extends Serializable> putOpInsertMap(Operator<? extends Serializable> op,
    - RowResolver rr) {
    + private Operator<? extends OperatorDesc>
    + putOpInsertMap(Operator<? extends OperatorDesc> op, RowResolver rr) {
    OpParseContext ctx = new OpParseContext(rr);
    pGraphContext.getOpParseCtx().put(op, ctx);
    return op;
    @@ -120,18 +120,18 @@ public class MapJoinProcessor implements

    // create a new MapredLocalWork
    MapredLocalWork newLocalWork = new MapredLocalWork(
    - new LinkedHashMap<String, Operator<? extends Serializable>>(),
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
    new LinkedHashMap<String, FetchWork>());

    - for (Map.Entry<String, Operator<? extends Serializable>> entry : newWork.getAliasToWork()
    - .entrySet()) {
    + for (Map.Entry<String, Operator<? extends OperatorDesc>> entry :
    + newWork.getAliasToWork().entrySet()) {
    String alias = entry.getKey();
    - Operator<? extends Serializable> op = entry.getValue();
    + Operator<? extends OperatorDesc> op = entry.getValue();

    // if the table scan is for big table; then skip it
    // tracing down the operator tree from the table scan operator
    - Operator<? extends Serializable> parentOp = op;
    - Operator<? extends Serializable> childOp = op.getChildOperators().get(0);
    + Operator<? extends OperatorDesc> parentOp = op;
    + Operator<? extends OperatorDesc> childOp = op.getChildOperators().get(0);
    while ((childOp != null) && (!childOp.equals(mapJoinOp))) {
    parentOp = childOp;
    assert parentOp.getChildOperators().size() == 1;
    @@ -218,10 +218,10 @@ public class MapJoinProcessor implements
    }

    public static String genMapJoinOpAndLocalWork(MapredWork newWork, JoinOperator op, int mapJoinPos)
    - throws SemanticException {
    + throws SemanticException {
    try {
    - LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtxMap = newWork
    - .getOpParseCtxMap();
    + LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap =
    + newWork.getOpParseCtxMap();
    QBJoinTree newJoinTree = newWork.getJoinTree();
    // generate the map join operator; already checked the map join
    MapJoinOperator newMapJoinOp = MapJoinProcessor.convertMapJoin(opParseCtxMap, op,
    @@ -256,9 +256,9 @@ public class MapJoinProcessor implements
    * @param noCheckOuterJoin
    */
    public static MapJoinOperator convertMapJoin(
    - LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtxMap,
    - JoinOperator op, QBJoinTree joinTree, int mapJoinPos, boolean noCheckOuterJoin)
    - throws SemanticException {
    + LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap,
    + JoinOperator op, QBJoinTree joinTree, int mapJoinPos, boolean noCheckOuterJoin)
    + throws SemanticException {
    // outer join cannot be performed on a table which is being cached
    JoinDesc desc = op.getConf();
    JoinCondDesc[] condns = desc.getConds();
    @@ -279,18 +279,22 @@ public class MapJoinProcessor implements
    // The join outputs a concatenation of all the inputs.
    QBJoinTree leftSrc = joinTree.getJoinSrc();

    - List<Operator<? extends Serializable>> parentOps = op.getParentOperators();
    - List<Operator<? extends Serializable>> newParentOps = new ArrayList<Operator<? extends Serializable>>();
    - List<Operator<? extends Serializable>> oldReduceSinkParentOps = new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> parentOps = op.getParentOperators();
    + List<Operator<? extends OperatorDesc>> newParentOps =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    + List<Operator<? extends OperatorDesc>> oldReduceSinkParentOps =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
    - HashMap<Byte, HashMap<String, ExprNodeDesc>> columnTransfer = new HashMap<Byte, HashMap<String, ExprNodeDesc>>();
    + HashMap<Byte, HashMap<String, ExprNodeDesc>> columnTransfer =
    + new HashMap<Byte, HashMap<String, ExprNodeDesc>>();

    // found a source which is not to be stored in memory
    if (leftSrc != null) {
    // assert mapJoinPos == 0;
    - Operator<? extends Serializable> parentOp = parentOps.get(0);
    + Operator<? extends OperatorDesc> parentOp = parentOps.get(0);
    assert parentOp.getParentOperators().size() == 1;
    - Operator<? extends Serializable> grandParentOp = parentOp.getParentOperators().get(0);
    + Operator<? extends OperatorDesc> grandParentOp =
    + parentOp.getParentOperators().get(0);
    oldReduceSinkParentOps.add(parentOp);
    grandParentOp.removeChild(parentOp);
    newParentOps.add(grandParentOp);
    @@ -300,9 +304,10 @@ public class MapJoinProcessor implements
    // Remove parent reduce-sink operators
    for (String src : joinTree.getBaseSrc()) {
    if (src != null) {
    - Operator<? extends Serializable> parentOp = parentOps.get(pos);
    + Operator<? extends OperatorDesc> parentOp = parentOps.get(pos);
    assert parentOp.getParentOperators().size() == 1;
    - Operator<? extends Serializable> grandParentOp = parentOp.getParentOperators().get(0);
    + Operator<? extends OperatorDesc> grandParentOp =
    + parentOp.getParentOperators().get(0);

    grandParentOp.removeChild(parentOp);
    oldReduceSinkParentOps.add(parentOp);
    @@ -389,7 +394,7 @@ public class MapJoinProcessor implements

    Operator[] newPar = new Operator[newParentOps.size()];
    pos = 0;
    - for (Operator<? extends Serializable> o : newParentOps) {
    + for (Operator<? extends OperatorDesc> o : newParentOps) {
    newPar[pos++] = o;
    }

    @@ -461,8 +466,8 @@ public class MapJoinProcessor implements

    // change the children of the original join operator to point to the map
    // join operator
    - List<Operator<? extends Serializable>> childOps = op.getChildOperators();
    - for (Operator<? extends Serializable> childOp : childOps) {
    + List<Operator<? extends OperatorDesc>> childOps = op.getChildOperators();
    + for (Operator<? extends OperatorDesc> childOp : childOps) {
    childOp.replaceParent(op, mapJoinOp);
    }

    @@ -482,7 +487,7 @@ public class MapJoinProcessor implements
    && HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTBUCKETMAPJOIN);


    - LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtxMap = pctx
    + LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap = pctx
    .getOpParseCtx();
    MapJoinOperator mapJoinOp = convertMapJoin(opParseCtxMap, op, joinTree, mapJoinPos,
    noCheckOuterJoin);
    @@ -577,7 +582,7 @@ public class MapJoinProcessor implements
    }

    private void genSelectPlan(ParseContext pctx, MapJoinOperator input) throws SemanticException {
    - List<Operator<? extends Serializable>> childOps = input.getChildOperators();
    + List<Operator<? extends OperatorDesc>> childOps = input.getChildOperators();
    input.setChildOperators(null);

    // create a dummy select - This select is needed by the walker to split the
    @@ -613,7 +618,7 @@ public class MapJoinProcessor implements

    // Insert the select operator in between.
    sel.setChildOperators(childOps);
    - for (Operator<? extends Serializable> ch : childOps) {
    + for (Operator<? extends OperatorDesc> ch : childOps) {
    ch.replaceParent(input, sel);
    }
    }
    @@ -764,12 +769,12 @@ public class MapJoinProcessor implements
    }

    private Boolean findGrandChildSubqueryMapjoin(MapJoinWalkerCtx ctx, MapJoinOperator mapJoin) {
    - Operator<? extends Serializable> parent = mapJoin;
    + Operator<? extends OperatorDesc> parent = mapJoin;
    while (true) {
    if (parent.getChildOperators() == null || parent.getChildOperators().size() != 1) {
    return null;
    }
    - Operator<? extends Serializable> ch = parent.getChildOperators().get(0);
    + Operator<? extends OperatorDesc> ch = parent.getChildOperators().get(0);
    if (ch instanceof MapJoinOperator) {
    if (!nonSubqueryMapJoin(ctx.getpGraphContext(), (MapJoinOperator) ch, mapJoin)) {
    if (ch.getParentOperators().indexOf(parent) == ((MapJoinOperator) ch).getConf()
  • Namit at Aug 29, 2012 at 5:45 pm
    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java Wed Aug 29 17:43:59 2012
    @@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.udf.gen
    *
    */
    @Explain(displayName = "Group By Operator")
    -public class GroupByDesc implements java.io.Serializable {
    +public class GroupByDesc extends AbstractOperatorDesc {
    /**
    * Group-by Mode: COMPLETE: complete 1-phase aggregation: iterate, terminate
    * PARTIAL1: partial aggregation - first phase: iterate, terminatePartial
    @@ -54,9 +54,9 @@ public class GroupByDesc implements java
    private boolean groupKeyNotReductionKey;
    private boolean bucketGroup;

    - private java.util.ArrayList<ExprNodeDesc> keys;
    - private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators;
    - private java.util.ArrayList<java.lang.String> outputColumnNames;
    + private ArrayList<ExprNodeDesc> keys;
    + private ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators;
    + private ArrayList<java.lang.String> outputColumnNames;
    private float groupByMemoryUsage;
    private float memoryThreshold;

    @@ -65,9 +65,9 @@ public class GroupByDesc implements java

    public GroupByDesc(
    final Mode mode,
    - final java.util.ArrayList<java.lang.String> outputColumnNames,
    - final java.util.ArrayList<ExprNodeDesc> keys,
    - final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
    + final ArrayList<java.lang.String> outputColumnNames,
    + final ArrayList<ExprNodeDesc> keys,
    + final ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
    final boolean groupKeyNotReductionKey,float groupByMemoryUsage, float memoryThreshold) {
    this(mode, outputColumnNames, keys, aggregators, groupKeyNotReductionKey,
    false, groupByMemoryUsage, memoryThreshold);
    @@ -75,9 +75,9 @@ public class GroupByDesc implements java

    public GroupByDesc(
    final Mode mode,
    - final java.util.ArrayList<java.lang.String> outputColumnNames,
    - final java.util.ArrayList<ExprNodeDesc> keys,
    - final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
    + final ArrayList<java.lang.String> outputColumnNames,
    + final ArrayList<ExprNodeDesc> keys,
    + final ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
    final boolean groupKeyNotReductionKey, final boolean bucketGroup,float groupByMemoryUsage, float memoryThreshold) {
    this.mode = mode;
    this.outputColumnNames = outputColumnNames;
    @@ -120,21 +120,21 @@ public class GroupByDesc implements java
    }

    @Explain(displayName = "keys")
    - public java.util.ArrayList<ExprNodeDesc> getKeys() {
    + public ArrayList<ExprNodeDesc> getKeys() {
    return keys;
    }

    - public void setKeys(final java.util.ArrayList<ExprNodeDesc> keys) {
    + public void setKeys(final ArrayList<ExprNodeDesc> keys) {
    this.keys = keys;
    }

    @Explain(displayName = "outputColumnNames")
    - public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
    + public ArrayList<java.lang.String> getOutputColumnNames() {
    return outputColumnNames;
    }

    public void setOutputColumnNames(
    - java.util.ArrayList<java.lang.String> outputColumnNames) {
    + ArrayList<java.lang.String> outputColumnNames) {
    this.outputColumnNames = outputColumnNames;
    }

    @@ -155,12 +155,12 @@ public class GroupByDesc implements java
    }

    @Explain(displayName = "aggregations")
    - public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> getAggregators() {
    + public ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> getAggregators() {
    return aggregators;
    }

    public void setAggregators(
    - final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators) {
    + final ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators) {
    this.aggregators = aggregators;
    }

    @@ -180,7 +180,7 @@ public class GroupByDesc implements java
    public void setBucketGroup(boolean dataSorted) {
    bucketGroup = dataSorted;
    }
    -
    +
    /**
    * Checks if this grouping is like distinct, which means that all non-distinct grouping
    * columns behave like they were distinct - for example min and max operators.

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,13 +18,12 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    /**
    * HashTable Dummy Descriptor implementation.
    *
    */
    @Explain(displayName = "HashTable Dummy Operator")
    -public class HashTableDummyDesc implements Serializable {
    +public class HashTableDummyDesc extends AbstractOperatorDesc {
    private TableDesc tbl;

    public TableDesc getTbl() {

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,19 +18,20 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.Arrays;
    +import java.util.HashMap;
    import java.util.LinkedHashMap;
    import java.util.List;
    import java.util.Map;

    +
    /**
    * Join operator Descriptor implementation.
    *
    */
    @Explain(displayName = "Join Operator")
    -public class JoinDesc implements Serializable {
    +public class JoinDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    public static final int INNER_JOIN = 0;
    public static final int LEFT_OUTER_JOIN = 1;
    @@ -87,6 +88,53 @@ public class JoinDesc implements Seriali
    }
    }

    + @Override
    + public Object clone() {
    + JoinDesc ret = new JoinDesc();
    + Map<Byte,List<ExprNodeDesc>> cloneExprs = new HashMap<Byte,List<ExprNodeDesc>>();
    + cloneExprs.putAll(getExprs());
    + ret.setExprs(cloneExprs);
    + Map<Byte,List<ExprNodeDesc>> cloneFilters = new HashMap<Byte,List<ExprNodeDesc>>();
    + cloneFilters.putAll(getFilters());
    + ret.setFilters(cloneFilters);
    + ret.setConds(getConds().clone());
    + ret.setNoOuterJoin(getNoOuterJoin());
    + ret.setNullSafes(getNullSafes());
    + ret.setHandleSkewJoin(handleSkewJoin);
    + ret.setSkewKeyDefinition(getSkewKeyDefinition());
    + ret.setTagOrder(getTagOrder().clone());
    + if (getKeyTableDesc() != null) {
    + ret.setKeyTableDesc((TableDesc) getKeyTableDesc().clone());
    + }
    +
    + if (getBigKeysDirMap() != null) {
    + Map<Byte, String> cloneBigKeysDirMap = new HashMap<Byte, String>();
    + cloneBigKeysDirMap.putAll(getBigKeysDirMap());
    + ret.setBigKeysDirMap(cloneBigKeysDirMap);
    + }
    + if (getSmallKeysDirMap() != null) {
    + Map<Byte, Map<Byte, String>> cloneSmallKeysDirMap = new HashMap<Byte, Map<Byte,String>> ();
    + cloneSmallKeysDirMap.putAll(getSmallKeysDirMap());
    + ret.setSmallKeysDirMap(cloneSmallKeysDirMap);
    + }
    + if (getSkewKeysValuesTables() != null) {
    + Map<Byte, TableDesc> cloneSkewKeysValuesTables = new HashMap<Byte, TableDesc>();
    + cloneSkewKeysValuesTables.putAll(getSkewKeysValuesTables());
    + ret.setSkewKeysValuesTables(cloneSkewKeysValuesTables);
    + }
    + if (getOutputColumnNames() != null) {
    + List<String> cloneOutputColumnNames = new ArrayList<String>();
    + cloneOutputColumnNames.addAll(getOutputColumnNames());
    + ret.setOutputColumnNames(cloneOutputColumnNames);
    + }
    + if (getReversedExprs() != null) {
    + Map<String, Byte> cloneReversedExprs = new HashMap<String, Byte>();
    + cloneReversedExprs.putAll(getReversedExprs());
    + ret.setReversedExprs(cloneReversedExprs);
    + }
    + return ret;
    + }
    +
    public JoinDesc(final Map<Byte, List<ExprNodeDesc>> exprs,
    List<String> outputColumnNames, final boolean noOuterJoin,
    final JoinCondDesc[] conds) {

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,14 +18,13 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;

    /**
    * LateralViewForwardDesc.
    *
    */
    @Explain(displayName = "Lateral View Forward")
    -public class LateralViewForwardDesc implements Serializable {
    +public class LateralViewForwardDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;

    public LateralViewForwardDesc() {

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,15 +18,15 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    import java.util.ArrayList;

    +
    /**
    * LateralViewJoinDesc.
    *
    */
    @Explain(displayName = "Lateral View Join Operator")
    -public class LateralViewJoinDesc implements Serializable {
    +public class LateralViewJoinDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;

    private ArrayList<String> outputInternalColNames;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,14 +18,13 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;

    /**
    * LimitDesc.
    *
    */
    @Explain(displayName = "Limit")
    -public class LimitDesc implements Serializable {
    +public class LimitDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    private int limit;
    private int leastRows = -1;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,13 +18,12 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;

    /**
    * description for ListSinkOperator, just for explain result.
    */
    @Explain(displayName = "ListSink")
    -public class ListSinkDesc implements Serializable {
    +public class ListSinkDesc extends AbstractOperatorDesc {

    private static final long serialVersionUID = 1L;


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java Wed Aug 29 17:43:59 2012
    @@ -33,22 +33,22 @@ import org.apache.hadoop.hive.ql.exec.Op
    public class MapredLocalWork implements Serializable {
    private static final long serialVersionUID = 1L;

    - private LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork;
    + private LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork;
    private LinkedHashMap<String, FetchWork> aliasToFetchWork;
    private boolean inputFileChangeSensitive;
    private BucketMapJoinContext bucketMapjoinContext;
    private String tmpFileURI;
    private String stageID;

    - private List<Operator<? extends Serializable>> dummyParentOp ;
    + private List<Operator<? extends OperatorDesc>> dummyParentOp ;

    public MapredLocalWork() {

    }

    public MapredLocalWork(
    - final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
    - final LinkedHashMap<String, FetchWork> aliasToFetchWork) {
    + final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork,
    + final LinkedHashMap<String, FetchWork> aliasToFetchWork) {
    this.aliasToWork = aliasToWork;
    this.aliasToFetchWork = aliasToFetchWork;

    @@ -61,18 +61,18 @@ public class MapredLocalWork implements
    }


    - public void setDummyParentOp(List<Operator<? extends Serializable>> op){
    + public void setDummyParentOp(List<Operator<? extends OperatorDesc>> op){
    this.dummyParentOp=op;
    }


    - public List<Operator<? extends Serializable>> getDummyParentOp(){
    + public List<Operator<? extends OperatorDesc>> getDummyParentOp(){
    return this.dummyParentOp;
    }


    @Explain(displayName = "Alias -> Map Local Operator Tree")
    - public LinkedHashMap<String, Operator<? extends Serializable>> getAliasToWork() {
    + public LinkedHashMap<String, Operator<? extends OperatorDesc>> getAliasToWork() {
    return aliasToWork;
    }

    @@ -85,7 +85,7 @@ public class MapredLocalWork implements
    }

    public void setAliasToWork(
    - final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork) {
    + final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork) {
    this.aliasToWork = aliasToWork;
    }


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java Wed Aug 29 17:43:59 2012
    @@ -19,7 +19,6 @@
    package org.apache.hadoop.hive.ql.plan;

    import java.io.ByteArrayOutputStream;
    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.LinkedHashMap;
    @@ -39,7 +38,7 @@ import org.apache.hadoop.hive.ql.parse.S
    *
    */
    @Explain(displayName = "Map Reduce")
    -public class MapredWork implements Serializable {
    +public class MapredWork extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    private String command;
    // map side work
    @@ -49,7 +48,7 @@ public class MapredWork implements Seria

    private LinkedHashMap<String, PartitionDesc> pathToPartitionInfo;

    - private LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork;
    + private LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork;

    private LinkedHashMap<String, PartitionDesc> aliasToPartnInfo;

    @@ -81,7 +80,7 @@ public class MapredWork implements Seria

    private String tmpHDFSFileURI;

    - private LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtxMap;
    + private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap;

    private QBJoinTree joinTree;

    @@ -100,7 +99,7 @@ public class MapredWork implements Seria
    final String command,
    final LinkedHashMap<String, ArrayList<String>> pathToAliases,
    final LinkedHashMap<String, PartitionDesc> pathToPartitionInfo,
    - final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
    + final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork,
    final TableDesc keyDesc, List<TableDesc> tagToValueDesc,
    final Operator<?> reducer, final Integer numReduceTasks,
    final MapredLocalWork mapLocalWork,
    @@ -167,12 +166,12 @@ public class MapredWork implements Seria
    }

    @Explain(displayName = "Alias -> Map Operator Tree")
    - public LinkedHashMap<String, Operator<? extends Serializable>> getAliasToWork() {
    + public LinkedHashMap<String, Operator<? extends OperatorDesc>> getAliasToWork() {
    return aliasToWork;
    }

    public void setAliasToWork(
    - final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork) {
    + final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork) {
    this.aliasToWork = aliasToWork;
    }

    @@ -433,12 +432,13 @@ public class MapredWork implements Seria
    this.joinTree = joinTree;
    }

    - public LinkedHashMap<Operator<? extends Serializable>, OpParseContext> getOpParseCtxMap() {
    + public
    + LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> getOpParseCtxMap() {
    return opParseCtxMap;
    }

    public void setOpParseCtxMap(
    - LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtxMap) {
    + LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap) {
    this.opParseCtxMap = opParseCtxMap;
    }


    Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java?rev=1378659&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java (added)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java Wed Aug 29 17:43:59 2012
    @@ -0,0 +1,25 @@
    +/**
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements. See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership. The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License. You may obtain a copy of the License at
    + *
    + * http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.hadoop.hive.ql.plan;
    +
    +import java.io.Serializable;
    +
    +public interface OperatorDesc extends Serializable, Cloneable {
    + public Object clone() throws CloneNotSupportedException;
    +}

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.Collections;
    import java.util.Comparator;
    @@ -88,7 +87,7 @@ public final class PlanUtils {
    try {
    return new MapredWork("", new LinkedHashMap<String, ArrayList<String>>(),
    new LinkedHashMap<String, PartitionDesc>(),
    - new LinkedHashMap<String, Operator<? extends Serializable>>(),
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
    new TableDesc(), new ArrayList<TableDesc>(), null, Integer.valueOf(1),
    null, Hive.get().getConf().getBoolVar(
    HiveConf.ConfVars.HIVE_COMBINE_INPUT_FORMAT_SUPPORTS_SPLITTABLE));

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,15 +18,16 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    +import java.util.ArrayList;
    import java.util.List;

    +
    /**
    * ReduceSinkDesc.
    *
    */
    @Explain(displayName = "Reduce Output Operator")
    -public class ReduceSinkDesc implements Serializable {
    +public class ReduceSinkDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    /**
    * Key columns are passed to reducer in the "key".
    @@ -91,6 +92,29 @@ public class ReduceSinkDesc implements S
    this.distinctColumnIndices = distinctColumnIndices;
    }

    + @Override
    + public Object clone() {
    + ReduceSinkDesc desc = new ReduceSinkDesc();
    + desc.setKeyCols((ArrayList<ExprNodeDesc>) getKeyCols().clone());
    + desc.setValueCols((ArrayList<ExprNodeDesc>) getValueCols().clone());
    + desc.setOutputKeyColumnNames((ArrayList<String>) getOutputKeyColumnNames().clone());
    + List<List<Integer>> distinctColumnIndicesClone = new ArrayList<List<Integer>>();
    + for (List<Integer> distinctColumnIndex : getDistinctColumnIndices()) {
    + List<Integer> tmp = new ArrayList<Integer>();
    + tmp.addAll(distinctColumnIndex);
    + distinctColumnIndicesClone.add(tmp);
    + }
    + desc.setDistinctColumnIndices(distinctColumnIndicesClone);
    + desc.setOutputValueColumnNames((ArrayList<String>) getOutputValueColumnNames().clone());
    + desc.setNumDistributionKeys(getNumDistributionKeys());
    + desc.setTag(getTag());
    + desc.setNumReducers(getNumReducers());
    + desc.setPartitionCols((ArrayList<ExprNodeDesc>) getPartitionCols().clone());
    + desc.setKeySerializeInfo((TableDesc) getKeySerializeInfo().clone());
    + desc.setValueSerializeInfo((TableDesc) getValueSerializeInfo().clone());
    + return desc;
    + }
    +
    public java.util.ArrayList<java.lang.String> getOutputKeyColumnNames() {
    return outputKeyColumnNames;
    }
    @@ -186,7 +210,7 @@ public class ReduceSinkDesc implements S

    /**
    * Returns the sort order of the key columns.
    - *
    + *
    * @return null, which means ascending order for all key columns, or a String
    * of the same length as key columns, that consists of only "+"
    * (ascending order) and "-" (descending order).
    @@ -196,7 +220,7 @@ public class ReduceSinkDesc implements S
    return keySerializeInfo.getProperties().getProperty(
    org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER);
    }
    -
    +
    public void setOrder(String orderStr) {
    keySerializeInfo.getProperties().setProperty(
    org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER,

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,8 +18,6 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    -
    import org.apache.hadoop.hive.ql.exec.RecordReader;
    import org.apache.hadoop.hive.ql.exec.RecordWriter;

    @@ -28,7 +26,7 @@ import org.apache.hadoop.hive.ql.exec.Re
    *
    */
    @Explain(displayName = "Transform Operator")
    -public class ScriptDesc implements Serializable {
    +public class ScriptDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    private String scriptCmd;
    // Describe how to deserialize data back from user script

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,17 +18,18 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    +import java.util.ArrayList;
    +

    /**
    * SelectDesc.
    *
    */
    @Explain(displayName = "Select Operator")
    -public class SelectDesc implements Serializable {
    +public class SelectDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    - private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList;
    - private java.util.ArrayList<java.lang.String> outputColumnNames;
    + private ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList;
    + private ArrayList<java.lang.String> outputColumnNames;
    private boolean selectStar;
    private boolean selStarNoCompute;

    @@ -40,45 +41,55 @@ public class SelectDesc implements Seria
    }

    public SelectDesc(
    - final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
    - final java.util.ArrayList<java.lang.String> outputColumnNames) {
    + final ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
    + final ArrayList<java.lang.String> outputColumnNames) {
    this(colList, outputColumnNames, false);
    }

    public SelectDesc(
    - final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
    - java.util.ArrayList<java.lang.String> outputColumnNames,
    - final boolean selectStar) {
    + final ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
    + ArrayList<java.lang.String> outputColumnNames,
    + final boolean selectStar) {
    this.colList = colList;
    this.selectStar = selectStar;
    this.outputColumnNames = outputColumnNames;
    }

    public SelectDesc(
    - final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
    - final boolean selectStar, final boolean selStarNoCompute) {
    + final ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
    + final boolean selectStar, final boolean selStarNoCompute) {
    this.colList = colList;
    this.selectStar = selectStar;
    this.selStarNoCompute = selStarNoCompute;
    }

    + @Override
    + public Object clone() {
    + SelectDesc ret = new SelectDesc();
    + ret.setColList((ArrayList<ExprNodeDesc>)getColList().clone());
    + ret.setOutputColumnNames((ArrayList<String>)getOutputColumnNames().clone());
    + ret.setSelectStar(selectStar);
    + ret.setSelStarNoCompute(selStarNoCompute);
    + return ret;
    + }
    +
    @Explain(displayName = "expressions")
    - public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> getColList() {
    + public ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> getColList() {
    return colList;
    }

    public void setColList(
    - final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList) {
    + final ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList) {
    this.colList = colList;
    }

    @Explain(displayName = "outputColumnNames")
    - public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
    + public ArrayList<java.lang.String> getOutputColumnNames() {
    return outputColumnNames;
    }

    public void setOutputColumnNames(
    - java.util.ArrayList<java.lang.String> outputColumnNames) {
    + ArrayList<java.lang.String> outputColumnNames) {
    this.outputColumnNames = outputColumnNames;
    }


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,7 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    +import java.util.ArrayList;
    import java.util.List;

    import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
    @@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.metadat
    * things will be added here as table scan is invoked as part of local work.
    **/
    @Explain(displayName = "TableScan")
    -public class TableScanDesc implements Serializable {
    +public class TableScanDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;

    private String alias;
    @@ -71,6 +71,12 @@ public class TableScanDesc implements Se
    this.virtualCols = vcs;
    }

    + @Override
    + public Object clone() {
    + List<VirtualColumn> vcs = new ArrayList<VirtualColumn>(getVirtualCols());
    + return new TableScanDesc(getAlias(), vcs);
    + }
    +
    @Explain(displayName = "alias")
    public String getAlias() {
    return alias;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,18 +18,16 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    -
    import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;

    /**
    * All member variables should have a setters and getters of the form get<member
    * name> and set<member name> or else they won't be recreated properly at run
    * time.
    - *
    + *
    */
    @Explain(displayName = "UDTF Operator")
    -public class UDTFDesc implements Serializable {
    +public class UDTFDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;

    private GenericUDTF genericUDTF;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,14 +18,13 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;

    /**
    * unionDesc is a empty class currently. However, union has more than one input
    * (as compared with forward), and therefore, we need a separate class.
    **/
    @Explain(displayName = "Union")
    -public class UnionDesc implements Serializable {
    +public class UnionDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;

    private transient int numInputs;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java Wed Aug 29 17:43:59 2012
    @@ -17,7 +17,6 @@
    */
    package org.apache.hadoop.hive.ql.ppd;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.List;
    @@ -31,6 +30,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
    import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
    import org.apache.hadoop.hive.ql.parse.RowResolver;
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Context for Expression Walker for determining predicate pushdown candidates
    @@ -62,7 +62,7 @@ public class ExprWalkerInfo implements N

    protected static final Log LOG = LogFactory.getLog(OpProcFactory.class
    .getName());;
    - private Operator<? extends Serializable> op = null;
    + private Operator<? extends OperatorDesc> op = null;
    private RowResolver toRR = null;

    /**
    @@ -105,7 +105,7 @@ public class ExprWalkerInfo implements N
    newToOldExprMap = new HashMap<ExprNodeDesc, ExprNodeDesc>();
    }

    - public ExprWalkerInfo(Operator<? extends Serializable> op,
    + public ExprWalkerInfo(Operator<? extends OperatorDesc> op,
    final RowResolver toRR) {
    this.op = op;
    this.toRR = toRR;
    @@ -119,7 +119,7 @@ public class ExprWalkerInfo implements N
    /**
    * @return the op of this expression.
    */
    - public Operator<? extends Serializable> getOp() {
    + public Operator<? extends OperatorDesc> getOp() {
    return op;
    }


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -17,7 +17,6 @@
    */
    package org.apache.hadoop.hive.ql.ppd;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.LinkedHashMap;
    import java.util.List;
    @@ -44,6 +43,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Expression factory for predicate pushdown processing. Each processor
    @@ -70,7 +70,7 @@ public final class ExprWalkerProcFactory
    ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx;
    ExprNodeColumnDesc colref = (ExprNodeColumnDesc) nd;
    RowResolver toRR = ctx.getToRR();
    - Operator<? extends Serializable> op = ctx.getOp();
    + Operator<? extends OperatorDesc> op = ctx.getOp();
    String[] colAlias = toRR.reverseLookup(colref.getColumn());

    boolean isCandidate = true;
    @@ -230,8 +230,8 @@ public final class ExprWalkerProcFactory
    }

    public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext,
    - Operator<? extends Serializable> op, ExprNodeDesc pred)
    - throws SemanticException {
    + Operator<? extends OperatorDesc> op, ExprNodeDesc pred)
    + throws SemanticException {
    List<ExprNodeDesc> preds = new ArrayList<ExprNodeDesc>();
    preds.add(pred);
    return extractPushdownPreds(opContext, op, preds);
    @@ -249,11 +249,11 @@ public final class ExprWalkerProcFactory
    * @throws SemanticException
    */
    public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext,
    - Operator<? extends Serializable> op, List<ExprNodeDesc> preds)
    - throws SemanticException {
    + Operator<? extends OperatorDesc> op, List<ExprNodeDesc> preds)
    + throws SemanticException {
    // Create the walker, the rules dispatcher and the context.
    ExprWalkerInfo exprContext = new ExprWalkerInfo(op, opContext
    - .getRowResolver(op));
    + .getRowResolver(op));

    // create a walker which walks the tree in a DFS manner while maintaining
    // the operator stack. The dispatcher

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -17,7 +17,6 @@
    */
    package org.apache.hadoop.hive.ql.ppd;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.HashSet;
    @@ -58,6 +57,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
    import org.apache.hadoop.hive.ql.plan.FilterDesc;
    import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
    import org.apache.hadoop.hive.ql.plan.JoinDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.TableScanDesc;
    import org.apache.hadoop.hive.serde2.Deserializer;
    import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
    @@ -132,10 +132,10 @@ public final class OpProcFactory {
    // SELECT(*) because that's the way that the DAG was constructed. We
    // only want to get the predicates from the SELECT(*).
    ExprWalkerInfo childPreds = owi
    - .getPrunedPreds((Operator<? extends Serializable>) nd.getChildren()
    + .getPrunedPreds((Operator<? extends OperatorDesc>) nd.getChildren()
    .get(0));

    - owi.putPrunedPreds((Operator<? extends Serializable>) nd, childPreds);
    + owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, childPreds);
    return null;
    }

    @@ -173,7 +173,8 @@ public final class OpProcFactory {
    LOG.info("Processing for " + nd.getName() + "("
    + ((Operator) nd).getIdentifier() + ")");
    OpWalkerInfo owi = (OpWalkerInfo) procCtx;
    - Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
    + Operator<? extends OperatorDesc> op =
    + (Operator<? extends OperatorDesc>) nd;
    ExprNodeDesc predicate = (((FilterOperator) nd).getConf()).getPredicate();
    ExprWalkerInfo ewi = new ExprWalkerInfo();
    // Don't push a sampling predicate since createFilter() always creates filter
    @@ -186,7 +187,7 @@ public final class OpProcFactory {
    /* predicate is not deterministic */
    if (op.getChildren() != null && op.getChildren().size() == 1) {
    createFilter(op, owi
    - .getPrunedPreds((Operator<? extends Serializable>) (op
    + .getPrunedPreds((Operator<? extends OperatorDesc>) (op
    .getChildren().get(0))), owi);
    }
    return null;
    @@ -199,7 +200,7 @@ public final class OpProcFactory {
    }
    }
    logExpr(nd, ewi);
    - owi.putPrunedPreds((Operator<? extends Serializable>) nd, ewi);
    + owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi);
    }
    // merge it with children predicates
    boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, ewi, null, false);
    @@ -233,7 +234,7 @@ public final class OpProcFactory {
    boolean hasUnpushedPredicates =
    mergeWithChildrenPred(nd, owi, null, null, false);
    ExprWalkerInfo prunePreds =
    - owi.getPrunedPreds((Operator<? extends Serializable>) nd);
    + owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd);
    if (prunePreds != null) {
    Set<String> toRemove = new HashSet<String>();
    // we don't push down any expressions that refer to aliases that can;t
    @@ -294,7 +295,7 @@ public final class OpProcFactory {
    private void applyFilterTransitivity(JoinOperator nd, OpWalkerInfo owi)
    throws SemanticException {
    ExprWalkerInfo prunePreds =
    - owi.getPrunedPreds((Operator<? extends Serializable>) nd);
    + owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd);
    if (prunePreds != null) {
    // We want to use the row resolvers of the parents of the join op
    // because the rowresolver refers to the output columns of an operator
    @@ -302,7 +303,7 @@ public final class OpProcFactory {
    // operator.
    Map<String, RowResolver> aliasToRR =
    new HashMap<String, RowResolver>();
    - for (Operator<? extends Serializable> o : (nd).getParentOperators()) {
    + for (Operator<? extends OperatorDesc> o : (nd).getParentOperators()) {
    for (String alias : owi.getRowResolver(o).getTableNames()){
    aliasToRR.put(alias, owi.getRowResolver(o));
    }
    @@ -386,7 +387,7 @@ public final class OpProcFactory {

    for (Entry<String, List<ExprNodeDesc>> aliasToFilters
    : newFilters.entrySet()){
    - owi.getPrunedPreds((Operator<? extends Serializable>) nd)
    + owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd)
    .addPushDowns(aliasToFilters.getKey(), aliasToFilters.getValue());
    }
    }
    @@ -513,8 +514,9 @@ public final class OpProcFactory {
    if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
    HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
    if (hasUnpushedPredicates) {
    - Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
    - Operator<? extends Serializable> childOperator = op.getChildOperators().get(0);
    + Operator<? extends OperatorDesc> op =
    + (Operator<? extends OperatorDesc>) nd;
    + Operator<? extends OperatorDesc> childOperator = op.getChildOperators().get(0);
    if(childOperator.getParentOperators().size()==1) {
    owi.getCandidateFilterOps().clear();
    }
    @@ -587,9 +589,10 @@ public final class OpProcFactory {
    // no-op for leafs
    return hasUnpushedPredicates;
    }
    - Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
    + Operator<? extends OperatorDesc> op =
    + (Operator<? extends OperatorDesc>) nd;
    ExprWalkerInfo childPreds = owi
    - .getPrunedPreds((Operator<? extends Serializable>) nd.getChildren()
    + .getPrunedPreds((Operator<? extends OperatorDesc>) nd.getChildren()
    .get(0));
    if (childPreds == null) {
    return hasUnpushedPredicates;
    @@ -614,7 +617,7 @@ public final class OpProcFactory {
    hasUnpushedPredicates = true;
    }
    }
    - owi.putPrunedPreds((Operator<? extends Serializable>) nd, ewi);
    + owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi);
    return hasUnpushedPredicates;
    }

    @@ -624,9 +627,9 @@ public final class OpProcFactory {
    if (nd.getChildren() == null) {
    return null;
    }
    - Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
    + Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd;
    ExprWalkerInfo ewi = new ExprWalkerInfo();
    - for (Operator<? extends Serializable> child : op.getChildOperators()) {
    + for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
    ExprWalkerInfo childPreds = owi.getPrunedPreds(child);
    if (childPreds == null) {
    continue;
    @@ -698,15 +701,15 @@ public final class OpProcFactory {
    }

    // add new filter op
    - List<Operator<? extends Serializable>> originalChilren = op
    + List<Operator<? extends OperatorDesc>> originalChilren = op
    .getChildOperators();
    op.setChildOperators(null);
    Operator<FilterDesc> output = OperatorFactory.getAndMakeChild(
    new FilterDesc(condn, false), new RowSchema(inputRR.getColumnInfos()),
    op);
    output.setChildOperators(originalChilren);
    - for (Operator<? extends Serializable> ch : originalChilren) {
    - List<Operator<? extends Serializable>> parentOperators = ch
    + for (Operator<? extends OperatorDesc> ch : originalChilren) {
    + List<Operator<? extends OperatorDesc>> parentOperators = ch
    .getParentOperators();
    int pos = parentOperators.indexOf(op);
    assert pos != -1;
    @@ -720,13 +723,13 @@ public final class OpProcFactory {
    HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
    // remove the candidate filter ops
    for (FilterOperator fop : owi.getCandidateFilterOps()) {
    - List<Operator<? extends Serializable>> children = fop.getChildOperators();
    - List<Operator<? extends Serializable>> parents = fop.getParentOperators();
    - for (Operator<? extends Serializable> parent : parents) {
    + List<Operator<? extends OperatorDesc>> children = fop.getChildOperators();
    + List<Operator<? extends OperatorDesc>> parents = fop.getParentOperators();
    + for (Operator<? extends OperatorDesc> parent : parents) {
    parent.getChildOperators().addAll(children);
    parent.removeChild(fop);
    }
    - for (Operator<? extends Serializable> child : children) {
    + for (Operator<? extends OperatorDesc> child : children) {
    child.getParentOperators().addAll(parents);
    child.removeParent(fop);
    }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java Wed Aug 29 17:43:59 2012
    @@ -17,7 +17,6 @@
    */
    package org.apache.hadoop.hive.ql.ppd;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.List;
    @@ -30,6 +29,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
    import org.apache.hadoop.hive.ql.parse.OpParseContext;
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    import org.apache.hadoop.hive.ql.parse.RowResolver;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Context class for operator walker of predicate pushdown.
    @@ -39,23 +39,24 @@ public class OpWalkerInfo implements Nod
    * Operator to Pushdown Predicates Map. This keeps track of the final pushdown
    * predicates for each operator as you walk the Op Graph from child to parent
    */
    - private final HashMap<Operator<? extends Serializable>, ExprWalkerInfo> opToPushdownPredMap;
    - private final Map<Operator<? extends Serializable>, OpParseContext> opToParseCtxMap;
    + private final HashMap<Operator<? extends OperatorDesc>, ExprWalkerInfo>
    + opToPushdownPredMap;
    + private final Map<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap;
    private final ParseContext pGraphContext;
    private final List<FilterOperator> candidateFilterOps;

    public OpWalkerInfo(ParseContext pGraphContext) {
    this.pGraphContext = pGraphContext;
    opToParseCtxMap = pGraphContext.getOpParseCtx();
    - opToPushdownPredMap = new HashMap<Operator<? extends Serializable>, ExprWalkerInfo>();
    + opToPushdownPredMap = new HashMap<Operator<? extends OperatorDesc>, ExprWalkerInfo>();
    candidateFilterOps = new ArrayList<FilterOperator>();
    }

    - public ExprWalkerInfo getPrunedPreds(Operator<? extends Serializable> op) {
    + public ExprWalkerInfo getPrunedPreds(Operator<? extends OperatorDesc> op) {
    return opToPushdownPredMap.get(op);
    }

    - public ExprWalkerInfo putPrunedPreds(Operator<? extends Serializable> op,
    + public ExprWalkerInfo putPrunedPreds(Operator<? extends OperatorDesc> op,
    ExprWalkerInfo value) {
    return opToPushdownPredMap.put(op, value);
    }
    @@ -64,7 +65,7 @@ public class OpWalkerInfo implements Nod
    return opToParseCtxMap.get(op).getRowResolver();
    }

    - public OpParseContext put(Operator<? extends Serializable> key,
    + public OpParseContext put(Operator<? extends OperatorDesc> key,
    OpParseContext value) {
    return opToParseCtxMap.put(key, value);
    }

    Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
    +++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Wed Aug 29 17:43:59 2012
    @@ -18,8 +18,11 @@

    package org.apache.hadoop.hive.ql.exec;

    -import java.io.Serializable;
    -import java.util.*;
    +import java.util.ArrayList;
    +import java.util.Arrays;
    +import java.util.HashMap;
    +import java.util.LinkedHashMap;
    +import java.util.Map;

    import junit.framework.TestCase;

    @@ -32,6 +35,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    import org.apache.hadoop.hive.ql.plan.FilterDesc;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.ScriptDesc;
    @@ -334,7 +338,8 @@ public class TestOperators extends TestC
    CollectOperator cdop2 = (CollectOperator) OperatorFactory
    .get(CollectDesc.class);
    cdop2.setConf(cd);
    - LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork = new LinkedHashMap<String, Operator<? extends Serializable>>();
    + LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork =
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
    aliasToWork.put("a", cdop1);
    aliasToWork.put("b", cdop2);


    Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
    +++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Wed Aug 29 17:43:59 2012
    @@ -19,7 +19,6 @@
    package org.apache.hadoop.hive.ql.exec;

    import java.io.ByteArrayOutputStream;
    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.LinkedHashMap;

    @@ -30,6 +29,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    import org.apache.hadoop.hive.ql.plan.FilterDesc;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    import org.apache.hadoop.hive.ql.plan.TableDesc;
    import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
    @@ -69,8 +69,8 @@ public class TestPlan extends TestCase {
    LinkedHashMap<String, PartitionDesc> pt = new LinkedHashMap<String, PartitionDesc>();
    pt.put("/tmp/testfolder", partDesc);

    - LinkedHashMap<String, Operator<? extends Serializable>> ao =
    - new LinkedHashMap<String, Operator<? extends Serializable>>();
    + LinkedHashMap<String, Operator<? extends OperatorDesc>> ao =
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
    ao.put("a", op);

    MapredWork mrwork = new MapredWork();
  • Namit at Aug 29, 2012 at 5:45 pm
    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkDeDuplication.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.Iterator;
    @@ -55,6 +54,7 @@ import org.apache.hadoop.hive.ql.parse.R
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
    import org.apache.hadoop.hive.ql.plan.SelectDesc;

    @@ -155,9 +155,10 @@ public class ReduceSinkDeDuplication imp
    return null;
    }

    - List<Operator<? extends Serializable>> childOp = childReduceSink.getChildOperators();
    + List<Operator<? extends OperatorDesc>> childOp =
    + childReduceSink.getChildOperators();
    if (childOp != null && childOp.size() == 1) {
    - Operator<? extends Serializable> child = childOp.get(0);
    + Operator<? extends OperatorDesc> child = childOp.get(0);
    if (child instanceof GroupByOperator || child instanceof JoinOperator) {
    ctx.addRejectedReduceSinkOperator(childReduceSink);
    return null;
    @@ -165,7 +166,8 @@ public class ReduceSinkDeDuplication imp
    }

    ParseContext pGraphContext = ctx.getPctx();
    - HashMap<String, String> childColumnMapping = getPartitionAndKeyColumnMapping(childReduceSink);
    + HashMap<String, String> childColumnMapping =
    + getPartitionAndKeyColumnMapping(childReduceSink);
    ReduceSinkOperator parentRS = null;
    parentRS = findSingleParentReduceSink(childReduceSink, pGraphContext);
    if (parentRS == null) {
    @@ -173,7 +175,7 @@ public class ReduceSinkDeDuplication imp
    return null;
    }
    HashMap<String, String> parentColumnMapping = getPartitionAndKeyColumnMapping(parentRS);
    - Operator<? extends Serializable> stopBacktrackFlagOp = null;
    + Operator<? extends OperatorDesc> stopBacktrackFlagOp = null;
    if (parentRS.getParentOperators() == null
    parentRS.getParentOperators().size() == 0) {
    stopBacktrackFlagOp = parentRS;
    @@ -202,10 +204,12 @@ public class ReduceSinkDeDuplication imp

    private void replaceReduceSinkWithSelectOperator(
    ReduceSinkOperator childReduceSink, ParseContext pGraphContext) throws SemanticException {
    - List<Operator<? extends Serializable>> parentOp = childReduceSink.getParentOperators();
    - List<Operator<? extends Serializable>> childOp = childReduceSink.getChildOperators();
    + List<Operator<? extends OperatorDesc>> parentOp =
    + childReduceSink.getParentOperators();
    + List<Operator<? extends OperatorDesc>> childOp =
    + childReduceSink.getChildOperators();

    - Operator<? extends Serializable> oldParent = childReduceSink;
    + Operator<? extends OperatorDesc> oldParent = childReduceSink;

    if (childOp != null && childOp.size() == 1
    && ((childOp.get(0)) instanceof ExtractOperator)) {
    @@ -213,7 +217,7 @@ public class ReduceSinkDeDuplication imp
    childOp = childOp.get(0).getChildOperators();
    }

    - Operator<? extends Serializable> input = parentOp.get(0);
    + Operator<? extends OperatorDesc> input = parentOp.get(0);
    input.getChildOperators().clear();

    RowResolver inputRR = pGraphContext.getOpParseCtx().get(input).getRowResolver();
    @@ -247,14 +251,14 @@ public class ReduceSinkDeDuplication imp

    // Insert the select operator in between.
    sel.setChildOperators(childOp);
    - for (Operator<? extends Serializable> ch : childOp) {
    + for (Operator<? extends OperatorDesc> ch : childOp) {
    ch.replaceParent(oldParent, sel);
    }

    }

    - private Operator<? extends Serializable> putOpInsertMap(
    - Operator<? extends Serializable> op, RowResolver rr, ParseContext pGraphContext) {
    + private Operator<? extends OperatorDesc> putOpInsertMap(
    + Operator<? extends OperatorDesc> op, RowResolver rr, ParseContext pGraphContext) {
    OpParseContext ctx = new OpParseContext(rr);
    pGraphContext.getOpParseCtx().put(op, ctx);
    return op;
    @@ -373,8 +377,9 @@ public class ReduceSinkDeDuplication imp
    private boolean backTrackColumnNames(
    HashMap<String, String> columnMapping,
    ReduceSinkOperator reduceSink,
    - Operator<? extends Serializable> stopBacktrackFlagOp, ParseContext pGraphContext) {
    - Operator<? extends Serializable> startOperator = reduceSink;
    + Operator<? extends OperatorDesc> stopBacktrackFlagOp,
    + ParseContext pGraphContext) {
    + Operator<? extends OperatorDesc> startOperator = reduceSink;
    while (startOperator != null && startOperator != stopBacktrackFlagOp) {
    startOperator = startOperator.getParentOperators().get(0);
    Map<String, ExprNodeDesc> colExprMap = startOperator.getColumnExprMap();
    @@ -423,7 +428,7 @@ public class ReduceSinkDeDuplication imp
    }

    private ReduceSinkOperator findSingleParentReduceSink(ReduceSinkOperator childReduceSink, ParseContext pGraphContext) {
    - Operator<? extends Serializable> start = childReduceSink;
    + Operator<? extends OperatorDesc> start = childReduceSink;
    while(start != null) {
    if (start.getParentOperators() == null
    start.getParentOperators().size() != 1) {
    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashSet;
    import java.util.List;
    @@ -50,6 +49,7 @@ import org.apache.hadoop.hive.ql.parse.S
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    import org.apache.hadoop.hive.ql.plan.FetchWork;
    import org.apache.hadoop.hive.ql.plan.ListSinkDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.TableDesc;
    @@ -63,7 +63,7 @@ public class SimpleFetchOptimizer implem
    private final Log LOG = LogFactory.getLog(SimpleFetchOptimizer.class.getName());

    public ParseContext transform(ParseContext pctx) throws SemanticException {
    - Map<String, Operator<? extends Serializable>> topOps = pctx.getTopOps();
    + Map<String, Operator<? extends OperatorDesc>> topOps = pctx.getTopOps();
    if (pctx.getQB().isSimpleSelectQuery() && topOps.size() == 1) {
    // no join, no groupby, no distinct, no lateral view, no subq,
    // no CTAS or insert, not analyze command, and single sourced.
    @@ -234,8 +234,8 @@ public class SimpleFetchOptimizer implem
    pctx.getSemanticInputs().addAll(inputs);
    ListSinkOperator sink = new ListSinkOperator();
    sink.setConf(new ListSinkDesc(work.getSerializationNullFormat()));
    - sink.setParentOperators(new ArrayList<Operator<? extends Serializable>>());
    - Operator<? extends Serializable> parent = fileSink.getParentOperators().get(0);
    + sink.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
    + Operator<? extends OperatorDesc> parent = fileSink.getParentOperators().get(0);
    sink.getParentOperators().add(parent);
    parent.replaceChild(fileSink, sink);
    fileSink.setParentOperators(null);

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeBucketMapJoinOptimizer.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.LinkedHashMap;
    @@ -56,6 +55,7 @@ import org.apache.hadoop.hive.ql.parse.S
    import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
    import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;

    @@ -234,7 +234,7 @@ public class SortedMergeBucketMapJoinOpt
    List<Order> sortColumnsFirstTable)
    throws SemanticException {

    - Map<String, Operator<? extends Serializable>> topOps = this.pGraphContext
    + Map<String, Operator<? extends OperatorDesc>> topOps = this.pGraphContext
    .getTopOps();
    Map<TableScanOperator, Table> topToTable = this.pGraphContext
    .getTopToTable();

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyCtx.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.index;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.LinkedHashMap;
    @@ -43,6 +42,7 @@ import org.apache.hadoop.hive.ql.lib.Rul
    import org.apache.hadoop.hive.ql.lib.RuleRegExp;
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * RewriteCanApplyCtx class stores the context for the {@link RewriteCanApplyProcFactory}
    @@ -252,7 +252,8 @@ public final class RewriteCanApplyCtx im
    * @param topOp
    * @throws SemanticException
    */
    - void populateRewriteVars(Operator<? extends Serializable> topOp) throws SemanticException{
    + void populateRewriteVars(Operator<? extends OperatorDesc> topOp)
    + throws SemanticException{
    Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
    opRules.put(new RuleRegExp("R1", "FIL%"),
    RewriteCanApplyProcFactory.canApplyOnFilterOperator());

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteCanApplyProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.index;

    -import java.io.Serializable;
    import java.util.Iterator;
    import java.util.LinkedHashMap;
    import java.util.List;
    @@ -43,6 +42,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
    import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
    import org.apache.hadoop.hive.ql.plan.FilterDesc;
    import org.apache.hadoop.hive.ql.plan.GroupByDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Factory of methods used by {@link RewriteGBUsingIndex}
    @@ -204,8 +204,8 @@ public final class RewriteCanApplyProcFa
    SelectOperator operator = (SelectOperator)nd;
    canApplyCtx = (RewriteCanApplyCtx)ctx;

    - List<Operator<? extends Serializable>> childrenList = operator.getChildOperators();
    - Operator<? extends Serializable> child = childrenList.get(0);
    + List<Operator<? extends OperatorDesc>> childrenList = operator.getChildOperators();
    + Operator<? extends OperatorDesc> child = childrenList.get(0);
    if(child instanceof FileSinkOperator){
    Map<String, String> internalToAlias = new LinkedHashMap<String, String>();
    RowSchema rs = operator.getSchema();

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteGBUsingIndex.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.index;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.Collection;
    import java.util.HashMap;
    @@ -48,6 +47,7 @@ import org.apache.hadoop.hive.ql.optimiz
    import org.apache.hadoop.hive.ql.parse.OpParseContext;
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;


    /**
    @@ -224,7 +224,7 @@ public class RewriteGBUsingIndex impleme
    boolean canApply = false;
    //Context for checking if this optimization can be applied to the input query
    RewriteCanApplyCtx canApplyCtx = RewriteCanApplyCtx.getInstance(parseContext);
    - Map<String, Operator<? extends Serializable>> topOps = parseContext.getTopOps();
    + Map<String, Operator<? extends OperatorDesc>> topOps = parseContext.getTopOps();

    canApplyCtx.setBaseTableName(baseTableName);
    canApplyCtx.populateRewriteVars(topOp);
    @@ -403,8 +403,8 @@ public class RewriteGBUsingIndex impleme
    */
    @SuppressWarnings("unchecked")
    private void rewriteOriginalQuery() throws SemanticException {
    - Map<String, Operator<? extends Serializable>> topOpMap =
    - (HashMap<String, Operator<? extends Serializable>>) parseContext.getTopOps().clone();
    + Map<String, Operator<? extends OperatorDesc>> topOpMap =
    + (HashMap<String, Operator<? extends OperatorDesc>>) parseContext.getTopOps().clone();
    Iterator<String> tsOpItr = tsOpToProcess.keySet().iterator();

    while(tsOpItr.hasNext()){
    @@ -416,7 +416,7 @@ public class RewriteGBUsingIndex impleme
    indexTableName, baseTableName, canApplyCtx.getAggFunction());
    rewriteQueryCtx.invokeRewriteQueryProc(topOp);
    parseContext = rewriteQueryCtx.getParseContext();
    - parseContext.setOpParseCtx((LinkedHashMap<Operator<? extends Serializable>,
    + parseContext.setOpParseCtx((LinkedHashMap<Operator<? extends OperatorDesc>,
    OpParseContext>) rewriteQueryCtx.getOpc());
    }
    LOG.info("Finished Rewriting query");

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndex.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.index;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.Iterator;
    @@ -51,6 +50,7 @@ import org.apache.hadoop.hive.ql.plan.Ag
    import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    import org.apache.hadoop.hive.ql.plan.GroupByDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.TableScanDesc;
    import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
    import org.apache.hadoop.hive.serde2.SerDeException;
    @@ -58,7 +58,6 @@ import org.apache.hadoop.hive.serde2.obj
    import org.apache.hadoop.hive.serde2.objectinspector.StructField;
    import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
    import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
    -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
    import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
    import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;

    @@ -80,8 +79,8 @@ public final class RewriteQueryUsingAggr
    Object... nodeOutputs) throws SemanticException {
    SelectOperator operator = (SelectOperator)nd;
    rewriteQueryCtx = (RewriteQueryUsingAggregateIndexCtx)ctx;
    - List<Operator<? extends Serializable>> childOps = operator.getChildOperators();
    - Operator<? extends Serializable> childOp = childOps.iterator().next();
    + List<Operator<? extends OperatorDesc>> childOps = operator.getChildOperators();
    + Operator<? extends OperatorDesc> childOp = childOps.iterator().next();

    //we need to set the colList, outputColumnNames, colExprMap,
    // rowSchema for only that SelectOperator which precedes the GroupByOperator
    @@ -136,9 +135,9 @@ public final class RewriteQueryUsingAggr
    // and add new ones
    Map<TableScanOperator, Table> topToTable =
    rewriteQueryCtx.getParseContext().getTopToTable();
    - Map<String, Operator<? extends Serializable>> topOps =
    + Map<String, Operator<? extends OperatorDesc>> topOps =
    rewriteQueryCtx.getParseContext().getTopOps();
    - Map<Operator<? extends Serializable>, OpParseContext> opParseContext =
    + Map<Operator<? extends OperatorDesc>, OpParseContext> opParseContext =
    rewriteQueryCtx.getParseContext().getOpParseCtx();

    //need this to set rowResolver for new scanOperator
    @@ -202,11 +201,11 @@ public final class RewriteQueryUsingAggr
    topOps.put(tabNameWithAlias, scanOperator);
    opParseContext.put(scanOperator, operatorContext);
    rewriteQueryCtx.getParseContext().setTopToTable(
    - (HashMap<TableScanOperator, Table>) topToTable);
    + (HashMap<TableScanOperator, Table>) topToTable);
    rewriteQueryCtx.getParseContext().setTopOps(
    - (HashMap<String, Operator<? extends Serializable>>) topOps);
    + (HashMap<String, Operator<? extends OperatorDesc>>) topOps);
    rewriteQueryCtx.getParseContext().setOpParseCtx(
    - (LinkedHashMap<Operator<? extends Serializable>, OpParseContext>) opParseContext);
    + (LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext>) opParseContext);

    return null;
    }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/index/RewriteQueryUsingAggregateIndexCtx.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.index;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.LinkedHashMap;
    import java.util.List;
    @@ -40,6 +39,7 @@ import org.apache.hadoop.hive.ql.parse.O
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;

    /**
    @@ -67,8 +67,8 @@ public final class RewriteQueryUsingAggr
    }


    - private Map<Operator<? extends Serializable>, OpParseContext> opc =
    - new LinkedHashMap<Operator<? extends Serializable>, OpParseContext>();
    + private Map<Operator<? extends OperatorDesc>, OpParseContext> opc =
    + new LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext>();
    private final Hive hiveDb;
    private final ParseContext parseContext;
    //We need the GenericUDAFEvaluator for GenericUDAF function "sum"
    @@ -78,7 +78,7 @@ public final class RewriteQueryUsingAggr
    private final String aggregateFunction;
    private ExprNodeColumnDesc aggrExprNode = null;

    - public Map<Operator<? extends Serializable>, OpParseContext> getOpc() {
    + public Map<Operator<? extends OperatorDesc>, OpParseContext> getOpc() {
    return opc;
    }

    @@ -119,7 +119,7 @@ public final class RewriteQueryUsingAggr
    * @throws SemanticException
    */
    public void invokeRewriteQueryProc(
    - Operator<? extends Serializable> topOp) throws SemanticException{
    + Operator<? extends OperatorDesc> topOp) throws SemanticException{
    Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();

    // replace scan operator containing original table with index table

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcCtx.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcCtx.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcCtx.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcCtx.java Wed Aug 29 17:43:59 2012
    @@ -18,10 +18,9 @@

    package org.apache.hadoop.hive.ql.optimizer.lineage;

    -import java.io.Serializable;
    -
    import org.apache.hadoop.hive.ql.exec.Operator;
    import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * The processor context for the lineage information. This contains the
    @@ -33,41 +32,41 @@ public class ExprProcCtx implements Node
    /**
    * The lineage context that is being populated.
    */
    - private LineageCtx lctx;
    -
    + private final LineageCtx lctx;
    +
    /**
    * The input operator in case the current operator is not a leaf.
    */
    - private Operator<? extends Serializable> inpOp;
    -
    + private final Operator<? extends OperatorDesc> inpOp;
    +
    /**
    * Constructor.
    - *
    + *
    * @param lctx The lineage context thatcontains the dependencies for the inputs.
    * @param inpOp The input operator to the current operator.
    */
    public ExprProcCtx(LineageCtx lctx,
    - Operator<? extends Serializable> inpOp) {
    + Operator<? extends OperatorDesc> inpOp) {
    this.lctx = lctx;
    this.inpOp = inpOp;
    }
    -
    +
    /**
    * Gets the lineage context.
    - *
    + *
    * @return LineageCtx The lineage context.
    */
    public LineageCtx getLineageCtx() {
    return lctx;
    }
    -
    +
    /**
    * Gets the input operator.
    - *
    - * @return Operator The input operator - this is null in case the current
    + *
    + * @return Operator The input operator - this is null in case the current
    * operator is a leaf.
    */
    - public Operator<? extends Serializable> getInputOperator() {
    + public Operator<? extends OperatorDesc> getInputOperator() {
    return inpOp;
    }
    }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/ExprProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.lineage;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.LinkedHashMap;
    @@ -48,6 +47,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
    import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Expression processor factory for lineage. Each processor is responsible to
    @@ -173,7 +173,7 @@ public class ExprProcFactory {
    * @throws SemanticException
    */
    public static Dependency getExprDependency(LineageCtx lctx,
    - Operator<? extends Serializable> inpOp, ExprNodeDesc expr)
    + Operator<? extends OperatorDesc> inpOp, ExprNodeDesc expr)
    throws SemanticException {

    // Create the walker, the rules dispatcher and the context.

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/LineageCtx.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/LineageCtx.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/LineageCtx.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/LineageCtx.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.lineage;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.LinkedHashMap;
    import java.util.LinkedHashSet;
    @@ -32,6 +31,7 @@ import org.apache.hadoop.hive.ql.hooks.L
    import org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency;
    import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * This class contains the lineage context that is passed
    @@ -53,13 +53,16 @@ public class LineageCtx implements NodeP
    * dependency vector for that tuple. This is used to generate the
    * dependency vectors during the walk of the operator tree.
    */
    - private final Map<Operator<? extends Serializable>, LinkedHashMap<ColumnInfo, Dependency>> depMap;
    + private final Map<Operator<? extends OperatorDesc>,
    + LinkedHashMap<ColumnInfo, Dependency>> depMap;

    /**
    * Constructor.
    */
    public Index() {
    - depMap = new LinkedHashMap<Operator<? extends Serializable>, LinkedHashMap<ColumnInfo, Dependency>>();
    + depMap =
    + new LinkedHashMap<Operator<? extends OperatorDesc>,
    + LinkedHashMap<ColumnInfo, Dependency>>();
    }

    /**
    @@ -69,7 +72,8 @@ public class LineageCtx implements NodeP
    * @return Dependency for that particular operator, columninfo tuple.
    * null if no dependency is found.
    */
    - public Dependency getDependency(Operator<? extends Serializable> op, ColumnInfo col) {
    + public Dependency getDependency(Operator<? extends OperatorDesc> op,
    + ColumnInfo col) {
    Map<ColumnInfo, Dependency> colMap = depMap.get(op);
    if (colMap == null) {
    return null;
    @@ -84,7 +88,7 @@ public class LineageCtx implements NodeP
    * @param col The column info whose dependency is being inserted.
    * @param dep The dependency.
    */
    - public void putDependency(Operator<? extends Serializable> op,
    + public void putDependency(Operator<? extends OperatorDesc> op,
    ColumnInfo col, Dependency dep) {
    LinkedHashMap<ColumnInfo, Dependency> colMap = depMap.get(op);
    if (colMap == null) {
    @@ -102,7 +106,7 @@ public class LineageCtx implements NodeP
    * @param ci The column info of the associated column.
    * @param dep The new dependency.
    */
    - public void mergeDependency(Operator<? extends Serializable> op,
    + public void mergeDependency(Operator<? extends OperatorDesc> op,
    ColumnInfo ci, Dependency dep) {
    Dependency old_dep = getDependency(op, ci);
    if (old_dep == null) {

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.lineage;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.HashMap;
    import java.util.Iterator;
    @@ -57,6 +56,7 @@ import org.apache.hadoop.hive.ql.parse.S
    import org.apache.hadoop.hive.ql.plan.AggregationDesc;
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    import org.apache.hadoop.hive.ql.plan.JoinDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;

    /**
    @@ -71,8 +71,8 @@ public class OpProcFactory {
    *
    * @return Operator The parent operator in the current path.
    */
    - protected static Operator<? extends Serializable> getParent(Stack<Node> stack) {
    - return (Operator<? extends Serializable>)Utils.getNthAncestor(stack, 1);
    + protected static Operator<? extends OperatorDesc> getParent(Stack<Node> stack) {
    + return (Operator<? extends OperatorDesc>)Utils.getNthAncestor(stack, 1);
    }

    /**
    @@ -88,8 +88,8 @@ public class OpProcFactory {
    LineageCtx lCtx = (LineageCtx) procCtx;

    // The operators
    - Operator<? extends Serializable> op = (Operator<? extends Serializable>)nd;
    - Operator<? extends Serializable> inpOp = getParent(stack);
    + Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd;
    + Operator<? extends OperatorDesc> inpOp = getParent(stack);

    // Create a single dependency list by concatenating the dependencies of all
    // the cols
    @@ -238,7 +238,7 @@ public class OpProcFactory {
    LineageCtx lCtx = (LineageCtx) procCtx;
    LateralViewJoinOperator op = (LateralViewJoinOperator)nd;
    boolean isUdtfPath = true;
    - Operator<? extends Serializable> inpOp = getParent(stack);
    + Operator<? extends OperatorDesc> inpOp = getParent(stack);
    ArrayList<ColumnInfo> cols = inpOp.getSchema().getSignature();

    if (inpOp instanceof SelectOperator) {
    @@ -317,7 +317,7 @@ public class OpProcFactory {
    LineageCtx lctx = (LineageCtx)procCtx;
    GroupByOperator gop = (GroupByOperator)nd;
    ArrayList<ColumnInfo> col_infos = gop.getSchema().getSignature();
    - Operator<? extends Serializable> inpOp = getParent(stack);
    + Operator<? extends OperatorDesc> inpOp = getParent(stack);
    int cnt = 0;

    for(ExprNodeDesc expr : gop.getConf().getKeys()) {
    @@ -401,11 +401,11 @@ public class OpProcFactory {

    // LineageCtx
    LineageCtx lCtx = (LineageCtx) procCtx;
    - Operator<? extends Serializable> op = (Operator<? extends Serializable>)nd;
    + Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd;

    // Get the row schema of the input operator.
    // The row schema of the parent operator
    - Operator<? extends Serializable> inpOp = getParent(stack);
    + Operator<? extends OperatorDesc> inpOp = getParent(stack);
    RowSchema rs = op.getSchema();
    ArrayList<ColumnInfo> inp_cols = inpOp.getSchema().getSignature();
    int cnt = 0;
    @@ -439,12 +439,12 @@ public class OpProcFactory {
    ReduceSinkOperator rop = (ReduceSinkOperator)nd;

    ArrayList<ColumnInfo> col_infos = rop.getSchema().getSignature();
    - Operator<? extends Serializable> inpOp = getParent(stack);
    + Operator<? extends OperatorDesc> inpOp = getParent(stack);
    int cnt = 0;

    // The keys are included only in case the reduce sink feeds into
    // a group by operator through a chain of forward operators
    - Operator<? extends Serializable> op = rop.getChildOperators().get(0);
    + Operator<? extends OperatorDesc> op = rop.getChildOperators().get(0);
    while (op instanceof ForwardOperator) {
    op = op.getChildOperators().get(0);
    }
    @@ -483,11 +483,11 @@ public class OpProcFactory {

    // LineageCtx
    LineageCtx lCtx = (LineageCtx) procCtx;
    - Operator<? extends Serializable> op = (Operator<? extends Serializable>)nd;
    + Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd;

    // Get the row schema of the input operator.
    // The row schema of the parent operator
    - Operator<? extends Serializable> inpOp = getParent(stack);
    + Operator<? extends OperatorDesc> inpOp = getParent(stack);
    RowSchema rs = op.getSchema();
    ArrayList<ColumnInfo> inp_cols = inpOp.getSchema().getSignature();
    int cnt = 0;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.pcr;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.Stack;

    @@ -36,6 +35,7 @@ import org.apache.hadoop.hive.ql.optimiz
    import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * PcrOpProcFactory contains processors that process expression tree of filter operators
    @@ -70,7 +70,7 @@ public final class PcrOpProcFactory {
    Node tmp = stack.pop();
    Node tmp2 = stack.pop();
    TableScanOperator top = null;
    - Operator<? extends Serializable> pop = null;
    + Operator<? extends OperatorDesc> pop = null;
    if (tmp2 instanceof TableScanOperator) {
    top = (TableScanOperator) tmp2;
    pop = top;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpWalkerCtx.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpWalkerCtx.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpWalkerCtx.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrOpWalkerCtx.java Wed Aug 29 17:43:59 2012
    @@ -18,13 +18,13 @@

    package org.apache.hadoop.hive.ql.optimizer.pcr;

    -import java.io.Serializable;
    import java.util.List;

    import org.apache.hadoop.hive.ql.exec.FilterOperator;
    import org.apache.hadoop.hive.ql.exec.Operator;
    import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Context class for operator tree walker for partition condition remover.
    @@ -32,15 +32,16 @@ import org.apache.hadoop.hive.ql.parse.P
    public class PcrOpWalkerCtx implements NodeProcessorCtx {

    static public class OpToDeleteInfo {
    - private final Operator<? extends Serializable> parent;
    + private final Operator<? extends OperatorDesc> parent;
    private final FilterOperator operator;

    - public OpToDeleteInfo(Operator<? extends Serializable> parent, FilterOperator operator) {
    + public OpToDeleteInfo(Operator<? extends OperatorDesc> parent,
    + FilterOperator operator) {
    super();
    this.parent = parent;
    this.operator = operator;
    }
    - public Operator<? extends Serializable> getParent() {
    + public Operator<? extends OperatorDesc> getParent() {
    return parent;
    }
    public FilterOperator getOperator() {

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinResolver.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinResolver.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinResolver.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinResolver.java Wed Aug 29 17:43:59 2012
    @@ -46,10 +46,12 @@ import org.apache.hadoop.hive.ql.parse.P
    import org.apache.hadoop.hive.ql.parse.QBJoinTree;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.ConditionalResolverCommonJoin;
    +import
    + org.apache.hadoop.hive.ql.plan.ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx;
    import org.apache.hadoop.hive.ql.plan.ConditionalWork;
    import org.apache.hadoop.hive.ql.plan.JoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    -import org.apache.hadoop.hive.ql.plan.ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;


    public class CommonJoinResolver implements PhysicalPlanResolver {
    @@ -112,14 +114,14 @@ public class CommonJoinResolver implemen
    JoinDesc joinDesc = joinOp.getConf();
    Byte[] order = joinDesc.getTagOrder();
    int numAliases = order.length;
    -
    +
    long aliasTotalKnownInputSize = 0;
    HashMap<String, Long> aliasToSize = new HashMap<String, Long>();
    try {
    // go over all the input paths, and calculate a known total size, known
    // size for each input alias.
    Utilities.getInputSummary(context, currWork, null).getLength();
    -
    +
    // set alias to size mapping, this can be used to determine if one table
    // is choosen as big table, what's the total size of left tables, which
    // are going to be small tables.
    @@ -140,9 +142,9 @@ public class CommonJoinResolver implemen
    }
    }
    }
    -
    +
    HashSet<Integer> bigTableCandidates = MapJoinProcessor.getBigTableCandidates(joinDesc.getConds());
    -
    +
    // no table could be the big table; there is no need to convert
    if (bigTableCandidates == null) {
    return null;
    @@ -160,7 +162,7 @@ public class CommonJoinResolver implemen
    if (!bigTableCandidates.contains(i)) {
    continue;
    }
    -
    +
    // create map join task and set big table as i
    // deep copy a new mapred work from xml
    InputStream in = new ByteArrayInputStream(xml.getBytes("UTF-8"));
    @@ -182,7 +184,7 @@ public class CommonJoinResolver implemen
    continue;
    }
    }
    -
    +
    // add into conditional task
    listWorks.add(newWork);
    listTasks.add(newTask);
    @@ -295,7 +297,7 @@ public class CommonJoinResolver implemen
    if (task.getWork() == null) {
    return null;
    }
    - Operator<? extends Serializable> reducerOp = task.getWork().getReducer();
    + Operator<? extends OperatorDesc> reducerOp = task.getWork().getReducer();
    if (reducerOp instanceof JoinOperator) {
    return (JoinOperator) reducerOp;
    } else {

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java Wed Aug 29 17:43:59 2012
    @@ -52,6 +52,7 @@ import org.apache.hadoop.hive.ql.plan.Jo
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.TableDesc;
    @@ -250,14 +251,14 @@ public final class GenMRSkewJoinProcesso
    throw new SemanticException(e);
    }

    - Operator<? extends Serializable>[] parentOps = new TableScanOperator[tags.length];
    + Operator<? extends OperatorDesc>[] parentOps = new TableScanOperator[tags.length];
    for (int k = 0; k < tags.length; k++) {
    - Operator<? extends Serializable> ts = OperatorFactory.get(
    + Operator<? extends OperatorDesc> ts = OperatorFactory.get(
    TableScanDesc.class, (RowSchema) null);
    ((TableScanOperator)ts).setTableDesc(tableDescList.get((byte)k));
    parentOps[k] = ts;
    }
    - Operator<? extends Serializable> tblScan_op = parentOps[i];
    + Operator<? extends OperatorDesc> tblScan_op = parentOps[i];

    ArrayList<String> aliases = new ArrayList<String>();
    String alias = src.toString();
    @@ -275,7 +276,7 @@ public final class GenMRSkewJoinProcesso
    newPlan.getPathToPartitionInfo().put(bigKeyDirPath, part);
    newPlan.getAliasToPartnInfo().put(alias, part);

    - Operator<? extends Serializable> reducer = clonePlan.getReducer();
    + Operator<? extends OperatorDesc> reducer = clonePlan.getReducer();
    assert reducer instanceof JoinOperator;
    JoinOperator cloneJoinOp = (JoinOperator) reducer;

    @@ -289,7 +290,7 @@ public final class GenMRSkewJoinProcesso
    mapJoinDescriptor.setNullSafes(joinDescriptor.getNullSafes());

    MapredLocalWork localPlan = new MapredLocalWork(
    - new LinkedHashMap<String, Operator<? extends Serializable>>(),
    + new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
    new LinkedHashMap<String, FetchWork>());
    Map<Byte, String> smallTblDirs = smallKeysDirMap.get(src);

    @@ -298,7 +299,7 @@ public final class GenMRSkewJoinProcesso
    continue;
    }
    Byte small_alias = tags[j];
    - Operator<? extends Serializable> tblScan_op2 = parentOps[j];
    + Operator<? extends OperatorDesc> tblScan_op2 = parentOps[j];
    localPlan.getAliasToWork().put(small_alias.toString(), tblScan_op2);
    Path tblDir = new Path(smallTblDirs.get(small_alias));
    localPlan.getAliasToFetchWork().put(small_alias.toString(),
    @@ -312,9 +313,9 @@ public final class GenMRSkewJoinProcesso
    .getAndMakeChild(mapJoinDescriptor, (RowSchema) null, parentOps);
    // change the children of the original join operator to point to the map
    // join operator
    - List<Operator<? extends Serializable>> childOps = cloneJoinOp
    + List<Operator<? extends OperatorDesc>> childOps = cloneJoinOp
    .getChildOperators();
    - for (Operator<? extends Serializable> childOp : childOps) {
    + for (Operator<? extends OperatorDesc> childOp : childOps) {
    childOp.replaceParent(cloneJoinOp, mapJoinOp);
    }
    mapJoinOp.setChildOperators(childOps);

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LocalMapJoinProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.optimizer.physical;

    -import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.LinkedHashMap;
    import java.util.List;
    @@ -47,6 +46,7 @@ import org.apache.hadoop.hive.ql.optimiz
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.HashTableDummyDesc;
    import org.apache.hadoop.hive.ql.plan.HashTableSinkDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.TableDesc;

    @@ -136,16 +136,18 @@ public final class LocalMapJoinProcFacto
    int bigTableAlias = (int) order[bigTable];

    // the parent ops for hashTableSinkOp
    - List<Operator<? extends Serializable>> smallTablesParentOp = new ArrayList<Operator<? extends Serializable>>();
    - List<Operator<? extends Serializable>> dummyOperators = new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> smallTablesParentOp =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    + List<Operator<? extends OperatorDesc>> dummyOperators =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    // get all parents
    - List<Operator<? extends Serializable>> parentsOp = mapJoinOp.getParentOperators();
    + List<Operator<? extends OperatorDesc>> parentsOp = mapJoinOp.getParentOperators();
    for (int i = 0; i < parentsOp.size(); i++) {
    if (i == bigTableAlias) {
    smallTablesParentOp.add(null);
    continue;
    }
    - Operator<? extends Serializable> parent = parentsOp.get(i);
    + Operator<? extends OperatorDesc> parent = parentsOp.get(i);
    // let hashtable Op be the child of this parent
    parent.replaceChild(mapJoinOp, hashTableSinkOp);
    // keep the parent id correct
    @@ -171,22 +173,23 @@ public final class LocalMapJoinProcFacto
    dummyOp.getConf().setTbl(tbl);
    // let the dummy op be the parent of mapjoin op
    mapJoinOp.replaceParent(parent, dummyOp);
    - List<Operator<? extends Serializable>> dummyChildren = new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> dummyChildren =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    dummyChildren.add(mapJoinOp);
    dummyOp.setChildOperators(dummyChildren);
    // add this dummy op to the dummp operator list
    dummyOperators.add(dummyOp);
    }
    hashTableSinkOp.setParentOperators(smallTablesParentOp);
    - for (Operator<? extends Serializable> op : dummyOperators) {
    + for (Operator<? extends OperatorDesc> op : dummyOperators) {
    context.addDummyParentOp(op);
    }
    return null;
    }

    - public void hasGroupBy(Operator<? extends Serializable> mapJoinOp,
    + public void hasGroupBy(Operator<? extends OperatorDesc> mapJoinOp,
    LocalMapJoinProcCtx localMapJoinProcCtx) throws Exception {
    - List<Operator<? extends Serializable>> childOps = mapJoinOp.getChildOperators();
    + List<Operator<? extends OperatorDesc>> childOps = mapJoinOp.getChildOperators();
    Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
    opRules.put(new RuleRegExp("R1", "GBY%"), LocalMapJoinProcFactory.getGroupByProc());
    // The dispatcher fires the processor corresponding to the closest

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java Wed Aug 29 17:43:59 2012
    @@ -47,12 +47,14 @@ import org.apache.hadoop.hive.ql.parse.P
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.ConditionalResolver;
    import org.apache.hadoop.hive.ql.plan.ConditionalResolverCommonJoin;
    +import
    + org.apache.hadoop.hive.ql.plan.ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx;
    import org.apache.hadoop.hive.ql.plan.ConditionalResolverSkewJoin;
    +import org.apache.hadoop.hive.ql.plan.ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx;
    import org.apache.hadoop.hive.ql.plan.ConditionalWork;
    import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    -import org.apache.hadoop.hive.ql.plan.ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx;
    -import org.apache.hadoop.hive.ql.plan.ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * An implementation of PhysicalPlanResolver. It iterator each MapRedTask to see whether the task
    @@ -122,7 +124,8 @@ public class MapJoinResolver implements
    // replace the map join operator to local_map_join operator in the operator tree
    // and return all the dummy parent
    LocalMapJoinProcCtx localMapJoinProcCtx= adjustLocalTask(localTask);
    - List<Operator<? extends Serializable>> dummyOps = localMapJoinProcCtx.getDummyParentOp();
    + List<Operator<? extends OperatorDesc>> dummyOps =
    + localMapJoinProcCtx.getDummyParentOp();

    // create new local work and setup the dummy ops
    MapredLocalWork newLocalWork = new MapredLocalWork();
    @@ -264,13 +267,13 @@ public class MapJoinResolver implements
    public static class LocalMapJoinProcCtx implements NodeProcessorCtx {
    private Task<? extends Serializable> currentTask;
    private ParseContext parseCtx;
    - private List<Operator<? extends Serializable>> dummyParentOp = null;
    + private List<Operator<? extends OperatorDesc>> dummyParentOp = null;
    private boolean isFollowedByGroupBy;

    public LocalMapJoinProcCtx(Task<? extends Serializable> task, ParseContext parseCtx) {
    currentTask = task;
    this.parseCtx = parseCtx;
    - dummyParentOp = new ArrayList<Operator<? extends Serializable>>();
    + dummyParentOp = new ArrayList<Operator<? extends OperatorDesc>>();
    isFollowedByGroupBy = false;
    }

    @@ -297,15 +300,15 @@ public class MapJoinResolver implements
    this.parseCtx = parseCtx;
    }

    - public void setDummyParentOp(List<Operator<? extends Serializable>> op) {
    + public void setDummyParentOp(List<Operator<? extends OperatorDesc>> op) {
    this.dummyParentOp = op;
    }

    - public List<Operator<? extends Serializable>> getDummyParentOp() {
    + public List<Operator<? extends OperatorDesc>> getDummyParentOp() {
    return this.dummyParentOp;
    }

    - public void addDummyParentOp(Operator<? extends Serializable> op) {
    + public void addDummyParentOp(Operator<? extends OperatorDesc> op) {
    this.dummyParentOp.add(op);
    }
    }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java Wed Aug 29 17:43:59 2012
    @@ -21,11 +21,11 @@ import java.io.Serializable;
    import java.util.ArrayList;
    import java.util.Collection;
    import java.util.HashSet;
    +import java.util.Iterator;
    import java.util.LinkedHashMap;
    import java.util.List;
    import java.util.Map;
    import java.util.Stack;
    -import java.util.Iterator;

    import org.apache.commons.logging.Log;
    import org.apache.commons.logging.LogFactory;
    @@ -35,6 +35,7 @@ import org.apache.hadoop.hive.ql.exec.Op
    import org.apache.hadoop.hive.ql.exec.TableScanOperator;
    import org.apache.hadoop.hive.ql.exec.Task;
    import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
    +import org.apache.hadoop.hive.ql.io.OneNullRowInputFormat;
    import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
    import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
    import org.apache.hadoop.hive.ql.lib.Dispatcher;
    @@ -48,8 +49,8 @@ import org.apache.hadoop.hive.ql.lib.Rul
    import org.apache.hadoop.hive.ql.parse.ParseContext;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PartitionDesc;
    -import org.apache.hadoop.hive.ql.io.OneNullRowInputFormat;
    import org.apache.hadoop.hive.serde2.NullStructSerDe;

    /**
    @@ -179,7 +180,7 @@ public class MetadataOnlyOptimizer imple
    */
    class MetadataOnlyTaskDispatcher implements Dispatcher {

    - private PhysicalContext physicalContext;
    + private final PhysicalContext physicalContext;

    public MetadataOnlyTaskDispatcher(PhysicalContext context) {
    super();
    @@ -189,7 +190,8 @@ public class MetadataOnlyOptimizer imple
    private String getAliasForTableScanOperator(MapredWork work,
    TableScanOperator tso) {

    - for (Map.Entry<String, Operator<? extends Serializable>> entry : work.getAliasToWork().entrySet()) {
    + for (Map.Entry<String, Operator<? extends OperatorDesc>> entry :
    + work.getAliasToWork().entrySet()) {
    if (entry.getValue() == tso) {
    return entry.getKey();
    }
    @@ -250,7 +252,7 @@ public class MetadataOnlyOptimizer imple
    throws SemanticException {
    Task<? extends Serializable> task = (Task<? extends Serializable>) nd;

    - Collection<Operator<? extends Serializable>> topOperators
    + Collection<Operator<? extends OperatorDesc>> topOperators
    = task.getTopOperators();
    if (topOperators.size() == 0) {
    return null;
    @@ -273,7 +275,7 @@ public class MetadataOnlyOptimizer imple
    // Create a list of topOp nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    // Get the top Nodes for this map-reduce task
    - for (Operator<? extends Serializable>
    + for (Operator<? extends OperatorDesc>
    workOperator : topOperators) {
    if (parseContext.getTopOps().values().contains(workOperator)) {
    topNodes.add(workOperator);

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java Wed Aug 29 17:43:59 2012
    @@ -17,7 +17,6 @@
    */
    package org.apache.hadoop.hive.ql.optimizer.unionproc;

    -import java.io.Serializable;
    import java.util.List;
    import java.util.Stack;

    @@ -28,6 +27,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
    import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
    import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext.UnionParseContext;
    import org.apache.hadoop.hive.ql.parse.SemanticException;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Operator factory for union processing.
    @@ -42,9 +42,9 @@ public final class UnionProcFactory {
    int pos = 0;
    int size = stack.size();
    assert size >= 2 && stack.get(size - 1) == union;
    - Operator<? extends Serializable> parent = (Operator<? extends Serializable>) stack
    - .get(size - 2);
    - List<Operator<? extends Serializable>> parUnion = union
    + Operator<? extends OperatorDesc> parent =
    + (Operator<? extends OperatorDesc>) stack.get(size - 2);
    + List<Operator<? extends OperatorDesc>> parUnion = union
    .getParentOperators();
    pos = parUnion.indexOf(parent);
    assert pos < parUnion.size();
    @@ -145,8 +145,8 @@ public final class UnionProcFactory {
    int start = stack.size() - 2;
    UnionOperator parentUnionOperator = null;
    while (start >= 0) {
    - Operator<? extends Serializable> parent =
    - (Operator<? extends Serializable>) stack.get(start);
    + Operator<? extends OperatorDesc> parent =
    + (Operator<? extends OperatorDesc>) stack.get(start);
    if (parent instanceof UnionOperator) {
    parentUnionOperator = (UnionOperator) parent;
    break;

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Wed Aug 29 17:43:59 2012
    @@ -47,6 +47,7 @@ import org.apache.hadoop.hive.ql.plan.Fi
    import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
    import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * Parse Context: The current parse context. This is passed to the optimizer
    @@ -65,9 +66,9 @@ public class ParseContext {
    private HashMap<TableScanOperator, PrunedPartitionList> opToPartList;
    private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
    private Map<TableScanOperator, ExprNodeDesc> opToSkewedPruner;
    - private HashMap<String, Operator<? extends Serializable>> topOps;
    - private HashMap<String, Operator<? extends Serializable>> topSelOps;
    - private LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx;
    + private HashMap<String, Operator<? extends OperatorDesc>> topOps;
    + private HashMap<String, Operator<? extends OperatorDesc>> topSelOps;
    + private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx;
    private Map<JoinOperator, QBJoinTree> joinContext;
    private Map<MapJoinOperator, QBJoinTree> mapJoinContext;
    private HashMap<TableScanOperator, Table> topToTable;
    @@ -154,9 +155,9 @@ public class ParseContext {
    ASTNode ast,
    HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner,
    HashMap<TableScanOperator, PrunedPartitionList> opToPartList,
    - HashMap<String, Operator<? extends Serializable>> topOps,
    - HashMap<String, Operator<? extends Serializable>> topSelOps,
    - LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx,
    + HashMap<String, Operator<? extends OperatorDesc>> topOps,
    + HashMap<String, Operator<? extends OperatorDesc>> topSelOps,
    + LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx,
    Map<JoinOperator, QBJoinTree> joinContext,
    HashMap<TableScanOperator, Table> topToTable,
    List<LoadTableDesc> loadTableWork, List<LoadFileDesc> loadFileWork,
    @@ -299,7 +300,7 @@ public class ParseContext {
    /**
    * @return the topOps
    */
    - public HashMap<String, Operator<? extends Serializable>> getTopOps() {
    + public HashMap<String, Operator<? extends OperatorDesc>> getTopOps() {
    return topOps;
    }

    @@ -307,14 +308,14 @@ public class ParseContext {
    * @param topOps
    * the topOps to set
    */
    - public void setTopOps(HashMap<String, Operator<? extends Serializable>> topOps) {
    + public void setTopOps(HashMap<String, Operator<? extends OperatorDesc>> topOps) {
    this.topOps = topOps;
    }

    /**
    * @return the topSelOps
    */
    - public HashMap<String, Operator<? extends Serializable>> getTopSelOps() {
    + public HashMap<String, Operator<? extends OperatorDesc>> getTopSelOps() {
    return topSelOps;
    }

    @@ -323,14 +324,14 @@ public class ParseContext {
    * the topSelOps to set
    */
    public void setTopSelOps(
    - HashMap<String, Operator<? extends Serializable>> topSelOps) {
    + HashMap<String, Operator<? extends OperatorDesc>> topSelOps) {
    this.topSelOps = topSelOps;
    }

    /**
    * @return the opParseCtx
    */
    - public LinkedHashMap<Operator<? extends Serializable>, OpParseContext> getOpParseCtx() {
    + public LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> getOpParseCtx() {
    return opParseCtx;
    }

    @@ -339,7 +340,7 @@ public class ParseContext {
    * the opParseCtx to set
    */
    public void setOpParseCtx(
    - LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx) {
    + LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx) {
    this.opParseCtx = opParseCtx;
    }


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Wed Aug 29 17:43:59 2012
    @@ -25,6 +25,11 @@ import java.util.List;
    import org.apache.hadoop.hive.metastore.api.FieldSchema;
    import org.apache.hadoop.hive.ql.ErrorMsg;

    +import java.util.Iterator;
    +import java.util.Map;
    +
    +import org.apache.hadoop.hive.ql.metadata.Table;
    +

    /**
    * Library of utility functions used in the parse code.
    @@ -95,5 +100,4 @@ public final class ParseUtils {
    }
    return colNames;
    }
    -
    }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java Wed Aug 29 17:43:59 2012
    @@ -19,7 +19,6 @@
    package org.apache.hadoop.hive.ql.parse;

    import java.io.PrintStream;
    -import java.io.Serializable;
    import java.util.HashMap;
    import java.util.Stack;

    @@ -27,6 +26,7 @@ import org.apache.hadoop.hive.ql.exec.Op
    import org.apache.hadoop.hive.ql.lib.Node;
    import org.apache.hadoop.hive.ql.lib.NodeProcessor;
    import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;

    /**
    * PrintOpTreeProcessor.
    @@ -35,7 +35,8 @@ import org.apache.hadoop.hive.ql.lib.Nod
    public class PrintOpTreeProcessor implements NodeProcessor {

    private final PrintStream out;
    - private final HashMap<Operator<? extends Serializable>, Integer> opMap = new HashMap<Operator<? extends Serializable>, Integer>();
    + private final HashMap<Operator<? extends OperatorDesc>, Integer> opMap =
    + new HashMap<Operator<? extends OperatorDesc>, Integer>();
    private Integer curNum = 0;

    public PrintOpTreeProcessor() {
    @@ -46,11 +47,11 @@ public class PrintOpTreeProcessor implem
    out = o;
    }

    - private String getParents(Operator<? extends Serializable> op) {
    + private String getParents(Operator<? extends OperatorDesc> op) {
    StringBuilder ret = new StringBuilder("[");
    boolean first = true;
    if (op.getParentOperators() != null) {
    - for (Operator<? extends Serializable> parent : op.getParentOperators()) {
    + for (Operator<? extends OperatorDesc> parent : op.getParentOperators()) {
    if (!first) {
    ret.append(",");
    }
    @@ -62,11 +63,11 @@ public class PrintOpTreeProcessor implem
    return ret.toString();
    }

    - private String getChildren(Operator<? extends Serializable> op) {
    + private String getChildren(Operator<? extends OperatorDesc> op) {
    StringBuilder ret = new StringBuilder("[");
    boolean first = true;
    if (op.getChildOperators() != null) {
    - for (Operator<? extends Serializable> child : op.getChildOperators()) {
    + for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
    if (!first) {
    ret.append(",");
    }
    @@ -80,7 +81,7 @@ public class PrintOpTreeProcessor implem

    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
    Object... nodeOutputs) throws SemanticException {
    - Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
    + Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>) nd;
    if (opMap.get(op) == null) {
    opMap.put(op, curNum++);
    }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Wed Aug 29 17:43:59 2012
    @@ -219,7 +219,7 @@ public class QB {
    null != qbm.getAliasToTable() &&
    qbm.getAliasToTable().size() > 0) {
    Table tbl = getMetaData().getTableForAlias(alias);
    - skewedColNames = tbl.getSkewedColName();
    + skewedColNames = tbl.getSkewedColNames();
    }
    return skewedColNames;
    }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Aug 29 17:43:59 2012
    @@ -142,6 +142,7 @@ import org.apache.hadoop.hive.ql.plan.Lo
    import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
    import org.apache.hadoop.hive.ql.plan.MapredWork;
    import org.apache.hadoop.hive.ql.plan.MoveWork;
    +import org.apache.hadoop.hive.ql.plan.OperatorDesc;
    import org.apache.hadoop.hive.ql.plan.PlanUtils;
    import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
    import org.apache.hadoop.hive.ql.plan.ScriptDesc;
    @@ -178,9 +179,9 @@ import org.apache.hadoop.mapred.InputFor
    public class SemanticAnalyzer extends BaseSemanticAnalyzer {
    private HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner;
    private HashMap<TableScanOperator, PrunedPartitionList> opToPartList;
    - private HashMap<String, Operator<? extends Serializable>> topOps;
    - private HashMap<String, Operator<? extends Serializable>> topSelOps;
    - private LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx;
    + private HashMap<String, Operator<? extends OperatorDesc>> topOps;
    + private HashMap<String, Operator<? extends OperatorDesc>> topSelOps;
    + private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx;
    private List<LoadTableDesc> loadTableWork;
    private List<LoadFileDesc> loadFileWork;
    private Map<JoinOperator, QBJoinTree> joinContext;
    @@ -225,11 +226,11 @@ public class SemanticAnalyzer extends Ba
    opToPartList = new HashMap<TableScanOperator, PrunedPartitionList>();
    opToSamplePruner = new HashMap<TableScanOperator, sampleDesc>();
    nameToSplitSample = new HashMap<String, SplitSample>();
    - topOps = new HashMap<String, Operator<? extends Serializable>>();
    - topSelOps = new HashMap<String, Operator<? extends Serializable>>();
    + topOps = new HashMap<String, Operator<? extends OperatorDesc>>();
    + topSelOps = new HashMap<String, Operator<? extends OperatorDesc>>();
    loadTableWork = new ArrayList<LoadTableDesc>();
    loadFileWork = new ArrayList<LoadFileDesc>();
    - opParseCtx = new LinkedHashMap<Operator<? extends Serializable>, OpParseContext>();
    + opParseCtx = new LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext>();
    joinContext = new HashMap<JoinOperator, QBJoinTree>();
    topToTable = new HashMap<TableScanOperator, Table>();
    destTableId = 1;
    @@ -1467,7 +1468,7 @@ public class SemanticAnalyzer extends Ba
    }

    @SuppressWarnings("nls")
    - public <T extends Serializable> Operator<T> putOpInsertMap(Operator<T> op,
    + public <T extends OperatorDesc> Operator<T> putOpInsertMap(Operator<T> op,
    RowResolver rr) {
    OpParseContext ctx = new OpParseContext(rr);
    opParseCtx.put(op, ctx);
    @@ -6403,12 +6404,12 @@ public class SemanticAnalyzer extends Ba
    if ((leftOp instanceof UnionOperator) || (rightOp instanceof UnionOperator)) {
    if (leftOp instanceof UnionOperator) {
    // make left a child of right
    - List<Operator<? extends Serializable>> child =
    - new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> child =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    child.add(leftOp);
    rightOp.setChildOperators(child);

    - List<Operator<? extends Serializable>> parent = leftOp
    + List<Operator<? extends OperatorDesc>> parent = leftOp
    .getParentOperators();
    parent.add(rightOp);

    @@ -6417,12 +6418,12 @@ public class SemanticAnalyzer extends Ba
    return putOpInsertMap(leftOp, unionoutRR);
    } else {
    // make right a child of left
    - List<Operator<? extends Serializable>> child =
    - new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> child =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    child.add(rightOp);
    leftOp.setChildOperators(child);

    - List<Operator<? extends Serializable>> parent = rightOp
    + List<Operator<? extends OperatorDesc>> parent = rightOp
    .getParentOperators();
    parent.add(leftOp);
    UnionDesc uDesc = ((UnionOperator) rightOp).getConf();
    @@ -6433,22 +6434,22 @@ public class SemanticAnalyzer extends Ba
    }

    // Create a new union operator
    - Operator<? extends Serializable> unionforward = OperatorFactory
    + Operator<? extends OperatorDesc> unionforward = OperatorFactory
    .getAndMakeChild(new UnionDesc(), new RowSchema(unionoutRR
    .getColumnInfos()));

    // set union operator as child of each of leftOp and rightOp
    - List<Operator<? extends Serializable>> child =
    - new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> child =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    child.add(unionforward);
    rightOp.setChildOperators(child);

    - child = new ArrayList<Operator<? extends Serializable>>();
    + child = new ArrayList<Operator<? extends OperatorDesc>>();
    child.add(unionforward);
    leftOp.setChildOperators(child);

    - List<Operator<? extends Serializable>> parent =
    - new ArrayList<Operator<? extends Serializable>>();
    + List<Operator<? extends OperatorDesc>> parent =
    + new ArrayList<Operator<? extends OperatorDesc>>();
    parent.add(leftOp);
    parent.add(rightOp);
    unionforward.setParentOperators(parent);
    @@ -6554,8 +6555,8 @@ public class SemanticAnalyzer extends Ba
    RowResolver rwsch;

    // is the table already present
    - Operator<? extends Serializable> top = topOps.get(alias_id);
    - Operator<? extends Serializable> dummySel = topSelOps.get(alias_id);
    + Operator<? extends OperatorDesc> top = topOps.get(alias_id);
    + Operator<? extends OperatorDesc> dummySel = topSelOps.get(alias_id);
    if (dummySel != null) {
    top = dummySel;
    }
    @@ -6607,7 +6608,7 @@ public class SemanticAnalyzer extends Ba
    setupStats(tsDesc, qb.getParseInfo(), tab, alias, rwsch);

    top = putOpInsertMap(OperatorFactory.get(tsDesc,
    - new RowSchema(rwsch.getColumnInfos())), rwsch);
    + new RowSchema(rwsch.getColumnInfos())), rwsch);

    // Add this to the list of top operators - we always start from a table
    // scan
    @@ -6621,7 +6622,7 @@ public class SemanticAnalyzer extends Ba
    }

    // check if this table is sampled and needs more than input pruning
    - Operator<? extends Serializable> tableOp = top;
    + Operator<? extends OperatorDesc> tableOp = top;
    TableSample ts = qb.getParseInfo().getTabSample(alias);
    if (ts != null) {
    int num = ts.getNumerator();
    @@ -7129,10 +7130,10 @@ public class SemanticAnalyzer extends Ba
    ParseContext tempParseContext = getParseContext();
    GenMRProcContext procCtx = new GenMRProcContext(
    conf,
    - new HashMap<Operator<? extends Serializable>, Task<? extends Serializable>>(),
    - new ArrayList<Operator<? extends Serializable>>(), tempParseContext,
    + new HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>>(),
    + new ArrayList<Operator<? extends OperatorDesc>>(), tempParseContext,
    mvTask, rootTasks,
    - new LinkedHashMap<Operator<? extends Serializable>, GenMapRedCtx>(),
    + new LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx>(),
    inputs, outputs);

    // create a walker which walks the tree in a DFS manner while maintaining
    @@ -7275,15 +7276,15 @@ public class SemanticAnalyzer extends Ba
    // loop over all the tasks recursviely
    private void generateCountersTask(Task<? extends Serializable> task) {
    if (task instanceof ExecDriver) {
    - HashMap<String, Operator<? extends Serializable>> opMap = ((MapredWork) task
    + HashMap<String, Operator<? extends OperatorDesc>> opMap = ((MapredWork) task
    .getWork()).getAliasToWork();
    if (!opMap.isEmpty()) {
    - for (Operator<? extends Serializable> op : opMap.values()) {
    + for (Operator<? extends OperatorDesc> op : opMap.values()) {
    generateCountersOperator(op);
    }
    }

    - Operator<? extends Serializable> reducer = ((MapredWork) task.getWork())
    + Operator<? extends OperatorDesc> reducer = ((MapredWork) task.getWork())
    .getReducer();
    if (reducer != null) {
    LOG.info("Generating counters for operator " + reducer);
    @@ -7309,14 +7310,14 @@ public class SemanticAnalyzer extends Ba
    }
    }

    - private void generateCountersOperator(Operator<? extends Serializable> op) {
    + private void generateCountersOperator(Operator<? extends OperatorDesc> op) {
    op.assignCounterNameToEnum();

    if (op.getChildOperators() == null) {
    return;
    }

    - for (Operator<? extends Serializable> child : op.getChildOperators()) {
    + for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
    generateCountersOperator(child);
    }
    }
    @@ -7325,10 +7326,10 @@ public class SemanticAnalyzer extends Ba
    private void breakTaskTree(Task<? extends Serializable> task) {

    if (task instanceof ExecDriver) {
    - HashMap<String, Operator<? extends Serializable>> opMap = ((MapredWork) task
    + HashMap<String, Operator<? extends OperatorDesc>> opMap = ((MapredWork) task
    .getWork()).getAliasToWork();
    if (!opMap.isEmpty()) {
    - for (Operator<? extends Serializable> op : opMap.values()) {
    + for (Operator<? extends OperatorDesc> op : opMap.values()) {
    breakOperatorTree(op);
    }
    }
    @@ -7350,7 +7351,7 @@ public class SemanticAnalyzer extends Ba
    }

    // loop over all the operators recursviely
    - private void breakOperatorTree(Operator<? extends Serializable> topOp) {
    + private void breakOperatorTree(Operator<? extends OperatorDesc> topOp) {
    if (topOp instanceof ReduceSinkOperator) {
    topOp.setChildOperators(null);
    }
    @@ -7359,7 +7360,7 @@ public class SemanticAnalyzer extends Ba
    return;
    }

    - for (Operator<? extends Serializable> op : topOp.getChildOperators()) {
    + for (Operator<? extends OperatorDesc> op : topOp.getChildOperators()) {
    breakOperatorTree(op);
    }
    }
    @@ -7370,10 +7371,10 @@ public class SemanticAnalyzer extends Ba
    if (task instanceof ExecDriver) {
    MapredWork work = (MapredWork) task.getWork();
    work.deriveExplainAttributes();
    - HashMap<String, Operator<? extends Serializable>> opMap = work
    + HashMap<String, Operator<? extends OperatorDesc>> opMap = work
    .getAliasToWork();
    if (!opMap.isEmpty()) {
    - for (Operator<? extends Serializable> op : opMap.values()) {
    + for (Operator<? extends OperatorDesc> op : opMap.values()) {
    GenMapRedUtils.setKeyAndValueDesc(work, op);
    }
    }
    @@ -8334,7 +8335,7 @@ public class SemanticAnalyzer extends Ba

    /**
    * This code is commented out pending further testing/development
    - * for (Task<? extends Serializable> t: rootTasks)
    + * for (Task<? extends SerializableCloneable> t: rootTasks)
    * t.localizeMRTmpFiles(ctx);
    */
    }

    Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java?rev=1378659&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java (added)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AbstractOperatorDesc.java Wed Aug 29 17:43:59 2012
    @@ -0,0 +1,26 @@
    +/**
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements. See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership. The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License. You may obtain a copy of the License at
    + *
    + * http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.hadoop.hive.ql.plan;
    +
    +public class AbstractOperatorDesc implements OperatorDesc {
    + @Override
    + public Object clone() throws CloneNotSupportedException {
    + throw new CloneNotSupportedException("clone not supported");
    + }
    +}

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,14 +18,13 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;

    /**
    * CollectDesc.
    *
    */
    @Explain(displayName = "Collect")
    -public class CollectDesc implements Serializable {
    +public class CollectDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    Integer bufferSize;


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,14 +18,13 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;

    /**
    * ExtractDesc.
    *
    */
    @Explain(displayName = "Extract")
    -public class ExtractDesc implements Serializable {
    +public class ExtractDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    private ExprNodeDesc col;


    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,7 +18,6 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    import java.util.ArrayList;

    import org.apache.hadoop.fs.Path;
    @@ -28,7 +27,7 @@ import org.apache.hadoop.fs.Path;
    *
    */
    @Explain(displayName = "File Output Operator")
    -public class FileSinkDesc implements Serializable {
    +public class FileSinkDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;
    private String dirName;
    // normally statsKeyPref will be the same as dirName, but the latter

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,20 +18,20 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;
    import java.util.List;

    +
    /**
    * FilterDesc.
    *
    */
    @Explain(displayName = "Filter Operator")
    -public class FilterDesc implements Serializable {
    +public class FilterDesc extends AbstractOperatorDesc {

    /**
    * sampleDesc is used to keep track of the sampling descriptor.
    */
    - public static class sampleDesc {
    + public static class sampleDesc implements Cloneable {
    // The numerator of the TABLESAMPLE clause
    private int numerator;

    @@ -62,6 +62,12 @@ public class FilterDesc implements Seria
    public boolean getInputPruning() {
    return inputPruning;
    }
    +
    + @Override
    + public Object clone() {
    + sampleDesc desc = new sampleDesc(numerator, denominator, null, inputPruning);
    + return desc;
    + }
    }

    private static final long serialVersionUID = 1L;
    @@ -126,4 +132,13 @@ public class FilterDesc implements Seria
    this.isSortedFilter = isSortedFilter;
    }

    + @Override
    + public Object clone() {
    + FilterDesc filterDesc = new FilterDesc(getPredicate().clone(), getIsSamplingPred());
    + if (getIsSamplingPred()) {
    + filterDesc.setSampleDescr(getSampleDescr());
    + }
    + filterDesc.setSortedFilter(isSortedFilter());
    + return filterDesc;
    + }
    }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java Wed Aug 29 17:43:59 2012
    @@ -18,16 +18,20 @@

    package org.apache.hadoop.hive.ql.plan;

    -import java.io.Serializable;

    /**
    * ForwardDesc.
    *
    */
    @Explain(displayName = "Forward")
    -public class ForwardDesc implements Serializable {
    +public class ForwardDesc extends AbstractOperatorDesc {
    private static final long serialVersionUID = 1L;

    public ForwardDesc() {
    }
    +
    + @Override
    + public ForwardDesc clone() {
    + return new ForwardDesc();
    + }
    }

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedAug 29, '12 at 5:44p
activeAug 29, '12 at 5:45p
posts4
users1
websitehive.apache.org

1 user in discussion

Namit: 4 posts

People

Translate

site design / logo © 2021 Grokbase