java.io.Serializableorg.apache.hadoop.hive.ql.plan.OperatorDesc



Project apache/hive in file ...hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer.pcr;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Stack;
 
@@ -36,6 +35,7 @@
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * PcrOpProcFactory contains processors that process expression tree of filter operators
@@ -70,7 +70,7 @@ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
       Node tmp = stack.pop();
       Node tmp2 = stack.pop();
       TableScanOperator top = null;
-      Operator<? extends Serializable> pop = null;
+      Operator<? extends OperatorDesc> pop = null;
       if (tmp2 instanceof TableScanOperator) {
         top = (TableScanOperator) tmp2;
         pop = top;
Project apache/hive in file ...rg.apache.hadoop.hive.ql.exec.SkewJoinHandler.java (2012-08-29)
@@ -20,7 +20,6 @@
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -35,6 +34,7 @@
 import org.apache.hadoop.hive.ql.exec.persistence.RowContainer;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.JoinDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -94,11 +94,11 @@
   List<Object> dummyKey = null;
   String taskId;
 
-  private final CommonJoinOperator<? extends Serializable> joinOp;
+  private final CommonJoinOperator<? extends OperatorDesc> joinOp;
   private final int numAliases;
   private final JoinDesc conf;
 
-  public SkewJoinHandler(CommonJoinOperator<? extends Serializable> joinOp) {
+  public SkewJoinHandler(CommonJoinOperator<? extends OperatorDesc> joinOp) {
     this.joinOp = joinOp;
     numAliases = joinOp.numAliases;
     conf = joinOp.getConf();
Project apache/hive in file ....org.apache.hadoop.hive.ql.io.HiveInputFormat.java (2012-08-29)
@@ -21,7 +21,6 @@
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -39,6 +38,7 @@
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
@@ -416,8 +416,8 @@ protected void pushProjectionsAndFilters(JobConf jobConf, Class inputFormatClass
     }
 
     for (String alias : aliases) {
-      Operator<? extends Serializable> op = this.mrwork.getAliasToWork().get(
-          alias);
+      Operator<? extends OperatorDesc> op = this.mrwork.getAliasToWork().get(
+        alias);
       if (op != null && op instanceof TableScanOperator) {
         TableScanOperator tableScan = (TableScanOperator) op;
 
Project apache/hive in file ...doop.hive.ql.optimizer.BucketMapJoinOptimizer.java (2012-08-29)
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hive.ql.optimizer;
 
 import java.io.IOException;
-import java.io.Serializable;
 import java.net.URI;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -65,6 +64,7 @@
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 
 /**
@@ -188,7 +188,8 @@ private boolean convertBucketMapJoin(Node nd, Stack<Node> stack, NodeProcessorCt
       LinkedHashMap<String, List<List<String>>> aliasToPartitionBucketFileNamesMapping =
           new LinkedHashMap<String, List<List<String>>>();
 
-      Map<String, Operator<? extends Serializable>> topOps = this.pGraphContext.getTopOps();
+      Map<String, Operator<? extends OperatorDesc>> topOps =
+        this.pGraphContext.getTopOps();
       Map<TableScanOperator, Table> topToTable = this.pGraphContext.getTopToTable();
 
       // (partition to bucket file names) and (partition to bucket number) for
Project apache/hive in file ...op.hive.ql.optimizer.index.RewriteCanApplyCtx.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer.index;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -43,6 +42,7 @@
 import org.apache.hadoop.hive.ql.lib.RuleRegExp;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * RewriteCanApplyCtx class stores the context for the {@link RewriteCanApplyProcFactory}
@@ -252,7 +252,8 @@ public  ParseContext getParseContext() {
    * @param topOp
    * @throws SemanticException
    */
-  void populateRewriteVars(Operator<? extends Serializable> topOp) throws SemanticException{
+  void populateRewriteVars(Operator<? extends OperatorDesc> topOp)
+    throws SemanticException{
     Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
     opRules.put(new RuleRegExp("R1", "FIL%"),
         RewriteCanApplyProcFactory.canApplyOnFilterOperator());