java.io.Serializableorg.apache.hadoop.hive.ql.plan.OperatorDesc



Project apache/hive in file ....apache.hadoop.hive.ql.optimizer.ColumnPruner.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -39,6 +38,7 @@
 import org.apache.hadoop.hive.ql.parse.OpParseContext;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * Implementation of one of the rule-based optimization steps. ColumnPruner gets
@@ -50,7 +50,7 @@
  */
 public class ColumnPruner implements Transform {
   protected ParseContext pGraphContext;
-  private HashMap<Operator<? extends Serializable>, OpParseContext> opToParseCtxMap;
+  private HashMap<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap;
 
   /**
    * empty constructor.
Project apache/hive in file ...hadoop.hive.ql.optimizer.GlobalLimitOptimizer.java (2012-08-29)
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.ql.optimizer;
 
+import java.util.Map;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -35,9 +37,7 @@
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.SplitSample;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-
-import java.io.Serializable;
-import java.util.Map;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * This optimizer is used to reduce the input size for the query for queries which are
@@ -58,7 +58,7 @@
 
   public ParseContext transform(ParseContext pctx) throws SemanticException {
     Context ctx = pctx.getContext();
-    Map<String, Operator<? extends Serializable>> topOps = pctx.getTopOps();
+    Map<String, Operator<? extends OperatorDesc>> topOps = pctx.getTopOps();
     GlobalLimitCtx globalLimitCtx = pctx.getGlobalLimitCtx();
     Map<TableScanOperator, ExprNodeDesc> opToPartPruner = pctx.getOpToPartPruner();
     Map<TableScanOperator, PrunedPartitionList> opToPartList = pctx.getOpToPartList();
Project apache/hive in file ...che.hadoop.hive.ql.optimizer.GroupByOptimizer.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.LinkedList;
@@ -57,6 +56,7 @@
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
 import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 
 /**
@@ -175,7 +175,7 @@ private void checkBucketGroupBy(GroupByOperator curr)
       }
 
       for (String table : tblNames) {
-        Operator<? extends Serializable> topOp = pGraphContext.getTopOps().get(
+        Operator<? extends OperatorDesc> topOp = pGraphContext.getTopOps().get(
             table);
         if (topOp == null || (!(topOp instanceof TableScanOperator))) {
           // this is in a sub-query.
Project apache/hive in file ...l.optimizer.SortedMergeBucketMapJoinOptimizer.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -56,6 +55,7 @@
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 
@@ -234,7 +234,7 @@ private boolean isTableSorted(ParseContext pctx,
       List<Order> sortColumnsFirstTable)
       throws SemanticException {
 
-      Map<String, Operator<? extends Serializable>> topOps = this.pGraphContext
+      Map<String, Operator<? extends OperatorDesc>> topOps = this.pGraphContext
           .getTopOps();
       Map<TableScanOperator, Table> topToTable = this.pGraphContext
           .getTopToTable();
Project apache/hive in file ...oop.hive.ql.optimizer.lineage.ExprProcFactory.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer.lineage;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -48,6 +47,7 @@
 import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * Expression processor factory for lineage. Each processor is responsible to
@@ -173,7 +173,7 @@ public static NodeProcessor getColumnProcessor() {
    * @throws SemanticException
    */
   public static Dependency getExprDependency(LineageCtx lctx,
-      Operator<? extends Serializable> inpOp, ExprNodeDesc expr)
+      Operator<? extends OperatorDesc> inpOp, ExprNodeDesc expr)
       throws SemanticException {
 
     // Create the walker, the rules dispatcher and the context.