java.io.Serializableorg.apache.hadoop.hive.ql.plan.OperatorDesc



Project apache/hive in file ...ql.optimizer.index.RewriteCanApplyProcFactory.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer.index;
 
-import java.io.Serializable;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -43,6 +42,7 @@
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.FilterDesc;
 import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * Factory of methods used by {@link RewriteGBUsingIndex}
@@ -204,8 +204,8 @@ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
        SelectOperator operator = (SelectOperator)nd;
        canApplyCtx = (RewriteCanApplyCtx)ctx;
 
-       List<Operator<? extends Serializable>> childrenList = operator.getChildOperators();
-       Operator<? extends Serializable> child = childrenList.get(0);
+       List<Operator<? extends OperatorDesc>> childrenList = operator.getChildOperators();
+       Operator<? extends OperatorDesc> child = childrenList.get(0);
        if(child instanceof FileSinkOperator){
          Map<String, String> internalToAlias = new LinkedHashMap<String, String>();
          RowSchema rs = operator.getSchema();
Project apache/hive in file ....test.org.apache.hadoop.hive.ql.exec.TestPlan.java (2012-08-29)
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import java.io.ByteArrayOutputStream;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 
@@ -30,6 +29,7 @@
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.FilterDesc;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -69,8 +69,8 @@ public void testPlan() throws Exception {
       LinkedHashMap<String, PartitionDesc> pt = new LinkedHashMap<String, PartitionDesc>();
       pt.put("/tmp/testfolder", partDesc);
 
-      LinkedHashMap<String, Operator<? extends Serializable>> ao =
-        new LinkedHashMap<String, Operator<? extends Serializable>>();
+      LinkedHashMap<String, Operator<? extends OperatorDesc>> ao =
+        new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
       ao.put("a", op);
 
       MapredWork mrwork = new MapredWork();
Project apache/hive in file ....org.apache.hadoop.hive.ql.exec.TestOperators.java (2012-08-29)
@@ -18,8 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import java.io.Serializable;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
 
 import junit.framework.TestCase;
 
@@ -32,6 +35,7 @@
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.FilterDesc;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.ScriptDesc;
@@ -334,7 +338,8 @@ public void testMapOperator() throws Throwable {
       CollectOperator cdop2 = (CollectOperator) OperatorFactory
           .get(CollectDesc.class);
       cdop2.setConf(cd);
-      LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork = new LinkedHashMap<String, Operator<? extends Serializable>>();
+      LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork =
+        new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
       aliasToWork.put("a", cdop1);
       aliasToWork.put("b", cdop2);
 
Project apache/hive in file ...apache.hadoop.hive.ql.optimizer.GenMROperator.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer;
 
-import java.io.Serializable;
 import java.util.Map;
 import java.util.Stack;
 
@@ -28,6 +27,7 @@
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * Processor for the rule - no specific rule fired.
@@ -39,7 +39,7 @@ public GenMROperator() {
 
   /**
    * Reduce Scan encountered.
-   * 
+   *
    * @param nd
    *          the reduce sink operator encountered
    * @param procCtx
@@ -49,10 +49,10 @@ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
       Object... nodeOutputs) throws SemanticException {
     GenMRProcContext ctx = (GenMRProcContext) procCtx;
 
-    Map<Operator<? extends Serializable>, GenMapRedCtx> mapCurrCtx = ctx
+    Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
         .getMapCurrCtx();
     GenMapRedCtx mapredCtx = mapCurrCtx.get(stack.get(stack.size() - 2));
-    mapCurrCtx.put((Operator<? extends Serializable>) nd, new GenMapRedCtx(
+    mapCurrCtx.put((Operator<? extends OperatorDesc>) nd, new GenMapRedCtx(
         mapredCtx.getCurrTask(), mapredCtx.getCurrTopOp(), mapredCtx
         .getCurrAliasId()));
     return null;
Project apache/hive in file ...hadoop.hive.ql.optimizer.SimpleFetchOptimizer.java (2012-08-29)
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.optimizer;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -50,6 +49,7 @@
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.FetchWork;
 import org.apache.hadoop.hive.ql.plan.ListSinkDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -63,7 +63,7 @@
   private final Log LOG = LogFactory.getLog(SimpleFetchOptimizer.class.getName());
 
   public ParseContext transform(ParseContext pctx) throws SemanticException {
-    Map<String, Operator<? extends Serializable>> topOps = pctx.getTopOps();
+    Map<String, Operator<? extends OperatorDesc>> topOps = pctx.getTopOps();
     if (pctx.getQB().isSimpleSelectQuery() && topOps.size() == 1) {
       // no join, no groupby, no distinct, no lateral view, no subq,
       // no CTAS or insert, not analyze command, and single sourced.
@@ -234,8 +234,8 @@ private ListSinkOperator completed(ParseContext pctx, FetchWork work) {
       pctx.getSemanticInputs().addAll(inputs);
       ListSinkOperator sink = new ListSinkOperator();
       sink.setConf(new ListSinkDesc(work.getSerializationNullFormat()));
-      sink.setParentOperators(new ArrayList<Operator<? extends Serializable>>());
-      Operator<? extends Serializable> parent = fileSink.getParentOperators().get(0);
+      sink.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
+      Operator<? extends OperatorDesc> parent = fileSink.getParentOperators().get(0);
       sink.getParentOperators().add(parent);
       parent.replaceChild(fileSink, sink);
       fileSink.setParentOperators(null);