org.apache.hadoop.conf.Configuration



Project apache/cassandra in file ...org.apache.cassandra.thrift.ITransportFactory.java (2013-01-25)
@@ -21,6 +21,7 @@ package org.apache.cassandra.thrift;
  *
  */
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.thrift.transport.TSocket;
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
@@ -30,5 +31,5 @@ import javax.security.auth.login.LoginException;
 
 public interface ITransportFactory
 {
-    TTransport openTransport(TSocket socket) throws LoginException, TTransportException;
+    TTransport openTransport(TSocket socket, Configuration conf) throws LoginException, TTransportException;
 }
Project apache/hbase in file ...che.hadoop.hbase.stargate.MiniClusterTestCase.java (2010-02-25)
@@ -25,6 +25,7 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -59,7 +60,7 @@ public class MiniClusterTestCase extends TestCase {
   // use a nonstandard port
   public static final int DEFAULT_TEST_PORT = 38080;
 
-  protected static HBaseConfiguration conf = new HBaseConfiguration();
+  protected static Configuration conf = HBaseConfiguration.create();
   protected static MiniZooKeeperCluster zooKeeperCluster;
   protected static MiniHBaseCluster hbaseCluster;
   protected static MiniDFSCluster dfsCluster;
Project apache/hbase in file ....hbase.stargate.StorageClusterVersionResource.java (2009-12-31)
@@ -34,6 +34,7 @@ import javax.ws.rs.core.Response.ResponseBuilder;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.stargate.model.StorageClusterVersionModel;
@@ -59,7 +60,7 @@ public class StorageClusterVersionResource implements Constants {
     }
     try {
       RESTServlet server = RESTServlet.getInstance();
-      HBaseConfiguration hconf = server.getConfiguration();
+      Configuration hconf = server.getConfiguration();
       HBaseAdmin admin = new HBaseAdmin(hconf);
       StorageClusterVersionModel model = new StorageClusterVersionModel();
       model.setVersion(admin.getClusterStatus().getHBaseVersion());
Project apache/hbase in file ...regionserver.tableindexed.IndexedRegionServer.java (2009-12-31)
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.regionserver.tableindexed;
 
 import java.io.IOException;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -37,7 +38,7 @@ import org.apache.hadoop.util.Progressable;
 public class IndexedRegionServer extends TransactionalRegionServer implements
     IndexedRegionInterface {
 
-  public IndexedRegionServer(HBaseConfiguration conf) throws IOException {
+  public IndexedRegionServer(Configuration conf) throws IOException {
     super(conf);
   }
 
Project apache/hbase in file ...c.java.org.apache.hadoop.hbase.io.hfile.HFile.java (2009-12-31)
@@ -38,19 +38,20 @@ import org.apache.commons.cli.Options;
 import org.apache.commons.cli.PosixParser;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hbase.io.HbaseMapWritable;
-import org.apache.hadoop.hbase.io.HeapSize;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.util.ClassSize;
+import org.apache.hadoop.hbase.io.HbaseMapWritable;
+import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.RawComparator;
@@ -1706,7 +1707,7 @@ public class HFile {
       boolean checkRow = cmd.hasOption("k");
       boolean checkFamily = cmd.hasOption("a");
       // get configuration, file system and get list of files
-      HBaseConfiguration conf = new HBaseConfiguration();
+      Configuration conf = HBaseConfiguration.create();
       conf.set("fs.defaultFS",
         conf.get(org.apache.hadoop.hbase.HConstants.HBASE_DIR));
       FileSystem fs = FileSystem.get(conf);