org.apache.hadoop.conf.Configurationcom.thinkaurelius.titan.diskstorage.configuration.Configuration



Project thinkaurelius/titan in file ...m.thinkaurelius.titan.hadoop.FaunusSerializer.java (2014-08-23)
@@ -5,17 +5,18 @@ import com.google.common.base.Predicate;
 import com.google.common.collect.*;
 import com.thinkaurelius.titan.diskstorage.ReadBuffer;
 import com.thinkaurelius.titan.diskstorage.StaticBuffer;
+import com.thinkaurelius.titan.diskstorage.configuration.Configuration;
 import com.thinkaurelius.titan.diskstorage.util.ReadArrayBuffer;
 import com.thinkaurelius.titan.graphdb.database.serialize.Serializer;
 import com.thinkaurelius.titan.graphdb.database.serialize.StandardSerializer;
 import com.thinkaurelius.titan.hadoop.FaunusPathElement.MicroElement;
 import com.thinkaurelius.titan.hadoop.config.ModifiableHadoopConfiguration;
+import com.thinkaurelius.titan.hadoop.config.TitanHadoopConfiguration;
 import com.thinkaurelius.titan.util.datastructures.IterablesUtil;
 import com.tinkerpop.blueprints.Direction;
 import com.tinkerpop.blueprints.util.ExceptionFactory;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableUtils;
@@ -53,8 +54,8 @@ public class FaunusSerializer {
         Preconditions.checkNotNull(configuration);
         this.types = FaunusTypeManager.getTypeManager(configuration);
         this.configuration = configuration;
-        this.trackState = configuration.getBoolean(Tokens.TITAN_HADOOP_PIPELINE_TRACK_STATE, false);
-        this.trackPaths = configuration.getBoolean(Tokens.TITAN_HADOOP_PIPELINE_TRACK_PATHS, false);
+        this.trackState = configuration.get(TitanHadoopConfiguration.PIPELINE_TRACK_STATE);
+        this.trackPaths = configuration.get(TitanHadoopConfiguration.PIPELINE_TRACK_PATHS);
     }
 
     public void writeVertex(final FaunusVertex vertex, final DataOutput out) throws IOException {
@@ -180,7 +181,7 @@ public class FaunusSerializer {
         if (null == standardSerializer) { // N.B. standardSerializer is volatile
             synchronized (FaunusSerializer.class) {
                 if (null == standardSerializer) {
-                    int maxOutputBufSize = ModifiableHadoopConfiguration.of(configuration).get(KRYO_MAX_OUTPUT_SIZE);
+                    int maxOutputBufSize = configuration.get(KRYO_MAX_OUTPUT_SIZE);
                     standardSerializer = new StandardSerializer(true, maxOutputBufSize);
                 }
             }
Project thinkaurelius/titan in file ...aurelius.titan.hadoop.FaunusVertexQueryFilter.java (2014-08-23)
@@ -4,6 +4,7 @@ import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
 import com.thinkaurelius.titan.core.*;
 import com.thinkaurelius.titan.diskstorage.StaticBuffer;
+import com.thinkaurelius.titan.diskstorage.configuration.Configuration;
 import com.thinkaurelius.titan.diskstorage.keycolumnvalue.SliceQuery;
 import com.thinkaurelius.titan.graphdb.internal.OrderList;
 import com.thinkaurelius.titan.graphdb.internal.RelationCategory;
@@ -16,12 +17,8 @@ import com.thinkaurelius.titan.graphdb.transaction.StandardTitanTx;
 import com.thinkaurelius.titan.hadoop.config.ModifiableHadoopConfiguration;
 import com.thinkaurelius.titan.hadoop.config.TitanHadoopConfiguration;
 import com.tinkerpop.blueprints.Direction;
-import com.tinkerpop.blueprints.Edge;
-import com.tinkerpop.blueprints.util.DefaultQuery;
 import com.tinkerpop.gremlin.groovy.jsr223.GremlinGroovyScriptEngine;
-import org.apache.hadoop.conf.Configuration;
 
-import java.util.Collections;
 import java.util.Iterator;
 
 /**
@@ -48,12 +45,11 @@ public class FaunusVertexQueryFilter extends FaunusVertexQuery implements Iterab
     }
 
     public static FaunusVertexQueryFilter create(final Configuration configuration) {
-        ModifiableHadoopConfiguration faunusConf = ModifiableHadoopConfiguration.of(configuration);
         engine.put("v", new DummyVertex(FaunusTypeManager.getTypeManager(configuration)));
         try {
             // Can't default to v.query().relations() -- this causes a class cast exception when attempting to convert the Iterable return value of relations() to a FaunusVertexQueryFilter
-            FaunusVertexQueryFilter query = (FaunusVertexQueryFilter) engine.eval(faunusConf.get(TitanHadoopConfiguration.INPUT_VERTEX_QUERY_FILTER));
-            if (faunusConf.has(TitanHadoopConfiguration.INPUT_VERTEX_QUERY_FILTER)) {
+            FaunusVertexQueryFilter query = (FaunusVertexQueryFilter) engine.eval(configuration.get(TitanHadoopConfiguration.INPUT_VERTEX_QUERY_FILTER));
+            if (configuration.has(TitanHadoopConfiguration.INPUT_VERTEX_QUERY_FILTER)) {
                 query.setDoesFilter(true);
             }
             // Move relations() call down here for the side effect (sets the resultType)
Project thinkaurelius/titan in file ...kaurelius.titan.hadoop.StandardFaunusRelation.java (2014-08-23)
@@ -2,11 +2,11 @@ package com.thinkaurelius.titan.hadoop;
 
 import com.google.common.base.Preconditions;
 import com.thinkaurelius.titan.core.*;
+import com.thinkaurelius.titan.diskstorage.configuration.Configuration;
 import com.thinkaurelius.titan.graphdb.relations.EdgeDirection;
 import com.thinkaurelius.titan.graphdb.relations.RelationIdentifier;
 import com.tinkerpop.blueprints.Direction;
 
-import org.apache.hadoop.conf.Configuration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
Project thinkaurelius/titan in file ...p.formats.cassandra.TitanCassandraHadoopGraph.java (2014-08-23)
@@ -2,6 +2,7 @@ package com.thinkaurelius.titan.hadoop.formats.cassandra;
 
 import com.google.common.base.Preconditions;
 import com.thinkaurelius.titan.diskstorage.Entry;
+import com.thinkaurelius.titan.diskstorage.configuration.Configuration;
 import com.thinkaurelius.titan.diskstorage.util.StaticArrayBuffer;
 import com.thinkaurelius.titan.diskstorage.util.StaticArrayEntry;
 import com.thinkaurelius.titan.hadoop.FaunusVertex;
@@ -9,7 +10,6 @@ import com.thinkaurelius.titan.hadoop.formats.util.TitanHadoopGraph;
 import com.thinkaurelius.titan.hadoop.formats.util.input.TitanHadoopSetup;
 
 import org.apache.cassandra.db.Column;
-import org.apache.hadoop.conf.Configuration;
 
 import java.nio.ByteBuffer;
 import java.util.Iterator;
Project thinkaurelius/titan in file ...oop.formats.edgelist.rdf.RDFBlueprintsHandler.java (2014-08-23)
@@ -3,6 +3,7 @@ package com.thinkaurelius.titan.hadoop.formats.edgelist.rdf;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
 import com.thinkaurelius.titan.diskstorage.configuration.ConfigElement;
+import com.thinkaurelius.titan.diskstorage.configuration.Configuration;
 import com.thinkaurelius.titan.diskstorage.configuration.ModifiableConfiguration;
 import com.thinkaurelius.titan.hadoop.FaunusVertex;
 import com.thinkaurelius.titan.hadoop.StandardFaunusEdge;
@@ -11,7 +12,6 @@ import com.thinkaurelius.titan.hadoop.config.ModifiableHadoopConfiguration;
 import com.thinkaurelius.titan.hadoop.config.TitanHadoopConfiguration;
 import com.tinkerpop.blueprints.impls.sail.SailTokens;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 import org.openrdf.model.Literal;
 import org.openrdf.model.Statement;
@@ -42,7 +42,8 @@ public class RDFBlueprintsHandler implements RDFHandler, Iterator<FaunusElement>
     private static final Logger logger = Logger.getLogger(RDFBlueprintsHandler.class);
 
     private final boolean useFragments;
-    private final Configuration configuration;
+    private final ModifiableHadoopConfiguration faunusConf;
+    private final Configuration rdfConf;
     private final Set<String> asProperties = new HashSet<String>();
     private final boolean literalAsProperty;
     private final RDFParser parser;
@@ -71,7 +72,7 @@ public class RDFBlueprintsHandler implements RDFHandler, Iterator<FaunusElement>
         dataTypeToClass = b.build();
     }
 
-    public RDFBlueprintsHandler(final Configuration configuration) throws IOException {
+    public RDFBlueprintsHandler(final ModifiableHadoopConfiguration configuration) throws IOException {
 
 
         // exclude fragments which are most likely to interfere in a Titan/Faunus pipeline
@@ -80,10 +81,9 @@ public class RDFBlueprintsHandler implements RDFHandler, Iterator<FaunusElement>
         //reservedFragments.add("type");
         reservedFragments.add("id");
 
-        ModifiableHadoopConfiguration faunusConf = ModifiableHadoopConfiguration.of(configuration);
-        ModifiableConfiguration rdfConf = faunusConf.getInputConf(ROOT_NS);
+        faunusConf = configuration;
+        rdfConf = faunusConf.getInputConf(ROOT_NS);
 
-        this.configuration = configuration;
         this.baseURI = rdfConf.get(RDF_BASE_URI);
         this.useFragments = rdfConf.get(RDF_USE_LOCALNAME);
         this.literalAsProperty = rdfConf.get(RDF_LITERAL_AS_PROPERTY);
@@ -173,14 +173,14 @@ public class RDFBlueprintsHandler implements RDFHandler, Iterator<FaunusElement>
 
     public void handleStatement(final Statement s) throws RDFHandlerException {
         if (this.asProperties.contains(s.getPredicate().toString())) {
-            final FaunusVertex subject = new FaunusVertex(this.configuration, Crc64.digest(s.getSubject().stringValue().getBytes()));
+            final FaunusVertex subject = new FaunusVertex(faunusConf, Crc64.digest(s.getSubject().stringValue().getBytes()));
             subject.setProperty(postProcess(s.getPredicate()), postProcess(s.getObject()));
             subject.setProperty(URI, s.getSubject().stringValue());
             if (this.useFragments)
                 subject.setProperty(NAME, createFragment(s.getSubject()));
             this.queue.add(subject);
         } else if (this.literalAsProperty && (s.getObject() instanceof Literal)) {
-            final FaunusVertex subject = new FaunusVertex(this.configuration, Crc64.digest(s.getSubject().stringValue().getBytes()));
+            final FaunusVertex subject = new FaunusVertex(faunusConf, Crc64.digest(s.getSubject().stringValue().getBytes()));
             subject.setProperty(postProcess(s.getPredicate()), castLiteral((Literal) s.getObject()));
             subject.setProperty(URI, s.getSubject().stringValue());
             if (this.useFragments)
@@ -188,20 +188,20 @@ public class RDFBlueprintsHandler implements RDFHandler, Iterator<FaunusElement>
             this.queue.add(subject);
         } else {
             long subjectId = Crc64.digest(s.getSubject().stringValue().getBytes());
-            final FaunusVertex subject = new FaunusVertex(this.configuration, subjectId);
+            final FaunusVertex subject = new FaunusVertex(faunusConf, subjectId);
             subject.setProperty(URI, s.getSubject().stringValue());
             if (this.useFragments)
                 subject.setProperty(NAME, createFragment(s.getSubject()));
             this.queue.add(subject);
 
             long objectId = Crc64.digest(s.getObject().stringValue().getBytes());
-            final FaunusVertex object = new FaunusVertex(this.configuration, objectId);
+            final FaunusVertex object = new FaunusVertex(faunusConf, objectId);
             object.setProperty(URI, s.getObject().stringValue());
             if (this.useFragments)
                 object.setProperty(NAME, createFragment(s.getObject()));
             this.queue.add(object);
 
-            final StandardFaunusEdge predicate = new StandardFaunusEdge(this.configuration, -1, subjectId, objectId, postProcess(s.getPredicate()));
+            final StandardFaunusEdge predicate = new StandardFaunusEdge(faunusConf, -1, subjectId, objectId, postProcess(s.getPredicate()));
             predicate.setProperty(URI, s.getPredicate().stringValue());
             if (null != s.getContext())
                 predicate.setProperty(CONTEXT, s.getContext().stringValue());