org.junit.Beforeorg.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked-s



Project elasticsearch/elasticsearch in file ...rch.search.aggregations.bucket.DateRangeTests.java (2014-03-28)
@@ -30,7 +30,6 @@ import org.elasticsearch.test.ElasticsearchIntegrationTest;
 import org.hamcrest.Matchers;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
-import org.junit.Before;
 import org.junit.Test;
 
 import java.util.ArrayList;
@@ -40,6 +39,7 @@ import java.util.List;
 import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
 import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
 import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.is;
@@ -49,6 +49,7 @@ import static org.hamcrest.core.IsNull.nullValue;
 /**
  *
  */
+@ElasticsearchIntegrationTest.SuiteScopeTest
 public class DateRangeTests extends ElasticsearchIntegrationTest {
 
     private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception {
@@ -64,10 +65,9 @@ public class DateRangeTests extends ElasticsearchIntegrationTest {
         return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
     }
 
-    int numDocs;
-
-    @Before
-    public void init() throws Exception {
+    private static int numDocs;
+    @Override
+    public void setupSuiteScopeCluster() throws Exception {
         createIndex("idx");
         createIndex("idx_unmapped");
 
@@ -86,7 +86,13 @@ public class DateRangeTests extends ElasticsearchIntegrationTest {
         for (int i = docs.size(); i < numDocs; ++i) {
             docs.add(indexDoc(randomIntBetween(6, 10), randomIntBetween(1, 20), randomInt(100)));
         }
-
+        assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer"));
+        for (int i = 0; i < 2; i++) {
+            docs.add(client().prepareIndex("empty_bucket_idx", "type", ""+i).setSource(jsonBuilder()
+                    .startObject()
+                    .field("value", i*2)
+                    .endObject()));
+        }
         indexRandom(true, docs);
         ensureSearchable();
     }
@@ -1010,16 +1016,6 @@ public class DateRangeTests extends ElasticsearchIntegrationTest {
 
     @Test
     public void emptyAggregation() throws Exception {
-        prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").execute().actionGet();
-        List<IndexRequestBuilder> builders = new ArrayList<>();
-        for (int i = 0; i < 2; i++) {
-            builders.add(client().prepareIndex("empty_bucket_idx", "type", ""+i).setSource(jsonBuilder()
-                    .startObject()
-                    .field("value", i*2)
-                    .endObject()));
-        }
-        indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
-
         SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
                 .setQuery(matchAllQuery())
                 .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0).subAggregation(dateRange("date_range").addRange("0-1", 0, 1)))
Project elasticsearch/elasticsearch in file ...h.search.aggregations.bucket.DoubleTermsTests.java (2014-03-28)
@@ -34,7 +34,6 @@ import org.elasticsearch.search.aggregations.metrics.sum.Sum;
 import org.elasticsearch.test.ElasticsearchIntegrationTest;
 import org.elasticsearch.test.cache.recycler.MockBigArrays;
 import org.hamcrest.Matchers;
-import org.junit.Before;
 import org.junit.Test;
 
 import java.util.ArrayList;
@@ -45,6 +44,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
 import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
 import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
 import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.is;
@@ -53,38 +53,42 @@ import static org.hamcrest.core.IsNull.notNullValue;
 /**
  *
  */
+@ElasticsearchIntegrationTest.SuiteScopeTest
 public class DoubleTermsTests extends ElasticsearchIntegrationTest {
 
     private static final int NUM_DOCS = 5; // TODO: randomize the size?
     private static final String SINGLE_VALUED_FIELD_NAME = "d_value";
     private static final String MULTI_VALUED_FIELD_NAME = "d_values";
 
-    @Before
-    public void init() throws Exception {
+    public void setupSuiteScopeCluster() throws Exception {
         createIndex("idx");
-
-        IndexRequestBuilder[] lowcardBuilders = new IndexRequestBuilder[NUM_DOCS];
-        for (int i = 0; i < lowcardBuilders.length; i++) {
-            lowcardBuilders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder()
+        List<IndexRequestBuilder> builders = new ArrayList<>();
+        for (int i = 0; i < NUM_DOCS; i++) {
+            builders.add(client().prepareIndex("idx", "type").setSource(jsonBuilder()
                     .startObject()
                     .field(SINGLE_VALUED_FIELD_NAME, (double) i)
-                    .field("num_tag", i < lowcardBuilders.length/2 + 1 ? 1 : 0) // used to test order by single-bucket sub agg
+                    .field("num_tag", i < NUM_DOCS/2 + 1 ? 1 : 0) // used to test order by single-bucket sub agg
                     .startArray(MULTI_VALUED_FIELD_NAME).value((double) i).value(i + 1d).endArray()
-                    .endObject());
+                    .endObject()));
 
         }
-        indexRandom(randomBoolean(), lowcardBuilders);
-        IndexRequestBuilder[] highCardBuilders = new IndexRequestBuilder[100]; // TODO: randomize the size?
-        for (int i = 0; i < highCardBuilders.length; i++) {
-            highCardBuilders[i] = client().prepareIndex("idx", "high_card_type").setSource(jsonBuilder()
+        for (int i = 0; i < 100; i++) {
+            builders.add(client().prepareIndex("idx", "high_card_type").setSource(jsonBuilder()
                     .startObject()
                     .field(SINGLE_VALUED_FIELD_NAME, (double) i)
                     .startArray(MULTI_VALUED_FIELD_NAME).value((double)i).value(i + 1d).endArray()
-                    .endObject());
+                    .endObject()));
         }
-        indexRandom(true, highCardBuilders);
 
         createIndex("idx_unmapped");
+        assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer"));
+        for (int i = 0; i < 2; i++) {
+            builders.add(client().prepareIndex("empty_bucket_idx", "type", ""+i).setSource(jsonBuilder()
+                    .startObject()
+                    .field(SINGLE_VALUED_FIELD_NAME, i*2)
+                    .endObject()));
+        }
+        indexRandom(true, builders);
         ensureSearchable();
     }
 
@@ -610,16 +614,6 @@ public class DoubleTermsTests extends ElasticsearchIntegrationTest {
 
     @Test
     public void emptyAggregation() throws Exception {
-        prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer").execute().actionGet();
-        List<IndexRequestBuilder> builders = new ArrayList<>();
-        for (int i = 0; i < 2; i++) {
-            builders.add(client().prepareIndex("empty_bucket_idx", "type", ""+i).setSource(jsonBuilder()
-                    .startObject()
-                    .field(SINGLE_VALUED_FIELD_NAME, i*2)
-                    .endObject()));
-        }
-        indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
-
         SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
                 .setQuery(matchAllQuery())
                 .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1l).minDocCount(0)
Project elasticsearch/elasticsearch in file ...rch.search.aggregations.bucket.IPv4RangeTests.java (2014-03-28)
@@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.metrics.max.Max;
 import org.elasticsearch.search.aggregations.metrics.sum.Sum;
 import org.elasticsearch.test.ElasticsearchIntegrationTest;
 import org.hamcrest.Matchers;
-import org.junit.Before;
 import org.junit.Test;
 
 import java.util.ArrayList;
@@ -36,6 +35,7 @@ import java.util.List;
 import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
 import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
 import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.is;
@@ -45,25 +45,39 @@ import static org.hamcrest.core.IsNull.nullValue;
 /**
  *
  */
+@ElasticsearchIntegrationTest.SuiteScopeTest
 public class IPv4RangeTests extends ElasticsearchIntegrationTest {
 
-    @Before
-    public void init() throws Exception {
-        prepareCreate("idx")
-                .addMapping("type", "ip", "type=ip", "ips", "type=ip")
-                .execute().actionGet();
-        IndexRequestBuilder[] builders = new IndexRequestBuilder[255]; // TODO randomize the size?
-        // TODO randomize the values in the docs?
-        for (int i = 0; i < builders.length; i++) {
-            builders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder()
-                    .startObject()
-                    .field("ip", "10.0.0." + (i))
-                    .startArray("ips").value("10.0.0." + i).value("10.0.0." + (i + 1)).endArray()
-                    .field("value", (i < 100 ? 1 : i < 200 ? 2 : 3))        // 100 1's, 100 2's, and 55 3's
-                    .endObject());
+    @Override
+    public void setupSuiteScopeCluster() throws Exception {
+        {
+            assertAcked(prepareCreate("idx")
+                    .addMapping("type", "ip", "type=ip", "ips", "type=ip"));
+            IndexRequestBuilder[] builders = new IndexRequestBuilder[255]; // TODO randomize the size?
+            // TODO randomize the values in the docs?
+            for (int i = 0; i < builders.length; i++) {
+                builders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder()
+                        .startObject()
+                        .field("ip", "10.0.0." + (i))
+                        .startArray("ips").value("10.0.0." + i).value("10.0.0." + (i + 1)).endArray()
+                        .field("value", (i < 100 ? 1 : i < 200 ? 2 : 3))        // 100 1's, 100 2's, and 55 3's
+                        .endObject());
+            }
+            indexRandom(true, builders);
+            createIndex("idx_unmapped");
+        }
+        {
+            assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "ip", "type=ip"));
+            List<IndexRequestBuilder> builders = new ArrayList<>();
+            for (int i = 0; i < 2; i++) {
+                builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder()
+                        .startObject()
+                        .field("value", i * 2)
+                        .field("ip", "10.0.0.5")
+                        .endObject()));
+            }
+            indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
         }
-        indexRandom(true, builders);
-        createIndex("idx_unmapped");
         ensureSearchable();
     }
 
@@ -819,17 +833,6 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
 
     @Test
     public void emptyAggregation() throws Exception {
-        prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "ip", "type=ip").execute().actionGet();
-        List<IndexRequestBuilder> builders = new ArrayList<>();
-        for (int i = 0; i < 2; i++) {
-            builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder()
-                    .startObject()
-                    .field("value", i * 2)
-                    .field("ip", "10.0.0.5")
-                    .endObject()));
-        }
-        indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
-
         SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
                 .setQuery(matchAllQuery())
                 .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0)
Project elasticsearch/elasticsearch in file ...rch.search.aggregations.bucket.LongTermsTests.java (2014-03-28)
@@ -33,7 +33,6 @@ import org.elasticsearch.search.aggregations.metrics.sum.Sum;
 import org.elasticsearch.test.ElasticsearchIntegrationTest;
 import org.elasticsearch.test.cache.recycler.MockBigArrays;
 import org.hamcrest.Matchers;
-import org.junit.Before;
 import org.junit.Test;
 
 import java.util.ArrayList;
@@ -43,6 +42,7 @@ import java.util.List;
 import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
 import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
 import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.is;
@@ -51,14 +51,15 @@ import static org.hamcrest.core.IsNull.notNullValue;
 /**
  *
  */
+@ElasticsearchIntegrationTest.SuiteScopeTest
 public class LongTermsTests extends ElasticsearchIntegrationTest {
 
     private static final int NUM_DOCS = 5; // TODO randomize the size?
     private static final String SINGLE_VALUED_FIELD_NAME = "l_value";
     private static final String MULTI_VALUED_FIELD_NAME = "l_values";
 
-    @Before
-    public void init() throws Exception {
+    @Override
+    public void setupSuiteScopeCluster() throws Exception {
         createIndex("idx");
         IndexRequestBuilder[] lowCardBuilders = new IndexRequestBuilder[NUM_DOCS];
         for (int i = 0; i < lowCardBuilders.length; i++) {
@@ -77,13 +78,23 @@ public class LongTermsTests extends ElasticsearchIntegrationTest {
                     .field(SINGLE_VALUED_FIELD_NAME, i)
                     .startArray(MULTI_VALUED_FIELD_NAME).value(i).value(i + 1).endArray()
                     .endObject());
-                    
+
         }
         indexRandom(true, highCardBuilders);
         createIndex("idx_unmapped");
+
+        assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer"));
+        List<IndexRequestBuilder> builders = new ArrayList<>();
+        for (int i = 0; i < 2; i++) {
+            builders.add(client().prepareIndex("empty_bucket_idx", "type", ""+i).setSource(jsonBuilder()
+                    .startObject()
+                    .field(SINGLE_VALUED_FIELD_NAME, i * 2)
+                    .endObject()));
+        }
+        indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
         ensureSearchable();
     }
-    
+
     private String key(Terms.Bucket bucket) {
         return randomBoolean() ? bucket.getKey() : key(bucket);
     }
@@ -602,16 +613,6 @@ public class LongTermsTests extends ElasticsearchIntegrationTest {
 
     @Test
     public void emptyAggregation() throws Exception {
-        prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer").execute().actionGet();
-        List<IndexRequestBuilder> builders = new ArrayList<>();
-        for (int i = 0; i < 2; i++) {
-            builders.add(client().prepareIndex("empty_bucket_idx", "type", ""+i).setSource(jsonBuilder()
-                    .startObject()
-                    .field(SINGLE_VALUED_FIELD_NAME, i*2)
-                    .endObject()));
-        }
-        indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
-
         SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
                 .setQuery(matchAllQuery())
                 .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1l).minDocCount(0)
Project elasticsearch/elasticsearch in file ...org.elasticsearch.snapshots.RepositoriesTests.java (2014-09-10)
@@ -23,17 +23,24 @@ import com.google.common.collect.ImmutableList;
 import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse;
 import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
 import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
+import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
 import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
 import org.elasticsearch.client.Client;
 import org.elasticsearch.cluster.metadata.MetaData;
 import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
 import org.elasticsearch.cluster.metadata.RepositoryMetaData;
 import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.repositories.RepositoryException;
-import org.junit.After;
-import org.junit.Before;
+import org.elasticsearch.repositories.RepositoryVerificationException;
+import org.elasticsearch.snapshots.mockstore.MockRepositoryModule;
 import org.junit.Test;
 
+import java.io.File;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
+import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.notNullValue;
 
@@ -45,13 +52,23 @@ public class RepositoriesTests extends AbstractSnapshotTests {
     public void testRepositoryCreation() throws Exception {
         Client client = client();
 
+        File location = newTempDir(LifecycleScope.SUITE);
+
         logger.info("-->  creating repository");
         PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo-1")
                 .setType("fs").setSettings(ImmutableSettings.settingsBuilder()
-                        .put("location", newTempDir(LifecycleScope.SUITE))
+                                .put("location", location)
                 ).get();
         assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
 
+        logger.info("--> verify the repository");
+        int numberOfFiles = location.listFiles().length;
+        VerifyRepositoryResponse verifyRepositoryResponse = client.admin().cluster().prepareVerifyRepository("test-repo-1").get();
+        assertThat(verifyRepositoryResponse.getNodes().length, equalTo(cluster().numDataAndMasterNodes()));
+
+        logger.info("--> verify that we didn't leave any files as a result of verification");
+        assertThat(location.listFiles().length, equalTo(numberOfFiles));
+
         logger.info("--> check that repository is really there");
         ClusterStateResponse clusterStateResponse = client.admin().cluster().prepareState().clear().setMetaData(true).get();
         MetaData metaData = clusterStateResponse.getState().getMetaData();
@@ -60,10 +77,10 @@ public class RepositoriesTests extends AbstractSnapshotTests {
         assertThat(repositoriesMetaData.repository("test-repo-1"), notNullValue());
         assertThat(repositoriesMetaData.repository("test-repo-1").type(), equalTo("fs"));
 
-        logger.info("-->  creating anoter repository");
+        logger.info("-->  creating another repository");
         putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo-2")
                 .setType("fs").setSettings(ImmutableSettings.settingsBuilder()
-                        .put("location", newTempDir(LifecycleScope.SUITE))
+                                .put("location", newTempDir(LifecycleScope.SUITE))
                 ).get();
         assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
 
@@ -120,13 +137,12 @@ public class RepositoriesTests extends AbstractSnapshotTests {
 
     @Test
     public void repositoryAckTimeoutTest() throws Exception {
-
         logger.info("-->  creating repository test-repo-1 with 0s timeout - shouldn't ack");
         PutRepositoryResponse putRepositoryResponse = client().admin().cluster().preparePutRepository("test-repo-1")
                 .setType("fs").setSettings(ImmutableSettings.settingsBuilder()
-                        .put("location", newTempDir(LifecycleScope.SUITE))
-                        .put("compress", randomBoolean())
-                        .put("chunk_size", randomIntBetween(5, 100))
+                                .put("location", newTempDir(LifecycleScope.SUITE))
+                                .put("compress", randomBoolean())
+                                .put("chunk_size", randomIntBetween(5, 100))
                 )
                 .setTimeout("0s").get();
         assertThat(putRepositoryResponse.isAcknowledged(), equalTo(false));
@@ -134,9 +150,9 @@ public class RepositoriesTests extends AbstractSnapshotTests {
         logger.info("-->  creating repository test-repo-2 with standard timeout - should ack");
         putRepositoryResponse = client().admin().cluster().preparePutRepository("test-repo-2")
                 .setType("fs").setSettings(ImmutableSettings.settingsBuilder()
-                        .put("location", newTempDir(LifecycleScope.SUITE))
-                        .put("compress", randomBoolean())
-                        .put("chunk_size", randomIntBetween(5, 100))
+                                .put("location", newTempDir(LifecycleScope.SUITE))
+                                .put("compress", randomBoolean())
+                                .put("chunk_size", randomIntBetween(5, 100))
                 ).get();
         assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
 
@@ -150,5 +166,74 @@ public class RepositoriesTests extends AbstractSnapshotTests {
         assertThat(deleteRepositoryResponse.isAcknowledged(), equalTo(true));
     }
 
+    @Test
+    public void repositoryVerificationTest() throws Exception {
+        Client client = client();
+
+        Settings settings = ImmutableSettings.settingsBuilder()
+                .put("location", newTempDir(LifecycleScope.SUITE))
+                .put("random_control_io_exception_rate", 1.0).build();
+        logger.info("-->  creating repository that cannot write any files - should fail");
+        assertThrows(client.admin().cluster().preparePutRepository("test-repo-1")
+                        .setType(MockRepositoryModule.class.getCanonicalName()).setSettings(settings),
+                RepositoryVerificationException.class);
+
+        logger.info("-->  creating repository that cannot write any files, but suppress verification - should be acked");
+        assertAcked(client.admin().cluster().preparePutRepository("test-repo-1")
+                .setType(MockRepositoryModule.class.getCanonicalName()).setSettings(settings).setVerify(false));
+
+        logger.info("-->  verifying repository");
+        assertThrows(client.admin().cluster().prepareVerifyRepository("test-repo-1"), RepositoryVerificationException.class);
+
+        File location = newTempDir(LifecycleScope.SUITE);
+
+        logger.info("-->  creating repository");
+        try {
+            client.admin().cluster().preparePutRepository("test-repo-1")
+                    .setType(MockRepositoryModule.class.getCanonicalName())
+                    .setSettings(ImmutableSettings.settingsBuilder()
+                                    .put("location", location)
+                                    .put("localize_location", true)
+                    ).get();
+            fail("RepositoryVerificationException wasn't generated");
+        } catch (RepositoryVerificationException ex) {
+            assertThat(ex.getMessage(), containsString("is not shared"));
+        }
+    }
+
+    @Test
+    public void repositoryVerificationTimeoutTest() throws Exception {
+        Client client = client();
+
+        Settings settings = ImmutableSettings.settingsBuilder()
+                .put("location", newTempDir(LifecycleScope.SUITE))
+                .put("random_control_io_exception_rate", 1.0).build();
+        logger.info("-->  creating repository that cannot write any files - should fail");
+        assertThrows(client.admin().cluster().preparePutRepository("test-repo-1")
+                        .setType(MockRepositoryModule.class.getCanonicalName()).setSettings(settings),
+                RepositoryVerificationException.class);
+
+        logger.info("-->  creating repository that cannot write any files, but suppress verification - should be acked");
+        assertAcked(client.admin().cluster().preparePutRepository("test-repo-1")
+                .setType(MockRepositoryModule.class.getCanonicalName()).setSettings(settings).setVerify(false));
+
+        logger.info("-->  verifying repository");
+        assertThrows(client.admin().cluster().prepareVerifyRepository("test-repo-1"), RepositoryVerificationException.class);
+
+        File location = newTempDir(LifecycleScope.SUITE);
+
+        logger.info("-->  creating repository");
+        try {
+            client.admin().cluster().preparePutRepository("test-repo-1")
+                    .setType(MockRepositoryModule.class.getCanonicalName())
+                    .setSettings(ImmutableSettings.settingsBuilder()
+                                    .put("location", location)
+                                    .put("localize_location", true)
+                    ).get();
+            fail("RepositoryVerificationException wasn't generated");
+        } catch (RepositoryVerificationException ex) {
+            assertThat(ex.getMessage(), containsString("is not shared"));
+        }
+    }
 
 }