Search in sources :

Example 51 with ExecutionException

use of java.util.concurrent.ExecutionException in project hbase by apache.

the class RegionMover method load.

/**
   * Loads the specified {@link #hostname} with regions listed in the {@link #filename} RegionMover
   * Object has to be created using {@link #RegionMover(RegionMoverBuilder)}
   * @return true if loading succeeded, false otherwise
   * @throws ExecutionException
   * @throws InterruptedException if the loader thread was interrupted
   * @throws TimeoutException
   */
public boolean load() throws ExecutionException, InterruptedException, TimeoutException {
    setConf();
    ExecutorService loadPool = Executors.newFixedThreadPool(1);
    Future<Boolean> loadTask = loadPool.submit(new Load(this));
    loadPool.shutdown();
    try {
        if (!loadPool.awaitTermination((long) this.timeout, TimeUnit.SECONDS)) {
            LOG.warn("Timed out before finishing the loading operation. Timeout:" + this.timeout + "sec");
            loadPool.shutdownNow();
        }
    } catch (InterruptedException e) {
        loadPool.shutdownNow();
        Thread.currentThread().interrupt();
    }
    try {
        return loadTask.get(5, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        LOG.warn("Interrupted while loading Regions on " + this.hostname, e);
        throw e;
    } catch (ExecutionException e) {
        LOG.error("Error while loading regions on RegionServer " + this.hostname, e);
        throw e;
    }
}
Also used : ExecutorService(java.util.concurrent.ExecutorService) ExecutionException(java.util.concurrent.ExecutionException)

Example 52 with ExecutionException

use of java.util.concurrent.ExecutionException in project hbase by apache.

the class HBaseFsck method processRegionServers.

/**
   * Contacts each regionserver and fetches metadata about regions.
   * @param regionServerList - the list of region servers to connect to
   * @throws IOException if a remote or network exception occurs
   */
void processRegionServers(Collection<ServerName> regionServerList) throws IOException, InterruptedException {
    List<WorkItemRegion> workItems = new ArrayList<>(regionServerList.size());
    List<Future<Void>> workFutures;
    // loop to contact each region server in parallel
    for (ServerName rsinfo : regionServerList) {
        workItems.add(new WorkItemRegion(this, rsinfo, errors, connection));
    }
    workFutures = executor.invokeAll(workItems);
    for (int i = 0; i < workFutures.size(); i++) {
        WorkItemRegion item = workItems.get(i);
        Future<Void> f = workFutures.get(i);
        try {
            f.get();
        } catch (ExecutionException e) {
            LOG.warn("Could not process regionserver " + item.rsinfo.getHostAndPort(), e.getCause());
        }
    }
}
Also used : ServerName(org.apache.hadoop.hbase.ServerName) ArrayList(java.util.ArrayList) Future(java.util.concurrent.Future) ExecutionException(java.util.concurrent.ExecutionException)

Example 53 with ExecutionException

use of java.util.concurrent.ExecutionException in project hbase by apache.

the class TestAsyncTableGetMultiThreaded method test.

@Test
public void test() throws IOException, InterruptedException, ExecutionException {
    int numThreads = 20;
    AtomicBoolean stop = new AtomicBoolean(false);
    ExecutorService executor = Executors.newFixedThreadPool(numThreads, Threads.newDaemonThreadFactory("TestAsyncGet-"));
    List<Future<?>> futures = new ArrayList<>();
    IntStream.range(0, numThreads).forEach(i -> futures.add(executor.submit(() -> {
        run(stop);
        return null;
    })));
    Collections.shuffle(Arrays.asList(SPLIT_KEYS), new Random(123));
    Admin admin = TEST_UTIL.getAdmin();
    for (byte[] splitPoint : SPLIT_KEYS) {
        admin.split(TABLE_NAME, splitPoint);
        for (HRegion region : TEST_UTIL.getHBaseCluster().getRegions(TABLE_NAME)) {
            region.compact(true);
        }
        Thread.sleep(5000);
        admin.balancer(true);
        Thread.sleep(5000);
        ServerName metaServer = TEST_UTIL.getHBaseCluster().getServerHoldingMeta();
        ServerName newMetaServer = TEST_UTIL.getHBaseCluster().getRegionServerThreads().stream().map(t -> t.getRegionServer().getServerName()).filter(s -> !s.equals(metaServer)).findAny().get();
        admin.move(HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes(), Bytes.toBytes(newMetaServer.getServerName()));
        Thread.sleep(5000);
    }
    stop.set(true);
    executor.shutdown();
    for (Future<?> future : futures) {
        future.get();
    }
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) BeforeClass(org.junit.BeforeClass) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Random(java.util.Random) ClientTests(org.apache.hadoop.hbase.testclassification.ClientTests) ArrayList(java.util.ArrayList) Future(java.util.concurrent.Future) ExecutorService(java.util.concurrent.ExecutorService) Threads(org.apache.hadoop.hbase.util.Threads) Bytes(org.apache.hadoop.hbase.util.Bytes) TABLES_ON_MASTER(org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.TABLES_ON_MASTER) AfterClass(org.junit.AfterClass) CompactingMemStore(org.apache.hadoop.hbase.regionserver.CompactingMemStore) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) LargeTests(org.apache.hadoop.hbase.testclassification.LargeTests) IOException(java.io.IOException) Test(org.junit.Test) Category(org.junit.experimental.categories.Category) Collectors(java.util.stream.Collectors) Executors(java.util.concurrent.Executors) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) org.apache.hadoop.hbase(org.apache.hadoop.hbase) IOUtils(org.apache.commons.io.IOUtils) List(java.util.List) ByteBufferPool(org.apache.hadoop.hbase.io.ByteBufferPool) HBASE_CLIENT_META_OPERATION_TIMEOUT(org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_META_OPERATION_TIMEOUT) Collections(java.util.Collections) Assert.assertEquals(org.junit.Assert.assertEquals) ArrayList(java.util.ArrayList) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Random(java.util.Random) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) Test(org.junit.Test)

Example 54 with ExecutionException

use of java.util.concurrent.ExecutionException in project hbase by apache.

the class TestAsyncTableBatch method test.

@Test
public void test() throws InterruptedException, ExecutionException, IOException {
    AsyncTableBase table = tableGetter.apply(TABLE_NAME);
    table.putAll(IntStream.range(0, COUNT).mapToObj(i -> new Put(getRow(i)).addColumn(FAMILY, CQ, Bytes.toBytes(i))).collect(Collectors.toList())).get();
    List<Result> results = table.getAll(IntStream.range(0, COUNT).mapToObj(i -> Arrays.asList(new Get(getRow(i)), new Get(Arrays.copyOf(getRow(i), 4)))).flatMap(l -> l.stream()).collect(Collectors.toList())).get();
    assertEquals(2 * COUNT, results.size());
    for (int i = 0; i < COUNT; i++) {
        assertEquals(i, Bytes.toInt(results.get(2 * i).getValue(FAMILY, CQ)));
        assertTrue(results.get(2 * i + 1).isEmpty());
    }
    Admin admin = TEST_UTIL.getAdmin();
    admin.flush(TABLE_NAME);
    TEST_UTIL.getHBaseCluster().getRegions(TABLE_NAME).forEach(r -> {
        byte[] startKey = r.getRegionInfo().getStartKey();
        int number = startKey.length == 0 ? 55 : Integer.parseInt(Bytes.toString(startKey));
        byte[] splitPoint = Bytes.toBytes(String.format("%03d", number + 55));
        try {
            admin.splitRegion(r.getRegionInfo().getRegionName(), splitPoint);
        } catch (IOException e) {
            throw new UncheckedIOException(e);
        }
    });
    // we are not going to test the function of split so no assertion here. Just wait for a while
    // and then start our work.
    Thread.sleep(5000);
    table.deleteAll(IntStream.range(0, COUNT).mapToObj(i -> new Delete(getRow(i))).collect(Collectors.toList())).get();
    results = table.getAll(IntStream.range(0, COUNT).mapToObj(i -> new Get(getRow(i))).collect(Collectors.toList())).get();
    assertEquals(COUNT, results.size());
    results.forEach(r -> assertTrue(r.isEmpty()));
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) BeforeClass(org.junit.BeforeClass) RunWith(org.junit.runner.RunWith) Parameters(org.junit.runners.Parameterized.Parameters) CompletableFuture(java.util.concurrent.CompletableFuture) ClientTests(org.apache.hadoop.hbase.testclassification.ClientTests) Function(java.util.function.Function) ArrayList(java.util.ArrayList) CoreMatchers.instanceOf(org.hamcrest.CoreMatchers.instanceOf) Assert.assertThat(org.junit.Assert.assertThat) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Assert.assertArrayEquals(org.junit.Assert.assertArrayEquals) After(org.junit.After) RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) Parameterized(org.junit.runners.Parameterized) Cell(org.apache.hadoop.hbase.Cell) Bytes(org.apache.hadoop.hbase.util.Bytes) Before(org.junit.Before) TableName(org.apache.hadoop.hbase.TableName) AfterClass(org.junit.AfterClass) RegionObserver(org.apache.hadoop.hbase.coprocessor.RegionObserver) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertTrue(org.junit.Assert.assertTrue) LargeTests(org.apache.hadoop.hbase.testclassification.LargeTests) IOException(java.io.IOException) Test(org.junit.Test) Category(org.junit.experimental.categories.Category) Collectors(java.util.stream.Collectors) UncheckedIOException(java.io.UncheckedIOException) ExecutionException(java.util.concurrent.ExecutionException) List(java.util.List) HBaseTestingUtility(org.apache.hadoop.hbase.HBaseTestingUtility) ForkJoinPool(java.util.concurrent.ForkJoinPool) ObserverContext(org.apache.hadoop.hbase.coprocessor.ObserverContext) Assert.assertEquals(org.junit.Assert.assertEquals) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) UncheckedIOException(java.io.UncheckedIOException) Test(org.junit.Test)

Example 55 with ExecutionException

use of java.util.concurrent.ExecutionException in project hbase by apache.

the class TestAsyncTableBatch method testPartialSuccess.

@Test
public void testPartialSuccess() throws IOException, InterruptedException, ExecutionException {
    Admin admin = TEST_UTIL.getAdmin();
    HTableDescriptor htd = admin.getTableDescriptor(TABLE_NAME);
    htd.addCoprocessor(ErrorInjectObserver.class.getName());
    admin.modifyTable(TABLE_NAME, htd);
    AsyncTableBase table = tableGetter.apply(TABLE_NAME);
    table.putAll(Arrays.asList(SPLIT_KEYS).stream().map(k -> new Put(k).addColumn(FAMILY, CQ, k)).collect(Collectors.toList())).get();
    List<CompletableFuture<Result>> futures = table.get(Arrays.asList(SPLIT_KEYS).stream().map(k -> new Get(k)).collect(Collectors.toList()));
    for (int i = 0; i < SPLIT_KEYS.length - 1; i++) {
        assertArrayEquals(SPLIT_KEYS[i], futures.get(i).get().getValue(FAMILY, CQ));
    }
    try {
        futures.get(SPLIT_KEYS.length - 1).get();
    } catch (ExecutionException e) {
        assertThat(e.getCause(), instanceOf(RetriesExhaustedException.class));
    }
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) BeforeClass(org.junit.BeforeClass) RunWith(org.junit.runner.RunWith) Parameters(org.junit.runners.Parameterized.Parameters) CompletableFuture(java.util.concurrent.CompletableFuture) ClientTests(org.apache.hadoop.hbase.testclassification.ClientTests) Function(java.util.function.Function) ArrayList(java.util.ArrayList) CoreMatchers.instanceOf(org.hamcrest.CoreMatchers.instanceOf) Assert.assertThat(org.junit.Assert.assertThat) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Assert.assertArrayEquals(org.junit.Assert.assertArrayEquals) After(org.junit.After) RegionCoprocessorEnvironment(org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment) Parameterized(org.junit.runners.Parameterized) Cell(org.apache.hadoop.hbase.Cell) Bytes(org.apache.hadoop.hbase.util.Bytes) Before(org.junit.Before) TableName(org.apache.hadoop.hbase.TableName) AfterClass(org.junit.AfterClass) RegionObserver(org.apache.hadoop.hbase.coprocessor.RegionObserver) Parameter(org.junit.runners.Parameterized.Parameter) Assert.assertTrue(org.junit.Assert.assertTrue) LargeTests(org.apache.hadoop.hbase.testclassification.LargeTests) IOException(java.io.IOException) Test(org.junit.Test) Category(org.junit.experimental.categories.Category) Collectors(java.util.stream.Collectors) UncheckedIOException(java.io.UncheckedIOException) ExecutionException(java.util.concurrent.ExecutionException) List(java.util.List) HBaseTestingUtility(org.apache.hadoop.hbase.HBaseTestingUtility) ForkJoinPool(java.util.concurrent.ForkJoinPool) ObserverContext(org.apache.hadoop.hbase.coprocessor.ObserverContext) Assert.assertEquals(org.junit.Assert.assertEquals) CompletableFuture(java.util.concurrent.CompletableFuture) ExecutionException(java.util.concurrent.ExecutionException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Aggregations

ExecutionException (java.util.concurrent.ExecutionException)1341 IOException (java.io.IOException)367 Test (org.junit.Test)335 TimeoutException (java.util.concurrent.TimeoutException)258 ArrayList (java.util.ArrayList)237 Future (java.util.concurrent.Future)218 ExecutorService (java.util.concurrent.ExecutorService)152 CountDownLatch (java.util.concurrent.CountDownLatch)103 List (java.util.List)98 CancellationException (java.util.concurrent.CancellationException)98 Callable (java.util.concurrent.Callable)97 Test (org.testng.annotations.Test)78 HashMap (java.util.HashMap)69 Map (java.util.Map)65 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)64 RejectedExecutionException (java.util.concurrent.RejectedExecutionException)63 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)56 ParallelTest (com.hazelcast.test.annotation.ParallelTest)47 QuickTest (com.hazelcast.test.annotation.QuickTest)47 UncheckedExecutionException (com.google.common.util.concurrent.UncheckedExecutionException)46