Search in sources :

Example 6 with RetriesExhaustedWithDetailsException

use of org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException in project hbase by apache.

the class TestConstraint method testConstraintFails.

/**
   * Test that constraints will fail properly
   * @throws Exception
   */
@SuppressWarnings("unchecked")
@Test(timeout = 60000)
public void testConstraintFails() throws Exception {
    // create the table
    // it would be nice if this was also a method on the util
    HTableDescriptor desc = new HTableDescriptor(tableName);
    for (byte[] family : new byte[][] { dummy, test }) {
        desc.addFamily(new HColumnDescriptor(family));
    }
    // add a constraint that is sure to fail
    Constraints.add(desc, AllFailConstraint.class);
    util.getAdmin().createTable(desc);
    Table table = util.getConnection().getTable(tableName);
    // test that we do fail on violation
    Put put = new Put(row1);
    byte[] qualifier = new byte[0];
    put.addColumn(dummy, qualifier, "fail".getBytes());
    LOG.warn("Doing put in table");
    try {
        table.put(put);
        fail("This put should not have suceeded - AllFailConstraint was not run!");
    } catch (RetriesExhaustedWithDetailsException e) {
        List<Throwable> causes = e.getCauses();
        assertEquals("More than one failure cause - should only be the failure constraint exception", 1, causes.size());
        Throwable t = causes.get(0);
        assertEquals(ConstraintException.class, t.getClass());
    }
    table.close();
}
Also used : Table(org.apache.hadoop.hbase.client.Table) RetriesExhaustedWithDetailsException(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) List(java.util.List) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 7 with RetriesExhaustedWithDetailsException

use of org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException in project hbase by apache.

the class BufferedMutatorExample method run.

@Override
public int run(String[] args) throws InterruptedException, ExecutionException, TimeoutException {
    /** a callback invoked when an asynchronous write fails. */
    final BufferedMutator.ExceptionListener listener = new BufferedMutator.ExceptionListener() {

        @Override
        public void onException(RetriesExhaustedWithDetailsException e, BufferedMutator mutator) {
            for (int i = 0; i < e.getNumExceptions(); i++) {
                LOG.info("Failed to sent put " + e.getRow(i) + ".");
            }
        }
    };
    BufferedMutatorParams params = new BufferedMutatorParams(TABLE).listener(listener);
    //
    try (final Connection conn = ConnectionFactory.createConnection(getConf());
        final BufferedMutator mutator = conn.getBufferedMutator(params)) {
        /** worker pool that operates on BufferedTable instances */
        final ExecutorService workerPool = Executors.newFixedThreadPool(POOL_SIZE);
        List<Future<Void>> futures = new ArrayList<>(TASK_COUNT);
        for (int i = 0; i < TASK_COUNT; i++) {
            futures.add(workerPool.submit(new Callable<Void>() {

                @Override
                public Void call() throws Exception {
                    //
                    // step 2: each worker sends edits to the shared BufferedMutator instance. They all use
                    // the same backing buffer, call-back "listener", and RPC executor pool.
                    //
                    Put p = new Put(Bytes.toBytes("someRow"));
                    p.addColumn(FAMILY, Bytes.toBytes("someQualifier"), Bytes.toBytes("some value"));
                    mutator.mutate(p);
                    // this worker's edits are sent before exiting the Callable
                    return null;
                }
            }));
        }
        //
        for (Future<Void> f : futures) {
            f.get(5, TimeUnit.MINUTES);
        }
        workerPool.shutdown();
    } catch (IOException e) {
        // exception while creating/destroying Connection or BufferedMutator
        LOG.info("exception while creating/destroying Connection or BufferedMutator", e);
    }
    // invoked from here.
    return 0;
}
Also used : BufferedMutator(org.apache.hadoop.hbase.client.BufferedMutator) Connection(org.apache.hadoop.hbase.client.Connection) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Callable(java.util.concurrent.Callable) Put(org.apache.hadoop.hbase.client.Put) RetriesExhaustedWithDetailsException(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) BufferedMutatorParams(org.apache.hadoop.hbase.client.BufferedMutatorParams) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future)

Example 8 with RetriesExhaustedWithDetailsException

use of org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException in project hbase by apache.

the class MultiThreadedWriterWithACL method recordFailure.

private void recordFailure(final Table table, final Put put, final long keyBase, final long start, IOException e) {
    failedKeySet.add(keyBase);
    String exceptionInfo;
    if (e instanceof RetriesExhaustedWithDetailsException) {
        RetriesExhaustedWithDetailsException aggEx = (RetriesExhaustedWithDetailsException) e;
        exceptionInfo = aggEx.getExhaustiveDescription();
    } else {
        StringWriter stackWriter = new StringWriter();
        PrintWriter pw = new PrintWriter(stackWriter);
        e.printStackTrace(pw);
        pw.flush();
        exceptionInfo = StringUtils.stringifyException(e);
    }
    LOG.error("Failed to insert: " + keyBase + " after " + (System.currentTimeMillis() - start) + "ms; region information: " + getRegionDebugInfoSafe(table, put.getRow()) + "; errors: " + exceptionInfo);
}
Also used : RetriesExhaustedWithDetailsException(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) StringWriter(java.io.StringWriter) PrintWriter(java.io.PrintWriter)

Example 9 with RetriesExhaustedWithDetailsException

use of org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException in project phoenix by apache.

the class FailWithoutRetriesIT method testQuickFailure.

/**
     * If this test times out, then we didn't fail quickly enough. {@link Indexer} maybe isn't rethrowing the exception
     * correctly?
     * <p>
     * We use a custom codec to enforce the thrown exception.
     * 
     * @throws Exception
     */
@Test(timeout = 300000)
public void testQuickFailure() throws Exception {
    // incorrectly setup indexing for the primary table - target index table doesn't exist, which
    // should quickly return to the client
    byte[] family = Bytes.toBytes("family");
    ColumnGroup fam1 = new ColumnGroup(getIndexTableName());
    // values are [col1]
    fam1.add(new CoveredColumn(family, CoveredColumn.ALL_QUALIFIERS));
    CoveredColumnIndexSpecifierBuilder builder = new CoveredColumnIndexSpecifierBuilder();
    // add the index family
    builder.addIndexGroup(fam1);
    // usually, we would create the index table here, but we don't for the sake of the test.
    // setup the primary table
    String primaryTable = Bytes.toString(table.getTableName());
    @SuppressWarnings("deprecation") HTableDescriptor pTable = new HTableDescriptor(primaryTable);
    pTable.addFamily(new HColumnDescriptor(family));
    // override the codec so we can use our test one
    builder.build(pTable, FailingTestCodec.class);
    // create the primary table
    HBaseAdmin admin = UTIL.getHBaseAdmin();
    admin.createTable(pTable);
    Configuration conf = new Configuration(UTIL.getConfiguration());
    // up the number of retries/wait time to make it obvious that we are failing with retries here
    conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 20);
    conf.setLong(HConstants.HBASE_CLIENT_PAUSE, 1000);
    HTable primary = new HTable(conf, primaryTable);
    primary.setAutoFlush(false, true);
    // do a simple put that should be indexed
    Put p = new Put(Bytes.toBytes("row"));
    p.add(family, null, Bytes.toBytes("value"));
    primary.put(p);
    try {
        primary.flushCommits();
        fail("Shouldn't have gotten a successful write to the primary table");
    } catch (RetriesExhaustedWithDetailsException e) {
        LOG.info("Correclty got a failure of the put!");
    }
    primary.close();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTable(org.apache.hadoop.hbase.client.HTable) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) RetriesExhaustedWithDetailsException(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) Test(org.junit.Test) NeedsOwnMiniClusterTest(org.apache.phoenix.end2end.NeedsOwnMiniClusterTest)

Aggregations

RetriesExhaustedWithDetailsException (org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException)9 Put (org.apache.hadoop.hbase.client.Put)7 IOException (java.io.IOException)3 Table (org.apache.hadoop.hbase.client.Table)3 Test (org.junit.Test)3 PrintWriter (java.io.PrintWriter)2 StringWriter (java.io.StringWriter)2 ArrayList (java.util.ArrayList)2 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)2 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)2 HTable (org.apache.hadoop.hbase.client.HTable)2 HashSet (java.util.HashSet)1 List (java.util.List)1 Callable (java.util.concurrent.Callable)1 ExecutorService (java.util.concurrent.ExecutorService)1 Future (java.util.concurrent.Future)1 TimeoutException (java.util.concurrent.TimeoutException)1 WebApplicationException (javax.ws.rs.WebApplicationException)1 Configuration (org.apache.hadoop.conf.Configuration)1 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)1