use of org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException in project hbase by apache.
the class TestConstraint method testConstraintFails.
/**
* Test that constraints will fail properly
* @throws Exception
*/
@SuppressWarnings("unchecked")
@Test(timeout = 60000)
public void testConstraintFails() throws Exception {
// create the table
// it would be nice if this was also a method on the util
HTableDescriptor desc = new HTableDescriptor(tableName);
for (byte[] family : new byte[][] { dummy, test }) {
desc.addFamily(new HColumnDescriptor(family));
}
// add a constraint that is sure to fail
Constraints.add(desc, AllFailConstraint.class);
util.getAdmin().createTable(desc);
Table table = util.getConnection().getTable(tableName);
// test that we do fail on violation
Put put = new Put(row1);
byte[] qualifier = new byte[0];
put.addColumn(dummy, qualifier, "fail".getBytes());
LOG.warn("Doing put in table");
try {
table.put(put);
fail("This put should not have suceeded - AllFailConstraint was not run!");
} catch (RetriesExhaustedWithDetailsException e) {
List<Throwable> causes = e.getCauses();
assertEquals("More than one failure cause - should only be the failure constraint exception", 1, causes.size());
Throwable t = causes.get(0);
assertEquals(ConstraintException.class, t.getClass());
}
table.close();
}
use of org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException in project hbase by apache.
the class BufferedMutatorExample method run.
@Override
public int run(String[] args) throws InterruptedException, ExecutionException, TimeoutException {
/** a callback invoked when an asynchronous write fails. */
final BufferedMutator.ExceptionListener listener = new BufferedMutator.ExceptionListener() {
@Override
public void onException(RetriesExhaustedWithDetailsException e, BufferedMutator mutator) {
for (int i = 0; i < e.getNumExceptions(); i++) {
LOG.info("Failed to sent put " + e.getRow(i) + ".");
}
}
};
BufferedMutatorParams params = new BufferedMutatorParams(TABLE).listener(listener);
//
try (final Connection conn = ConnectionFactory.createConnection(getConf());
final BufferedMutator mutator = conn.getBufferedMutator(params)) {
/** worker pool that operates on BufferedTable instances */
final ExecutorService workerPool = Executors.newFixedThreadPool(POOL_SIZE);
List<Future<Void>> futures = new ArrayList<>(TASK_COUNT);
for (int i = 0; i < TASK_COUNT; i++) {
futures.add(workerPool.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
//
// step 2: each worker sends edits to the shared BufferedMutator instance. They all use
// the same backing buffer, call-back "listener", and RPC executor pool.
//
Put p = new Put(Bytes.toBytes("someRow"));
p.addColumn(FAMILY, Bytes.toBytes("someQualifier"), Bytes.toBytes("some value"));
mutator.mutate(p);
// this worker's edits are sent before exiting the Callable
return null;
}
}));
}
//
for (Future<Void> f : futures) {
f.get(5, TimeUnit.MINUTES);
}
workerPool.shutdown();
} catch (IOException e) {
// exception while creating/destroying Connection or BufferedMutator
LOG.info("exception while creating/destroying Connection or BufferedMutator", e);
}
// invoked from here.
return 0;
}
use of org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException in project hbase by apache.
the class MultiThreadedWriterWithACL method recordFailure.
private void recordFailure(final Table table, final Put put, final long keyBase, final long start, IOException e) {
failedKeySet.add(keyBase);
String exceptionInfo;
if (e instanceof RetriesExhaustedWithDetailsException) {
RetriesExhaustedWithDetailsException aggEx = (RetriesExhaustedWithDetailsException) e;
exceptionInfo = aggEx.getExhaustiveDescription();
} else {
StringWriter stackWriter = new StringWriter();
PrintWriter pw = new PrintWriter(stackWriter);
e.printStackTrace(pw);
pw.flush();
exceptionInfo = StringUtils.stringifyException(e);
}
LOG.error("Failed to insert: " + keyBase + " after " + (System.currentTimeMillis() - start) + "ms; region information: " + getRegionDebugInfoSafe(table, put.getRow()) + "; errors: " + exceptionInfo);
}
use of org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException in project phoenix by apache.
the class FailWithoutRetriesIT method testQuickFailure.
/**
* If this test times out, then we didn't fail quickly enough. {@link Indexer} maybe isn't rethrowing the exception
* correctly?
* <p>
* We use a custom codec to enforce the thrown exception.
*
* @throws Exception
*/
@Test(timeout = 300000)
public void testQuickFailure() throws Exception {
// incorrectly setup indexing for the primary table - target index table doesn't exist, which
// should quickly return to the client
byte[] family = Bytes.toBytes("family");
ColumnGroup fam1 = new ColumnGroup(getIndexTableName());
// values are [col1]
fam1.add(new CoveredColumn(family, CoveredColumn.ALL_QUALIFIERS));
CoveredColumnIndexSpecifierBuilder builder = new CoveredColumnIndexSpecifierBuilder();
// add the index family
builder.addIndexGroup(fam1);
// usually, we would create the index table here, but we don't for the sake of the test.
// setup the primary table
String primaryTable = Bytes.toString(table.getTableName());
@SuppressWarnings("deprecation") HTableDescriptor pTable = new HTableDescriptor(primaryTable);
pTable.addFamily(new HColumnDescriptor(family));
// override the codec so we can use our test one
builder.build(pTable, FailingTestCodec.class);
// create the primary table
HBaseAdmin admin = UTIL.getHBaseAdmin();
admin.createTable(pTable);
Configuration conf = new Configuration(UTIL.getConfiguration());
// up the number of retries/wait time to make it obvious that we are failing with retries here
conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 20);
conf.setLong(HConstants.HBASE_CLIENT_PAUSE, 1000);
HTable primary = new HTable(conf, primaryTable);
primary.setAutoFlush(false, true);
// do a simple put that should be indexed
Put p = new Put(Bytes.toBytes("row"));
p.add(family, null, Bytes.toBytes("value"));
primary.put(p);
try {
primary.flushCommits();
fail("Shouldn't have gotten a successful write to the primary table");
} catch (RetriesExhaustedWithDetailsException e) {
LOG.info("Correclty got a failure of the put!");
}
primary.close();
}
Aggregations