use of org.apache.accumulo.hadoop.mapred.AccumuloOutputFormat in project accumulo by apache.
the class AccumuloOutputFormatIT method testMapred.
// Prevent regression of ACCUMULO-3709.
@Test
public void testMapred() throws Exception {
try (AccumuloClient client = Accumulo.newClient().from(getClientProperties()).build()) {
// create a table and put some data in it
client.tableOperations().create(testName.getMethodName());
JobConf job = new JobConf();
BatchWriterConfig batchConfig = new BatchWriterConfig();
// no flushes!!!!!
batchConfig.setMaxLatency(0, TimeUnit.MILLISECONDS);
// use a single thread to ensure our update session times out
batchConfig.setMaxWriteThreads(1);
// set the max memory so that we ensure we don't flush on the write.
batchConfig.setMaxMemory(Long.MAX_VALUE);
AccumuloOutputFormat outputFormat = new AccumuloOutputFormat();
Properties props = Accumulo.newClientProperties().from(getClientProperties()).batchWriterConfig(batchConfig).build();
AccumuloOutputFormat.configure().clientProperties(props).store(job);
RecordWriter<Text, Mutation> writer = outputFormat.getRecordWriter(null, job, "Test", null);
try {
for (int i = 0; i < 3; i++) {
Mutation m = new Mutation(new Text(String.format("%08d", i)));
for (int j = 0; j < 3; j++) {
m.put("cf1", "cq" + j, i + "_" + j);
}
writer.write(new Text(testName.getMethodName()), m);
}
} catch (Exception e) {
e.printStackTrace();
// we don't want the exception to come from write
}
client.securityOperations().revokeTablePermission("root", testName.getMethodName(), TablePermission.WRITE);
var ex = assertThrows(IOException.class, () -> writer.close(null));
log.info(ex.getMessage(), ex);
assertTrue(ex.getCause() instanceof MutationsRejectedException);
}
}
Aggregations