use of org.apache.accumulo.core.client.BatchWriterConfig in project accumulo-examples by apache.
the class MapReduceIT method runTest.
static void runTest(String confFile, Connector c, MiniAccumuloClusterImpl cluster) throws AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException, MutationsRejectedException, IOException, InterruptedException, NoSuchAlgorithmException {
c.tableOperations().create(tablename);
BatchWriter bw = c.createBatchWriter(tablename, new BatchWriterConfig());
for (int i = 0; i < 10; i++) {
Mutation m = new Mutation("" + i);
m.put(input_cf, input_cq, "row" + i);
bw.addMutation(m);
}
bw.close();
Process hash = cluster.exec(RowHash.class, Collections.singletonList(hadoopTmpDirArg), "-c", confFile, "-t", tablename, "--column", input_cfcq);
assertEquals(0, hash.waitFor());
Scanner s = c.createScanner(tablename, Authorizations.EMPTY);
s.fetchColumn(new Text(input_cf), new Text(output_cq));
int i = 0;
for (Entry<Key, Value> entry : s) {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] check = Base64.getEncoder().encode(md.digest(("row" + i).getBytes()));
assertEquals(entry.getValue().toString(), new String(check));
i++;
}
}
use of org.apache.accumulo.core.client.BatchWriterConfig in project accumulo-examples by apache.
the class BatchWriterOpts method getBatchWriterConfig.
public BatchWriterConfig getBatchWriterConfig() {
BatchWriterConfig config = new BatchWriterConfig();
config.setMaxWriteThreads(this.batchThreads);
config.setMaxLatency(this.batchLatency, TimeUnit.MILLISECONDS);
config.setMaxMemory(this.batchMemory);
config.setTimeout(this.batchTimeout, TimeUnit.MILLISECONDS);
return config;
}
use of org.apache.accumulo.core.client.BatchWriterConfig in project presto by prestodb.
the class TestIndexer method testMutationIndexWithVisibilities.
@Test
public void testMutationIndexWithVisibilities() throws Exception {
Instance inst = new MockInstance();
Connector conn = inst.getConnector("root", new PasswordToken(""));
conn.tableOperations().create(table.getFullTableName());
conn.tableOperations().create(table.getIndexTableName());
conn.tableOperations().create(table.getMetricsTableName());
for (IteratorSetting s : Indexer.getMetricIterators(table)) {
conn.tableOperations().attachIterator(table.getMetricsTableName(), s);
}
Indexer indexer = new Indexer(conn, new Authorizations(), table, new BatchWriterConfig());
indexer.index(m1);
indexer.index(m1v);
indexer.flush();
Scanner scan = conn.createScanner(table.getIndexTableName(), new Authorizations("private", "moreprivate"));
scan.setRange(new Range());
Iterator<Entry<Key, Value>> iter = scan.iterator();
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "row1", "");
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "row1", "private", "");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "row1", "");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "row1", "moreprivate", "");
assertKeyValuePair(iter.next(), M1_FNAME_VALUE, "cf_firstname", "row1", "");
assertKeyValuePair(iter.next(), M1_FNAME_VALUE, "cf_firstname", "row1", "private", "");
assertKeyValuePair(iter.next(), bytes("def"), "cf_arr", "row1", "");
assertKeyValuePair(iter.next(), bytes("def"), "cf_arr", "row1", "moreprivate", "");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "row1", "");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "row1", "moreprivate", "");
assertFalse(iter.hasNext());
scan.close();
scan = conn.createScanner(table.getMetricsTableName(), new Authorizations("private", "moreprivate"));
scan.setRange(new Range());
iter = scan.iterator();
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "___card___", "1");
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "___card___", "private", "1");
assertKeyValuePair(iter.next(), Indexer.METRICS_TABLE_ROW_ID.array(), "___rows___", "___card___", "2");
assertKeyValuePair(iter.next(), Indexer.METRICS_TABLE_ROW_ID.array(), "___rows___", "___first_row___", "row1");
assertKeyValuePair(iter.next(), Indexer.METRICS_TABLE_ROW_ID.array(), "___rows___", "___last_row___", "row1");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "___card___", "1");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "___card___", "moreprivate", "1");
assertKeyValuePair(iter.next(), M1_FNAME_VALUE, "cf_firstname", "___card___", "1");
assertKeyValuePair(iter.next(), M1_FNAME_VALUE, "cf_firstname", "___card___", "private", "1");
assertKeyValuePair(iter.next(), bytes("def"), "cf_arr", "___card___", "1");
assertKeyValuePair(iter.next(), bytes("def"), "cf_arr", "___card___", "moreprivate", "1");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "___card___", "1");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "___card___", "moreprivate", "1");
assertFalse(iter.hasNext());
scan.close();
indexer.index(m2);
indexer.index(m2v);
indexer.close();
scan = conn.createScanner(table.getIndexTableName(), new Authorizations("private", "moreprivate"));
scan.setRange(new Range());
iter = scan.iterator();
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "row1", "");
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "row1", "private", "");
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "row2", "");
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "row2", "private", "");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "row1", "");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "row1", "moreprivate", "");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "row2", "");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "row2", "moreprivate", "");
assertKeyValuePair(iter.next(), M1_FNAME_VALUE, "cf_firstname", "row1", "");
assertKeyValuePair(iter.next(), M1_FNAME_VALUE, "cf_firstname", "row1", "private", "");
assertKeyValuePair(iter.next(), M2_FNAME_VALUE, "cf_firstname", "row2", "");
assertKeyValuePair(iter.next(), M2_FNAME_VALUE, "cf_firstname", "row2", "moreprivate", "");
assertKeyValuePair(iter.next(), bytes("def"), "cf_arr", "row1", "");
assertKeyValuePair(iter.next(), bytes("def"), "cf_arr", "row1", "moreprivate", "");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "row1", "");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "row1", "moreprivate", "");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "row2", "");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "row2", "moreprivate", "");
assertKeyValuePair(iter.next(), bytes("mno"), "cf_arr", "row2", "");
assertKeyValuePair(iter.next(), bytes("mno"), "cf_arr", "row2", "moreprivate", "");
assertFalse(iter.hasNext());
scan.close();
scan = conn.createScanner(table.getMetricsTableName(), new Authorizations("private", "moreprivate"));
scan.setRange(new Range());
iter = scan.iterator();
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "___card___", "2");
assertKeyValuePair(iter.next(), AGE_VALUE, "cf_age", "___card___", "private", "2");
assertKeyValuePair(iter.next(), Indexer.METRICS_TABLE_ROW_ID.array(), "___rows___", "___card___", "4");
assertKeyValuePair(iter.next(), Indexer.METRICS_TABLE_ROW_ID.array(), "___rows___", "___first_row___", "row1");
assertKeyValuePair(iter.next(), Indexer.METRICS_TABLE_ROW_ID.array(), "___rows___", "___last_row___", "row2");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "___card___", "2");
assertKeyValuePair(iter.next(), bytes("abc"), "cf_arr", "___card___", "moreprivate", "2");
assertKeyValuePair(iter.next(), M1_FNAME_VALUE, "cf_firstname", "___card___", "1");
assertKeyValuePair(iter.next(), M1_FNAME_VALUE, "cf_firstname", "___card___", "private", "1");
assertKeyValuePair(iter.next(), M2_FNAME_VALUE, "cf_firstname", "___card___", "1");
assertKeyValuePair(iter.next(), M2_FNAME_VALUE, "cf_firstname", "___card___", "moreprivate", "1");
assertKeyValuePair(iter.next(), bytes("def"), "cf_arr", "___card___", "1");
assertKeyValuePair(iter.next(), bytes("def"), "cf_arr", "___card___", "moreprivate", "1");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "___card___", "2");
assertKeyValuePair(iter.next(), bytes("ghi"), "cf_arr", "___card___", "moreprivate", "2");
assertKeyValuePair(iter.next(), bytes("mno"), "cf_arr", "___card___", "1");
assertKeyValuePair(iter.next(), bytes("mno"), "cf_arr", "___card___", "moreprivate", "1");
assertFalse(iter.hasNext());
scan.close();
}
use of org.apache.accumulo.core.client.BatchWriterConfig in project YCSB by brianfrankcooper.
the class AccumuloClient method createBatchWriter.
/**
* Creates a BatchWriter with the expected configuration.
*
* @param table The table to write to
*/
private BatchWriter createBatchWriter(String table) throws TableNotFoundException {
BatchWriterConfig bwc = new BatchWriterConfig();
bwc.setMaxLatency(Long.parseLong(getProperties().getProperty("accumulo.batchWriterMaxLatency", "30000")), TimeUnit.MILLISECONDS);
bwc.setMaxMemory(Long.parseLong(getProperties().getProperty("accumulo.batchWriterSize", "100000")));
final String numThreadsValue = getProperties().getProperty("accumulo.batchWriterThreads");
// Try to saturate the client machine.
int numThreads = Math.max(1, Runtime.getRuntime().availableProcessors() / 2);
if (null != numThreadsValue) {
numThreads = Integer.parseInt(numThreadsValue);
}
System.err.println("Using " + numThreads + " threads to write data");
bwc.setMaxWriteThreads(numThreads);
return connector.createBatchWriter(table, bwc);
}
use of org.apache.accumulo.core.client.BatchWriterConfig in project hive by apache.
the class AccumuloTestSetup method createAccumuloTable.
private void createAccumuloTable(Connector conn) throws TableExistsException, TableNotFoundException, AccumuloException, AccumuloSecurityException {
TableOperations tops = conn.tableOperations();
if (tops.exists(TABLE_NAME)) {
tops.delete(TABLE_NAME);
}
tops.create(TABLE_NAME);
boolean[] booleans = new boolean[] { true, false, true };
byte[] bytes = new byte[] { Byte.MIN_VALUE, -1, Byte.MAX_VALUE };
short[] shorts = new short[] { Short.MIN_VALUE, -1, Short.MAX_VALUE };
int[] ints = new int[] { Integer.MIN_VALUE, -1, Integer.MAX_VALUE };
long[] longs = new long[] { Long.MIN_VALUE, -1, Long.MAX_VALUE };
String[] strings = new String[] { "Hadoop, Accumulo", "Hive", "Test Strings" };
float[] floats = new float[] { Float.MIN_VALUE, -1.0F, Float.MAX_VALUE };
double[] doubles = new double[] { Double.MIN_VALUE, -1.0, Double.MAX_VALUE };
HiveDecimal[] decimals = new HiveDecimal[] { HiveDecimal.create("3.14159"), HiveDecimal.create("2.71828"), HiveDecimal.create("0.57721") };
Date[] dates = new Date[] { Date.valueOf("2014-01-01"), Date.valueOf("2014-03-01"), Date.valueOf("2014-05-01") };
Timestamp[] timestamps = new Timestamp[] { new Timestamp(50), new Timestamp(100), new Timestamp(150) };
BatchWriter bw = conn.createBatchWriter(TABLE_NAME, new BatchWriterConfig());
final String cf = "cf";
try {
for (int i = 0; i < 3; i++) {
Mutation m = new Mutation("key-" + i);
m.put(cf, "cq-boolean", Boolean.toString(booleans[i]));
m.put(cf.getBytes(), "cq-byte".getBytes(), new byte[] { bytes[i] });
m.put(cf, "cq-short", Short.toString(shorts[i]));
m.put(cf, "cq-int", Integer.toString(ints[i]));
m.put(cf, "cq-long", Long.toString(longs[i]));
m.put(cf, "cq-string", strings[i]);
m.put(cf, "cq-float", Float.toString(floats[i]));
m.put(cf, "cq-double", Double.toString(doubles[i]));
m.put(cf, "cq-decimal", decimals[i].toString());
m.put(cf, "cq-date", dates[i].toString());
m.put(cf, "cq-timestamp", timestamps[i].toString());
bw.addMutation(m);
}
} finally {
bw.close();
}
}
Aggregations