Search in sources :

Example 46 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class WorkMakerIT method dontCreateWorkForEntriesWithNothingToReplicate.

@Test
public void dontCreateWorkForEntriesWithNothingToReplicate() throws Exception {
    String table = testName.getMethodName();
    conn.tableOperations().create(table);
    String tableId = conn.tableOperations().tableIdMap().get(table);
    String file = "hdfs://localhost:8020/accumulo/wal/123456-1234-1234-12345678";
    Mutation m = new Mutation(new Path(file).toString());
    m.put(StatusSection.NAME, new Text(tableId), StatusUtil.fileCreatedValue(System.currentTimeMillis()));
    BatchWriter bw = ReplicationTable.getBatchWriter(conn);
    bw.addMutation(m);
    bw.flush();
    // Assert that we have one record in the status section
    try (Scanner s = ReplicationTable.getScanner(conn)) {
        StatusSection.limit(s);
        Assert.assertEquals(1, Iterables.size(s));
        MockWorkMaker workMaker = new MockWorkMaker(conn);
        conn.tableOperations().setProperty(ReplicationTable.NAME, Property.TABLE_REPLICATION_TARGET.getKey() + "remote_cluster_1", "4");
        workMaker.setBatchWriter(bw);
        // If we don't shortcircuit out, we should get an exception because ServerConfiguration.getTableConfiguration
        // won't work with MockAccumulo
        workMaker.run();
    }
    try (Scanner s = ReplicationTable.getScanner(conn)) {
        WorkSection.limit(s);
        Assert.assertEquals(0, Iterables.size(s));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Scanner(org.apache.accumulo.core.client.Scanner) Text(org.apache.hadoop.io.Text) Mutation(org.apache.accumulo.core.data.Mutation) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Test(org.junit.Test)

Example 47 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class WorkMakerIT method singleUnitMultipleTargets.

@Test
public void singleUnitMultipleTargets() throws Exception {
    String table = testName.getMethodName();
    conn.tableOperations().create(table);
    Table.ID tableId = Table.ID.of(conn.tableOperations().tableIdMap().get(table));
    String file = "hdfs://localhost:8020/accumulo/wal/123456-1234-1234-12345678";
    Mutation m = new Mutation(new Path(file).toString());
    m.put(StatusSection.NAME, new Text(tableId.getUtf8()), StatusUtil.fileCreatedValue(System.currentTimeMillis()));
    BatchWriter bw = ReplicationTable.getBatchWriter(conn);
    bw.addMutation(m);
    bw.flush();
    // Assert that we have one record in the status section
    Set<ReplicationTarget> expectedTargets = new HashSet<>();
    try (Scanner s = ReplicationTable.getScanner(conn)) {
        StatusSection.limit(s);
        Assert.assertEquals(1, Iterables.size(s));
        MockWorkMaker workMaker = new MockWorkMaker(conn);
        Map<String, String> targetClusters = ImmutableMap.of("remote_cluster_1", "4", "remote_cluster_2", "6", "remote_cluster_3", "8");
        for (Entry<String, String> cluster : targetClusters.entrySet()) {
            expectedTargets.add(new ReplicationTarget(cluster.getKey(), cluster.getValue(), tableId));
        }
        workMaker.setBatchWriter(bw);
        workMaker.addWorkRecord(new Text(file), StatusUtil.fileCreatedValue(System.currentTimeMillis()), targetClusters, tableId);
    }
    try (Scanner s = ReplicationTable.getScanner(conn)) {
        WorkSection.limit(s);
        Set<ReplicationTarget> actualTargets = new HashSet<>();
        for (Entry<Key, Value> entry : s) {
            Assert.assertEquals(file, entry.getKey().getRow().toString());
            Assert.assertEquals(WorkSection.NAME, entry.getKey().getColumnFamily());
            ReplicationTarget target = ReplicationTarget.from(entry.getKey().getColumnQualifier());
            actualTargets.add(target);
        }
        for (ReplicationTarget expected : expectedTargets) {
            Assert.assertTrue("Did not find expected target: " + expected, actualTargets.contains(expected));
            actualTargets.remove(expected);
        }
        Assert.assertTrue("Found extra replication work entries: " + actualTargets, actualTargets.isEmpty());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Scanner(org.apache.accumulo.core.client.Scanner) Table(org.apache.accumulo.core.client.impl.Table) ReplicationTable(org.apache.accumulo.core.replication.ReplicationTable) Text(org.apache.hadoop.io.Text) ReplicationTarget(org.apache.accumulo.core.replication.ReplicationTarget) Value(org.apache.accumulo.core.data.Value) Mutation(org.apache.accumulo.core.data.Mutation) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Key(org.apache.accumulo.core.data.Key) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 48 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class PluginIT method checkIterator.

@Test
public void checkIterator() throws IOException, AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException {
    String tableName = "checkIterator";
    connector.tableOperations().create(tableName);
    BatchWriter bw = connector.createBatchWriter(tableName, new BatchWriterConfig());
    Mutation m = new Mutation("ROW1");
    m.put("allowed", "CQ1", "V1");
    m.put("denied", "CQ2", "V2");
    m.put("allowed", "CQ3", "V3");
    bw.addMutation(m);
    m = new Mutation("ROW2");
    m.put("allowed", "CQ1", "V1");
    m.put("denied", "CQ2", "V2");
    m.put("allowed", "CQ3", "V3");
    bw.addMutation(m);
    bw.close();
    // check filter
    Scanner scanner = connector.createScanner(tableName, Authorizations.EMPTY);
    IteratorSetting is = new IteratorSetting(5, CustomFilter.class);
    scanner.addScanIterator(is);
    int count = 0;
    for (Entry<Key, Value> entry : scanner) {
        count++;
        assertEquals("allowed", entry.getKey().getColumnFamily().toString());
    }
    assertEquals(4, count);
    // check filter negated
    scanner.clearScanIterators();
    CustomFilter.setNegate(is, true);
    scanner.addScanIterator(is);
    count = 0;
    for (Entry<Key, Value> entry : scanner) {
        count++;
        assertEquals("denied", entry.getKey().getColumnFamily().toString());
    }
    assertEquals(2, count);
    assertTrue(new File("target/accumulo-maven-plugin/" + instance.getInstanceName() + "/testCheckIteratorPassed").createNewFile());
}
Also used : Scanner(org.apache.accumulo.core.client.Scanner) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) File(java.io.File) Key(org.apache.accumulo.core.data.Key) Test(org.junit.Test)

Example 49 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class ProxyServerTest method updateAndFlushClosesWriterOnExceptionFromFlush.

@Test
public void updateAndFlushClosesWriterOnExceptionFromFlush() throws Exception {
    ProxyServer server = EasyMock.createMockBuilder(ProxyServer.class).addMockedMethod("getWriter", ByteBuffer.class, String.class, WriterOptions.class).addMockedMethod("addCellsToWriter", Map.class, BatchWriterPlusProblem.class).createMock();
    BatchWriter writer = EasyMock.createMock(BatchWriter.class);
    BatchWriterPlusProblem bwpe = new BatchWriterPlusProblem();
    bwpe.writer = writer;
    MutationsRejectedException mre = EasyMock.createMock(MutationsRejectedException.class);
    final ByteBuffer login = ByteBuffer.wrap("my_login".getBytes(UTF_8));
    final String tableName = "table1";
    final Map<ByteBuffer, List<ColumnUpdate>> cells = new HashMap<>();
    EasyMock.expect(server.getWriter(login, tableName, null)).andReturn(bwpe);
    server.addCellsToWriter(cells, bwpe);
    EasyMock.expectLastCall();
    // No exception throw adding the cells
    bwpe.exception = null;
    writer.flush();
    EasyMock.expectLastCall().andThrow(mre);
    writer.close();
    EasyMock.expectLastCall();
    EasyMock.replay(server, writer, mre);
    try {
        server.updateAndFlush(login, tableName, cells);
        Assert.fail("Expected updateAndFlush to throw an exception");
    } catch (org.apache.accumulo.proxy.thrift.MutationsRejectedException e) {
    // pass
    }
    EasyMock.verify(server, writer, mre);
}
Also used : HashMap(java.util.HashMap) ByteBuffer(java.nio.ByteBuffer) BatchWriterPlusProblem(org.apache.accumulo.proxy.ProxyServer.BatchWriterPlusProblem) List(java.util.List) BatchWriter(org.apache.accumulo.core.client.BatchWriter) HashMap(java.util.HashMap) Map(java.util.Map) MutationsRejectedException(org.apache.accumulo.core.client.MutationsRejectedException) Test(org.junit.Test)

Example 50 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class ProxyServerTest method updateAndFlushClosesWriterOnExceptionFromAddCells.

@Test
public void updateAndFlushClosesWriterOnExceptionFromAddCells() throws Exception {
    ProxyServer server = EasyMock.createMockBuilder(ProxyServer.class).addMockedMethod("getWriter", ByteBuffer.class, String.class, WriterOptions.class).addMockedMethod("addCellsToWriter", Map.class, BatchWriterPlusProblem.class).createMock();
    BatchWriter writer = EasyMock.createMock(BatchWriter.class);
    BatchWriterPlusProblem bwpe = new BatchWriterPlusProblem();
    bwpe.writer = writer;
    MutationsRejectedException mre = EasyMock.createMock(MutationsRejectedException.class);
    final ByteBuffer login = ByteBuffer.wrap("my_login".getBytes(UTF_8));
    final String tableName = "table1";
    final Map<ByteBuffer, List<ColumnUpdate>> cells = new HashMap<>();
    EasyMock.expect(server.getWriter(login, tableName, null)).andReturn(bwpe);
    server.addCellsToWriter(cells, bwpe);
    EasyMock.expectLastCall();
    // Set the exception
    bwpe.exception = mre;
    writer.close();
    EasyMock.expectLastCall();
    EasyMock.replay(server, writer, mre);
    try {
        server.updateAndFlush(login, tableName, cells);
        Assert.fail("Expected updateAndFlush to throw an exception");
    } catch (org.apache.accumulo.proxy.thrift.MutationsRejectedException e) {
    // pass
    }
    EasyMock.verify(server, writer, mre);
}
Also used : HashMap(java.util.HashMap) ByteBuffer(java.nio.ByteBuffer) BatchWriterPlusProblem(org.apache.accumulo.proxy.ProxyServer.BatchWriterPlusProblem) List(java.util.List) BatchWriter(org.apache.accumulo.core.client.BatchWriter) HashMap(java.util.HashMap) Map(java.util.Map) MutationsRejectedException(org.apache.accumulo.core.client.MutationsRejectedException) Test(org.junit.Test)

Aggregations

BatchWriter (org.apache.accumulo.core.client.BatchWriter)402 Mutation (org.apache.accumulo.core.data.Mutation)360 Test (org.junit.Test)264 Value (org.apache.accumulo.core.data.Value)250 BatchWriterConfig (org.apache.accumulo.core.client.BatchWriterConfig)246 Text (org.apache.hadoop.io.Text)194 Key (org.apache.accumulo.core.data.Key)179 Scanner (org.apache.accumulo.core.client.Scanner)174 Connector (org.apache.accumulo.core.client.Connector)169 IteratorSetting (org.apache.accumulo.core.client.IteratorSetting)81 Authorizations (org.apache.accumulo.core.security.Authorizations)68 Range (org.apache.accumulo.core.data.Range)61 Entry (java.util.Map.Entry)51 Map (java.util.Map)50 BatchScanner (org.apache.accumulo.core.client.BatchScanner)46 MutationsRejectedException (org.apache.accumulo.core.client.MutationsRejectedException)44 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)40 HashMap (java.util.HashMap)38 ArrayList (java.util.ArrayList)36 Status (org.apache.accumulo.server.replication.proto.Replication.Status)32