Search in sources :

Example 51 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class CloseWriteAheadLogReferences method updateReplicationEntries.

/**
 * Given the set of WALs which have references in the metadata table, close any status messages with reference that WAL.
 *
 * @param conn
 *          Connector
 * @param closedWals
 *          {@link Set} of paths to WALs that marked as closed or unreferenced in zookeeper
 */
protected long updateReplicationEntries(Connector conn, Set<String> closedWals) {
    BatchScanner bs = null;
    BatchWriter bw = null;
    long recordsClosed = 0;
    try {
        bw = conn.createBatchWriter(MetadataTable.NAME, new BatchWriterConfig());
        bs = conn.createBatchScanner(MetadataTable.NAME, Authorizations.EMPTY, 4);
        bs.setRanges(Collections.singleton(Range.prefix(ReplicationSection.getRowPrefix())));
        bs.fetchColumnFamily(ReplicationSection.COLF);
        Text replFileText = new Text();
        for (Entry<Key, Value> entry : bs) {
            Status status;
            try {
                status = Status.parseFrom(entry.getValue().get());
            } catch (InvalidProtocolBufferException e) {
                log.error("Could not parse Status protobuf for {}", entry.getKey(), e);
                continue;
            }
            // Ignore things that aren't completely replicated as we can't delete those anyways
            MetadataSchema.ReplicationSection.getFile(entry.getKey(), replFileText);
            String replFile = replFileText.toString();
            boolean isClosed = closedWals.contains(replFile);
            // metadata doesn't have a reference to the given WAL
            if (!status.getClosed() && !replFile.endsWith(RFILE_SUFFIX) && isClosed) {
                try {
                    closeWal(bw, entry.getKey());
                    recordsClosed++;
                } catch (MutationsRejectedException e) {
                    log.error("Failed to submit delete mutation for {}", entry.getKey());
                    continue;
                }
            }
        }
    } catch (TableNotFoundException e) {
        log.error("Replication table was deleted", e);
    } finally {
        if (null != bs) {
            bs.close();
        }
        if (null != bw) {
            try {
                bw.close();
            } catch (MutationsRejectedException e) {
                log.error("Failed to write delete mutations for replication table", e);
            }
        }
    }
    return recordsClosed;
}
Also used : Status(org.apache.accumulo.server.replication.proto.Replication.Status) BatchScanner(org.apache.accumulo.core.client.BatchScanner) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) Text(org.apache.hadoop.io.Text) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Key(org.apache.accumulo.core.data.Key) MutationsRejectedException(org.apache.accumulo.core.client.MutationsRejectedException)

Example 52 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class MockConnectorTest method testAggregation.

@Test
public void testAggregation() throws Exception {
    MockInstance mockInstance = new MockInstance();
    Connector c = mockInstance.getConnector("root", new PasswordToken(""));
    String table = "perDayCounts";
    c.tableOperations().create(table);
    IteratorSetting is = new IteratorSetting(10, "String Summation", SummingCombiner.class);
    Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("day")));
    SummingCombiner.setEncodingType(is, SummingCombiner.Type.STRING);
    c.tableOperations().attachIterator(table, is);
    String[][] keys = { { "foo", "day", "20080101" }, { "foo", "day", "20080101" }, { "foo", "day", "20080103" }, { "bar", "day", "20080101" }, { "bar", "day", "20080101" } };
    BatchWriter bw = c.createBatchWriter("perDayCounts", new BatchWriterConfig());
    for (String[] elt : keys) {
        Mutation m = new Mutation(new Text(elt[0]));
        m.put(new Text(elt[1]), new Text(elt[2]), new Value("1".getBytes()));
        bw.addMutation(m);
    }
    bw.close();
    Scanner s = c.createScanner("perDayCounts", Authorizations.EMPTY);
    Iterator<Entry<Key, Value>> iterator = s.iterator();
    assertTrue(iterator.hasNext());
    checkEntry(iterator.next(), "bar", "day", "20080101", "2");
    assertTrue(iterator.hasNext());
    checkEntry(iterator.next(), "foo", "day", "20080101", "2");
    assertTrue(iterator.hasNext());
    checkEntry(iterator.next(), "foo", "day", "20080103", "1");
    assertFalse(iterator.hasNext());
}
Also used : Connector(org.apache.accumulo.core.client.Connector) BatchScanner(org.apache.accumulo.core.client.BatchScanner) Scanner(org.apache.accumulo.core.client.Scanner) Text(org.apache.hadoop.io.Text) PasswordToken(org.apache.accumulo.core.client.security.tokens.PasswordToken) Entry(java.util.Map.Entry) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) MultiTableBatchWriter(org.apache.accumulo.core.client.MultiTableBatchWriter) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Example 53 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class MockConnectorTest method testMockMultiTableBatchWriter.

@Test
public void testMockMultiTableBatchWriter() throws Exception {
    Connector c = new MockConnector("root", new MockInstance());
    c.tableOperations().create("a");
    c.tableOperations().create("b");
    MultiTableBatchWriter bw = c.createMultiTableBatchWriter(new BatchWriterConfig());
    Mutation m1 = new Mutation("r1");
    m1.put("cf1", "cq1", 1, "v1");
    BatchWriter b = bw.getBatchWriter("a");
    b.addMutation(m1);
    b.flush();
    b = bw.getBatchWriter("b");
    b.addMutation(m1);
    b.flush();
    Scanner scanner = c.createScanner("a", Authorizations.EMPTY);
    int count = Iterators.size(scanner.iterator());
    assertEquals(1, count);
    scanner = c.createScanner("b", Authorizations.EMPTY);
    count = Iterators.size(scanner.iterator());
    assertEquals(1, count);
}
Also used : Connector(org.apache.accumulo.core.client.Connector) BatchScanner(org.apache.accumulo.core.client.BatchScanner) Scanner(org.apache.accumulo.core.client.Scanner) MultiTableBatchWriter(org.apache.accumulo.core.client.MultiTableBatchWriter) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Mutation(org.apache.accumulo.core.data.Mutation) MultiTableBatchWriter(org.apache.accumulo.core.client.MultiTableBatchWriter) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Test(org.junit.Test)

Example 54 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class MockConnectorTest method testSunnyDay.

@Test
public void testSunnyDay() throws Exception {
    Connector c = new MockConnector("root", new MockInstance());
    c.tableOperations().create("test");
    BatchWriter bw = c.createBatchWriter("test", new BatchWriterConfig());
    for (int i = 0; i < 100; i++) {
        int r = random.nextInt();
        Mutation m = new Mutation(asText(r));
        m.put(asText(random.nextInt()), asText(random.nextInt()), new Value(Integer.toHexString(r).getBytes()));
        bw.addMutation(m);
    }
    bw.close();
    BatchScanner s = c.createBatchScanner("test", Authorizations.EMPTY, 2);
    s.setRanges(Collections.singletonList(new Range()));
    Key key = null;
    int count = 0;
    for (Entry<Key, Value> entry : s) {
        if (key != null)
            assertTrue(key.compareTo(entry.getKey()) < 0);
        assertEquals(entry.getKey().getRow(), new Text(entry.getValue().get()));
        key = entry.getKey();
        count++;
    }
    assertEquals(100, count);
}
Also used : Connector(org.apache.accumulo.core.client.Connector) BatchScanner(org.apache.accumulo.core.client.BatchScanner) Text(org.apache.hadoop.io.Text) Range(org.apache.accumulo.core.data.Range) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) MultiTableBatchWriter(org.apache.accumulo.core.client.MultiTableBatchWriter) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Key(org.apache.accumulo.core.data.Key) Test(org.junit.Test)

Example 55 with BatchWriter

use of org.apache.accumulo.core.client.BatchWriter in project accumulo by apache.

the class MockConnectorTest method testDelete.

@Test
public void testDelete() throws Exception {
    Connector c = new MockConnector("root", new MockInstance());
    c.tableOperations().create("test");
    BatchWriter bw = c.createBatchWriter("test", new BatchWriterConfig());
    Mutation m1 = new Mutation("r1");
    m1.put("cf1", "cq1", 1, "v1");
    bw.addMutation(m1);
    bw.flush();
    Mutation m2 = new Mutation("r1");
    m2.putDelete("cf1", "cq1", 2);
    bw.addMutation(m2);
    bw.flush();
    Scanner scanner = c.createScanner("test", Authorizations.EMPTY);
    int count = Iterators.size(scanner.iterator());
    assertEquals(0, count);
    try {
        c.tableOperations().create("test_this_$tableName");
        assertTrue(false);
    } catch (IllegalArgumentException iae) {
    }
}
Also used : Connector(org.apache.accumulo.core.client.Connector) BatchScanner(org.apache.accumulo.core.client.BatchScanner) Scanner(org.apache.accumulo.core.client.Scanner) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) MultiTableBatchWriter(org.apache.accumulo.core.client.MultiTableBatchWriter) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Test(org.junit.Test)

Aggregations

BatchWriter (org.apache.accumulo.core.client.BatchWriter)402 Mutation (org.apache.accumulo.core.data.Mutation)360 Test (org.junit.Test)264 Value (org.apache.accumulo.core.data.Value)250 BatchWriterConfig (org.apache.accumulo.core.client.BatchWriterConfig)246 Text (org.apache.hadoop.io.Text)194 Key (org.apache.accumulo.core.data.Key)179 Scanner (org.apache.accumulo.core.client.Scanner)174 Connector (org.apache.accumulo.core.client.Connector)169 IteratorSetting (org.apache.accumulo.core.client.IteratorSetting)81 Authorizations (org.apache.accumulo.core.security.Authorizations)68 Range (org.apache.accumulo.core.data.Range)61 Entry (java.util.Map.Entry)51 Map (java.util.Map)50 BatchScanner (org.apache.accumulo.core.client.BatchScanner)46 MutationsRejectedException (org.apache.accumulo.core.client.MutationsRejectedException)44 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)40 HashMap (java.util.HashMap)38 ArrayList (java.util.ArrayList)36 Status (org.apache.accumulo.server.replication.proto.Replication.Status)32