Search in sources :

Example 6 with CellBuilder

use of org.apache.hadoop.hbase.CellBuilder in project hbase by apache.

the class ThriftHBaseServiceHandler method mutateRowsTs.

@Override
public void mutateRowsTs(ByteBuffer tableName, List<BatchMutation> rowBatches, long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError, IllegalArgument, TException {
    List<Put> puts = new ArrayList<>();
    List<Delete> deletes = new ArrayList<>();
    CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
    for (BatchMutation batch : rowBatches) {
        byte[] row = getBytes(batch.row);
        List<Mutation> mutations = batch.mutations;
        Delete delete = new Delete(row);
        addAttributes(delete, attributes);
        Put put = new Put(row, timestamp);
        addAttributes(put, attributes);
        for (Mutation m : mutations) {
            byte[][] famAndQf = CellUtil.parseColumn(getBytes(m.column));
            if (m.isDelete) {
                // no qualifier, family only.
                if (famAndQf.length == 1) {
                    delete.addFamily(famAndQf[0], timestamp);
                } else {
                    delete.addColumns(famAndQf[0], famAndQf[1], timestamp);
                }
                delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
            } else {
                if (famAndQf.length == 1) {
                    LOG.warn("No column qualifier specified. Delete is the only mutation supported " + "over the whole column family.");
                }
                if (famAndQf.length == 2) {
                    try {
                        put.add(builder.clear().setRow(put.getRow()).setFamily(famAndQf[0]).setQualifier(famAndQf[1]).setTimestamp(put.getTimestamp()).setType(Cell.Type.Put).setValue(m.value != null ? getBytes(m.value) : HConstants.EMPTY_BYTE_ARRAY).build());
                    } catch (IOException e) {
                        throw new IllegalArgumentException(e);
                    }
                } else {
                    throw new IllegalArgumentException("Invalid famAndQf provided.");
                }
                put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
            }
        }
        if (!delete.isEmpty()) {
            deletes.add(delete);
        }
        if (!put.isEmpty()) {
            puts.add(put);
        }
    }
    Table table = null;
    try {
        table = getTable(tableName);
        if (!puts.isEmpty()) {
            table.put(puts);
        }
        if (!deletes.isEmpty()) {
            table.delete(deletes);
        }
    } catch (IOException e) {
        LOG.warn(e.getMessage(), e);
        throw getIOError(e);
    } catch (IllegalArgumentException e) {
        LOG.warn(e.getMessage(), e);
        throw new IllegalArgument(Throwables.getStackTraceAsString(e));
    } finally {
        closeTable(table);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) ArrayList(java.util.ArrayList) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) Put(org.apache.hadoop.hbase.client.Put) CellBuilder(org.apache.hadoop.hbase.CellBuilder) BatchMutation(org.apache.hadoop.hbase.thrift.generated.BatchMutation) Mutation(org.apache.hadoop.hbase.thrift.generated.Mutation) BatchMutation(org.apache.hadoop.hbase.thrift.generated.BatchMutation) IllegalArgument(org.apache.hadoop.hbase.thrift.generated.IllegalArgument)

Example 7 with CellBuilder

use of org.apache.hadoop.hbase.CellBuilder in project hbase by apache.

the class Mutation method getCellBuilder.

/**
 * get a CellBuilder instance that already has relevant Type and Row set.
 * @param cellBuilderType e.g CellBuilderType.SHALLOW_COPY
 * @param cellType e.g Cell.Type.Put
 * @return CellBuilder which already has relevant Type and Row set.
 */
protected CellBuilder getCellBuilder(CellBuilderType cellBuilderType, Cell.Type cellType) {
    CellBuilder builder = CellBuilderFactory.create(cellBuilderType).setRow(row).setType(cellType);
    return new CellBuilder() {

        @Override
        public CellBuilder setRow(byte[] row) {
            return this;
        }

        @Override
        public CellBuilder setType(Cell.Type type) {
            return this;
        }

        @Override
        public CellBuilder setRow(byte[] row, int rOffset, int rLength) {
            return this;
        }

        @Override
        public CellBuilder setFamily(byte[] family) {
            builder.setFamily(family);
            return this;
        }

        @Override
        public CellBuilder setFamily(byte[] family, int fOffset, int fLength) {
            builder.setFamily(family, fOffset, fLength);
            return this;
        }

        @Override
        public CellBuilder setQualifier(byte[] qualifier) {
            builder.setQualifier(qualifier);
            return this;
        }

        @Override
        public CellBuilder setQualifier(byte[] qualifier, int qOffset, int qLength) {
            builder.setQualifier(qualifier, qOffset, qLength);
            return this;
        }

        @Override
        public CellBuilder setTimestamp(long timestamp) {
            builder.setTimestamp(timestamp);
            return this;
        }

        @Override
        public CellBuilder setValue(byte[] value) {
            builder.setValue(value);
            return this;
        }

        @Override
        public CellBuilder setValue(byte[] value, int vOffset, int vLength) {
            builder.setValue(value, vOffset, vLength);
            return this;
        }

        @Override
        public Cell build() {
            return builder.build();
        }

        @Override
        public CellBuilder clear() {
            builder.clear();
            // reset the row and type
            builder.setRow(row);
            builder.setType(cellType);
            return this;
        }
    };
}
Also used : CellBuilderType(org.apache.hadoop.hbase.CellBuilderType) CellBuilder(org.apache.hadoop.hbase.CellBuilder)

Example 8 with CellBuilder

use of org.apache.hadoop.hbase.CellBuilder in project hbase by apache.

the class TestHFile method testCorruptOutOfOrderHFileWrite.

@Test
public void testCorruptOutOfOrderHFileWrite() throws IOException {
    Path path = new Path(ROOT_DIR, testName.getMethodName());
    FSDataOutputStream mockedOutputStream = Mockito.mock(FSDataOutputStream.class);
    String columnFamily = "MyColumnFamily";
    String tableName = "MyTableName";
    HFileContext fileContext = new HFileContextBuilder().withHFileName(testName.getMethodName() + "HFile").withBlockSize(minBlockSize).withColumnFamily(Bytes.toBytes(columnFamily)).withTableName(Bytes.toBytes(tableName)).withHBaseCheckSum(false).withCompression(Compression.Algorithm.NONE).withCompressTags(false).build();
    HFileWriterImpl writer = new HFileWriterImpl(conf, cacheConf, path, mockedOutputStream, fileContext);
    CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
    byte[] row = Bytes.toBytes("foo");
    byte[] qualifier = Bytes.toBytes("qualifier");
    byte[] cf = Bytes.toBytes(columnFamily);
    byte[] val = Bytes.toBytes("fooVal");
    long firstTS = 100L;
    long secondTS = 101L;
    Cell firstCell = cellBuilder.setRow(row).setValue(val).setTimestamp(firstTS).setQualifier(qualifier).setFamily(cf).setType(Cell.Type.Put).build();
    Cell secondCell = cellBuilder.setRow(row).setValue(val).setTimestamp(secondTS).setQualifier(qualifier).setFamily(cf).setType(Cell.Type.Put).build();
    // second Cell will sort "higher" than the first because later timestamps should come first
    writer.append(firstCell);
    try {
        writer.append(secondCell);
    } catch (IOException ie) {
        String message = ie.getMessage();
        Assert.assertTrue(message.contains("not lexically larger"));
        Assert.assertTrue(message.contains(tableName));
        Assert.assertTrue(message.contains(columnFamily));
        return;
    }
    Assert.fail("Exception wasn't thrown even though Cells were appended in the wrong order!");
}
Also used : Path(org.apache.hadoop.fs.Path) CellBuilder(org.apache.hadoop.hbase.CellBuilder) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) IOException(java.io.IOException) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 9 with CellBuilder

use of org.apache.hadoop.hbase.CellBuilder in project hbase by apache.

the class ExpAsStringVisibilityLabelServiceImpl method setAuths.

@Override
public OperationStatus[] setAuths(byte[] user, List<byte[]> authLabels) throws IOException {
    assert labelsRegion != null;
    OperationStatus[] finalOpStatus = new OperationStatus[authLabels.size()];
    Put p = new Put(user);
    CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
    for (byte[] auth : authLabels) {
        p.add(builder.clear().setRow(p.getRow()).setFamily(LABELS_TABLE_FAMILY).setQualifier(auth).setTimestamp(p.getTimestamp()).setType(Cell.Type.Put).setValue(DUMMY_VALUE).build());
    }
    this.labelsRegion.put(p);
    // This is a testing impl and so not doing any caching
    for (int i = 0; i < authLabels.size(); i++) {
        finalOpStatus[i] = new OperationStatus(OperationStatusCode.SUCCESS);
    }
    return finalOpStatus;
}
Also used : OperationStatus(org.apache.hadoop.hbase.regionserver.OperationStatus) CellBuilder(org.apache.hadoop.hbase.CellBuilder) Put(org.apache.hadoop.hbase.client.Put)

Aggregations

CellBuilder (org.apache.hadoop.hbase.CellBuilder)9 IOException (java.io.IOException)3 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)3 Put (org.apache.hadoop.hbase.client.Put)3 Test (org.junit.Test)3 File (java.io.File)2 FileOutputStream (java.io.FileOutputStream)2 ArrayList (java.util.ArrayList)2 Cell (org.apache.hadoop.hbase.Cell)2 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)2 KeyValue (org.apache.hadoop.hbase.KeyValue)2 Delete (org.apache.hadoop.hbase.client.Delete)2 Table (org.apache.hadoop.hbase.client.Table)2 HFile (org.apache.hadoop.hbase.io.hfile.HFile)2 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)2 BatchMutation (org.apache.hadoop.hbase.thrift.generated.BatchMutation)2 IllegalArgument (org.apache.hadoop.hbase.thrift.generated.IllegalArgument)2 Mutation (org.apache.hadoop.hbase.thrift.generated.Mutation)2 Configuration (org.apache.hadoop.conf.Configuration)1 Path (org.apache.hadoop.fs.Path)1