Search in sources :

Example 6 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class TestThriftHBaseServiceHandler method testDeleteAllTimestamps.

@Test
public void testDeleteAllTimestamps() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    byte[] rowName = Bytes.toBytes("testDeleteAllTimestamps");
    ByteBuffer table = wrap(tableAname);
    List<TColumnValue> columnValues = new ArrayList<>(1);
    TColumnValue columnValueA = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
    columnValueA.setTimestamp(EnvironmentEdgeManager.currentTime() - 10);
    columnValues.add(columnValueA);
    TPut put = new TPut(wrap(rowName), columnValues);
    put.setColumnValues(columnValues);
    handler.put(table, put);
    columnValueA.setTimestamp(EnvironmentEdgeManager.currentTime());
    handler.put(table, put);
    TGet get = new TGet(wrap(rowName));
    get.setMaxVersions(2);
    TResult result = handler.get(table, get);
    assertEquals(2, result.getColumnValuesSize());
    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<>(1);
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);
    // This is the default anyway.
    delete.setDeleteType(TDeleteType.DELETE_COLUMNS);
    handler.deleteSingle(table, delete);
    get = new TGet(wrap(rowName));
    result = handler.get(table, get);
    assertNull(result.getRow());
    assertEquals(0, result.getColumnValuesSize());
}
Also used : TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) Test(org.junit.Test)

Example 7 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class TestThriftHBaseServiceHandler method testDeleteSingleTimestamp.

@Test
public void testDeleteSingleTimestamp() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    byte[] rowName = Bytes.toBytes("testDeleteSingleTimestamp");
    ByteBuffer table = wrap(tableAname);
    long timestamp1 = EnvironmentEdgeManager.currentTime() - 10;
    long timestamp2 = EnvironmentEdgeManager.currentTime();
    List<TColumnValue> columnValues = new ArrayList<>(1);
    TColumnValue columnValueA = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
    columnValueA.setTimestamp(timestamp1);
    columnValues.add(columnValueA);
    TPut put = new TPut(wrap(rowName), columnValues);
    put.setColumnValues(columnValues);
    handler.put(table, put);
    columnValueA.setTimestamp(timestamp2);
    handler.put(table, put);
    TGet get = new TGet(wrap(rowName));
    get.setMaxVersions(2);
    TResult result = handler.get(table, get);
    assertEquals(2, result.getColumnValuesSize());
    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<>(1);
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);
    delete.setDeleteType(TDeleteType.DELETE_COLUMN);
    handler.deleteSingle(table, delete);
    get = new TGet(wrap(rowName));
    result = handler.get(table, get);
    assertArrayEquals(rowName, result.getRow());
    assertEquals(1, result.getColumnValuesSize());
    // the older timestamp should remain.
    assertEquals(timestamp1, result.getColumnValues().get(0).getTimestamp());
}
Also used : TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Example 8 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class TestThriftHBaseServiceHandler method testMutateRow.

/**
 * Put valueA to a row, make sure put has happened, then create a mutation object to put valueB
 * and delete ValueA, then check that the row value is only valueB.
 */
@Test
public void testMutateRow() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    byte[] rowName = Bytes.toBytes("testMutateRow");
    ByteBuffer table = wrap(tableAname);
    List<TColumnValue> columnValuesA = new ArrayList<>(1);
    TColumnValue columnValueA = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
    columnValuesA.add(columnValueA);
    TPut putA = new TPut(wrap(rowName), columnValuesA);
    putA.setColumnValues(columnValuesA);
    handler.put(table, putA);
    TGet get = new TGet(wrap(rowName));
    TResult result = handler.get(table, get);
    assertArrayEquals(rowName, result.getRow());
    List<TColumnValue> returnedColumnValues = result.getColumnValues();
    List<TColumnValue> expectedColumnValues = new ArrayList<>(1);
    expectedColumnValues.add(columnValueA);
    assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
    List<TColumnValue> columnValuesB = new ArrayList<>(1);
    TColumnValue columnValueB = new TColumnValue(wrap(familyAname), wrap(qualifierBname), wrap(valueBname));
    columnValuesB.add(columnValueB);
    TPut putB = new TPut(wrap(rowName), columnValuesB);
    putB.setColumnValues(columnValuesB);
    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<>(1);
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);
    List<TMutation> mutations = new ArrayList<>(2);
    TMutation mutationA = TMutation.put(putB);
    mutations.add(mutationA);
    TMutation mutationB = TMutation.deleteSingle(delete);
    mutations.add(mutationB);
    TRowMutations tRowMutations = new TRowMutations(wrap(rowName), mutations);
    handler.mutateRow(table, tRowMutations);
    result = handler.get(table, get);
    assertArrayEquals(rowName, result.getRow());
    returnedColumnValues = result.getColumnValues();
    expectedColumnValues = new ArrayList<>(1);
    expectedColumnValues.add(columnValueB);
    assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
}
Also used : TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TMutation(org.apache.hadoop.hbase.thrift2.generated.TMutation) TRowMutations(org.apache.hadoop.hbase.thrift2.generated.TRowMutations) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Example 9 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class ThriftUtilities method scanFromHBase.

public static TScan scanFromHBase(Scan in) throws IOException {
    TScan out = new TScan();
    out.setStartRow(in.getStartRow());
    out.setStopRow(in.getStopRow());
    out.setCaching(in.getCaching());
    out.setMaxVersions(in.getMaxVersions());
    for (Map.Entry<byte[], NavigableSet<byte[]>> family : in.getFamilyMap().entrySet()) {
        if (family.getValue() != null && !family.getValue().isEmpty()) {
            for (byte[] qualifier : family.getValue()) {
                TColumn column = new TColumn();
                column.setFamily(family.getKey());
                column.setQualifier(qualifier);
                out.addToColumns(column);
            }
        } else {
            TColumn column = new TColumn();
            column.setFamily(family.getKey());
            out.addToColumns(column);
        }
    }
    TTimeRange tTimeRange = new TTimeRange();
    tTimeRange.setMinStamp(in.getTimeRange().getMin()).setMaxStamp(in.getTimeRange().getMax());
    out.setTimeRange(tTimeRange);
    out.setBatchSize(in.getBatch());
    for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
        out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), ByteBuffer.wrap(attribute.getValue()));
    }
    try {
        Authorizations authorizations = in.getAuthorizations();
        if (authorizations != null) {
            TAuthorization tAuthorization = new TAuthorization();
            tAuthorization.setLabels(authorizations.getLabels());
            out.setAuthorizations(tAuthorization);
        }
    } catch (DeserializationException e) {
        throw new RuntimeException(e);
    }
    out.setReversed(in.isReversed());
    out.setCacheBlocks(in.getCacheBlocks());
    out.setReadType(readTypeFromHBase(in.getReadType()));
    out.setLimit(in.getLimit());
    out.setConsistency(consistencyFromHBase(in.getConsistency()));
    out.setTargetReplicaId(in.getReplicaId());
    for (Map.Entry<byte[], TimeRange> entry : in.getColumnFamilyTimeRange().entrySet()) {
        if (entry.getValue() != null) {
            TTimeRange timeRange = new TTimeRange();
            timeRange.setMinStamp(entry.getValue().getMin()).setMaxStamp(entry.getValue().getMax());
            out.putToColFamTimeRangeMap(ByteBuffer.wrap(entry.getKey()), timeRange);
        }
    }
    if (in.getFilter() != null) {
        try {
            out.setFilterBytes(filterFromHBase(in.getFilter()));
        } catch (IOException ioE) {
            throw new RuntimeException(ioE);
        }
    }
    return out;
}
Also used : NavigableSet(java.util.NavigableSet) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) TTimeRange(org.apache.hadoop.hbase.thrift2.generated.TTimeRange) TAuthorization(org.apache.hadoop.hbase.thrift2.generated.TAuthorization) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) TTimeRange(org.apache.hadoop.hbase.thrift2.generated.TTimeRange) TimeRange(org.apache.hadoop.hbase.io.TimeRange) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) Map(java.util.Map)

Example 10 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class ThriftUtilities method getFromThrift.

/**
 * Creates a {@link Get} (HBase) from a {@link TGet} (Thrift).
 *
 * This ignores any timestamps set on {@link TColumn} objects.
 *
 * @param in the <code>TGet</code> to convert
 *
 * @return <code>Get</code> object
 *
 * @throws IOException if an invalid time range or max version parameter is given
 */
public static Get getFromThrift(TGet in) throws IOException {
    Get out = new Get(in.getRow());
    // Timestamp overwrites time range if both are set
    if (in.isSetTimestamp()) {
        out.setTimestamp(in.getTimestamp());
    } else if (in.isSetTimeRange()) {
        out.setTimeRange(in.getTimeRange().getMinStamp(), in.getTimeRange().getMaxStamp());
    }
    if (in.isSetMaxVersions()) {
        out.readVersions(in.getMaxVersions());
    }
    if (in.isSetFilterString()) {
        ParseFilter parseFilter = new ParseFilter();
        out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
    }
    if (in.isSetAttributes()) {
        addAttributes(out, in.getAttributes());
    }
    if (in.isSetAuthorizations()) {
        out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
    }
    if (in.isSetConsistency()) {
        out.setConsistency(consistencyFromThrift(in.getConsistency()));
    }
    if (in.isSetTargetReplicaId()) {
        out.setReplicaId(in.getTargetReplicaId());
    }
    if (in.isSetCacheBlocks()) {
        out.setCacheBlocks(in.isCacheBlocks());
    }
    if (in.isSetStoreLimit()) {
        out.setMaxResultsPerColumnFamily(in.getStoreLimit());
    }
    if (in.isSetStoreOffset()) {
        out.setRowOffsetPerColumnFamily(in.getStoreOffset());
    }
    if (in.isSetExistence_only()) {
        out.setCheckExistenceOnly(in.isExistence_only());
    }
    if (in.isSetColumns()) {
        for (TColumn column : in.getColumns()) {
            if (column.isSetQualifier()) {
                out.addColumn(column.getFamily(), column.getQualifier());
            } else {
                out.addFamily(column.getFamily());
            }
        }
    }
    if (in.isSetFilterBytes()) {
        out.setFilter(filterFromThrift(in.getFilterBytes()));
    }
    return out;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) Get(org.apache.hadoop.hbase.client.Get) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet)

Aggregations

TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)19 ByteBuffer (java.nio.ByteBuffer)15 ArrayList (java.util.ArrayList)15 TColumnValue (org.apache.hadoop.hbase.thrift2.generated.TColumnValue)14 TPut (org.apache.hadoop.hbase.thrift2.generated.TPut)14 TResult (org.apache.hadoop.hbase.thrift2.generated.TResult)14 Test (org.junit.Test)14 TScan (org.apache.hadoop.hbase.thrift2.generated.TScan)10 TGet (org.apache.hadoop.hbase.thrift2.generated.TGet)8 TDelete (org.apache.hadoop.hbase.thrift2.generated.TDelete)7 TIllegalArgument (org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument)5 Map (java.util.Map)4 Authorizations (org.apache.hadoop.hbase.security.visibility.Authorizations)4 TAuthorization (org.apache.hadoop.hbase.thrift2.generated.TAuthorization)4 TTimeRange (org.apache.hadoop.hbase.thrift2.generated.TTimeRange)3 IOException (java.io.IOException)2 NavigableSet (java.util.NavigableSet)2 DeserializationException (org.apache.hadoop.hbase.exceptions.DeserializationException)2 ParseFilter (org.apache.hadoop.hbase.filter.ParseFilter)2 TCellVisibility (org.apache.hadoop.hbase.thrift2.generated.TCellVisibility)2