Search in sources :

Example 16 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class TestThriftHBaseServiceHandler method testScan.

@Test
public void testScan() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    ByteBuffer table = wrap(tableAname);
    // insert data
    TColumnValue columnValue = new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname));
    List<TColumnValue> columnValues = new ArrayList<>(1);
    columnValues.add(columnValue);
    for (int i = 0; i < 10; i++) {
        TPut put = new TPut(wrap(Bytes.toBytes("testScan" + i)), columnValues);
        handler.put(table, put);
    }
    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<>(1);
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow(Bytes.toBytes("testScan"));
    scan.setStopRow(Bytes.toBytes("testScan\uffff"));
    // get scanner and rows
    int scanId = handler.openScanner(table, scan);
    List<TResult> results = handler.getScannerRows(scanId, 10);
    assertEquals(10, results.size());
    for (int i = 0; i < 10; i++) {
        // check if the rows are returned and in order
        assertArrayEquals(Bytes.toBytes("testScan" + i), results.get(i).getRow());
    }
    // check that we are at the end of the scan
    results = handler.getScannerRows(scanId, 10);
    assertEquals(0, results.size());
    // close scanner and check that it was indeed closed
    handler.closeScanner(scanId);
    try {
        handler.getScannerRows(scanId, 10);
        fail("Scanner id should be invalid");
    } catch (TIllegalArgument e) {
    }
}
Also used : TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TIllegalArgument(org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Test(org.junit.Test)

Example 17 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class ThriftUtilities method scanFromThrift.

public static Scan scanFromThrift(TScan in) throws IOException {
    Scan out = new Scan();
    if (in.isSetStartRow()) {
        out.withStartRow(in.getStartRow());
    }
    if (in.isSetStopRow()) {
        out.withStopRow(in.getStopRow());
    }
    if (in.isSetCaching()) {
        out.setCaching(in.getCaching());
    }
    if (in.isSetMaxVersions()) {
        out.readVersions(in.getMaxVersions());
    }
    if (in.isSetColumns()) {
        for (TColumn column : in.getColumns()) {
            if (column.isSetQualifier()) {
                out.addColumn(column.getFamily(), column.getQualifier());
            } else {
                out.addFamily(column.getFamily());
            }
        }
    }
    TTimeRange timeRange = in.getTimeRange();
    if (timeRange != null && timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) {
        out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp());
    }
    if (in.isSetBatchSize()) {
        out.setBatch(in.getBatchSize());
    }
    if (in.isSetFilterString()) {
        ParseFilter parseFilter = new ParseFilter();
        out.setFilter(parseFilter.parseFilterString(in.getFilterString()));
    }
    if (in.isSetAttributes()) {
        addAttributes(out, in.getAttributes());
    }
    if (in.isSetAuthorizations()) {
        out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels()));
    }
    if (in.isSetReversed()) {
        out.setReversed(in.isReversed());
    }
    if (in.isSetCacheBlocks()) {
        out.setCacheBlocks(in.isCacheBlocks());
    }
    if (in.isSetColFamTimeRangeMap()) {
        Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap();
        if (MapUtils.isNotEmpty(colFamTimeRangeMap)) {
            for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) {
                out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()), entry.getValue().getMinStamp(), entry.getValue().getMaxStamp());
            }
        }
    }
    if (in.isSetReadType()) {
        out.setReadType(readTypeFromThrift(in.getReadType()));
    }
    if (in.isSetLimit()) {
        out.setLimit(in.getLimit());
    }
    if (in.isSetConsistency()) {
        out.setConsistency(consistencyFromThrift(in.getConsistency()));
    }
    if (in.isSetTargetReplicaId()) {
        out.setReplicaId(in.getTargetReplicaId());
    }
    if (in.isSetFilterBytes()) {
        out.setFilter(filterFromThrift(in.getFilterBytes()));
    }
    return out;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) ParseFilter(org.apache.hadoop.hbase.filter.ParseFilter) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) TTimeRange(org.apache.hadoop.hbase.thrift2.generated.TTimeRange) TScan(org.apache.hadoop.hbase.thrift2.generated.TScan) Scan(org.apache.hadoop.hbase.client.Scan) ByteBuffer(java.nio.ByteBuffer) Map(java.util.Map)

Example 18 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class ThriftUtilities method getFromHBase.

public static TGet getFromHBase(Get in) {
    TGet out = new TGet();
    out.setRow(in.getRow());
    TTimeRange tTimeRange = new TTimeRange();
    tTimeRange.setMaxStamp(in.getTimeRange().getMax()).setMinStamp(in.getTimeRange().getMin());
    out.setTimeRange(tTimeRange);
    out.setMaxVersions(in.getMaxVersions());
    for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
        out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), ByteBuffer.wrap(attribute.getValue()));
    }
    try {
        Authorizations authorizations = in.getAuthorizations();
        if (authorizations != null) {
            TAuthorization tAuthorization = new TAuthorization();
            tAuthorization.setLabels(authorizations.getLabels());
            out.setAuthorizations(tAuthorization);
        }
    } catch (DeserializationException e) {
        throw new RuntimeException(e);
    }
    out.setConsistency(consistencyFromHBase(in.getConsistency()));
    out.setTargetReplicaId(in.getReplicaId());
    out.setCacheBlocks(in.getCacheBlocks());
    out.setStoreLimit(in.getMaxResultsPerColumnFamily());
    out.setStoreOffset(in.getRowOffsetPerColumnFamily());
    out.setExistence_only(in.isCheckExistenceOnly());
    for (Map.Entry<byte[], NavigableSet<byte[]>> family : in.getFamilyMap().entrySet()) {
        if (family.getValue() != null && !family.getValue().isEmpty()) {
            for (byte[] qualifier : family.getValue()) {
                TColumn column = new TColumn();
                column.setFamily(family.getKey());
                column.setQualifier(qualifier);
                out.addToColumns(column);
            }
        } else {
            TColumn column = new TColumn();
            column.setFamily(family.getKey());
            out.addToColumns(column);
        }
    }
    if (in.getFilter() != null) {
        try {
            out.setFilterBytes(filterFromHBase(in.getFilter()));
        } catch (IOException ioE) {
            throw new RuntimeException(ioE);
        }
    }
    return out;
}
Also used : Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) NavigableSet(java.util.NavigableSet) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) TTimeRange(org.apache.hadoop.hbase.thrift2.generated.TTimeRange) TAuthorization(org.apache.hadoop.hbase.thrift2.generated.TAuthorization) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) Map(java.util.Map)

Example 19 with TColumn

use of org.apache.hadoop.hbase.thrift2.generated.TColumn in project hbase by apache.

the class ThriftUtilities method deleteFromHBase.

public static TDelete deleteFromHBase(Delete in) {
    TDelete out = new TDelete(ByteBuffer.wrap(in.getRow()));
    List<TColumn> columns = new ArrayList<>(in.getFamilyCellMap().entrySet().size());
    long rowTimestamp = in.getTimestamp();
    if (rowTimestamp != HConstants.LATEST_TIMESTAMP) {
        out.setTimestamp(rowTimestamp);
    }
    for (Map.Entry<String, byte[]> attribute : in.getAttributesMap().entrySet()) {
        out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), ByteBuffer.wrap(attribute.getValue()));
    }
    if (in.getDurability() != Durability.USE_DEFAULT) {
        out.setDurability(durabilityFromHBase(in.getDurability()));
    }
    // Delete the whole row
    if (in.getFamilyCellMap().size() == 0) {
        return out;
    }
    TDeleteType type = null;
    for (Map.Entry<byte[], List<Cell>> familyEntry : in.getFamilyCellMap().entrySet()) {
        byte[] family = familyEntry.getKey();
        TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
        for (Cell cell : familyEntry.getValue()) {
            TDeleteType cellDeleteType = deleteTypeFromHBase(cell.getType());
            if (type == null) {
                type = cellDeleteType;
            } else if (type != cellDeleteType) {
                throw new RuntimeException("Only the same delete type is supported, but two delete type " + "is founded, one is " + type + " the other one is " + cellDeleteType);
            }
            byte[] qualifier = CellUtil.cloneQualifier(cell);
            long timestamp = cell.getTimestamp();
            column.setFamily(family);
            if (qualifier != null) {
                column.setQualifier(qualifier);
            }
            if (timestamp != HConstants.LATEST_TIMESTAMP) {
                column.setTimestamp(timestamp);
            }
        }
        columns.add(column);
    }
    out.setColumns(columns);
    out.setDeleteType(type);
    return out;
}
Also used : TColumn(org.apache.hadoop.hbase.thrift2.generated.TColumn) ArrayList(java.util.ArrayList) TDelete(org.apache.hadoop.hbase.thrift2.generated.TDelete) TDeleteType(org.apache.hadoop.hbase.thrift2.generated.TDeleteType) List(java.util.List) ArrayList(java.util.ArrayList) Map(java.util.Map) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

TColumn (org.apache.hadoop.hbase.thrift2.generated.TColumn)19 ByteBuffer (java.nio.ByteBuffer)15 ArrayList (java.util.ArrayList)15 TColumnValue (org.apache.hadoop.hbase.thrift2.generated.TColumnValue)14 TPut (org.apache.hadoop.hbase.thrift2.generated.TPut)14 TResult (org.apache.hadoop.hbase.thrift2.generated.TResult)14 Test (org.junit.Test)14 TScan (org.apache.hadoop.hbase.thrift2.generated.TScan)10 TGet (org.apache.hadoop.hbase.thrift2.generated.TGet)8 TDelete (org.apache.hadoop.hbase.thrift2.generated.TDelete)7 TIllegalArgument (org.apache.hadoop.hbase.thrift2.generated.TIllegalArgument)5 Map (java.util.Map)4 Authorizations (org.apache.hadoop.hbase.security.visibility.Authorizations)4 TAuthorization (org.apache.hadoop.hbase.thrift2.generated.TAuthorization)4 TTimeRange (org.apache.hadoop.hbase.thrift2.generated.TTimeRange)3 IOException (java.io.IOException)2 NavigableSet (java.util.NavigableSet)2 DeserializationException (org.apache.hadoop.hbase.exceptions.DeserializationException)2 ParseFilter (org.apache.hadoop.hbase.filter.ParseFilter)2 TCellVisibility (org.apache.hadoop.hbase.thrift2.generated.TCellVisibility)2