Search in sources :

Example 1 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class ProtobufUtil method toGet.

/**
   * Create a protocol buffer Get based on a client Get.
   *
   * @param get the client Get
   * @return a protocol buffer Get
   * @throws IOException
   */
public static ClientProtos.Get toGet(final Get get) throws IOException {
    ClientProtos.Get.Builder builder = ClientProtos.Get.newBuilder();
    builder.setRow(UnsafeByteOperations.unsafeWrap(get.getRow()));
    builder.setCacheBlocks(get.getCacheBlocks());
    builder.setMaxVersions(get.getMaxVersions());
    if (get.getFilter() != null) {
        builder.setFilter(ProtobufUtil.toFilter(get.getFilter()));
    }
    for (Entry<byte[], TimeRange> cftr : get.getColumnFamilyTimeRange().entrySet()) {
        HBaseProtos.ColumnFamilyTimeRange.Builder b = HBaseProtos.ColumnFamilyTimeRange.newBuilder();
        b.setColumnFamily(UnsafeByteOperations.unsafeWrap(cftr.getKey()));
        b.setTimeRange(timeRangeToProto(cftr.getValue()));
        builder.addCfTimeRange(b);
    }
    TimeRange timeRange = get.getTimeRange();
    if (!timeRange.isAllTime()) {
        HBaseProtos.TimeRange.Builder timeRangeBuilder = HBaseProtos.TimeRange.newBuilder();
        timeRangeBuilder.setFrom(timeRange.getMin());
        timeRangeBuilder.setTo(timeRange.getMax());
        builder.setTimeRange(timeRangeBuilder.build());
    }
    Map<String, byte[]> attributes = get.getAttributesMap();
    if (!attributes.isEmpty()) {
        NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder();
        for (Map.Entry<String, byte[]> attribute : attributes.entrySet()) {
            attributeBuilder.setName(attribute.getKey());
            attributeBuilder.setValue(UnsafeByteOperations.unsafeWrap(attribute.getValue()));
            builder.addAttribute(attributeBuilder.build());
        }
    }
    if (get.hasFamilies()) {
        Column.Builder columnBuilder = Column.newBuilder();
        Map<byte[], NavigableSet<byte[]>> families = get.getFamilyMap();
        for (Map.Entry<byte[], NavigableSet<byte[]>> family : families.entrySet()) {
            NavigableSet<byte[]> qualifiers = family.getValue();
            columnBuilder.setFamily(UnsafeByteOperations.unsafeWrap(family.getKey()));
            columnBuilder.clearQualifier();
            if (qualifiers != null && qualifiers.size() > 0) {
                for (byte[] qualifier : qualifiers) {
                    columnBuilder.addQualifier(UnsafeByteOperations.unsafeWrap(qualifier));
                }
            }
            builder.addColumn(columnBuilder.build());
        }
    }
    if (get.getMaxResultsPerColumnFamily() >= 0) {
        builder.setStoreLimit(get.getMaxResultsPerColumnFamily());
    }
    if (get.getRowOffsetPerColumnFamily() > 0) {
        builder.setStoreOffset(get.getRowOffsetPerColumnFamily());
    }
    if (get.isCheckExistenceOnly()) {
        builder.setExistenceOnly(true);
    }
    if (get.getConsistency() != null && get.getConsistency() != Consistency.STRONG) {
        builder.setConsistency(toConsistency(get.getConsistency()));
    }
    Boolean loadColumnFamiliesOnDemand = get.getLoadColumnFamiliesOnDemandValue();
    if (loadColumnFamiliesOnDemand != null) {
        builder.setLoadColumnFamiliesOnDemand(loadColumnFamiliesOnDemand);
    }
    return builder.build();
}
Also used : NavigableSet(java.util.NavigableSet) ByteString(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) TimeRange(org.apache.hadoop.hbase.io.TimeRange) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) Column(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) Get(org.apache.hadoop.hbase.client.Get) Map(java.util.Map) HashMap(java.util.HashMap)

Example 2 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class ProtobufUtil method toGet.

/**
   * Convert a protocol buffer Mutate to a Get.
   * @param proto the protocol buffer Mutate to convert.
   * @param cellScanner
   * @return the converted client get.
   * @throws IOException
   */
public static Get toGet(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.INCREMENT || type == MutationType.APPEND : type.name();
    byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
    Get get = null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + TextFormat.shortDebugString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + TextFormat.shortDebugString(proto));
            }
            Cell cell = cellScanner.current();
            if (get == null) {
                get = new Get(CellUtil.cloneRow(cell));
            }
            get.addColumn(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell));
        }
    } else {
        get = new Get(row);
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                byte[] qualifier = qv.getQualifier().toByteArray();
                if (!qv.hasValue()) {
                    throw new DoNotRetryIOException("Missing required field: qualifier value");
                }
                get.addColumn(family, qualifier);
            }
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
        get.setTimeRange(timeRange);
    }
    for (NameBytesPair attribute : proto.getAttributeList()) {
        get.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return get;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Get(org.apache.hadoop.hbase.client.Get) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) Cell(org.apache.hadoop.hbase.Cell) ByteBufferCell(org.apache.hadoop.hbase.ByteBufferCell)

Example 3 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class TestTimeRangeTracker method testTimeRangeInitialized.

@Test
public void testTimeRangeInitialized() {
    TimeRangeTracker src = new TimeRangeTracker();
    TimeRange tr = new TimeRange(System.currentTimeMillis());
    assertFalse(src.includesTimeRange(tr));
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) Test(org.junit.Test)

Example 4 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class TestTimeRangeTracker method testTimeRangeTrackerNullIsSameAsTimeRangeNull.

@Test
public void testTimeRangeTrackerNullIsSameAsTimeRangeNull() throws IOException {
    TimeRangeTracker src = new TimeRangeTracker(1, 2);
    byte[] bytes = Writables.getBytes(src);
    TimeRange tgt = TimeRangeTracker.getTimeRange(bytes);
    assertEquals(src.getMin(), tgt.getMin());
    assertEquals(src.getMax(), tgt.getMax());
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) Test(org.junit.Test)

Example 5 with TimeRange

use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.

the class TestTimeRangeTracker method testRangeConstruction.

@Test
public void testRangeConstruction() throws IOException {
    TimeRange defaultRange = new TimeRange();
    assertEquals(0L, defaultRange.getMin());
    assertEquals(Long.MAX_VALUE, defaultRange.getMax());
    assertTrue(defaultRange.isAllTime());
    TimeRange oneArgRange = new TimeRange(0L);
    assertEquals(0L, oneArgRange.getMin());
    assertEquals(Long.MAX_VALUE, oneArgRange.getMax());
    assertTrue(oneArgRange.isAllTime());
    TimeRange oneArgRange2 = new TimeRange(1);
    assertEquals(1, oneArgRange2.getMin());
    assertEquals(Long.MAX_VALUE, oneArgRange2.getMax());
    assertFalse(oneArgRange2.isAllTime());
    TimeRange twoArgRange = new TimeRange(0L, Long.MAX_VALUE);
    assertEquals(0L, twoArgRange.getMin());
    assertEquals(Long.MAX_VALUE, twoArgRange.getMax());
    assertTrue(twoArgRange.isAllTime());
    TimeRange twoArgRange2 = new TimeRange(0L, Long.MAX_VALUE - 1);
    assertEquals(0L, twoArgRange2.getMin());
    assertEquals(Long.MAX_VALUE - 1, twoArgRange2.getMax());
    assertFalse(twoArgRange2.isAllTime());
    TimeRange twoArgRange3 = new TimeRange(1, Long.MAX_VALUE);
    assertEquals(1, twoArgRange3.getMin());
    assertEquals(Long.MAX_VALUE, twoArgRange3.getMax());
    assertFalse(twoArgRange3.isAllTime());
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) Test(org.junit.Test)

Aggregations

TimeRange (org.apache.hadoop.hbase.io.TimeRange)45 Test (org.junit.Test)11 Map (java.util.Map)10 Get (org.apache.hadoop.hbase.client.Get)10 Scan (org.apache.hadoop.hbase.client.Scan)10 Cell (org.apache.hadoop.hbase.Cell)8 NavigableSet (java.util.NavigableSet)7 NameBytesPair (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)7 HashMap (java.util.HashMap)6 Filter (org.apache.hadoop.hbase.filter.Filter)6 NameBytesPair (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)6 ByteString (com.google.protobuf.ByteString)5 ArrayList (java.util.ArrayList)5 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)5 Put (org.apache.hadoop.hbase.client.Put)5 List (java.util.List)4 Increment (org.apache.hadoop.hbase.client.Increment)4 Result (org.apache.hadoop.hbase.client.Result)4 Column (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)4 Column (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column)4