use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.
the class ProtobufUtil method toScan.
/**
* Convert a client Scan to a protocol buffer Scan
*
* @param scan the client Scan to convert
* @return the converted protocol buffer Scan
* @throws IOException
*/
public static ClientProtos.Scan toScan(final Scan scan) throws IOException {
ClientProtos.Scan.Builder scanBuilder = ClientProtos.Scan.newBuilder();
scanBuilder.setCacheBlocks(scan.getCacheBlocks());
if (scan.getBatch() > 0) {
scanBuilder.setBatchSize(scan.getBatch());
}
if (scan.getMaxResultSize() > 0) {
scanBuilder.setMaxResultSize(scan.getMaxResultSize());
}
if (scan.isSmall()) {
scanBuilder.setSmall(scan.isSmall());
}
if (scan.getAllowPartialResults()) {
scanBuilder.setAllowPartialResults(scan.getAllowPartialResults());
}
Boolean loadColumnFamiliesOnDemand = scan.getLoadColumnFamiliesOnDemandValue();
if (loadColumnFamiliesOnDemand != null) {
scanBuilder.setLoadColumnFamiliesOnDemand(loadColumnFamiliesOnDemand);
}
scanBuilder.setMaxVersions(scan.getMaxVersions());
for (Entry<byte[], TimeRange> cftr : scan.getColumnFamilyTimeRange().entrySet()) {
HBaseProtos.ColumnFamilyTimeRange.Builder b = HBaseProtos.ColumnFamilyTimeRange.newBuilder();
b.setColumnFamily(UnsafeByteOperations.unsafeWrap(cftr.getKey()));
b.setTimeRange(timeRangeToProto(cftr.getValue()));
scanBuilder.addCfTimeRange(b);
}
TimeRange timeRange = scan.getTimeRange();
if (!timeRange.isAllTime()) {
HBaseProtos.TimeRange.Builder timeRangeBuilder = HBaseProtos.TimeRange.newBuilder();
timeRangeBuilder.setFrom(timeRange.getMin());
timeRangeBuilder.setTo(timeRange.getMax());
scanBuilder.setTimeRange(timeRangeBuilder.build());
}
Map<String, byte[]> attributes = scan.getAttributesMap();
if (!attributes.isEmpty()) {
NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder();
for (Map.Entry<String, byte[]> attribute : attributes.entrySet()) {
attributeBuilder.setName(attribute.getKey());
attributeBuilder.setValue(UnsafeByteOperations.unsafeWrap(attribute.getValue()));
scanBuilder.addAttribute(attributeBuilder.build());
}
}
byte[] startRow = scan.getStartRow();
if (startRow != null && startRow.length > 0) {
scanBuilder.setStartRow(UnsafeByteOperations.unsafeWrap(startRow));
}
byte[] stopRow = scan.getStopRow();
if (stopRow != null && stopRow.length > 0) {
scanBuilder.setStopRow(UnsafeByteOperations.unsafeWrap(stopRow));
}
if (scan.hasFilter()) {
scanBuilder.setFilter(ProtobufUtil.toFilter(scan.getFilter()));
}
if (scan.hasFamilies()) {
Column.Builder columnBuilder = Column.newBuilder();
for (Map.Entry<byte[], NavigableSet<byte[]>> family : scan.getFamilyMap().entrySet()) {
columnBuilder.setFamily(UnsafeByteOperations.unsafeWrap(family.getKey()));
NavigableSet<byte[]> qualifiers = family.getValue();
columnBuilder.clearQualifier();
if (qualifiers != null && qualifiers.size() > 0) {
for (byte[] qualifier : qualifiers) {
columnBuilder.addQualifier(UnsafeByteOperations.unsafeWrap(qualifier));
}
}
scanBuilder.addColumn(columnBuilder.build());
}
}
if (scan.getMaxResultsPerColumnFamily() >= 0) {
scanBuilder.setStoreLimit(scan.getMaxResultsPerColumnFamily());
}
if (scan.getRowOffsetPerColumnFamily() > 0) {
scanBuilder.setStoreOffset(scan.getRowOffsetPerColumnFamily());
}
if (scan.isReversed()) {
scanBuilder.setReversed(scan.isReversed());
}
if (scan.getConsistency() == Consistency.TIMELINE) {
scanBuilder.setConsistency(toConsistency(scan.getConsistency()));
}
if (scan.getCaching() > 0) {
scanBuilder.setCaching(scan.getCaching());
}
long mvccReadPoint = PackagePrivateFieldAccessor.getMvccReadPoint(scan);
if (mvccReadPoint > 0) {
scanBuilder.setMvccReadPoint(mvccReadPoint);
}
if (!scan.includeStartRow()) {
scanBuilder.setIncludeStartRow(false);
}
if (scan.includeStopRow()) {
scanBuilder.setIncludeStopRow(true);
}
if (scan.getReadType() != Scan.ReadType.DEFAULT) {
scanBuilder.setReadType(toReadType(scan.getReadType()));
}
return scanBuilder.build();
}
use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.
the class ProtobufUtil method toMutation.
/**
* Convert a client Increment to a protobuf Mutate.
*
* @param increment
* @return the converted mutate
*/
public static MutationProto toMutation(final Increment increment, final MutationProto.Builder builder, long nonce) {
builder.setRow(UnsafeByteOperations.unsafeWrap(increment.getRow()));
builder.setMutateType(MutationType.INCREMENT);
builder.setDurability(toDurability(increment.getDurability()));
if (nonce != HConstants.NO_NONCE) {
builder.setNonce(nonce);
}
TimeRange timeRange = increment.getTimeRange();
setTimeRange(builder, timeRange);
ColumnValue.Builder columnBuilder = ColumnValue.newBuilder();
QualifierValue.Builder valueBuilder = QualifierValue.newBuilder();
for (Map.Entry<byte[], List<Cell>> family : increment.getFamilyCellMap().entrySet()) {
columnBuilder.setFamily(UnsafeByteOperations.unsafeWrap(family.getKey()));
columnBuilder.clearQualifierValue();
List<Cell> values = family.getValue();
if (values != null && values.size() > 0) {
for (Cell cell : values) {
valueBuilder.clear();
valueBuilder.setQualifier(UnsafeByteOperations.unsafeWrap(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()));
valueBuilder.setValue(UnsafeByteOperations.unsafeWrap(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
if (cell.getTagsLength() > 0) {
valueBuilder.setTags(UnsafeByteOperations.unsafeWrap(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()));
}
columnBuilder.addQualifierValue(valueBuilder.build());
}
}
builder.addColumnValue(columnBuilder.build());
}
Map<String, byte[]> attributes = increment.getAttributesMap();
if (!attributes.isEmpty()) {
NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder();
for (Map.Entry<String, byte[]> attribute : attributes.entrySet()) {
attributeBuilder.setName(attribute.getKey());
attributeBuilder.setValue(UnsafeByteOperations.unsafeWrap(attribute.getValue()));
builder.addAttribute(attributeBuilder.build());
}
}
return builder.build();
}
use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.
the class ProtobufUtil method toGet.
/**
* Convert a protocol buffer Mutate to a Get.
* @param proto the protocol buffer Mutate to convert.
* @param cellScanner
* @return the converted client get.
* @throws IOException
*/
public static Get toGet(final MutationProto proto, final CellScanner cellScanner) throws IOException {
MutationType type = proto.getMutateType();
assert type == MutationType.INCREMENT || type == MutationType.APPEND : type.name();
byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
Get get = null;
int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
if (cellCount > 0) {
// The proto has metadata only and the data is separate to be found in the cellScanner.
if (cellScanner == null) {
throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + TextFormat.shortDebugString(proto));
}
for (int i = 0; i < cellCount; i++) {
if (!cellScanner.advance()) {
throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + TextFormat.shortDebugString(proto));
}
Cell cell = cellScanner.current();
if (get == null) {
get = new Get(Bytes.copy(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
}
get.addColumn(Bytes.copy(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()), Bytes.copy(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()));
}
} else {
get = new Get(row);
for (ColumnValue column : proto.getColumnValueList()) {
byte[] family = column.getFamily().toByteArray();
for (QualifierValue qv : column.getQualifierValueList()) {
byte[] qualifier = qv.getQualifier().toByteArray();
if (!qv.hasValue()) {
throw new DoNotRetryIOException("Missing required field: qualifier value");
}
get.addColumn(family, qualifier);
}
}
}
if (proto.hasTimeRange()) {
TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
get.setTimeRange(timeRange);
}
for (NameBytesPair attribute : proto.getAttributeList()) {
get.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
}
return get;
}
use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.
the class ProtobufUtil method toMutation.
/**
* Convert a client Increment to a protobuf Mutate.
*
* @param increment
* @return the converted mutate
*/
public static MutationProto toMutation(final Increment increment, final MutationProto.Builder builder, long nonce) {
builder.setRow(ByteStringer.wrap(increment.getRow()));
builder.setMutateType(MutationType.INCREMENT);
builder.setDurability(toDurability(increment.getDurability()));
if (nonce != HConstants.NO_NONCE) {
builder.setNonce(nonce);
}
TimeRange timeRange = increment.getTimeRange();
setTimeRange(builder, timeRange);
ColumnValue.Builder columnBuilder = ColumnValue.newBuilder();
QualifierValue.Builder valueBuilder = QualifierValue.newBuilder();
for (Map.Entry<byte[], List<Cell>> family : increment.getFamilyCellMap().entrySet()) {
columnBuilder.setFamily(ByteStringer.wrap(family.getKey()));
columnBuilder.clearQualifierValue();
List<Cell> values = family.getValue();
if (values != null && values.size() > 0) {
for (Cell cell : values) {
valueBuilder.clear();
valueBuilder.setQualifier(ByteStringer.wrap(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()));
valueBuilder.setValue(ByteStringer.wrap(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
if (cell.getTagsLength() > 0) {
valueBuilder.setTags(ByteStringer.wrap(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()));
}
columnBuilder.addQualifierValue(valueBuilder.build());
}
}
builder.addColumnValue(columnBuilder.build());
}
Map<String, byte[]> attributes = increment.getAttributesMap();
if (!attributes.isEmpty()) {
NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder();
for (Map.Entry<String, byte[]> attribute : attributes.entrySet()) {
attributeBuilder.setName(attribute.getKey());
attributeBuilder.setValue(ByteStringer.wrap(attribute.getValue()));
builder.addAttribute(attributeBuilder.build());
}
}
return builder.build();
}
use of org.apache.hadoop.hbase.io.TimeRange in project hbase by apache.
the class ProtobufUtil method toIncrement.
/**
* Convert a protocol buffer Mutate to an Increment
*
* @param proto the protocol buffer Mutate to convert
* @return the converted client Increment
* @throws IOException
*/
public static Increment toIncrement(final MutationProto proto, final CellScanner cellScanner) throws IOException {
MutationType type = proto.getMutateType();
assert type == MutationType.INCREMENT : type.name();
byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
Increment increment = null;
int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
if (cellCount > 0) {
// The proto has metadata only and the data is separate to be found in the cellScanner.
if (cellScanner == null) {
throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + TextFormat.shortDebugString(proto));
}
for (int i = 0; i < cellCount; i++) {
if (!cellScanner.advance()) {
throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + TextFormat.shortDebugString(proto));
}
Cell cell = cellScanner.current();
if (increment == null) {
increment = new Increment(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
}
increment.add(cell);
}
} else {
increment = new Increment(row);
for (ColumnValue column : proto.getColumnValueList()) {
byte[] family = column.getFamily().toByteArray();
for (QualifierValue qv : column.getQualifierValueList()) {
byte[] qualifier = qv.getQualifier().toByteArray();
if (!qv.hasValue()) {
throw new DoNotRetryIOException("Missing required field: qualifier value");
}
byte[] value = qv.getValue().toByteArray();
byte[] tags = null;
if (qv.hasTags()) {
tags = qv.getTags().toByteArray();
}
increment.add(CellUtil.createCell(row, family, qualifier, qv.getTimestamp(), KeyValue.Type.Put, value, tags));
}
}
}
if (proto.hasTimeRange()) {
TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
increment.setTimeRange(timeRange.getMin(), timeRange.getMax());
}
increment.setDurability(toDurability(proto.getDurability()));
for (NameBytesPair attribute : proto.getAttributeList()) {
increment.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
}
return increment;
}
Aggregations