Search in sources :

Example 1 with Column

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column in project hbase by apache.

the class ProtobufUtil method toScan.

/**
   * Convert a protocol buffer Scan to a client Scan
   *
   * @param proto the protocol buffer Scan to convert
   * @return the converted client Scan
   * @throws IOException
   */
public static Scan toScan(final ClientProtos.Scan proto) throws IOException {
    byte[] startRow = HConstants.EMPTY_START_ROW;
    byte[] stopRow = HConstants.EMPTY_END_ROW;
    boolean includeStartRow = true;
    boolean includeStopRow = false;
    if (proto.hasStartRow()) {
        startRow = proto.getStartRow().toByteArray();
    }
    if (proto.hasStopRow()) {
        stopRow = proto.getStopRow().toByteArray();
    }
    if (proto.hasIncludeStartRow()) {
        includeStartRow = proto.getIncludeStartRow();
    }
    if (proto.hasIncludeStopRow()) {
        includeStopRow = proto.getIncludeStopRow();
    }
    Scan scan = new Scan().withStartRow(startRow, includeStartRow).withStopRow(stopRow, includeStopRow);
    if (proto.hasCacheBlocks()) {
        scan.setCacheBlocks(proto.getCacheBlocks());
    }
    if (proto.hasMaxVersions()) {
        scan.setMaxVersions(proto.getMaxVersions());
    }
    if (proto.hasStoreLimit()) {
        scan.setMaxResultsPerColumnFamily(proto.getStoreLimit());
    }
    if (proto.hasStoreOffset()) {
        scan.setRowOffsetPerColumnFamily(proto.getStoreOffset());
    }
    if (proto.hasLoadColumnFamiliesOnDemand()) {
        scan.setLoadColumnFamiliesOnDemand(proto.getLoadColumnFamiliesOnDemand());
    }
    if (proto.getCfTimeRangeCount() > 0) {
        for (HBaseProtos.ColumnFamilyTimeRange cftr : proto.getCfTimeRangeList()) {
            TimeRange timeRange = protoToTimeRange(cftr.getTimeRange());
            scan.setColumnFamilyTimeRange(cftr.getColumnFamily().toByteArray(), timeRange);
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
        scan.setTimeRange(timeRange);
    }
    if (proto.hasFilter()) {
        FilterProtos.Filter filter = proto.getFilter();
        scan.setFilter(ProtobufUtil.toFilter(filter));
    }
    if (proto.hasBatchSize()) {
        scan.setBatch(proto.getBatchSize());
    }
    if (proto.hasMaxResultSize()) {
        scan.setMaxResultSize(proto.getMaxResultSize());
    }
    if (proto.hasSmall()) {
        scan.setSmall(proto.getSmall());
    }
    if (proto.hasAllowPartialResults()) {
        scan.setAllowPartialResults(proto.getAllowPartialResults());
    }
    for (NameBytesPair attribute : proto.getAttributeList()) {
        scan.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    if (proto.getColumnCount() > 0) {
        for (Column column : proto.getColumnList()) {
            byte[] family = column.getFamily().toByteArray();
            if (column.getQualifierCount() > 0) {
                for (ByteString qualifier : column.getQualifierList()) {
                    scan.addColumn(family, qualifier.toByteArray());
                }
            } else {
                scan.addFamily(family);
            }
        }
    }
    if (proto.hasReversed()) {
        scan.setReversed(proto.getReversed());
    }
    if (proto.hasConsistency()) {
        scan.setConsistency(toConsistency(proto.getConsistency()));
    }
    if (proto.hasCaching()) {
        scan.setCaching(proto.getCaching());
    }
    if (proto.hasMvccReadPoint()) {
        PackagePrivateFieldAccessor.setMvccReadPoint(scan, proto.getMvccReadPoint());
    }
    if (scan.isSmall()) {
        scan.setReadType(Scan.ReadType.PREAD);
    } else if (proto.hasReadType()) {
        scan.setReadType(toReadType(proto.getReadType()));
    }
    return scan;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) Column(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) ByteString(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) Scan(org.apache.hadoop.hbase.client.Scan) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos)

Example 2 with Column

use of org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column in project hbase by apache.

the class ProtobufUtil method toGet.

/**
   * Convert a protocol buffer Get to a client Get
   *
   * @param proto the protocol buffer Get to convert
   * @return the converted client Get
   * @throws IOException
   */
public static Get toGet(final ClientProtos.Get proto) throws IOException {
    if (proto == null)
        return null;
    byte[] row = proto.getRow().toByteArray();
    Get get = new Get(row);
    if (proto.hasCacheBlocks()) {
        get.setCacheBlocks(proto.getCacheBlocks());
    }
    if (proto.hasMaxVersions()) {
        get.setMaxVersions(proto.getMaxVersions());
    }
    if (proto.hasStoreLimit()) {
        get.setMaxResultsPerColumnFamily(proto.getStoreLimit());
    }
    if (proto.hasStoreOffset()) {
        get.setRowOffsetPerColumnFamily(proto.getStoreOffset());
    }
    if (proto.getCfTimeRangeCount() > 0) {
        for (HBaseProtos.ColumnFamilyTimeRange cftr : proto.getCfTimeRangeList()) {
            TimeRange timeRange = protoToTimeRange(cftr.getTimeRange());
            get.setColumnFamilyTimeRange(cftr.getColumnFamily().toByteArray(), timeRange);
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
        get.setTimeRange(timeRange);
    }
    if (proto.hasFilter()) {
        FilterProtos.Filter filter = proto.getFilter();
        get.setFilter(ProtobufUtil.toFilter(filter));
    }
    for (NameBytesPair attribute : proto.getAttributeList()) {
        get.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    if (proto.getColumnCount() > 0) {
        for (Column column : proto.getColumnList()) {
            byte[] family = column.getFamily().toByteArray();
            if (column.getQualifierCount() > 0) {
                for (ByteString qualifier : column.getQualifierList()) {
                    get.addColumn(family, qualifier.toByteArray());
                }
            } else {
                get.addFamily(family);
            }
        }
    }
    if (proto.hasExistenceOnly() && proto.getExistenceOnly()) {
        get.setCheckExistenceOnly(true);
    }
    if (proto.hasConsistency()) {
        get.setConsistency(toConsistency(proto.getConsistency()));
    }
    if (proto.hasLoadColumnFamiliesOnDemand()) {
        get.setLoadColumnFamiliesOnDemand(proto.getLoadColumnFamiliesOnDemand());
    }
    return get;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) Column(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) ByteString(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) Get(org.apache.hadoop.hbase.client.Get) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos)

Aggregations

TimeRange (org.apache.hadoop.hbase.io.TimeRange)2 ByteString (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString)2 Column (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column)2 FilterProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos)2 HBaseProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos)2 NameBytesPair (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)2 Get (org.apache.hadoop.hbase.client.Get)1 Scan (org.apache.hadoop.hbase.client.Scan)1