Search in sources :

Example 26 with ByteString

use of org.apache.hbase.thirdparty.com.google.protobuf.ByteString in project hbase by apache.

the class ProtobufUtil method toBulkLoadDescriptor.

public static WALProtos.BulkLoadDescriptor toBulkLoadDescriptor(TableName tableName, ByteString encodedRegionName, Map<byte[], List<Path>> storeFiles, Map<String, Long> storeFilesSize, long bulkloadSeqId, List<String> clusterIds, boolean replicate) {
    BulkLoadDescriptor.Builder desc = BulkLoadDescriptor.newBuilder().setTableName(ProtobufUtil.toProtoTableName(tableName)).setEncodedRegionName(encodedRegionName).setBulkloadSeqNum(bulkloadSeqId).setReplicate(replicate);
    if (clusterIds != null) {
        desc.addAllClusterIds(clusterIds);
    }
    for (Map.Entry<byte[], List<Path>> entry : storeFiles.entrySet()) {
        WALProtos.StoreDescriptor.Builder builder = StoreDescriptor.newBuilder().setFamilyName(UnsafeByteOperations.unsafeWrap(entry.getKey())).setStoreHomeDir(// relative to region
        Bytes.toString(entry.getKey()));
        for (Path path : entry.getValue()) {
            String name = path.getName();
            builder.addStoreFile(name);
            Long size = storeFilesSize.get(name) == null ? (Long) 0L : storeFilesSize.get(name);
            builder.setStoreFileSizeBytes(size);
        }
        desc.addStores(builder);
    }
    return desc.build();
}
Also used : Path(org.apache.hadoop.fs.Path) BulkLoadDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor) ArrayList(java.util.ArrayList) List(java.util.List) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) Map(java.util.Map) HashMap(java.util.HashMap) StoreDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor)

Example 27 with ByteString

use of org.apache.hbase.thirdparty.com.google.protobuf.ByteString in project hbase by apache.

the class ProtobufUtil method toGet.

/**
 * Convert a protocol buffer Get to a client Get
 *
 * @param proto the protocol buffer Get to convert
 * @return the converted client Get
 * @throws IOException
 */
public static Get toGet(final ClientProtos.Get proto) throws IOException {
    if (proto == null)
        return null;
    byte[] row = proto.getRow().toByteArray();
    Get get = new Get(row);
    if (proto.hasCacheBlocks()) {
        get.setCacheBlocks(proto.getCacheBlocks());
    }
    if (proto.hasMaxVersions()) {
        get.readVersions(proto.getMaxVersions());
    }
    if (proto.hasStoreLimit()) {
        get.setMaxResultsPerColumnFamily(proto.getStoreLimit());
    }
    if (proto.hasStoreOffset()) {
        get.setRowOffsetPerColumnFamily(proto.getStoreOffset());
    }
    if (proto.getCfTimeRangeCount() > 0) {
        for (HBaseProtos.ColumnFamilyTimeRange cftr : proto.getCfTimeRangeList()) {
            TimeRange timeRange = toTimeRange(cftr.getTimeRange());
            get.setColumnFamilyTimeRange(cftr.getColumnFamily().toByteArray(), timeRange.getMin(), timeRange.getMax());
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = toTimeRange(proto.getTimeRange());
        get.setTimeRange(timeRange.getMin(), timeRange.getMax());
    }
    if (proto.hasFilter()) {
        FilterProtos.Filter filter = proto.getFilter();
        get.setFilter(ProtobufUtil.toFilter(filter));
    }
    for (NameBytesPair attribute : proto.getAttributeList()) {
        get.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    if (proto.getColumnCount() > 0) {
        for (Column column : proto.getColumnList()) {
            byte[] family = column.getFamily().toByteArray();
            if (column.getQualifierCount() > 0) {
                for (ByteString qualifier : column.getQualifierList()) {
                    get.addColumn(family, qualifier.toByteArray());
                }
            } else {
                get.addFamily(family);
            }
        }
    }
    if (proto.hasExistenceOnly() && proto.getExistenceOnly()) {
        get.setCheckExistenceOnly(true);
    }
    if (proto.hasConsistency()) {
        get.setConsistency(toConsistency(proto.getConsistency()));
    }
    if (proto.hasLoadColumnFamiliesOnDemand()) {
        get.setLoadColumnFamiliesOnDemand(proto.getLoadColumnFamiliesOnDemand());
    }
    return get;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) Column(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) Get(org.apache.hadoop.hbase.client.Get) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos)

Example 28 with ByteString

use of org.apache.hbase.thirdparty.com.google.protobuf.ByteString in project hbase by apache.

the class ProtobufUtil method getRegionEncodedName.

/**
 * Find the HRegion encoded name based on a region specifier
 *
 * @param regionSpecifier the region specifier
 * @return the corresponding region's encoded name
 * @throws DoNotRetryIOException if the specifier type is unsupported
 */
public static String getRegionEncodedName(final RegionSpecifier regionSpecifier) throws DoNotRetryIOException {
    ByteString value = regionSpecifier.getValue();
    RegionSpecifierType type = regionSpecifier.getType();
    switch(type) {
        case REGION_NAME:
            return org.apache.hadoop.hbase.client.RegionInfo.encodeRegionName(value.toByteArray());
        case ENCODED_REGION_NAME:
            return value.toStringUtf8();
        default:
            throw new DoNotRetryIOException("Unsupported region specifier type: " + type);
    }
}
Also used : RegionSpecifierType(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString)

Example 29 with ByteString

use of org.apache.hbase.thirdparty.com.google.protobuf.ByteString in project hbase by apache.

the class QuotaTableUtil method extractQuotaSnapshot.

/**
 * Extracts the {@link SpaceViolationPolicy} and {@link TableName} from the provided
 * {@link Result} and adds them to the given {@link Map}. If the result does not contain
 * the expected information or the serialized policy in the value is invalid, this method
 * will throw an {@link IllegalArgumentException}.
 *
 * @param result A row from the quota table.
 * @param snapshots A map of snapshots to add the result of this method into.
 */
public static void extractQuotaSnapshot(Result result, Map<TableName, SpaceQuotaSnapshot> snapshots) {
    byte[] row = Objects.requireNonNull(result).getRow();
    if (row == null || row.length == 0) {
        throw new IllegalArgumentException("Provided result had a null row");
    }
    final TableName targetTableName = getTableFromRowKey(row);
    Cell c = result.getColumnLatestCell(QUOTA_FAMILY_USAGE, QUOTA_QUALIFIER_POLICY);
    if (c == null) {
        throw new IllegalArgumentException("Result did not contain the expected column " + QUOTA_POLICY_COLUMN + ", " + result.toString());
    }
    ByteString buffer = UnsafeByteOperations.unsafeWrap(c.getValueArray(), c.getValueOffset(), c.getValueLength());
    try {
        QuotaProtos.SpaceQuotaSnapshot snapshot = QuotaProtos.SpaceQuotaSnapshot.parseFrom(buffer);
        snapshots.put(targetTableName, SpaceQuotaSnapshot.toSpaceQuotaSnapshot(snapshot));
    } catch (InvalidProtocolBufferException e) {
        throw new IllegalArgumentException("Result did not contain a valid SpaceQuota protocol buffer message", e);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) InvalidProtocolBufferException(org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException) Cell(org.apache.hadoop.hbase.Cell) QuotaProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos)

Example 30 with ByteString

use of org.apache.hbase.thirdparty.com.google.protobuf.ByteString in project hbase by apache.

the class TestProtobufUtil method testProcedureInfo.

@Test
public void testProcedureInfo() {
    ProcedureProtos.Procedure.Builder builder = createProcedureBuilder(1);
    ByteString stateBytes = ByteString.copyFrom(new byte[] { 65 });
    BytesValue state = BytesValue.newBuilder().setValue(stateBytes).build();
    builder.addStateMessage(Any.pack(state));
    ProcedureProtos.Procedure procedure = builder.build();
    String procJson = ProtobufUtil.toProcedureJson(Lists.newArrayList(procedure));
    assertEquals("[{" + "\"className\":\"java.lang.Object\"," + "\"procId\":\"1\"," + "\"submittedTime\":\"0\"," + "\"state\":\"RUNNABLE\"," + "\"lastUpdate\":\"0\"," + "\"stateMessage\":[{\"value\":\"QQ==\"}]" + "}]", procJson);
}
Also used : ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) BytesValue(org.apache.hbase.thirdparty.com.google.protobuf.BytesValue) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ProcedureProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos) Test(org.junit.Test)

Aggregations

ByteString (org.apache.hbase.thirdparty.com.google.protobuf.ByteString)36 IOException (java.io.IOException)22 ArrayList (java.util.ArrayList)18 Test (org.junit.Test)11 AggregateResponse (org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateResponse)9 Connection (org.apache.hadoop.hbase.client.Connection)8 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)7 List (java.util.List)7 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)7 Scan (org.apache.hadoop.hbase.client.Scan)7 Cell (org.apache.hadoop.hbase.Cell)6 CoprocessorRpcUtils (org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils)6 AggregateRequest (org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateRequest)6 AggregateService (org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.AggregateService)6 GetAuthsResponse (org.apache.hadoop.hbase.shaded.protobuf.generated.VisibilityLabelsProtos.GetAuthsResponse)6 VisibilityLabelsResponse (org.apache.hadoop.hbase.shaded.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse)6 RpcCallback (org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback)6 RpcController (org.apache.hbase.thirdparty.com.google.protobuf.RpcController)6 ByteBuffer (java.nio.ByteBuffer)4 CellScanner (org.apache.hadoop.hbase.CellScanner)4