Search in sources :

Example 16 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project phoenix by apache.

the class ConnectionQueryServicesImpl method getTable.

@Override
public MetaDataMutationResult getTable(final PName tenantId, final byte[] schemaBytes, final byte[] tableBytes, final long tableTimestamp, final long clientTimestamp) throws SQLException {
    final byte[] tenantIdBytes = tenantId == null ? ByteUtil.EMPTY_BYTE_ARRAY : tenantId.getBytes();
    byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes, schemaBytes, tableBytes);
    return metaDataCoprocessorExec(tableKey, new Batch.Call<MetaDataService, MetaDataResponse>() {

        @Override
        public MetaDataResponse call(MetaDataService instance) throws IOException {
            ServerRpcController controller = new ServerRpcController();
            BlockingRpcCallback<MetaDataResponse> rpcCallback = new BlockingRpcCallback<MetaDataResponse>();
            GetTableRequest.Builder builder = GetTableRequest.newBuilder();
            builder.setTenantId(ByteStringer.wrap(tenantIdBytes));
            builder.setSchemaName(ByteStringer.wrap(schemaBytes));
            builder.setTableName(ByteStringer.wrap(tableBytes));
            builder.setTableTimestamp(tableTimestamp);
            builder.setClientTimestamp(clientTimestamp);
            builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
            instance.getTable(controller, builder.build(), rpcCallback);
            if (controller.getFailedOn() != null) {
                throw controller.getFailedOn();
            }
            return rpcCallback.get();
        }
    });
}
Also used : MetaDataResponse(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) MetaDataService(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) KeyValueBuilder(org.apache.phoenix.hbase.index.util.KeyValueBuilder) NonTxIndexBuilder(org.apache.phoenix.hbase.index.covered.NonTxIndexBuilder) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) PhoenixIndexBuilder(org.apache.phoenix.index.PhoenixIndexBuilder) BlockingRpcCallback(org.apache.hadoop.hbase.ipc.BlockingRpcCallback) IOException(java.io.IOException) PhoenixIOException(org.apache.phoenix.exception.PhoenixIOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController)

Example 17 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project phoenix by apache.

the class ConnectionQueryServicesImpl method dropFunction.

@Override
public MetaDataMutationResult dropFunction(final List<Mutation> functionData, final boolean ifExists) throws SQLException {
    byte[][] rowKeyMetadata = new byte[2][];
    byte[] key = functionData.get(0).getRow();
    SchemaUtil.getVarChars(key, rowKeyMetadata);
    byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
    byte[] functionBytes = rowKeyMetadata[PhoenixDatabaseMetaData.FUNTION_NAME_INDEX];
    byte[] functionKey = SchemaUtil.getFunctionKey(tenantIdBytes, functionBytes);
    final MetaDataMutationResult result = metaDataCoprocessorExec(functionKey, new Batch.Call<MetaDataService, MetaDataResponse>() {

        @Override
        public MetaDataResponse call(MetaDataService instance) throws IOException {
            ServerRpcController controller = new ServerRpcController();
            BlockingRpcCallback<MetaDataResponse> rpcCallback = new BlockingRpcCallback<MetaDataResponse>();
            DropFunctionRequest.Builder builder = DropFunctionRequest.newBuilder();
            for (Mutation m : functionData) {
                MutationProto mp = ProtobufUtil.toProto(m);
                builder.addTableMetadataMutations(mp.toByteString());
            }
            builder.setIfExists(ifExists);
            builder.setClientVersion(VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER));
            instance.dropFunction(controller, builder.build(), rpcCallback);
            if (controller.getFailedOn() != null) {
                throw controller.getFailedOn();
            }
            return rpcCallback.get();
        }
    }, PhoenixDatabaseMetaData.SYSTEM_FUNCTION_NAME_BYTES);
    return result;
}
Also used : MetaDataResponse(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) KeyValueBuilder(org.apache.phoenix.hbase.index.util.KeyValueBuilder) NonTxIndexBuilder(org.apache.phoenix.hbase.index.covered.NonTxIndexBuilder) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) PhoenixIndexBuilder(org.apache.phoenix.index.PhoenixIndexBuilder) IOException(java.io.IOException) PhoenixIOException(org.apache.phoenix.exception.PhoenixIOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) MutationProto(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) MetaDataService(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) BlockingRpcCallback(org.apache.hadoop.hbase.ipc.BlockingRpcCallback) Mutation(org.apache.hadoop.hbase.client.Mutation) MetaDataMutationResult(org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult)

Example 18 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project phoenix by apache.

the class IndexUtil method setIndexDisableTimeStamp.

public static MetaDataMutationResult setIndexDisableTimeStamp(String indexTableName, long minTimeStamp, HTableInterface metaTable, PIndexState newState) throws ServiceException, Throwable {
    byte[] indexTableKey = SchemaUtil.getTableKeyFromFullName(indexTableName);
    // Mimic the Put that gets generated by the client on an update of the index state
    Put put = new Put(indexTableKey);
    put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.INDEX_STATE_BYTES, newState.getSerializedBytes());
    put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES, PLong.INSTANCE.toBytes(minTimeStamp));
    put.add(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES, PhoenixDatabaseMetaData.ASYNC_REBUILD_TIMESTAMP_BYTES, PLong.INSTANCE.toBytes(0));
    final List<Mutation> tableMetadata = Collections.<Mutation>singletonList(put);
    final Map<byte[], MetaDataResponse> results = metaTable.coprocessorService(MetaDataService.class, indexTableKey, indexTableKey, new Batch.Call<MetaDataService, MetaDataResponse>() {

        @Override
        public MetaDataResponse call(MetaDataService instance) throws IOException {
            ServerRpcController controller = new ServerRpcController();
            BlockingRpcCallback<MetaDataResponse> rpcCallback = new BlockingRpcCallback<MetaDataResponse>();
            UpdateIndexStateRequest.Builder builder = UpdateIndexStateRequest.newBuilder();
            for (Mutation m : tableMetadata) {
                MutationProto mp = ProtobufUtil.toProto(m);
                builder.addTableMetadataMutations(mp.toByteString());
            }
            instance.updateIndexState(controller, builder.build(), rpcCallback);
            if (controller.getFailedOn() != null) {
                throw controller.getFailedOn();
            }
            return rpcCallback.get();
        }
    });
    if (results.isEmpty()) {
        throw new IOException("Didn't get expected result size");
    }
    MetaDataResponse tmpResponse = results.values().iterator().next();
    return MetaDataMutationResult.constructFromProto(tmpResponse);
}
Also used : MetaDataResponse(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) KeyValueBuilder(org.apache.phoenix.hbase.index.util.KeyValueBuilder) IOException(java.io.IOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) Put(org.apache.hadoop.hbase.client.Put) MutationProto(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) MetaDataService(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService) Batch(org.apache.hadoop.hbase.client.coprocessor.Batch) BlockingRpcCallback(org.apache.hadoop.hbase.ipc.BlockingRpcCallback) Mutation(org.apache.hadoop.hbase.client.Mutation)

Example 19 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project hbase by apache.

the class SecureBulkLoadEndpointClient method prepareBulkLoad.

public String prepareBulkLoad(final TableName tableName) throws IOException {
    try {
        CoprocessorRpcChannel channel = table.coprocessorService(HConstants.EMPTY_START_ROW);
        SecureBulkLoadProtos.SecureBulkLoadService instance = ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel);
        ServerRpcController controller = new ServerRpcController();
        CoprocessorRpcUtils.BlockingRpcCallback<PrepareBulkLoadResponse> rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
        PrepareBulkLoadRequest request = PrepareBulkLoadRequest.newBuilder().setTableName(ProtobufUtil.toProtoTableName(tableName)).build();
        instance.prepareBulkLoad(controller, request, rpcCallback);
        PrepareBulkLoadResponse response = rpcCallback.get();
        if (controller.failedOnException()) {
            throw controller.getFailedOn();
        }
        return response.getBulkToken();
    } catch (Throwable throwable) {
        throw new IOException(throwable);
    }
}
Also used : PrepareBulkLoadRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest) CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) IOException(java.io.IOException) PrepareBulkLoadResponse(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) SecureBulkLoadProtos(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos)

Example 20 with ServerRpcController

use of org.apache.hadoop.hbase.ipc.ServerRpcController in project hbase by apache.

the class SecureBulkLoadEndpointClient method cleanupBulkLoad.

public void cleanupBulkLoad(final String bulkToken) throws IOException {
    try {
        CoprocessorRpcChannel channel = table.coprocessorService(HConstants.EMPTY_START_ROW);
        SecureBulkLoadProtos.SecureBulkLoadService instance = ProtobufUtil.newServiceStub(SecureBulkLoadProtos.SecureBulkLoadService.class, channel);
        ServerRpcController controller = new ServerRpcController();
        CoprocessorRpcUtils.BlockingRpcCallback<CleanupBulkLoadResponse> rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>();
        CleanupBulkLoadRequest request = CleanupBulkLoadRequest.newBuilder().setBulkToken(bulkToken).build();
        instance.cleanupBulkLoad(controller, request, rpcCallback);
        if (controller.failedOnException()) {
            throw controller.getFailedOn();
        }
    } catch (Throwable throwable) {
        throw new IOException(throwable);
    }
}
Also used : CoprocessorRpcUtils(org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils) CleanupBulkLoadRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) IOException(java.io.IOException) ServerRpcController(org.apache.hadoop.hbase.ipc.ServerRpcController) CleanupBulkLoadResponse(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) SecureBulkLoadProtos(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos)

Aggregations

ServerRpcController (org.apache.hadoop.hbase.ipc.ServerRpcController)31 IOException (java.io.IOException)24 Batch (org.apache.hadoop.hbase.client.coprocessor.Batch)21 BlockingRpcCallback (org.apache.hadoop.hbase.ipc.BlockingRpcCallback)17 MetaDataService (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService)15 KeyValueBuilder (org.apache.phoenix.hbase.index.util.KeyValueBuilder)13 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)12 CoprocessorRpcUtils (org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils)12 PhoenixIOException (org.apache.phoenix.exception.PhoenixIOException)12 NonTxIndexBuilder (org.apache.phoenix.hbase.index.covered.NonTxIndexBuilder)12 PhoenixIndexBuilder (org.apache.phoenix.index.PhoenixIndexBuilder)12 MetaDataResponse (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse)11 Mutation (org.apache.hadoop.hbase.client.Mutation)10 MutationProto (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)10 MetaDataMutationResult (org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult)8 Table (org.apache.hadoop.hbase.client.Table)6 SQLException (java.sql.SQLException)5 Map (java.util.Map)5 TreeMap (java.util.TreeMap)5 HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)5