Search in sources :

Example 1 with Delete

use of org.apache.hadoop.hbase.client.Delete in project camel by apache.

the class HBaseProducer method process.

public void process(Exchange exchange) throws Exception {
    try (Table table = endpoint.getTable()) {
        updateHeaders(exchange);
        String operation = (String) exchange.getIn().getHeader(HBaseConstants.OPERATION);
        Integer maxScanResult = exchange.getIn().getHeader(HBaseConstants.HBASE_MAX_SCAN_RESULTS, Integer.class);
        String fromRowId = (String) exchange.getIn().getHeader(HBaseConstants.FROM_ROW);
        String stopRowId = (String) exchange.getIn().getHeader(HBaseConstants.STOP_ROW);
        CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
        HBaseData data = mappingStrategy.resolveModel(exchange.getIn());
        List<Put> putOperations = new LinkedList<>();
        List<Delete> deleteOperations = new LinkedList<>();
        List<HBaseRow> getOperationResult = new LinkedList<>();
        List<HBaseRow> scanOperationResult = new LinkedList<>();
        for (HBaseRow hRow : data.getRows()) {
            hRow.apply(rowModel);
            if (HBaseConstants.PUT.equals(operation)) {
                putOperations.add(createPut(hRow));
            } else if (HBaseConstants.GET.equals(operation)) {
                HBaseRow getResultRow = getCells(table, hRow);
                getOperationResult.add(getResultRow);
            } else if (HBaseConstants.DELETE.equals(operation)) {
                deleteOperations.add(createDeleteRow(hRow));
            } else if (HBaseConstants.SCAN.equals(operation)) {
                scanOperationResult = scanCells(table, hRow, fromRowId, stopRowId, maxScanResult, endpoint.getFilters());
            }
        }
        //Check if we have something to add.
        if (!putOperations.isEmpty()) {
            table.put(putOperations);
        } else if (!deleteOperations.isEmpty()) {
            table.delete(deleteOperations);
        } else if (!getOperationResult.isEmpty()) {
            mappingStrategy.applyGetResults(exchange.getOut(), new HBaseData(getOperationResult));
        } else if (!scanOperationResult.isEmpty()) {
            mappingStrategy.applyScanResults(exchange.getOut(), new HBaseData(scanOperationResult));
        }
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) CellMappingStrategy(org.apache.camel.component.hbase.mapping.CellMappingStrategy) HBaseData(org.apache.camel.component.hbase.model.HBaseData) HBaseRow(org.apache.camel.component.hbase.model.HBaseRow) LinkedList(java.util.LinkedList) Put(org.apache.hadoop.hbase.client.Put)

Example 2 with Delete

use of org.apache.hadoop.hbase.client.Delete in project camel by apache.

the class HBaseIdempotentRepository method remove.

@Override
public boolean remove(Object o) {
    try {
        byte[] b = HBaseHelper.toBytes(o);
        if (table.exists(new Get(b))) {
            Delete delete = new Delete(b);
            table.delete(delete);
            return true;
        } else {
            return false;
        }
    } catch (Exception e) {
        LOG.warn("Error removing object {} from HBase repository.", o);
        return false;
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Get(org.apache.hadoop.hbase.client.Get) IOException(java.io.IOException)

Example 3 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class MetaTableAccessor method multiMutate.

/**
   * Performs an atomic multi-mutate operation against the given table.
   */
// Used by the RSGroup Coprocessor Endpoint. It had a copy/paste of the below. Need to reveal
// this facility for CPEP use or at least those CPEPs that are on their way to becoming part of
// core as is the intent for RSGroup eventually.
public static void multiMutate(Connection connection, final Table table, byte[] row, final List<Mutation> mutations) throws IOException {
    if (METALOG.isDebugEnabled()) {
        METALOG.debug(mutationsToString(mutations));
    }
    // TODO: Need rollback!!!!
    // TODO: Need Retry!!!
    // TODO: What for a timeout? Default write timeout? GET FROM HTABLE?
    // TODO: Review when we come through with ProcedureV2.
    RegionServerCallable<MutateRowsResponse, MultiRowMutationProtos.MultiRowMutationService.BlockingInterface> callable = new RegionServerCallable<MutateRowsResponse, MultiRowMutationProtos.MultiRowMutationService.BlockingInterface>(connection, table.getName(), row, null) {

        /*RpcController not used in this CPEP!*/
        @Override
        protected MutateRowsResponse rpcCall() throws Exception {
            final MutateRowsRequest.Builder builder = MutateRowsRequest.newBuilder();
            for (Mutation mutation : mutations) {
                if (mutation instanceof Put) {
                    builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.PUT, mutation));
                } else if (mutation instanceof Delete) {
                    builder.addMutationRequest(ProtobufUtil.toMutation(ClientProtos.MutationProto.MutationType.DELETE, mutation));
                } else {
                    throw new DoNotRetryIOException("multi in MetaEditor doesn't support " + mutation.getClass().getName());
                }
            }
            // The call to #prepare that ran before this invocation will have populated HRegionLocation.
            HRegionLocation hrl = getLocation();
            RegionSpecifier region = ProtobufUtil.buildRegionSpecifier(RegionSpecifierType.REGION_NAME, hrl.getRegionInfo().getRegionName());
            builder.setRegion(region);
            // that makes com.google.protobuf.RpcController and then copy into it configs.
            return getStub().mutateRows(null, builder.build());
        }

        @Override
        protected // Called on the end of the super.prepare call. Set the stub.
        void setStubByServiceName(ServerName serviceName) throws /*Ignored*/
        IOException {
            CoprocessorRpcChannel channel = table.coprocessorService(getRow());
            setStub(MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel));
        }
    };
    int writeTimeout = connection.getConfiguration().getInt(HConstants.HBASE_RPC_WRITE_TIMEOUT_KEY, connection.getConfiguration().getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, HConstants.DEFAULT_HBASE_RPC_TIMEOUT));
    // The region location should be cached in connection. Call prepare so this callable picks
    // up the region location (see super.prepare method).
    callable.prepare(false);
    callable.call(writeTimeout);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) CoprocessorRpcChannel(org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel) MultiRowMutationProtos(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos) Put(org.apache.hadoop.hbase.client.Put) RegionSpecifier(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) RegionServerCallable(org.apache.hadoop.hbase.client.RegionServerCallable) MutateRowsResponse(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse) MutateRowsRequest(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest) Mutation(org.apache.hadoop.hbase.client.Mutation)

Example 4 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class MetaTableAccessor method removeRegionReplicasFromMeta.

/**
   * Deletes some replica columns corresponding to replicas for the passed rows
   * @param metaRows rows in hbase:meta
   * @param replicaIndexToDeleteFrom the replica ID we would start deleting from
   * @param numReplicasToRemove how many replicas to remove
   * @param connection connection we're using to access meta table
   * @throws IOException
   */
public static void removeRegionReplicasFromMeta(Set<byte[]> metaRows, int replicaIndexToDeleteFrom, int numReplicasToRemove, Connection connection) throws IOException {
    int absoluteIndex = replicaIndexToDeleteFrom + numReplicasToRemove;
    for (byte[] row : metaRows) {
        long now = EnvironmentEdgeManager.currentTime();
        Delete deleteReplicaLocations = new Delete(row);
        for (int i = replicaIndexToDeleteFrom; i < absoluteIndex; i++) {
            deleteReplicaLocations.addColumns(getCatalogFamily(), getServerColumn(i), now);
            deleteReplicaLocations.addColumns(getCatalogFamily(), getSeqNumColumn(i), now);
            deleteReplicaLocations.addColumns(getCatalogFamily(), getStartCodeColumn(i), now);
        }
        deleteFromMetaTable(connection, deleteReplicaLocations);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete)

Example 5 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class MetaTableAccessor method deleteTableState.

/**
   * Remove state for table from meta
   * @param connection to use for deletion
   * @param table to delete state for
   */
public static void deleteTableState(Connection connection, TableName table) throws IOException {
    long time = EnvironmentEdgeManager.currentTime();
    Delete delete = new Delete(table.getName());
    delete.addColumns(getTableFamily(), getStateColumn(), time);
    deleteFromMetaTable(connection, delete);
    LOG.info("Deleted table " + table + " state from META");
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete)

Aggregations

Delete (org.apache.hadoop.hbase.client.Delete)291 Put (org.apache.hadoop.hbase.client.Put)146 Test (org.junit.Test)142 Result (org.apache.hadoop.hbase.client.Result)101 Table (org.apache.hadoop.hbase.client.Table)99 Scan (org.apache.hadoop.hbase.client.Scan)93 IOException (java.io.IOException)85 Cell (org.apache.hadoop.hbase.Cell)74 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)70 TableName (org.apache.hadoop.hbase.TableName)65 Connection (org.apache.hadoop.hbase.client.Connection)55 ArrayList (java.util.ArrayList)52 InterruptedIOException (java.io.InterruptedIOException)45 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)44 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)42 Get (org.apache.hadoop.hbase.client.Get)41 CellScanner (org.apache.hadoop.hbase.CellScanner)32 Mutation (org.apache.hadoop.hbase.client.Mutation)32 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)27 Admin (org.apache.hadoop.hbase.client.Admin)20