Search in sources :

Example 21 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class WALSplitter method getMutationsFromWALEntry.

/**
   * This function is used to construct mutations from a WALEntry. It also
   * reconstructs WALKey & WALEdit from the passed in WALEntry
   * @param entry
   * @param cells
   * @param logEntry pair of WALKey and WALEdit instance stores WALKey and WALEdit instances
   *          extracted from the passed in WALEntry.
   * @return list of Pair<MutationType, Mutation> to be replayed
   * @throws IOException
   */
public static List<MutationReplay> getMutationsFromWALEntry(WALEntry entry, CellScanner cells, Pair<WALKey, WALEdit> logEntry, Durability durability) throws IOException {
    if (entry == null) {
        // return an empty array
        return new ArrayList<>();
    }
    long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ? entry.getKey().getOrigSequenceNumber() : entry.getKey().getLogSequenceNumber();
    int count = entry.getAssociatedCellCount();
    List<MutationReplay> mutations = new ArrayList<>();
    Cell previousCell = null;
    Mutation m = null;
    WALKey key = null;
    WALEdit val = null;
    if (logEntry != null)
        val = new WALEdit();
    for (int i = 0; i < count; i++) {
        // Throw index out of bounds if our cell count is off
        if (!cells.advance()) {
            throw new ArrayIndexOutOfBoundsException("Expected=" + count + ", index=" + i);
        }
        Cell cell = cells.current();
        if (val != null)
            val.add(cell);
        boolean isNewRowOrType = previousCell == null || previousCell.getTypeByte() != cell.getTypeByte() || !CellUtil.matchingRow(previousCell, cell);
        if (isNewRowOrType) {
            // Create new mutation
            if (CellUtil.isDelete(cell)) {
                m = new Delete(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
                // Deletes don't have nonces.
                mutations.add(new MutationReplay(MutationType.DELETE, m, HConstants.NO_NONCE, HConstants.NO_NONCE));
            } else {
                m = new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
                // Puts might come from increment or append, thus we need nonces.
                long nonceGroup = entry.getKey().hasNonceGroup() ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE;
                long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : HConstants.NO_NONCE;
                mutations.add(new MutationReplay(MutationType.PUT, m, nonceGroup, nonce));
            }
        }
        if (CellUtil.isDelete(cell)) {
            ((Delete) m).addDeleteMarker(cell);
        } else {
            ((Put) m).add(cell);
        }
        m.setDurability(durability);
        previousCell = cell;
    }
    // reconstruct WALKey
    if (logEntry != null) {
        org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey walKeyProto = entry.getKey();
        List<UUID> clusterIds = new ArrayList<>(walKeyProto.getClusterIdsCount());
        for (HBaseProtos.UUID uuid : entry.getKey().getClusterIdsList()) {
            clusterIds.add(new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits()));
        }
        key = new WALKey(walKeyProto.getEncodedRegionName().toByteArray(), TableName.valueOf(walKeyProto.getTableName().toByteArray()), replaySeqId, walKeyProto.getWriteTime(), clusterIds, walKeyProto.getNonceGroup(), walKeyProto.getNonce(), null);
        logEntry.setFirst(key);
        logEntry.setSecond(val);
    }
    return mutations;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) ArrayList(java.util.ArrayList) Put(org.apache.hadoop.hbase.client.Put) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos) WALEdit(org.apache.hadoop.hbase.regionserver.wal.WALEdit) Mutation(org.apache.hadoop.hbase.client.Mutation) UUID(java.util.UUID) Cell(org.apache.hadoop.hbase.Cell)

Example 22 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class RequestConverter method buildRegionAction.

/**
   * Create a protocol buffer multi request for a list of actions.
   * Propagates Actions original index.
   *
   * @param regionName
   * @param actions
   * @return a multi request
   * @throws IOException
   */
public static RegionAction.Builder buildRegionAction(final byte[] regionName, final List<Action> actions, final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder) throws IOException {
    ClientProtos.CoprocessorServiceCall.Builder cpBuilder = null;
    for (Action action : actions) {
        Row row = action.getAction();
        actionBuilder.clear();
        actionBuilder.setIndex(action.getOriginalIndex());
        mutationBuilder.clear();
        if (row instanceof Get) {
            Get g = (Get) row;
            regionActionBuilder.addAction(actionBuilder.setGet(ProtobufUtil.toGet(g)));
        } else if (row instanceof Put) {
            regionActionBuilder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation(MutationType.PUT, (Put) row, mutationBuilder)));
        } else if (row instanceof Delete) {
            regionActionBuilder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation(MutationType.DELETE, (Delete) row, mutationBuilder)));
        } else if (row instanceof Append) {
            regionActionBuilder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation(MutationType.APPEND, (Append) row, mutationBuilder, action.getNonce())));
        } else if (row instanceof Increment) {
            regionActionBuilder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation((Increment) row, mutationBuilder, action.getNonce())));
        } else if (row instanceof RegionCoprocessorServiceExec) {
            RegionCoprocessorServiceExec exec = (RegionCoprocessorServiceExec) row;
            // DUMB COPY!!! FIX!!! Done to copy from c.g.p.ByteString to shaded ByteString.
            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations.unsafeWrap(exec.getRequest().toByteArray());
            if (cpBuilder == null) {
                cpBuilder = ClientProtos.CoprocessorServiceCall.newBuilder();
            } else {
                cpBuilder.clear();
            }
            regionActionBuilder.addAction(actionBuilder.setServiceCall(cpBuilder.setRow(UnsafeByteOperations.unsafeWrap(exec.getRow())).setServiceName(exec.getMethod().getService().getFullName()).setMethodName(exec.getMethod().getName()).setRequest(value)));
        } else if (row instanceof RowMutations) {
            throw new UnsupportedOperationException("No RowMutations in multi calls; use mutateRow");
        } else {
            throw new DoNotRetryIOException("Multi doesn't support " + row.getClass().getName());
        }
    }
    return regionActionBuilder;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Action(org.apache.hadoop.hbase.client.Action) RegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Put(org.apache.hadoop.hbase.client.Put) RegionCoprocessorServiceExec(org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec) RowMutations(org.apache.hadoop.hbase.client.RowMutations) Append(org.apache.hadoop.hbase.client.Append) Get(org.apache.hadoop.hbase.client.Get) Increment(org.apache.hadoop.hbase.client.Increment) Row(org.apache.hadoop.hbase.client.Row)

Example 23 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class RequestConverter method buildMutateRequest.

/**
   * Create a protocol buffer MutateRequest for conditioned row mutations
   *
   * @param regionName
   * @param row
   * @param family
   * @param qualifier
   * @param comparator
   * @param compareType
   * @param rowMutations
   * @return a mutate request
   * @throws IOException
   */
public static ClientProtos.MultiRequest buildMutateRequest(final byte[] regionName, final byte[] row, final byte[] family, final byte[] qualifier, final ByteArrayComparable comparator, final CompareType compareType, final RowMutations rowMutations) throws IOException {
    RegionAction.Builder builder = getRegionActionBuilderWithRegion(RegionAction.newBuilder(), regionName);
    builder.setAtomic(true);
    ClientProtos.Action.Builder actionBuilder = ClientProtos.Action.newBuilder();
    MutationProto.Builder mutationBuilder = MutationProto.newBuilder();
    Condition condition = buildCondition(row, family, qualifier, comparator, compareType);
    for (Mutation mutation : rowMutations.getMutations()) {
        MutationType mutateType = null;
        if (mutation instanceof Put) {
            mutateType = MutationType.PUT;
        } else if (mutation instanceof Delete) {
            mutateType = MutationType.DELETE;
        } else {
            throw new DoNotRetryIOException("RowMutations supports only put and delete, not " + mutation.getClass().getName());
        }
        mutationBuilder.clear();
        MutationProto mp = ProtobufUtil.toMutation(mutateType, mutation, mutationBuilder);
        actionBuilder.clear();
        actionBuilder.setMutation(mp);
        builder.addAction(actionBuilder.build());
    }
    ClientProtos.MultiRequest request = ClientProtos.MultiRequest.newBuilder().addRegionAction(builder.build()).setCondition(condition).build();
    return request;
}
Also used : Condition(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition) Delete(org.apache.hadoop.hbase.client.Delete) Action(org.apache.hadoop.hbase.client.Action) RegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) RegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) MutationProto(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto) Put(org.apache.hadoop.hbase.client.Put) Mutation(org.apache.hadoop.hbase.client.Mutation) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos)

Example 24 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class RowResource method checkAndDelete.

/**
   * Validates the input request parameters, parses columns from CellSetModel,
   * and invokes checkAndDelete on HTable.
   *
   * @param model instance of CellSetModel
   * @return Response 200 OK, 304 Not modified, 400 Bad request
   */
Response checkAndDelete(final CellSetModel model) {
    Table table = null;
    Delete delete = null;
    try {
        table = servlet.getTable(tableResource.getName());
        if (model.getRows().size() != 1) {
            servlet.getMetrics().incrementFailedDeleteRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request" + CRLF).build();
        }
        RowModel rowModel = model.getRows().get(0);
        byte[] key = rowModel.getKey();
        if (key == null) {
            key = rowspec.getRow();
        }
        if (key == null) {
            servlet.getMetrics().incrementFailedDeleteRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Row key found to be null." + CRLF).build();
        }
        List<CellModel> cellModels = rowModel.getCells();
        int cellModelCount = cellModels.size();
        delete = new Delete(key);
        boolean retValue;
        CellModel valueToDeleteCell = rowModel.getCells().get(cellModelCount - 1);
        byte[] valueToDeleteColumn = valueToDeleteCell.getColumn();
        if (valueToDeleteColumn == null) {
            try {
                valueToDeleteColumn = rowspec.getColumns()[0];
            } catch (final ArrayIndexOutOfBoundsException e) {
                servlet.getMetrics().incrementFailedDeleteRequests(1);
                return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column not specified for check." + CRLF).build();
            }
        }
        byte[][] parts;
        // Copy all the cells to the Delete request if extra cells are sent
        if (cellModelCount > 1) {
            for (int i = 0, n = cellModelCount - 1; i < n; i++) {
                CellModel cell = cellModels.get(i);
                byte[] col = cell.getColumn();
                if (col == null) {
                    servlet.getMetrics().incrementFailedPutRequests(1);
                    return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF).build();
                }
                parts = KeyValue.parseColumn(col);
                if (parts.length == 1) {
                    // Only Column Family is specified
                    delete.addFamily(parts[0], cell.getTimestamp());
                } else if (parts.length == 2) {
                    delete.addColumn(parts[0], parts[1], cell.getTimestamp());
                } else {
                    servlet.getMetrics().incrementFailedDeleteRequests(1);
                    return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column to delete incorrectly specified." + CRLF).build();
                }
            }
        }
        parts = KeyValue.parseColumn(valueToDeleteColumn);
        if (parts.length == 2) {
            if (parts[1].length != 0) {
                // if that is the only cell passed to the rest api
                if (cellModelCount == 1) {
                    delete.addColumns(parts[0], parts[1]);
                }
                retValue = table.checkAndDelete(key, parts[0], parts[1], valueToDeleteCell.getValue(), delete);
            } else {
                // The case of empty qualifier.
                if (cellModelCount == 1) {
                    delete.addColumns(parts[0], Bytes.toBytes(StringUtils.EMPTY));
                }
                retValue = table.checkAndDelete(key, parts[0], Bytes.toBytes(StringUtils.EMPTY), valueToDeleteCell.getValue(), delete);
            }
        } else {
            servlet.getMetrics().incrementFailedDeleteRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column to check incorrectly specified." + CRLF).build();
        }
        if (LOG.isTraceEnabled()) {
            LOG.trace("CHECK-AND-DELETE " + delete.toString() + ", returns " + retValue);
        }
        if (!retValue) {
            servlet.getMetrics().incrementFailedDeleteRequests(1);
            return Response.status(Response.Status.NOT_MODIFIED).type(MIMETYPE_TEXT).entity(" Delete check failed." + CRLF).build();
        }
        ResponseBuilder response = Response.ok();
        servlet.getMetrics().incrementSucessfulDeleteRequests(1);
        return response.build();
    } catch (Exception e) {
        servlet.getMetrics().incrementFailedDeleteRequests(1);
        return processException(e);
    } finally {
        if (table != null)
            try {
                table.close();
            } catch (IOException ioe) {
                LOG.debug("Exception received while closing the table", ioe);
            }
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) IOException(java.io.IOException) IOException(java.io.IOException) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) ResponseBuilder(javax.ws.rs.core.Response.ResponseBuilder)

Example 25 with Delete

use of org.apache.hadoop.hbase.client.Delete in project hbase by apache.

the class QuotaUtil method deleteQuotas.

private static void deleteQuotas(final Connection connection, final byte[] rowKey, final byte[] qualifier) throws IOException {
    Delete delete = new Delete(rowKey);
    if (qualifier != null) {
        delete.addColumns(QUOTA_FAMILY_INFO, qualifier);
    }
    doDelete(connection, delete);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete)

Aggregations

Delete (org.apache.hadoop.hbase.client.Delete)306 Test (org.junit.Test)150 Put (org.apache.hadoop.hbase.client.Put)149 Result (org.apache.hadoop.hbase.client.Result)111 Table (org.apache.hadoop.hbase.client.Table)101 Scan (org.apache.hadoop.hbase.client.Scan)95 IOException (java.io.IOException)86 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)79 Cell (org.apache.hadoop.hbase.Cell)75 TableName (org.apache.hadoop.hbase.TableName)66 ArrayList (java.util.ArrayList)64 Connection (org.apache.hadoop.hbase.client.Connection)55 InterruptedIOException (java.io.InterruptedIOException)45 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)44 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)42 Get (org.apache.hadoop.hbase.client.Get)41 Mutation (org.apache.hadoop.hbase.client.Mutation)33 CellScanner (org.apache.hadoop.hbase.CellScanner)32 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)27 Admin (org.apache.hadoop.hbase.client.Admin)20