Search in sources :

Example 1 with Append

use of org.apache.hadoop.hbase.client.Append in project hbase by apache.

the class RequestConverter method buildRegionAction.

/**
   * Create a protocol buffer multi request for a list of actions.
   * Propagates Actions original index.
   *
   * @param regionName
   * @param actions
   * @return a multi request
   * @throws IOException
   */
public static RegionAction.Builder buildRegionAction(final byte[] regionName, final List<Action> actions, final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder) throws IOException {
    ClientProtos.CoprocessorServiceCall.Builder cpBuilder = null;
    for (Action action : actions) {
        Row row = action.getAction();
        actionBuilder.clear();
        actionBuilder.setIndex(action.getOriginalIndex());
        mutationBuilder.clear();
        if (row instanceof Get) {
            Get g = (Get) row;
            regionActionBuilder.addAction(actionBuilder.setGet(ProtobufUtil.toGet(g)));
        } else if (row instanceof Put) {
            regionActionBuilder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation(MutationType.PUT, (Put) row, mutationBuilder)));
        } else if (row instanceof Delete) {
            regionActionBuilder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation(MutationType.DELETE, (Delete) row, mutationBuilder)));
        } else if (row instanceof Append) {
            regionActionBuilder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation(MutationType.APPEND, (Append) row, mutationBuilder, action.getNonce())));
        } else if (row instanceof Increment) {
            regionActionBuilder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation((Increment) row, mutationBuilder, action.getNonce())));
        } else if (row instanceof RegionCoprocessorServiceExec) {
            RegionCoprocessorServiceExec exec = (RegionCoprocessorServiceExec) row;
            // DUMB COPY!!! FIX!!! Done to copy from c.g.p.ByteString to shaded ByteString.
            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations.unsafeWrap(exec.getRequest().toByteArray());
            if (cpBuilder == null) {
                cpBuilder = ClientProtos.CoprocessorServiceCall.newBuilder();
            } else {
                cpBuilder.clear();
            }
            regionActionBuilder.addAction(actionBuilder.setServiceCall(cpBuilder.setRow(UnsafeByteOperations.unsafeWrap(exec.getRow())).setServiceName(exec.getMethod().getService().getFullName()).setMethodName(exec.getMethod().getName()).setRequest(value)));
        } else if (row instanceof RowMutations) {
            throw new UnsupportedOperationException("No RowMutations in multi calls; use mutateRow");
        } else {
            throw new DoNotRetryIOException("Multi doesn't support " + row.getClass().getName());
        }
    }
    return regionActionBuilder;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Action(org.apache.hadoop.hbase.client.Action) RegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Put(org.apache.hadoop.hbase.client.Put) RegionCoprocessorServiceExec(org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec) RowMutations(org.apache.hadoop.hbase.client.RowMutations) Append(org.apache.hadoop.hbase.client.Append) Get(org.apache.hadoop.hbase.client.Get) Increment(org.apache.hadoop.hbase.client.Increment) Row(org.apache.hadoop.hbase.client.Row)

Example 2 with Append

use of org.apache.hadoop.hbase.client.Append in project hbase by apache.

the class TestHTableWrapper method checkAppend.

private void checkAppend() throws IOException {
    final byte[] appendValue = Bytes.toBytes("append");
    Append append = new Append(qualifierCol1).add(TEST_FAMILY, qualifierCol1, appendValue);
    Result appendResult = hTableInterface.append(append);
    byte[] appendedRow = appendResult.getRow();
    checkRowValue(appendedRow, appendValue);
}
Also used : Append(org.apache.hadoop.hbase.client.Append) Result(org.apache.hadoop.hbase.client.Result)

Example 3 with Append

use of org.apache.hadoop.hbase.client.Append in project phoenix by apache.

the class ConnectionQueryServicesImpl method returnAllSequences.

// Take no locks, as this only gets run when there are no open connections
// so there's no danger of contention.
@SuppressWarnings("deprecation")
private void returnAllSequences(ConcurrentMap<SequenceKey, Sequence> sequenceMap) throws SQLException {
    List<Append> mutations = Lists.newArrayListWithExpectedSize(sequenceMap.size());
    for (Sequence sequence : sequenceMap.values()) {
        mutations.addAll(sequence.newReturns());
    }
    if (mutations.isEmpty()) {
        return;
    }
    HTableInterface hTable = this.getTable(SchemaUtil.getPhysicalName(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_NAME_BYTES, this.getProps()).getName());
    SQLException sqlE = null;
    try {
        hTable.batch(mutations);
    } catch (IOException e) {
        sqlE = ServerUtil.parseServerException(e);
    } catch (InterruptedException e) {
        // restore the interrupt status
        Thread.currentThread().interrupt();
        sqlE = new SQLExceptionInfo.Builder(SQLExceptionCode.INTERRUPTED_EXCEPTION).setRootCause(e).build().buildException();
    } finally {
        try {
            hTable.close();
        } catch (IOException e) {
            if (sqlE == null) {
                sqlE = ServerUtil.parseServerException(e);
            } else {
                sqlE.setNextException(ServerUtil.parseServerException(e));
            }
        }
        if (sqlE != null) {
            throw sqlE;
        }
    }
}
Also used : Append(org.apache.hadoop.hbase.client.Append) SQLException(java.sql.SQLException) Sequence(org.apache.phoenix.schema.Sequence) IOException(java.io.IOException) PhoenixIOException(org.apache.phoenix.exception.PhoenixIOException) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) SQLExceptionInfo(org.apache.phoenix.exception.SQLExceptionInfo)

Example 4 with Append

use of org.apache.hadoop.hbase.client.Append in project phoenix by apache.

the class ConnectionQueryServicesImpl method createSequence.

@Override
public long createSequence(String tenantId, String schemaName, String sequenceName, long startWith, long incrementBy, long cacheSize, long minValue, long maxValue, boolean cycle, long timestamp) throws SQLException {
    SequenceKey sequenceKey = new SequenceKey(tenantId, schemaName, sequenceName, nSequenceSaltBuckets);
    Sequence newSequences = new Sequence(sequenceKey);
    Sequence sequence = sequenceMap.putIfAbsent(sequenceKey, newSequences);
    if (sequence == null) {
        sequence = newSequences;
    }
    try {
        sequence.getLock().lock();
        // Now that we have the lock we need, create the sequence
        Append append = sequence.createSequence(startWith, incrementBy, cacheSize, timestamp, minValue, maxValue, cycle);
        HTableInterface htable = this.getTable(SchemaUtil.getPhysicalName(PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_NAME_BYTES, this.getProps()).getName());
        htable.setAutoFlush(true);
        try {
            Result result = htable.append(append);
            return sequence.createSequence(result, minValue, maxValue, cycle);
        } catch (IOException e) {
            throw ServerUtil.parseServerException(e);
        } finally {
            Closeables.closeQuietly(htable);
        }
    } finally {
        sequence.getLock().unlock();
    }
}
Also used : Append(org.apache.hadoop.hbase.client.Append) SequenceKey(org.apache.phoenix.schema.SequenceKey) Sequence(org.apache.phoenix.schema.Sequence) IOException(java.io.IOException) PhoenixIOException(org.apache.phoenix.exception.PhoenixIOException) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) MetaDataMutationResult(org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult) Result(org.apache.hadoop.hbase.client.Result)

Example 5 with Append

use of org.apache.hadoop.hbase.client.Append in project hbase by apache.

the class RowResource method append.

/**
 * Validates the input request parameters, parses columns from CellSetModel,
 * and invokes Append on HTable.
 *
 * @param model instance of CellSetModel
 * @return Response 200 OK, 304 Not modified, 400 Bad request
 */
Response append(final CellSetModel model) {
    Table table = null;
    Append append = null;
    try {
        table = servlet.getTable(tableResource.getName());
        if (model.getRows().size() != 1) {
            servlet.getMetrics().incrementFailedAppendRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Number of rows specified is not 1." + CRLF).build();
        }
        RowModel rowModel = model.getRows().get(0);
        byte[] key = rowModel.getKey();
        if (key == null) {
            key = rowspec.getRow();
        }
        if (key == null) {
            servlet.getMetrics().incrementFailedAppendRequests(1);
            return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Row key found to be null." + CRLF).build();
        }
        append = new Append(key);
        append.setReturnResults(returnResult);
        int i = 0;
        for (CellModel cell : rowModel.getCells()) {
            byte[] col = cell.getColumn();
            if (col == null) {
                try {
                    col = rowspec.getColumns()[i++];
                } catch (ArrayIndexOutOfBoundsException e) {
                    col = null;
                }
            }
            if (col == null) {
                servlet.getMetrics().incrementFailedAppendRequests(1);
                return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF).build();
            }
            byte[][] parts = CellUtil.parseColumn(col);
            if (parts.length != 2) {
                servlet.getMetrics().incrementFailedAppendRequests(1);
                return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT).entity("Bad request: Column incorrectly specified." + CRLF).build();
            }
            append.addColumn(parts[0], parts[1], cell.getValue());
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("APPEND " + append.toString());
        }
        Result result = table.append(append);
        if (returnResult) {
            if (result.isEmpty()) {
                servlet.getMetrics().incrementFailedAppendRequests(1);
                return Response.status(Response.Status.NOT_MODIFIED).type(MIMETYPE_TEXT).entity("Append return empty." + CRLF).build();
            }
            CellSetModel rModel = new CellSetModel();
            RowModel rRowModel = new RowModel(result.getRow());
            for (Cell cell : result.listCells()) {
                rRowModel.addCell(new CellModel(CellUtil.cloneFamily(cell), CellUtil.cloneQualifier(cell), cell.getTimestamp(), CellUtil.cloneValue(cell)));
            }
            rModel.addRow(rRowModel);
            servlet.getMetrics().incrementSucessfulAppendRequests(1);
            return Response.ok(rModel).build();
        }
        servlet.getMetrics().incrementSucessfulAppendRequests(1);
        return Response.ok().build();
    } catch (Exception e) {
        servlet.getMetrics().incrementFailedAppendRequests(1);
        return processException(e);
    } finally {
        if (table != null)
            try {
                table.close();
            } catch (IOException ioe) {
                LOG.debug("Exception received while closing the table" + table.getName(), ioe);
            }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) CellSetModel(org.apache.hadoop.hbase.rest.model.CellSetModel) IOException(java.io.IOException) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result) Append(org.apache.hadoop.hbase.client.Append) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

Append (org.apache.hadoop.hbase.client.Append)62 Test (org.junit.Test)31 Result (org.apache.hadoop.hbase.client.Result)26 Increment (org.apache.hadoop.hbase.client.Increment)25 Put (org.apache.hadoop.hbase.client.Put)23 IOException (java.io.IOException)17 Get (org.apache.hadoop.hbase.client.Get)17 Delete (org.apache.hadoop.hbase.client.Delete)16 Table (org.apache.hadoop.hbase.client.Table)15 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)10 TableName (org.apache.hadoop.hbase.TableName)10 RowMutations (org.apache.hadoop.hbase.client.RowMutations)10 Cell (org.apache.hadoop.hbase.Cell)9 CheckAndMutateResult (org.apache.hadoop.hbase.client.CheckAndMutateResult)8 Mutation (org.apache.hadoop.hbase.client.Mutation)7 ArrayList (java.util.ArrayList)5 CheckAndMutate (org.apache.hadoop.hbase.client.CheckAndMutate)5 MutationProto (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto)5 ByteString (org.apache.hbase.thirdparty.com.google.protobuf.ByteString)5 List (java.util.List)4