Search in sources :

Example 61 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class ProtobufUtil method toIncrement.

/**
   * Convert a protocol buffer Mutate to an Increment
   *
   * @param proto the protocol buffer Mutate to convert
   * @return the converted client Increment
   * @throws IOException
   */
public static Increment toIncrement(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.INCREMENT : type.name();
    byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
    Increment increment = null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + TextFormat.shortDebugString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + TextFormat.shortDebugString(proto));
            }
            Cell cell = cellScanner.current();
            if (increment == null) {
                increment = new Increment(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
            }
            increment.add(cell);
        }
    } else {
        increment = new Increment(row);
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                byte[] qualifier = qv.getQualifier().toByteArray();
                if (!qv.hasValue()) {
                    throw new DoNotRetryIOException("Missing required field: qualifier value");
                }
                byte[] value = qv.getValue().toByteArray();
                byte[] tags = null;
                if (qv.hasTags()) {
                    tags = qv.getTags().toByteArray();
                }
                increment.add(CellUtil.createCell(row, family, qualifier, qv.getTimestamp(), KeyValue.Type.Put, value, tags));
            }
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
        increment.setTimeRange(timeRange.getMin(), timeRange.getMax());
    }
    increment.setDurability(toDurability(proto.getDurability()));
    for (NameBytesPair attribute : proto.getAttributeList()) {
        increment.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return increment;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) MutationType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType) NameBytesPair(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Increment(org.apache.hadoop.hbase.client.Increment) QualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) Cell(org.apache.hadoop.hbase.Cell)

Example 62 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class ProtobufUtil method toPut.

/**
   * Convert a protocol buffer Mutate to a Put.
   *
   * @param proto The protocol buffer MutationProto to convert
   * @param cellScanner If non-null, the Cell data that goes with this proto.
   * @return A client Put.
   * @throws IOException
   */
public static Put toPut(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    // TODO: Server-side at least why do we convert back to the Client types?  Why not just pb it?
    MutationType type = proto.getMutateType();
    assert type == MutationType.PUT : type.name();
    long timestamp = proto.hasTimestamp() ? proto.getTimestamp() : HConstants.LATEST_TIMESTAMP;
    Put put = proto.hasRow() ? new Put(proto.getRow().toByteArray(), timestamp) : null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + toShortString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + toShortString(proto));
            }
            Cell cell = cellScanner.current();
            if (put == null) {
                put = new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), timestamp);
            }
            put.add(cell);
        }
    } else {
        if (put == null) {
            throw new IllegalArgumentException("row cannot be null");
        }
        // The proto has the metadata and the data itself
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                if (!qv.hasValue()) {
                    throw new DoNotRetryIOException("Missing required field: qualifier value");
                }
                ByteBuffer qualifier = qv.hasQualifier() ? qv.getQualifier().asReadOnlyByteBuffer() : null;
                ByteBuffer value = qv.hasValue() ? qv.getValue().asReadOnlyByteBuffer() : null;
                long ts = timestamp;
                if (qv.hasTimestamp()) {
                    ts = qv.getTimestamp();
                }
                byte[] allTagsBytes;
                if (qv.hasTags()) {
                    allTagsBytes = qv.getTags().toByteArray();
                    if (qv.hasDeleteType()) {
                        byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null;
                        put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, fromDeleteType(qv.getDeleteType()), null, allTagsBytes));
                    } else {
                        List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short) allTagsBytes.length);
                        Tag[] tagsArray = new Tag[tags.size()];
                        put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray));
                    }
                } else {
                    if (qv.hasDeleteType()) {
                        byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null;
                        put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, fromDeleteType(qv.getDeleteType())));
                    } else {
                        put.addImmutable(family, qualifier, ts, value);
                    }
                }
            }
        }
    }
    put.setDurability(toDurability(proto.getDurability()));
    for (NameBytesPair attribute : proto.getAttributeList()) {
        put.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return put;
}
Also used : MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) KeyValue(org.apache.hadoop.hbase.KeyValue) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ByteBuffer(java.nio.ByteBuffer) Put(org.apache.hadoop.hbase.client.Put) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) Tag(org.apache.hadoop.hbase.Tag) Cell(org.apache.hadoop.hbase.Cell) ByteBufferCell(org.apache.hadoop.hbase.ByteBufferCell)

Example 63 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class TableNamespaceManager method getMaxRegions.

public static long getMaxRegions(NamespaceDescriptor ns) throws IOException {
    String value = ns.getConfigurationValue(KEY_MAX_REGIONS);
    long maxRegions = 0;
    if (StringUtils.isNotEmpty(value)) {
        try {
            maxRegions = Long.parseLong(value);
        } catch (NumberFormatException exp) {
            throw new DoNotRetryIOException("NumberFormatException while getting max regions.", exp);
        }
    } else {
        // The property is not set, so assume its the max long value.
        maxRegions = Long.MAX_VALUE;
    }
    return maxRegions;
}
Also used : DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException)

Example 64 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class TableNamespaceManager method getMaxTables.

public static long getMaxTables(NamespaceDescriptor ns) throws IOException {
    String value = ns.getConfigurationValue(KEY_MAX_TABLES);
    long maxTables = 0;
    if (StringUtils.isNotEmpty(value)) {
        try {
            maxTables = Long.parseLong(value);
        } catch (NumberFormatException exp) {
            throw new DoNotRetryIOException("NumberFormatException while getting max tables.", exp);
        }
    } else {
        // The property is not set, so assume its the max long value.
        maxTables = Long.MAX_VALUE;
    }
    return maxTables;
}
Also used : DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException)

Example 65 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class AsyncBatchRpcRetryingCaller method onComplete.

private void onComplete(Map<byte[], RegionRequest> actionsByRegion, int tries, ServerName serverName, MultiResponse resp) {
    List<Action> failedActions = new ArrayList<>();
    actionsByRegion.forEach((rn, regionReq) -> {
        RegionResult regionResult = resp.getResults().get(rn);
        if (regionResult != null) {
            regionReq.actions.forEach(action -> onComplete(action, regionReq, tries, serverName, regionResult, failedActions));
        } else {
            Throwable t = resp.getException(rn);
            Throwable error;
            if (t == null) {
                LOG.error("Server sent us neither results nor exceptions for " + Bytes.toStringBinary(rn));
                error = new RuntimeException("Invalid response");
            } else {
                error = translateException(t);
                logException(tries, () -> Stream.of(regionReq), error, serverName);
                conn.getLocator().updateCachedLocation(regionReq.loc, error);
                if (error instanceof DoNotRetryIOException || tries >= maxAttempts) {
                    failAll(regionReq.actions.stream(), tries, error, serverName);
                    return;
                }
                addError(regionReq.actions, error, serverName);
                failedActions.addAll(regionReq.actions);
            }
        }
    });
    if (!failedActions.isEmpty()) {
        tryResubmit(failedActions.stream(), tries);
    }
}
Also used : RegionResult(org.apache.hadoop.hbase.client.MultiResponse.RegionResult) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ArrayList(java.util.ArrayList)

Aggregations

DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)77 IOException (java.io.IOException)28 Cell (org.apache.hadoop.hbase.Cell)18 ArrayList (java.util.ArrayList)12 ServiceException (org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException)12 MutationType (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType)12 TableName (org.apache.hadoop.hbase.TableName)11 InterruptedIOException (java.io.InterruptedIOException)10 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)10 Delete (org.apache.hadoop.hbase.client.Delete)10 Put (org.apache.hadoop.hbase.client.Put)10 Test (org.junit.Test)10 AccessDeniedException (org.apache.hadoop.hbase.security.AccessDeniedException)9 User (org.apache.hadoop.hbase.security.User)8 Mutation (org.apache.hadoop.hbase.client.Mutation)7 ByteString (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString)7 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)6 NameBytesPair (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)6 ByteBufferCell (org.apache.hadoop.hbase.ByteBufferCell)5 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)5