Search in sources :

Example 56 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class RequestConverter method buildNoDataRegionAction.

/**
   * Create a protocol buffer multirequest with NO data for a list of actions (data is carried
   * otherwise than via protobuf).  This means it just notes attributes, whether to write the
   * WAL, etc., and the presence in protobuf serves as place holder for the data which is
   * coming along otherwise.  Note that Get is different.  It does not contain 'data' and is always
   * carried by protobuf.  We return references to the data by adding them to the passed in
   * <code>data</code> param.
   *
   * <p>Propagates Actions original index.
   *
   * @param regionName
   * @param actions
   * @param cells Place to stuff references to actual data.
   * @return a multi request that does not carry any data.
   * @throws IOException
   */
public static RegionAction.Builder buildNoDataRegionAction(final byte[] regionName, final Iterable<Action> actions, final List<CellScannable> cells, final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder) throws IOException {
    RegionAction.Builder builder = getRegionActionBuilderWithRegion(regionActionBuilder, regionName);
    ClientProtos.CoprocessorServiceCall.Builder cpBuilder = null;
    for (Action action : actions) {
        Row row = action.getAction();
        actionBuilder.clear();
        actionBuilder.setIndex(action.getOriginalIndex());
        mutationBuilder.clear();
        if (row instanceof Get) {
            Get g = (Get) row;
            builder.addAction(actionBuilder.setGet(ProtobufUtil.toGet(g)));
        } else if (row instanceof Put) {
            Put p = (Put) row;
            cells.add(p);
            builder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutationNoData(MutationType.PUT, p, mutationBuilder)));
        } else if (row instanceof Delete) {
            Delete d = (Delete) row;
            int size = d.size();
            // metadata only in the pb and then send the kv along the side in cells.
            if (size > 0) {
                cells.add(d);
                builder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutationNoData(MutationType.DELETE, d, mutationBuilder)));
            } else {
                builder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutation(MutationType.DELETE, d, mutationBuilder)));
            }
        } else if (row instanceof Append) {
            Append a = (Append) row;
            cells.add(a);
            builder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutationNoData(MutationType.APPEND, a, mutationBuilder, action.getNonce())));
        } else if (row instanceof Increment) {
            Increment i = (Increment) row;
            cells.add(i);
            builder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutationNoData(MutationType.INCREMENT, i, mutationBuilder, action.getNonce())));
        } else if (row instanceof RegionCoprocessorServiceExec) {
            RegionCoprocessorServiceExec exec = (RegionCoprocessorServiceExec) row;
            // DUMB COPY!!! FIX!!! Done to copy from c.g.p.ByteString to shaded ByteString.
            org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value = org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations.unsafeWrap(exec.getRequest().toByteArray());
            if (cpBuilder == null) {
                cpBuilder = ClientProtos.CoprocessorServiceCall.newBuilder();
            } else {
                cpBuilder.clear();
            }
            builder.addAction(actionBuilder.setServiceCall(cpBuilder.setRow(UnsafeByteOperations.unsafeWrap(exec.getRow())).setServiceName(exec.getMethod().getService().getFullName()).setMethodName(exec.getMethod().getName()).setRequest(value)));
        } else if (row instanceof RowMutations) {
            throw new UnsupportedOperationException("No RowMutations in multi calls; use mutateRow");
        } else {
            throw new DoNotRetryIOException("Multi doesn't support " + row.getClass().getName());
        }
    }
    return builder;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Action(org.apache.hadoop.hbase.client.Action) RegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) RegionAction(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction) Put(org.apache.hadoop.hbase.client.Put) RegionCoprocessorServiceExec(org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec) RowMutations(org.apache.hadoop.hbase.client.RowMutations) Append(org.apache.hadoop.hbase.client.Append) Get(org.apache.hadoop.hbase.client.Get) Increment(org.apache.hadoop.hbase.client.Increment) Row(org.apache.hadoop.hbase.client.Row)

Example 57 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class ProtobufUtil method toDelete.

/**
   * Convert a protocol buffer Mutate to a Delete
   *
   * @param proto the protocol buffer Mutate to convert
   * @param cellScanner if non-null, the data that goes with this delete.
   * @return the converted client Delete
   * @throws IOException
   */
public static Delete toDelete(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.DELETE : type.name();
    long timestamp = proto.hasTimestamp() ? proto.getTimestamp() : HConstants.LATEST_TIMESTAMP;
    Delete delete = proto.hasRow() ? new Delete(proto.getRow().toByteArray(), timestamp) : null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            // TextFormat should be fine for a Delete since it carries no data, just coordinates.
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + TextFormat.shortDebugString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                // TextFormat should be fine for a Delete since it carries no data, just coordinates.
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + TextFormat.shortDebugString(proto));
            }
            Cell cell = cellScanner.current();
            if (delete == null) {
                delete = new Delete(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), timestamp);
            }
            delete.addDeleteMarker(cell);
        }
    } else {
        if (delete == null) {
            throw new IllegalArgumentException("row cannot be null");
        }
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                DeleteType deleteType = qv.getDeleteType();
                byte[] qualifier = null;
                if (qv.hasQualifier()) {
                    qualifier = qv.getQualifier().toByteArray();
                }
                long ts = HConstants.LATEST_TIMESTAMP;
                if (qv.hasTimestamp()) {
                    ts = qv.getTimestamp();
                }
                if (deleteType == DeleteType.DELETE_ONE_VERSION) {
                    delete.addColumn(family, qualifier, ts);
                } else if (deleteType == DeleteType.DELETE_MULTIPLE_VERSIONS) {
                    delete.addColumns(family, qualifier, ts);
                } else if (deleteType == DeleteType.DELETE_FAMILY_VERSION) {
                    delete.addFamilyVersion(family, ts);
                } else {
                    delete.addFamily(family, ts);
                }
            }
        }
    }
    delete.setDurability(toDurability(proto.getDurability()));
    for (NameBytesPair attribute : proto.getAttributeList()) {
        delete.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return delete;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) MutationType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType) NameBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) QualifierValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue) DeleteType(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType) Cell(org.apache.hadoop.hbase.Cell) ByteBufferCell(org.apache.hadoop.hbase.ByteBufferCell)

Example 58 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class ProtobufUtil method getRegionEncodedName.

/**
   * Find the HRegion encoded name based on a region specifier
   *
   * @param regionSpecifier the region specifier
   * @return the corresponding region's encoded name
   * @throws DoNotRetryIOException if the specifier type is unsupported
   */
public static String getRegionEncodedName(final RegionSpecifier regionSpecifier) throws DoNotRetryIOException {
    ByteString value = regionSpecifier.getValue();
    RegionSpecifierType type = regionSpecifier.getType();
    switch(type) {
        case REGION_NAME:
            return HRegionInfo.encodeRegionName(value.toByteArray());
        case ENCODED_REGION_NAME:
            return value.toStringUtf8();
        default:
            throw new DoNotRetryIOException("Unsupported region specifier type: " + type);
    }
}
Also used : RegionSpecifierType(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ByteString(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString)

Example 59 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class ProtobufUtil method toGet.

/**
   * Convert a protocol buffer Mutate to a Get.
   * @param proto the protocol buffer Mutate to convert.
   * @param cellScanner
   * @return the converted client get.
   * @throws IOException
   */
public static Get toGet(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.INCREMENT || type == MutationType.APPEND : type.name();
    byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
    Get get = null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + TextFormat.shortDebugString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + TextFormat.shortDebugString(proto));
            }
            Cell cell = cellScanner.current();
            if (get == null) {
                get = new Get(Bytes.copy(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
            }
            get.addColumn(Bytes.copy(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()), Bytes.copy(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()));
        }
    } else {
        get = new Get(row);
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                byte[] qualifier = qv.getQualifier().toByteArray();
                if (!qv.hasValue()) {
                    throw new DoNotRetryIOException("Missing required field: qualifier value");
                }
                get.addColumn(family, qualifier);
            }
        }
    }
    if (proto.hasTimeRange()) {
        TimeRange timeRange = protoToTimeRange(proto.getTimeRange());
        get.setTimeRange(timeRange);
    }
    for (NameBytesPair attribute : proto.getAttributeList()) {
        get.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return get;
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) MutationType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType) NameBytesPair(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) Get(org.apache.hadoop.hbase.client.Get) QualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) Cell(org.apache.hadoop.hbase.Cell)

Example 60 with DoNotRetryIOException

use of org.apache.hadoop.hbase.DoNotRetryIOException in project hbase by apache.

the class ProtobufUtil method toAppend.

/**
   * Convert a protocol buffer Mutate to an Append
   * @param cellScanner
   * @param proto the protocol buffer Mutate to convert
   * @return the converted client Append
   * @throws IOException
   */
public static Append toAppend(final MutationProto proto, final CellScanner cellScanner) throws IOException {
    MutationType type = proto.getMutateType();
    assert type == MutationType.APPEND : type.name();
    byte[] row = proto.hasRow() ? proto.getRow().toByteArray() : null;
    Append append = null;
    int cellCount = proto.hasAssociatedCellCount() ? proto.getAssociatedCellCount() : 0;
    if (cellCount > 0) {
        // The proto has metadata only and the data is separate to be found in the cellScanner.
        if (cellScanner == null) {
            throw new DoNotRetryIOException("Cell count of " + cellCount + " but no cellScanner: " + toShortString(proto));
        }
        for (int i = 0; i < cellCount; i++) {
            if (!cellScanner.advance()) {
                throw new DoNotRetryIOException("Cell count of " + cellCount + " but at index " + i + " no cell returned: " + toShortString(proto));
            }
            Cell cell = cellScanner.current();
            if (append == null) {
                append = new Append(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
            }
            append.add(cell);
        }
    } else {
        append = new Append(row);
        for (ColumnValue column : proto.getColumnValueList()) {
            byte[] family = column.getFamily().toByteArray();
            for (QualifierValue qv : column.getQualifierValueList()) {
                byte[] qualifier = qv.getQualifier().toByteArray();
                if (!qv.hasValue()) {
                    throw new DoNotRetryIOException("Missing required field: qualifier value");
                }
                byte[] value = qv.getValue().toByteArray();
                byte[] tags = null;
                if (qv.hasTags()) {
                    tags = qv.getTags().toByteArray();
                }
                append.add(CellUtil.createCell(row, family, qualifier, qv.getTimestamp(), KeyValue.Type.Put, value, tags));
            }
        }
    }
    append.setDurability(toDurability(proto.getDurability()));
    for (NameBytesPair attribute : proto.getAttributeList()) {
        append.setAttribute(attribute.getName(), attribute.getValue().toByteArray());
    }
    return append;
}
Also used : MutationType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType) Append(org.apache.hadoop.hbase.client.Append) NameBytesPair(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) QualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) ColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)77 IOException (java.io.IOException)28 Cell (org.apache.hadoop.hbase.Cell)18 ArrayList (java.util.ArrayList)12 ServiceException (org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException)12 MutationType (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType)12 TableName (org.apache.hadoop.hbase.TableName)11 InterruptedIOException (java.io.InterruptedIOException)10 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)10 Delete (org.apache.hadoop.hbase.client.Delete)10 Put (org.apache.hadoop.hbase.client.Put)10 Test (org.junit.Test)10 AccessDeniedException (org.apache.hadoop.hbase.security.AccessDeniedException)9 User (org.apache.hadoop.hbase.security.User)8 Mutation (org.apache.hadoop.hbase.client.Mutation)7 ByteString (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString)7 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)6 NameBytesPair (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair)6 ByteBufferCell (org.apache.hadoop.hbase.ByteBufferCell)5 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)5