Search in sources :

Example 76 with DeserializationException

use of org.apache.hadoop.hbase.exceptions.DeserializationException in project hbase by apache.

the class ProtobufUtil method parseMetaRegionStateFrom.

/**
 * Get the Meta region state from the passed data bytes. Can handle both old and new style
 * server names.
 * @param data protobuf serialized data with meta server name.
 * @param replicaId replica ID for this region
 * @return RegionState instance corresponding to the serialized data.
 * @throws DeserializationException if the data is invalid.
 */
public static RegionState parseMetaRegionStateFrom(final byte[] data, int replicaId) throws DeserializationException {
    RegionState.State state = RegionState.State.OPEN;
    ServerName serverName;
    if (data != null && data.length > 0 && ProtobufUtil.isPBMagicPrefix(data)) {
        try {
            int prefixLen = ProtobufUtil.lengthOfPBMagic();
            ZooKeeperProtos.MetaRegionServer rl = ZooKeeperProtos.MetaRegionServer.parser().parseFrom(data, prefixLen, data.length - prefixLen);
            if (rl.hasState()) {
                state = RegionState.State.convert(rl.getState());
            }
            HBaseProtos.ServerName sn = rl.getServer();
            serverName = ServerName.valueOf(sn.getHostName(), sn.getPort(), sn.getStartCode());
        } catch (InvalidProtocolBufferException e) {
            throw new DeserializationException("Unable to parse meta region location");
        }
    } else {
        // old style of meta region location?
        serverName = parseServerNameFrom(data);
    }
    if (serverName == null) {
        state = RegionState.State.OFFLINE;
    }
    return new RegionState(RegionReplicaUtil.getRegionInfoForReplica(RegionInfoBuilder.FIRST_META_REGIONINFO, replicaId), state, serverName);
}
Also used : RegionState(org.apache.hadoop.hbase.master.RegionState) ServerName(org.apache.hadoop.hbase.ServerName) InvalidProtocolBufferException(org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException) ZooKeeperProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 77 with DeserializationException

use of org.apache.hadoop.hbase.exceptions.DeserializationException in project hbase by apache.

the class ProtobufUtil method parseServerNameFrom.

/**
 * Get a ServerName from the passed in data bytes.
 * @param data Data with a serialize server name in it; can handle the old style
 * servername where servername was host and port.  Works too with data that
 * begins w/ the pb 'PBUF' magic and that is then followed by a protobuf that
 * has a serialized {@link ServerName} in it.
 * @return Returns null if <code>data</code> is null else converts passed data
 * to a ServerName instance.
 * @throws DeserializationException
 */
public static ServerName parseServerNameFrom(final byte[] data) throws DeserializationException {
    if (data == null || data.length <= 0)
        return null;
    if (ProtobufMagic.isPBMagicPrefix(data)) {
        int prefixLen = ProtobufMagic.lengthOfPBMagic();
        try {
            ZooKeeperProtos.Master rss = ZooKeeperProtos.Master.PARSER.parseFrom(data, prefixLen, data.length - prefixLen);
            org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName sn = rss.getMaster();
            return ServerName.valueOf(sn.getHostName(), sn.getPort(), sn.getStartCode());
        } catch (/*InvalidProtocolBufferException*/
        IOException e) {
            // Fail fast if it does.
            throw new DeserializationException(e);
        }
    }
    // The str returned could be old style -- pre hbase-1502 -- which was
    // hostname and port seperated by a colon rather than hostname, port and
    // startcode delimited by a ','.
    String str = Bytes.toString(data);
    int index = str.indexOf(ServerName.SERVERNAME_SEPARATOR);
    if (index != -1) {
        // Presume its ServerName serialized with versioned bytes.
        return ServerName.parseVersionedServerName(data);
    }
    // Presume it a hostname:port format.
    String hostname = Addressing.parseHostname(str);
    int port = Addressing.parsePort(str);
    return ServerName.valueOf(hostname, port, -1L);
}
Also used : IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) ZooKeeperProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 78 with DeserializationException

use of org.apache.hadoop.hbase.exceptions.DeserializationException in project hbase by apache.

the class TestGet method testDynamicFilter.

@Test
public void testDynamicFilter() throws Exception {
    Configuration conf = HBaseConfiguration.create();
    String localPath = conf.get("hbase.local.dir") + File.separator + "jars" + File.separator;
    File jarFile = new File(localPath, "MockFilter.jar");
    jarFile.delete();
    assertFalse("Should be deleted: " + jarFile.getPath(), jarFile.exists());
    ClientProtos.Get getProto1 = ClientProtos.Get.parseFrom(Base64.getDecoder().decode(PB_GET));
    ClientProtos.Get getProto2 = ClientProtos.Get.parseFrom(Base64.getDecoder().decode(PB_GET_WITH_FILTER_LIST));
    try {
        ProtobufUtil.toGet(getProto1);
        fail("Should not be able to load the filter class");
    } catch (IOException ioe) {
        assertTrue(ioe.getCause() instanceof ClassNotFoundException);
    }
    try {
        ProtobufUtil.toGet(getProto2);
        fail("Should not be able to load the filter class");
    } catch (IOException ioe) {
        assertTrue(ioe.getCause() instanceof InvocationTargetException);
        InvocationTargetException ite = (InvocationTargetException) ioe.getCause();
        assertTrue(ite.getTargetException() instanceof DeserializationException);
    }
    FileOutputStream fos = new FileOutputStream(jarFile);
    fos.write(Base64.getDecoder().decode(MOCK_FILTER_JAR));
    fos.close();
    Get get1 = ProtobufUtil.toGet(getProto1);
    assertEquals("test.MockFilter", get1.getFilter().getClass().getName());
    Get get2 = ProtobufUtil.toGet(getProto2);
    assertTrue(get2.getFilter() instanceof FilterList);
    List<Filter> filters = ((FilterList) get2.getFilter()).getFilters();
    assertEquals(3, filters.size());
    assertEquals("test.MockFilter", filters.get(0).getClass().getName());
    assertEquals("my.MockFilter", filters.get(1).getClass().getName());
    assertTrue(filters.get(2) instanceof KeyOnlyFilter);
}
Also used : KeyOnlyFilter(org.apache.hadoop.hbase.filter.KeyOnlyFilter) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FilterList(org.apache.hadoop.hbase.filter.FilterList) IOException(java.io.IOException) InvocationTargetException(java.lang.reflect.InvocationTargetException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) Filter(org.apache.hadoop.hbase.filter.Filter) KeyOnlyFilter(org.apache.hadoop.hbase.filter.KeyOnlyFilter) FileOutputStream(java.io.FileOutputStream) File(java.io.File) ClientProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos) Test(org.junit.Test)

Example 79 with DeserializationException

use of org.apache.hadoop.hbase.exceptions.DeserializationException in project hbase by apache.

the class RawAsyncHBaseAdmin method split.

private CompletableFuture<Void> split(final RegionInfo hri, byte[] splitPoint) {
    CompletableFuture<Void> future = new CompletableFuture<>();
    TableName tableName = hri.getTable();
    final SplitTableRegionRequest request;
    try {
        request = RequestConverter.buildSplitTableRegionRequest(hri, splitPoint, ng.getNonceGroup(), ng.newNonce());
    } catch (DeserializationException e) {
        future.completeExceptionally(e);
        return future;
    }
    addListener(this.procedureCall(tableName, request, MasterService.Interface::splitRegion, SplitTableRegionResponse::getProcId, new SplitTableRegionProcedureBiConsumer(tableName)), (ret, err2) -> {
        if (err2 != null) {
            future.completeExceptionally(err2);
        } else {
            future.complete(ret);
        }
    });
    return future;
}
Also used : TableName(org.apache.hadoop.hbase.TableName) CompletableFuture(java.util.concurrent.CompletableFuture) SplitTableRegionRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest) MasterService(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 80 with DeserializationException

use of org.apache.hadoop.hbase.exceptions.DeserializationException in project hbase by apache.

the class ZKUtil method parseWALPositionFrom.

/**
 * @param bytes - Content of a WAL position znode.
 * @return long - The current WAL position.
 * @throws DeserializationException if the WAL position cannot be parsed
 */
public static long parseWALPositionFrom(final byte[] bytes) throws DeserializationException {
    if (bytes == null) {
        throw new DeserializationException("Unable to parse null WAL position.");
    }
    if (ProtobufUtil.isPBMagicPrefix(bytes)) {
        int pblen = ProtobufUtil.lengthOfPBMagic();
        ReplicationProtos.ReplicationHLogPosition.Builder builder = ReplicationProtos.ReplicationHLogPosition.newBuilder();
        ReplicationProtos.ReplicationHLogPosition position;
        try {
            ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
            position = builder.build();
        } catch (IOException e) {
            throw new DeserializationException(e);
        }
        return position.getPosition();
    } else {
        if (bytes.length > 0) {
            return Bytes.toLong(bytes);
        }
        return 0;
    }
}
Also used : ReplicationProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Aggregations

DeserializationException (org.apache.hadoop.hbase.exceptions.DeserializationException)83 IOException (java.io.IOException)57 InvalidProtocolBufferException (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException)15 FilterProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos)13 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)12 KeeperException (org.apache.zookeeper.KeeperException)12 ArrayList (java.util.ArrayList)11 ServerName (org.apache.hadoop.hbase.ServerName)9 Cell (org.apache.hadoop.hbase.Cell)8 CompareOperator (org.apache.hadoop.hbase.CompareOperator)8 InterruptedIOException (java.io.InterruptedIOException)7 CellVisibility (org.apache.hadoop.hbase.security.visibility.CellVisibility)7 ByteArrayInputStream (java.io.ByteArrayInputStream)6 Tag (org.apache.hadoop.hbase.Tag)6 HBaseProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos)6 Map (java.util.Map)5 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)5 TableName (org.apache.hadoop.hbase.TableName)5 FilterList (org.apache.hadoop.hbase.filter.FilterList)5 List (java.util.List)4