Search in sources :

Example 16 with InvalidProtocolBufferException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class SingleColumnValueExcludeFilter method parseFrom.

/**
   * @param pbBytes A pb serialized {@link SingleColumnValueExcludeFilter} instance
   * @return An instance of {@link SingleColumnValueExcludeFilter} made from <code>bytes</code>
   * @throws DeserializationException
   * @see #toByteArray
   */
public static SingleColumnValueExcludeFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
    FilterProtos.SingleColumnValueExcludeFilter proto;
    try {
        proto = FilterProtos.SingleColumnValueExcludeFilter.parseFrom(pbBytes);
    } catch (InvalidProtocolBufferException e) {
        throw new DeserializationException(e);
    }
    FilterProtos.SingleColumnValueFilter parentProto = proto.getSingleColumnValueFilter();
    final CompareOp compareOp = CompareOp.valueOf(parentProto.getCompareOp().name());
    final ByteArrayComparable comparator;
    try {
        comparator = ProtobufUtil.toComparator(parentProto.getComparator());
    } catch (IOException ioe) {
        throw new DeserializationException(ioe);
    }
    return new SingleColumnValueExcludeFilter(parentProto.hasColumnFamily() ? parentProto.getColumnFamily().toByteArray() : null, parentProto.hasColumnQualifier() ? parentProto.getColumnQualifier().toByteArray() : null, compareOp, comparator, parentProto.getFilterIfMissing(), parentProto.getLatestVersionOnly());
}
Also used : InvalidProtocolBufferException(org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) CompareOp(org.apache.hadoop.hbase.filter.CompareFilter.CompareOp) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 17 with InvalidProtocolBufferException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class FuzzyRowFilter method parseFrom.

/**
   * @param pbBytes A pb serialized {@link FuzzyRowFilter} instance
   * @return An instance of {@link FuzzyRowFilter} made from <code>bytes</code>
   * @throws DeserializationException
   * @see #toByteArray
   */
public static FuzzyRowFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
    FilterProtos.FuzzyRowFilter proto;
    try {
        proto = FilterProtos.FuzzyRowFilter.parseFrom(pbBytes);
    } catch (InvalidProtocolBufferException e) {
        throw new DeserializationException(e);
    }
    int count = proto.getFuzzyKeysDataCount();
    ArrayList<Pair<byte[], byte[]>> fuzzyKeysData = new ArrayList<>(count);
    for (int i = 0; i < count; ++i) {
        BytesBytesPair current = proto.getFuzzyKeysData(i);
        byte[] keyBytes = current.getFirst().toByteArray();
        byte[] keyMeta = current.getSecond().toByteArray();
        fuzzyKeysData.add(new Pair<>(keyBytes, keyMeta));
    }
    return new FuzzyRowFilter(fuzzyKeysData);
}
Also used : BytesBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair) InvalidProtocolBufferException(org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException) ArrayList(java.util.ArrayList) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException) BytesBytesPair(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair) Pair(org.apache.hadoop.hbase.util.Pair)

Example 18 with InvalidProtocolBufferException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class SnapshotManifest method readDataManifest.

/*
   * Read the SnapshotDataManifest file
   */
private SnapshotDataManifest readDataManifest() throws IOException {
    FSDataInputStream in = null;
    try {
        in = fs.open(new Path(workingDir, DATA_MANIFEST_NAME));
        CodedInputStream cin = CodedInputStream.newInstance(in);
        cin.setSizeLimit(manifestSizeLimit);
        return SnapshotDataManifest.parseFrom(cin);
    } catch (FileNotFoundException e) {
        return null;
    } catch (InvalidProtocolBufferException e) {
        throw new CorruptedSnapshotException("unable to parse data manifest " + e.getMessage(), e);
    } finally {
        if (in != null)
            in.close();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) CodedInputStream(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream) FileNotFoundException(java.io.FileNotFoundException) InvalidProtocolBufferException(org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream)

Example 19 with InvalidProtocolBufferException

use of org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class SnapshotManifestV2 method loadRegionManifests.

static List<SnapshotRegionManifest> loadRegionManifests(final Configuration conf, final Executor executor, final FileSystem fs, final Path snapshotDir, final SnapshotDescription desc, final int manifestSizeLimit) throws IOException {
    FileStatus[] manifestFiles = FSUtils.listStatus(fs, snapshotDir, new PathFilter() {

        @Override
        public boolean accept(Path path) {
            return path.getName().startsWith(SNAPSHOT_MANIFEST_PREFIX);
        }
    });
    if (manifestFiles == null || manifestFiles.length == 0)
        return null;
    final ExecutorCompletionService<SnapshotRegionManifest> completionService = new ExecutorCompletionService<>(executor);
    for (final FileStatus st : manifestFiles) {
        completionService.submit(new Callable<SnapshotRegionManifest>() {

            @Override
            public SnapshotRegionManifest call() throws IOException {
                FSDataInputStream stream = fs.open(st.getPath());
                CodedInputStream cin = CodedInputStream.newInstance(stream);
                cin.setSizeLimit(manifestSizeLimit);
                try {
                    return SnapshotRegionManifest.parseFrom(cin);
                } finally {
                    stream.close();
                }
            }
        });
    }
    ArrayList<SnapshotRegionManifest> regionsManifest = new ArrayList<>(manifestFiles.length);
    try {
        for (int i = 0; i < manifestFiles.length; ++i) {
            regionsManifest.add(completionService.take().get());
        }
    } catch (InterruptedException e) {
        throw new InterruptedIOException(e.getMessage());
    } catch (ExecutionException e) {
        Throwable t = e.getCause();
        if (t instanceof InvalidProtocolBufferException) {
            throw (InvalidProtocolBufferException) t;
        } else {
            IOException ex = new IOException("ExecutionException");
            ex.initCause(e.getCause());
            throw ex;
        }
    }
    return regionsManifest;
}
Also used : Path(org.apache.hadoop.fs.Path) InterruptedIOException(java.io.InterruptedIOException) PathFilter(org.apache.hadoop.fs.PathFilter) FileStatus(org.apache.hadoop.fs.FileStatus) CodedInputStream(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream) ArrayList(java.util.ArrayList) InvalidProtocolBufferException(org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException) SnapshotRegionManifest(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) ExecutionException(java.util.concurrent.ExecutionException)

Aggregations

InvalidProtocolBufferException (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException)19 DeserializationException (org.apache.hadoop.hbase.exceptions.DeserializationException)15 FilterProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos)12 IOException (java.io.IOException)9 ArrayList (java.util.ArrayList)4 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)3 Path (org.apache.hadoop.fs.Path)2 CompareOp (org.apache.hadoop.hbase.filter.CompareFilter.CompareOp)2 CodedInputStream (org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream)2 ComparatorProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos)2 HBaseProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos)2 FileNotFoundException (java.io.FileNotFoundException)1 InterruptedIOException (java.io.InterruptedIOException)1 IllegalCharsetNameException (java.nio.charset.IllegalCharsetNameException)1 TreeSet (java.util.TreeSet)1 ExecutionException (java.util.concurrent.ExecutionException)1 ExecutorCompletionService (java.util.concurrent.ExecutorCompletionService)1 FileStatus (org.apache.hadoop.fs.FileStatus)1 PathFilter (org.apache.hadoop.fs.PathFilter)1 ServerName (org.apache.hadoop.hbase.ServerName)1