Search in sources :

Example 1 with InvalidProtocolBufferException

use of org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class QualifierFilter method parseFrom.

/**
 * @param pbBytes A pb serialized {@link QualifierFilter} instance
 * @return An instance of {@link QualifierFilter} made from <code>bytes</code>
 * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
 * @see #toByteArray
 */
public static QualifierFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
    FilterProtos.QualifierFilter proto;
    try {
        proto = FilterProtos.QualifierFilter.parseFrom(pbBytes);
    } catch (InvalidProtocolBufferException e) {
        throw new DeserializationException(e);
    }
    final CompareOperator valueCompareOp = CompareOperator.valueOf(proto.getCompareFilter().getCompareOp().name());
    ByteArrayComparable valueComparator = null;
    try {
        if (proto.getCompareFilter().hasComparator()) {
            valueComparator = ProtobufUtil.toComparator(proto.getCompareFilter().getComparator());
        }
    } catch (IOException ioe) {
        throw new DeserializationException(ioe);
    }
    return new QualifierFilter(valueCompareOp, valueComparator);
}
Also used : CompareOperator(org.apache.hadoop.hbase.CompareOperator) InvalidProtocolBufferException(org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 2 with InvalidProtocolBufferException

use of org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class TableQuotaSnapshotStore method getSnapshotSizesForTable.

/**
 * Fetches any serialized snapshot sizes from the quota table for the {@code tn} provided. Any
 * malformed records are skipped with a warning printed out.
 */
long getSnapshotSizesForTable(TableName tn) throws IOException {
    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {
        Scan s = QuotaTableUtil.createScanForSpaceSnapshotSizes(tn);
        ResultScanner rs = quotaTable.getScanner(s);
        try {
            long size = 0L;
            // Should just be a single row (for our table)
            for (Result result : rs) {
                // May have multiple columns, one for each snapshot
                CellScanner cs = result.cellScanner();
                while (cs.advance()) {
                    Cell current = cs.current();
                    try {
                        long snapshotSize = QuotaTableUtil.parseSnapshotSize(current);
                        if (LOG.isTraceEnabled()) {
                            LOG.trace("Saw snapshot size of " + snapshotSize + " for " + current);
                        }
                        size += snapshotSize;
                    } catch (InvalidProtocolBufferException e) {
                        LOG.warn("Failed to parse snapshot size from cell: " + current);
                    }
                }
            }
            return size;
        } finally {
            if (null != rs) {
                rs.close();
            }
        }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) InvalidProtocolBufferException(org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException) Scan(org.apache.hadoop.hbase.client.Scan) CellScanner(org.apache.hadoop.hbase.CellScanner) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result)

Example 3 with InvalidProtocolBufferException

use of org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class ProcedureWALFormatReader method read.

public void read(ProcedureWALFile log) throws IOException {
    localTracker = log.getTracker();
    if (localTracker.isPartial()) {
        LOG.info("Rebuilding tracker for {}", log);
    }
    long count = 0;
    FSDataInputStream stream = log.getStream();
    try {
        boolean hasMore = true;
        while (hasMore) {
            ProcedureWALEntry entry = ProcedureWALFormat.readEntry(stream);
            if (entry == null) {
                LOG.warn("Nothing left to decode. Exiting with missing EOF, log={}", log);
                break;
            }
            count++;
            switch(entry.getType()) {
                case PROCEDURE_WAL_INIT:
                    readInitEntry(entry);
                    break;
                case PROCEDURE_WAL_INSERT:
                    readInsertEntry(entry);
                    break;
                case PROCEDURE_WAL_UPDATE:
                case PROCEDURE_WAL_COMPACT:
                    readUpdateEntry(entry);
                    break;
                case PROCEDURE_WAL_DELETE:
                    readDeleteEntry(entry);
                    break;
                case PROCEDURE_WAL_EOF:
                    hasMore = false;
                    break;
                default:
                    throw new CorruptedWALProcedureStoreException("Invalid entry: " + entry);
            }
        }
        LOG.info("Read {} entries in {}", count, log);
    } catch (InvalidProtocolBufferException e) {
        LOG.error("While reading entry #{} in {}", count, log, e);
        loader.markCorruptedWAL(log, e);
    }
    if (!localProcedureMap.isEmpty()) {
        log.setProcIds(localProcedureMap.getMinModifiedProcId(), localProcedureMap.getMaxModifiedProcId());
        if (localTracker.isPartial()) {
            localTracker.setMinMaxModifiedProcIds(localProcedureMap.getMinModifiedProcId(), localProcedureMap.getMaxModifiedProcId());
        }
        procedureMap.merge(localProcedureMap);
    }
// Do not reset the partial flag for local tracker, as here the local tracker only know the
// procedures which are modified in this file.
}
Also used : InvalidProtocolBufferException(org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) ProcedureWALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry)

Example 4 with InvalidProtocolBufferException

use of org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class ColumnValueFilter method parseFrom.

/**
 * Parse protobuf bytes to a ColumnValueFilter
 * @param pbBytes pbBytes
 * @return a ColumnValueFilter
 * @throws DeserializationException deserialization exception
 */
public static ColumnValueFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
    FilterProtos.ColumnValueFilter proto;
    try {
        proto = FilterProtos.ColumnValueFilter.parseFrom(pbBytes);
    } catch (InvalidProtocolBufferException e) {
        throw new DeserializationException(e);
    }
    final CompareOperator compareOp = CompareOperator.valueOf(proto.getCompareOp().name());
    final ByteArrayComparable comparator;
    try {
        comparator = ProtobufUtil.toComparator(proto.getComparator());
    } catch (IOException ioe) {
        throw new DeserializationException(ioe);
    }
    return new ColumnValueFilter(proto.getFamily().toByteArray(), proto.getQualifier().toByteArray(), compareOp, comparator);
}
Also used : CompareOperator(org.apache.hadoop.hbase.CompareOperator) InvalidProtocolBufferException(org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 5 with InvalidProtocolBufferException

use of org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class DependentColumnFilter method parseFrom.

/**
 * @param pbBytes A pb serialized {@link DependentColumnFilter} instance
 * @return An instance of {@link DependentColumnFilter} made from <code>bytes</code>
 * @throws DeserializationException
 * @see #toByteArray
 */
public static DependentColumnFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
    FilterProtos.DependentColumnFilter proto;
    try {
        proto = FilterProtos.DependentColumnFilter.parseFrom(pbBytes);
    } catch (InvalidProtocolBufferException e) {
        throw new DeserializationException(e);
    }
    final CompareOperator valueCompareOp = CompareOperator.valueOf(proto.getCompareFilter().getCompareOp().name());
    ByteArrayComparable valueComparator = null;
    try {
        if (proto.getCompareFilter().hasComparator()) {
            valueComparator = ProtobufUtil.toComparator(proto.getCompareFilter().getComparator());
        }
    } catch (IOException ioe) {
        throw new DeserializationException(ioe);
    }
    return new DependentColumnFilter(proto.hasColumnFamily() ? proto.getColumnFamily().toByteArray() : null, proto.hasColumnQualifier() ? proto.getColumnQualifier().toByteArray() : null, proto.getDropDependentColumn(), valueCompareOp, valueComparator);
}
Also used : CompareOperator(org.apache.hadoop.hbase.CompareOperator) InvalidProtocolBufferException(org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException) FilterProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos) IOException(java.io.IOException) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Aggregations

InvalidProtocolBufferException (org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException)25 DeserializationException (org.apache.hadoop.hbase.exceptions.DeserializationException)15 IOException (java.io.IOException)12 FilterProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos)12 CompareOperator (org.apache.hadoop.hbase.CompareOperator)8 ArrayList (java.util.ArrayList)4 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)3 Path (org.apache.hadoop.fs.Path)2 Cell (org.apache.hadoop.hbase.Cell)2 Result (org.apache.hadoop.hbase.client.Result)2 Table (org.apache.hadoop.hbase.client.Table)2 ComparatorProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos)2 HBaseProtos (org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos)2 JsonArray (org.apache.hbase.thirdparty.com.google.gson.JsonArray)2 JsonElement (org.apache.hbase.thirdparty.com.google.gson.JsonElement)2 CodedInputStream (org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream)2 EOFException (java.io.EOFException)1 FileNotFoundException (java.io.FileNotFoundException)1 InterruptedIOException (java.io.InterruptedIOException)1 IllegalCharsetNameException (java.nio.charset.IllegalCharsetNameException)1