Search in sources :

Example 1 with InvalidProtocolBufferException

use of com.google.protobuf.InvalidProtocolBufferException in project druid by druid-io.

the class ProtoBufInputRowParser method buildStringKeyMap.

private Map<String, Object> buildStringKeyMap(ByteBuffer input) {
    final Descriptors.Descriptor descriptor = getDescriptor(descriptorFileInClasspath);
    final Map<String, Object> theMap = Maps.newHashMap();
    try {
        DynamicMessage message = DynamicMessage.parseFrom(descriptor, ByteString.copyFrom(input));
        Map<Descriptors.FieldDescriptor, Object> allFields = message.getAllFields();
        for (Map.Entry<Descriptors.FieldDescriptor, Object> entry : allFields.entrySet()) {
            String name = entry.getKey().getName();
            if (theMap.containsKey(name)) {
                continue;
            // Perhaps throw an exception here?
            // throw new RuntimeException("dupicate key " + name + " in " + message);
            }
            Object value = entry.getValue();
            if (value instanceof Descriptors.EnumValueDescriptor) {
                Descriptors.EnumValueDescriptor desc = (Descriptors.EnumValueDescriptor) value;
                value = desc.getName();
            }
            theMap.put(name, value);
        }
    } catch (InvalidProtocolBufferException e) {
        log.warn(e, "Problem with protobuf something");
    }
    return theMap;
}
Also used : InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) ByteString(com.google.protobuf.ByteString) DynamicMessage(com.google.protobuf.DynamicMessage) Descriptors(com.google.protobuf.Descriptors) Map(java.util.Map)

Example 2 with InvalidProtocolBufferException

use of com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class SparkSQLPushDownFilter method parseFrom.

/**
   * @param pbBytes A pb serialized instance
   * @return An instance of SparkSQLPushDownFilter
   * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
   */
@SuppressWarnings("unused")
public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
    SparkFilterProtos.SQLPredicatePushDownFilter proto;
    try {
        proto = SparkFilterProtos.SQLPredicatePushDownFilter.parseFrom(pbBytes);
    } catch (InvalidProtocolBufferException e) {
        throw new DeserializationException(e);
    }
    String encoder = proto.getEncoderClassName();
    BytesEncoder enc = JavaBytesEncoder.create(encoder);
    //Load DynamicLogicExpression
    DynamicLogicExpression dynamicLogicExpression = DynamicLogicExpressionBuilder.build(proto.getDynamicLogicExpression(), enc);
    //Load valuesFromQuery
    final List<ByteString> valueFromQueryArrayList = proto.getValueFromQueryArrayList();
    byte[][] valueFromQueryArray = new byte[valueFromQueryArrayList.size()][];
    for (int i = 0; i < valueFromQueryArrayList.size(); i++) {
        valueFromQueryArray[i] = valueFromQueryArrayList.get(i).toByteArray();
    }
    //Load mapping from HBase family/qualifier to Spark SQL columnName
    HashMap<ByteArrayComparable, HashMap<ByteArrayComparable, String>> currentCellToColumnIndexMap = new HashMap<>();
    for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping sqlPredicatePushDownCellToColumnMapping : proto.getCellToColumnMappingList()) {
        byte[] familyArray = sqlPredicatePushDownCellToColumnMapping.getColumnFamily().toByteArray();
        ByteArrayComparable familyByteComparable = new ByteArrayComparable(familyArray, 0, familyArray.length);
        HashMap<ByteArrayComparable, String> qualifierMap = currentCellToColumnIndexMap.get(familyByteComparable);
        if (qualifierMap == null) {
            qualifierMap = new HashMap<>();
            currentCellToColumnIndexMap.put(familyByteComparable, qualifierMap);
        }
        byte[] qualifierArray = sqlPredicatePushDownCellToColumnMapping.getQualifier().toByteArray();
        ByteArrayComparable qualifierByteComparable = new ByteArrayComparable(qualifierArray, 0, qualifierArray.length);
        qualifierMap.put(qualifierByteComparable, sqlPredicatePushDownCellToColumnMapping.getColumnName());
    }
    return new SparkSQLPushDownFilter(dynamicLogicExpression, valueFromQueryArray, currentCellToColumnIndexMap, encoder);
}
Also used : HashMap(java.util.HashMap) ByteString(com.google.protobuf.ByteString) SparkFilterProtos(org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) ByteString(com.google.protobuf.ByteString) BytesEncoder(org.apache.hadoop.hbase.spark.datasources.BytesEncoder) JavaBytesEncoder(org.apache.hadoop.hbase.spark.datasources.JavaBytesEncoder) DeserializationException(org.apache.hadoop.hbase.exceptions.DeserializationException)

Example 3 with InvalidProtocolBufferException

use of com.google.protobuf.InvalidProtocolBufferException in project Signal-Android by WhisperSystems.

the class WebRtcCallService method onMessage.

@Override
public void onMessage(DataChannel.Buffer buffer) {
    Log.w(TAG, "onMessage...");
    try {
        byte[] data = new byte[buffer.data.remaining()];
        buffer.data.get(data);
        Data dataMessage = Data.parseFrom(data);
        if (dataMessage.hasConnected()) {
            Log.w(TAG, "hasConnected...");
            Intent intent = new Intent(this, WebRtcCallService.class);
            intent.setAction(ACTION_CALL_CONNECTED);
            intent.putExtra(EXTRA_CALL_ID, dataMessage.getConnected().getId());
            startService(intent);
        } else if (dataMessage.hasHangup()) {
            Log.w(TAG, "hasHangup...");
            Intent intent = new Intent(this, WebRtcCallService.class);
            intent.setAction(ACTION_REMOTE_HANGUP);
            intent.putExtra(EXTRA_CALL_ID, dataMessage.getHangup().getId());
            startService(intent);
        } else if (dataMessage.hasVideoStreamingStatus()) {
            Log.w(TAG, "hasVideoStreamingStatus...");
            Intent intent = new Intent(this, WebRtcCallService.class);
            intent.setAction(ACTION_REMOTE_VIDEO_MUTE);
            intent.putExtra(EXTRA_CALL_ID, dataMessage.getVideoStreamingStatus().getId());
            intent.putExtra(EXTRA_MUTE, !dataMessage.getVideoStreamingStatus().getEnabled());
            startService(intent);
        }
    } catch (InvalidProtocolBufferException e) {
        Log.w(TAG, e);
    }
}
Also used : InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) Data(org.thoughtcrime.securesms.webrtc.WebRtcDataProtos.Data) Intent(android.content.Intent)

Example 4 with InvalidProtocolBufferException

use of com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.

the class ProtobufUtil method toScanMetrics.

public static ScanMetrics toScanMetrics(final byte[] bytes) {
    Parser<MapReduceProtos.ScanMetrics> parser = MapReduceProtos.ScanMetrics.PARSER;
    MapReduceProtos.ScanMetrics pScanMetrics = null;
    try {
        pScanMetrics = parser.parseFrom(bytes);
    } catch (InvalidProtocolBufferException e) {
    //Ignored there are just no key values to add.
    }
    ScanMetrics scanMetrics = new ScanMetrics();
    if (pScanMetrics != null) {
        for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) {
            if (pair.hasName() && pair.hasValue()) {
                scanMetrics.setCounter(pair.getName(), pair.getValue());
            }
        }
    }
    return scanMetrics;
}
Also used : MapReduceProtos(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) ScanMetrics(org.apache.hadoop.hbase.client.metrics.ScanMetrics) HBaseProtos(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos)

Example 5 with InvalidProtocolBufferException

use of com.google.protobuf.InvalidProtocolBufferException in project hadoop by apache.

the class FSDirectory method addEncryptionZone.

private void addEncryptionZone(INodeWithAdditionalFields inode, XAttrFeature xaf) {
    if (xaf == null) {
        return;
    }
    XAttr xattr = xaf.getXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE);
    if (xattr == null) {
        return;
    }
    try {
        final HdfsProtos.ZoneEncryptionInfoProto ezProto = HdfsProtos.ZoneEncryptionInfoProto.parseFrom(xattr.getValue());
        ezManager.unprotectedAddEncryptionZone(inode.getId(), PBHelperClient.convert(ezProto.getSuite()), PBHelperClient.convert(ezProto.getCryptoProtocolVersion()), ezProto.getKeyName());
    } catch (InvalidProtocolBufferException e) {
        NameNode.LOG.warn("Error parsing protocol buffer of " + "EZ XAttr " + xattr.getName() + " dir:" + inode.getFullPathName());
    }
}
Also used : HdfsProtos(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) XAttr(org.apache.hadoop.fs.XAttr)

Aggregations

InvalidProtocolBufferException (com.google.protobuf.InvalidProtocolBufferException)260 ServerRequest (com.pokegoapi.main.ServerRequest)46 ByteString (com.google.protobuf.ByteString)42 IOException (java.io.IOException)41 RequestFailedException (com.pokegoapi.exceptions.request.RequestFailedException)39 InvalidProtocolBufferException (org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException)22 HashMap (java.util.HashMap)21 ArrayList (java.util.ArrayList)19 List (java.util.List)18 Map (java.util.Map)17 Any (com.google.protobuf.Any)16 RunnerApi (org.apache.beam.model.pipeline.v1.RunnerApi)15 HashSet (java.util.HashSet)11 Key (org.apache.accumulo.core.data.Key)10 Value (org.apache.accumulo.core.data.Value)10 Status (org.apache.accumulo.server.replication.proto.Replication.Status)10 Text (org.apache.hadoop.io.Text)10 JsonToken (com.fasterxml.jackson.core.JsonToken)9 ByteString (org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.ByteString)9 ContractExeException (org.tron.core.exception.ContractExeException)9