use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project druid by druid-io.
the class ProtoBufInputRowParser method buildStringKeyMap.
private Map<String, Object> buildStringKeyMap(ByteBuffer input) {
final Descriptors.Descriptor descriptor = getDescriptor(descriptorFileInClasspath);
final Map<String, Object> theMap = Maps.newHashMap();
try {
DynamicMessage message = DynamicMessage.parseFrom(descriptor, ByteString.copyFrom(input));
Map<Descriptors.FieldDescriptor, Object> allFields = message.getAllFields();
for (Map.Entry<Descriptors.FieldDescriptor, Object> entry : allFields.entrySet()) {
String name = entry.getKey().getName();
if (theMap.containsKey(name)) {
continue;
// Perhaps throw an exception here?
// throw new RuntimeException("dupicate key " + name + " in " + message);
}
Object value = entry.getValue();
if (value instanceof Descriptors.EnumValueDescriptor) {
Descriptors.EnumValueDescriptor desc = (Descriptors.EnumValueDescriptor) value;
value = desc.getName();
}
theMap.put(name, value);
}
} catch (InvalidProtocolBufferException e) {
log.warn(e, "Problem with protobuf something");
}
return theMap;
}
use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.
the class SparkSQLPushDownFilter method parseFrom.
/**
* @param pbBytes A pb serialized instance
* @return An instance of SparkSQLPushDownFilter
* @throws org.apache.hadoop.hbase.exceptions.DeserializationException
*/
@SuppressWarnings("unused")
public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
SparkFilterProtos.SQLPredicatePushDownFilter proto;
try {
proto = SparkFilterProtos.SQLPredicatePushDownFilter.parseFrom(pbBytes);
} catch (InvalidProtocolBufferException e) {
throw new DeserializationException(e);
}
String encoder = proto.getEncoderClassName();
BytesEncoder enc = JavaBytesEncoder.create(encoder);
//Load DynamicLogicExpression
DynamicLogicExpression dynamicLogicExpression = DynamicLogicExpressionBuilder.build(proto.getDynamicLogicExpression(), enc);
//Load valuesFromQuery
final List<ByteString> valueFromQueryArrayList = proto.getValueFromQueryArrayList();
byte[][] valueFromQueryArray = new byte[valueFromQueryArrayList.size()][];
for (int i = 0; i < valueFromQueryArrayList.size(); i++) {
valueFromQueryArray[i] = valueFromQueryArrayList.get(i).toByteArray();
}
//Load mapping from HBase family/qualifier to Spark SQL columnName
HashMap<ByteArrayComparable, HashMap<ByteArrayComparable, String>> currentCellToColumnIndexMap = new HashMap<>();
for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping sqlPredicatePushDownCellToColumnMapping : proto.getCellToColumnMappingList()) {
byte[] familyArray = sqlPredicatePushDownCellToColumnMapping.getColumnFamily().toByteArray();
ByteArrayComparable familyByteComparable = new ByteArrayComparable(familyArray, 0, familyArray.length);
HashMap<ByteArrayComparable, String> qualifierMap = currentCellToColumnIndexMap.get(familyByteComparable);
if (qualifierMap == null) {
qualifierMap = new HashMap<>();
currentCellToColumnIndexMap.put(familyByteComparable, qualifierMap);
}
byte[] qualifierArray = sqlPredicatePushDownCellToColumnMapping.getQualifier().toByteArray();
ByteArrayComparable qualifierByteComparable = new ByteArrayComparable(qualifierArray, 0, qualifierArray.length);
qualifierMap.put(qualifierByteComparable, sqlPredicatePushDownCellToColumnMapping.getColumnName());
}
return new SparkSQLPushDownFilter(dynamicLogicExpression, valueFromQueryArray, currentCellToColumnIndexMap, encoder);
}
use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project Signal-Android by WhisperSystems.
the class WebRtcCallService method onMessage.
@Override
public void onMessage(DataChannel.Buffer buffer) {
Log.w(TAG, "onMessage...");
try {
byte[] data = new byte[buffer.data.remaining()];
buffer.data.get(data);
Data dataMessage = Data.parseFrom(data);
if (dataMessage.hasConnected()) {
Log.w(TAG, "hasConnected...");
Intent intent = new Intent(this, WebRtcCallService.class);
intent.setAction(ACTION_CALL_CONNECTED);
intent.putExtra(EXTRA_CALL_ID, dataMessage.getConnected().getId());
startService(intent);
} else if (dataMessage.hasHangup()) {
Log.w(TAG, "hasHangup...");
Intent intent = new Intent(this, WebRtcCallService.class);
intent.setAction(ACTION_REMOTE_HANGUP);
intent.putExtra(EXTRA_CALL_ID, dataMessage.getHangup().getId());
startService(intent);
} else if (dataMessage.hasVideoStreamingStatus()) {
Log.w(TAG, "hasVideoStreamingStatus...");
Intent intent = new Intent(this, WebRtcCallService.class);
intent.setAction(ACTION_REMOTE_VIDEO_MUTE);
intent.putExtra(EXTRA_CALL_ID, dataMessage.getVideoStreamingStatus().getId());
intent.putExtra(EXTRA_MUTE, !dataMessage.getVideoStreamingStatus().getEnabled());
startService(intent);
}
} catch (InvalidProtocolBufferException e) {
Log.w(TAG, e);
}
}
use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project hbase by apache.
the class ProtobufUtil method toScanMetrics.
public static ScanMetrics toScanMetrics(final byte[] bytes) {
Parser<MapReduceProtos.ScanMetrics> parser = MapReduceProtos.ScanMetrics.PARSER;
MapReduceProtos.ScanMetrics pScanMetrics = null;
try {
pScanMetrics = parser.parseFrom(bytes);
} catch (InvalidProtocolBufferException e) {
//Ignored there are just no key values to add.
}
ScanMetrics scanMetrics = new ScanMetrics();
if (pScanMetrics != null) {
for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) {
if (pair.hasName() && pair.hasValue()) {
scanMetrics.setCounter(pair.getName(), pair.getValue());
}
}
}
return scanMetrics;
}
use of org.apache.beam.vendor.grpc.v1p43p2.com.google.protobuf.InvalidProtocolBufferException in project hadoop by apache.
the class FSDirectory method addEncryptionZone.
private void addEncryptionZone(INodeWithAdditionalFields inode, XAttrFeature xaf) {
if (xaf == null) {
return;
}
XAttr xattr = xaf.getXAttr(CRYPTO_XATTR_ENCRYPTION_ZONE);
if (xattr == null) {
return;
}
try {
final HdfsProtos.ZoneEncryptionInfoProto ezProto = HdfsProtos.ZoneEncryptionInfoProto.parseFrom(xattr.getValue());
ezManager.unprotectedAddEncryptionZone(inode.getId(), PBHelperClient.convert(ezProto.getSuite()), PBHelperClient.convert(ezProto.getCryptoProtocolVersion()), ezProto.getKeyName());
} catch (InvalidProtocolBufferException e) {
NameNode.LOG.warn("Error parsing protocol buffer of " + "EZ XAttr " + xattr.getName() + " dir:" + inode.getFullPathName());
}
}
Aggregations