use of com.google.protobuf.DescriptorProtos.FileDescriptorProto in project tesla by linking12.
the class ServiceResolver method fromFileDescriptorSet.
public static ServiceResolver fromFileDescriptorSet(FileDescriptorSet descriptorSet) {
ImmutableMap<String, FileDescriptorProto> descriptorProtoIndex = computeDescriptorProtoIndex(descriptorSet);
Map<String, FileDescriptor> descriptorCache = new HashMap<>();
ImmutableList.Builder<FileDescriptor> result = ImmutableList.builder();
for (FileDescriptorProto descriptorProto : descriptorSet.getFileList()) {
try {
result.add(descriptorFromProto(descriptorProto, descriptorProtoIndex, descriptorCache));
} catch (DescriptorValidationException e) {
logger.warn("Skipped descriptor " + descriptorProto.getName() + " due to error", e);
continue;
}
}
return new ServiceResolver(result.build());
}
use of com.google.protobuf.DescriptorProtos.FileDescriptorProto in project tesla by linking12.
the class ServiceResolver method descriptorFromProto.
private static FileDescriptor descriptorFromProto(FileDescriptorProto descriptorProto, ImmutableMap<String, FileDescriptorProto> descriptorProtoIndex, Map<String, FileDescriptor> descriptorCache) throws DescriptorValidationException {
String descritorName = descriptorProto.getName();
if (descriptorCache.containsKey(descritorName)) {
return descriptorCache.get(descritorName);
}
ImmutableList.Builder<FileDescriptor> dependencies = ImmutableList.builder();
for (String dependencyName : descriptorProto.getDependencyList()) {
if (!descriptorProtoIndex.containsKey(dependencyName)) {
throw new IllegalArgumentException("Could not find dependency: " + dependencyName);
}
FileDescriptorProto dependencyProto = descriptorProtoIndex.get(dependencyName);
dependencies.add(descriptorFromProto(dependencyProto, descriptorProtoIndex, descriptorCache));
}
FileDescriptor[] empty = new FileDescriptor[0];
return FileDescriptor.buildFrom(descriptorProto, dependencies.build().toArray(empty));
}
use of com.google.protobuf.DescriptorProtos.FileDescriptorProto in project atlasdb by palantir.
the class ColumnValueDescription method hydrateFromProto.
public static ColumnValueDescription hydrateFromProto(TableMetadataPersistence.ColumnValueDescription message) {
ValueType type = ValueType.hydrateFromProto(message.getType());
Compression compression = Compression.hydrateFromProto(message.getCompression());
if (!message.hasClassName()) {
return new ColumnValueDescription(type, compression);
}
Validate.isTrue(type == ValueType.BLOB);
if (message.hasFormat()) {
try {
Format format = Format.hydrateFromProto(message.getFormat());
Descriptor protoDescriptor = null;
if (message.hasProtoFileDescriptorTree()) {
FileDescriptor fileDescriptor = hydrateFileDescriptorTree(message.getProtoFileDescriptorTree());
protoDescriptor = fileDescriptor.findMessageTypeByName(message.getProtoMessageName());
} else if (message.hasProtoFileDescriptor()) {
FileDescriptorProto fileProto = FileDescriptorProto.parseFrom(message.getProtoFileDescriptor());
FileDescriptor fileDescriptor = FileDescriptor.buildFrom(fileProto, new FileDescriptor[0]);
protoDescriptor = fileDescriptor.findMessageTypeByName(message.getProtoMessageName());
}
return new ColumnValueDescription(format, message.getClassName(), message.getCanonicalClassName(), compression, protoDescriptor);
} catch (Exception e) {
log.error("Failed to parse FileDescriptorProto.", e);
}
}
/*
* All the code in the rest of this method is to support old protos that don't have a format field.
* Format and canonicalClassName were added at the same time.
*
* Once we upgrade all the old protos (after 3.6.0), we can remove the below code.
*/
Format format = Format.hydrateFromProto(message.getFormat());
Descriptor protoDescriptor = null;
if (message.hasProtoFileDescriptor()) {
try {
FileDescriptorProto fileProto = FileDescriptorProto.parseFrom(message.getProtoFileDescriptor());
FileDescriptor fileDescriptor = FileDescriptor.buildFrom(fileProto, new FileDescriptor[0]);
protoDescriptor = fileDescriptor.findMessageTypeByName(message.getProtoMessageName());
} catch (Exception e) {
log.warn("Failed to parse FileDescriptorProto.", e);
}
}
return new ColumnValueDescription(format, message.getClassName(), message.getCanonicalClassName(), compression, protoDescriptor);
}
use of com.google.protobuf.DescriptorProtos.FileDescriptorProto in project beam by apache.
the class BeamRowToStorageApiProto method getDescriptorFromSchema.
/**
* Given a Beam Schema, returns a protocol-buffer Descriptor that can be used to write data using
* the BigQuery Storage API.
*/
public static Descriptor getDescriptorFromSchema(Schema schema) throws DescriptorValidationException {
DescriptorProto descriptorProto = descriptorSchemaFromBeamSchema(schema);
FileDescriptorProto fileDescriptorProto = FileDescriptorProto.newBuilder().addMessageType(descriptorProto).build();
FileDescriptor fileDescriptor = FileDescriptor.buildFrom(fileDescriptorProto, new FileDescriptor[0]);
return Iterables.getOnlyElement(fileDescriptor.getMessageTypes());
}
use of com.google.protobuf.DescriptorProtos.FileDescriptorProto in project beam by apache.
the class TableRowToStorageApiProto method getDescriptorFromTableSchema.
/**
* Given a BigQuery TableSchema, returns a protocol-buffer Descriptor that can be used to write
* data using the BigQuery Storage API.
*/
public static Descriptor getDescriptorFromTableSchema(TableSchema jsonSchema) throws DescriptorValidationException {
DescriptorProto descriptorProto = descriptorSchemaFromTableSchema(jsonSchema);
FileDescriptorProto fileDescriptorProto = FileDescriptorProto.newBuilder().addMessageType(descriptorProto).build();
FileDescriptor fileDescriptor = FileDescriptor.buildFrom(fileDescriptorProto, new FileDescriptor[0]);
return Iterables.getOnlyElement(fileDescriptor.getMessageTypes());
}
Aggregations