use of com.google.cloud.bigquery.storage.v1.ProtoRows in project beam by apache.
the class SplittingIterable method iterator.
@Override
public Iterator<ProtoRows> iterator() {
return new Iterator<ProtoRows>() {
final Iterator<StorageApiWritePayload> underlyingIterator = underlying.iterator();
@Override
public boolean hasNext() {
return underlyingIterator.hasNext();
}
@Override
public ProtoRows next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
ProtoRows.Builder inserts = ProtoRows.newBuilder();
long bytesSize = 0;
while (underlyingIterator.hasNext()) {
StorageApiWritePayload payload = underlyingIterator.next();
if (payload.getSchemaHash() != currentDescriptor.hash) {
// Schema doesn't match. Try and get an updated schema hash (from the base table).
currentDescriptor = updateSchema.apply(payload.getSchemaHash());
// Validate that the record can now be parsed.
try {
DynamicMessage msg = DynamicMessage.parseFrom(currentDescriptor.descriptor, payload.getPayload());
if (msg.getUnknownFields() != null && !msg.getUnknownFields().asMap().isEmpty()) {
throw new RuntimeException("Record schema does not match table. Unknown fields: " + msg.getUnknownFields());
}
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
}
ByteString byteString = ByteString.copyFrom(payload.getPayload());
inserts.addSerializedRows(byteString);
bytesSize += byteString.size();
if (bytesSize > splitSize) {
break;
}
}
return inserts.build();
}
};
}
use of com.google.cloud.bigquery.storage.v1.ProtoRows in project beam by apache.
the class FakeDatasetService method getStreamAppendClient.
@Override
public StreamAppendClient getStreamAppendClient(String streamName, Descriptor descriptor) {
return new StreamAppendClient() {
private Descriptor protoDescriptor;
{
this.protoDescriptor = descriptor;
}
@Override
public ApiFuture<AppendRowsResponse> appendRows(long offset, ProtoRows rows) throws Exception {
synchronized (FakeDatasetService.class) {
Stream stream = writeStreams.get(streamName);
if (stream == null) {
throw new RuntimeException("No such stream: " + streamName);
}
List<TableRow> tableRows = Lists.newArrayListWithExpectedSize(rows.getSerializedRowsCount());
for (ByteString bytes : rows.getSerializedRowsList()) {
DynamicMessage msg = DynamicMessage.parseFrom(protoDescriptor, bytes);
if (msg.getUnknownFields() != null && !msg.getUnknownFields().asMap().isEmpty()) {
throw new RuntimeException("Unknown fields set in append! " + msg.getUnknownFields());
}
tableRows.add(TableRowToStorageApiProto.tableRowFromMessage(DynamicMessage.parseFrom(protoDescriptor, bytes)));
}
stream.appendRows(offset, tableRows);
}
return ApiFutures.immediateFuture(AppendRowsResponse.newBuilder().build());
}
@Override
public void close() throws Exception {
}
@Override
public void pin() {
}
@Override
public void unpin() throws Exception {
}
};
}
Aggregations