use of com.google.cloud.bigquery.storage.v1.ArrowRecordBatch in project beam by apache.
the class BigQueryIOStorageReadTest method createResponseArrow.
private ReadRowsResponse createResponseArrow(org.apache.arrow.vector.types.pojo.Schema arrowSchema, List<String> name, List<Long> number, double progressAtResponseStart, double progressAtResponseEnd) {
ArrowRecordBatch serializedRecord;
try (VectorSchemaRoot schemaRoot = VectorSchemaRoot.create(arrowSchema, allocator)) {
schemaRoot.allocateNew();
schemaRoot.setRowCount(name.size());
VarCharVector strVector = (VarCharVector) schemaRoot.getFieldVectors().get(0);
BigIntVector bigIntVector = (BigIntVector) schemaRoot.getFieldVectors().get(1);
for (int i = 0; i < name.size(); i++) {
bigIntVector.set(i, number.get(i));
strVector.set(i, new Text(name.get(i)));
}
VectorUnloader unLoader = new VectorUnloader(schemaRoot);
try (org.apache.arrow.vector.ipc.message.ArrowRecordBatch records = unLoader.getRecordBatch()) {
try (ByteArrayOutputStream os = new ByteArrayOutputStream()) {
MessageSerializer.serialize(new WriteChannel(Channels.newChannel(os)), records);
serializedRecord = ArrowRecordBatch.newBuilder().setRowCount(records.getLength()).setSerializedRecordBatch(ByteString.copyFrom(os.toByteArray())).build();
} catch (IOException e) {
throw new RuntimeException("Error writing to byte array output stream", e);
}
}
}
return ReadRowsResponse.newBuilder().setArrowRecordBatch(serializedRecord).setRowCount(name.size()).setStats(StreamStats.newBuilder().setProgress(Progress.newBuilder().setAtResponseStart(progressAtResponseStart).setAtResponseEnd(progressAtResponseEnd))).build();
}
use of com.google.cloud.bigquery.storage.v1.ArrowRecordBatch in project beam by apache.
the class BigQueryStorageArrowReader method processReadRowsResponse.
@Override
public void processReadRowsResponse(ReadRowsResponse readRowsResponse) throws IOException {
com.google.cloud.bigquery.storage.v1.ArrowRecordBatch recordBatch = readRowsResponse.getArrowRecordBatch();
rowCount = recordBatch.getRowCount();
this.alloc = new RootAllocator(Long.MAX_VALUE);
InputStream input = protoSchema.getSerializedSchema().newInput();
Schema arrowSchema = ArrowConversion.arrowSchemaFromInput(input);
this.recordBatchIterator = ArrowConversion.rowsFromSerializedRecordBatch(arrowSchema, recordBatch.getSerializedRecordBatch().newInput(), this.alloc);
}
Aggregations