use of com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference in project java-bigquerystorage by googleapis.
the class ITBigQueryStorageTest method testColumnPartitionedTableByDateField.
@Test
public void testColumnPartitionedTableByDateField() throws InterruptedException, IOException {
String partitionedTableName = "test_column_partition_table_by_date";
String createTableStatement = String.format(" CREATE TABLE %s.%s (num_field INT64, date_field DATE) " + " PARTITION BY date_field " + " OPTIONS( " + " description=\"a table partitioned by date_field\" " + " ) " + "AS " + " SELECT 1, CAST(\"2019-01-01\" AS DATE)" + " UNION ALL" + " SELECT 2, CAST(\"2019-01-02\" AS DATE)" + " UNION ALL" + " SELECT 3, CAST(\"2019-01-03\" AS DATE)", DATASET, partitionedTableName);
RunQueryJobAndExpectSuccess(QueryJobConfiguration.newBuilder(createTableStatement).build());
TableReference tableReference = TableReference.newBuilder().setTableId(partitionedTableName).setDatasetId(DATASET).setProjectId(ServiceOptions.getDefaultProjectId()).build();
List<GenericData.Record> unfilteredRows = ReadAllRows(/* tableReference = */
tableReference, /* filter = */
null);
assertEquals("Actual rows read: " + unfilteredRows.toString(), 3, unfilteredRows.size());
List<GenericData.Record> partitionFilteredRows = ReadAllRows(/* tableReference = */
tableReference, /* filter = */
"date_field = CAST(\"2019-01-02\" AS DATE)");
assertEquals("Actual rows read: " + partitionFilteredRows.toString(), 1, partitionFilteredRows.size());
assertEquals(2L, partitionFilteredRows.get(0).get("num_field"));
}
use of com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference in project java-bigquerystorage by googleapis.
the class ITBigQueryStorageTest method testIngestionTimePartitionedTable.
@Test
public void testIngestionTimePartitionedTable() throws InterruptedException, IOException {
Field intFieldSchema = Field.newBuilder("num_field", LegacySQLTypeName.INTEGER).setMode(Mode.REQUIRED).setDescription("IntegerDescription").build();
com.google.cloud.bigquery.Schema tableSchema = com.google.cloud.bigquery.Schema.of(intFieldSchema);
TableId testTableId = TableId.of(/* dataset = */
DATASET, /* table = */
"test_date_partitioned_table");
bigquery.create(TableInfo.of(testTableId, StandardTableDefinition.newBuilder().setTimePartitioning(TimePartitioning.of(TimePartitioning.Type.DAY)).setSchema(tableSchema).build()));
// Simulate ingestion for 2019-01-01.
RunQueryAppendJobAndExpectSuccess(/* destinationTableId = */
TableId.of(/* dataset = */
DATASET, /* table = */
testTableId.getTable() + "$20190101"), /* query = */
"SELECT 1 AS num_field");
// Simulate ingestion for 2019-01-02.
RunQueryAppendJobAndExpectSuccess(/* destinationTableId = */
TableId.of(/* dataset = */
DATASET, /* table = */
testTableId.getTable() + "$20190102"), /* query = */
"SELECT 2 AS num_field");
TableReference tableReference = TableReference.newBuilder().setTableId(testTableId.getTable()).setDatasetId(testTableId.getDataset()).setProjectId(ServiceOptions.getDefaultProjectId()).build();
List<GenericData.Record> unfilteredRows = ReadAllRows(/* tableReference = */
tableReference, /* filter = */
null);
assertEquals("Actual rows read: " + unfilteredRows.toString(), 2, unfilteredRows.size());
List<GenericData.Record> partitionFilteredRows = ReadAllRows(/* tableReference = */
tableReference, /* filter = */
"_PARTITIONDATE > \"2019-01-01\"");
assertEquals("Actual rows read: " + partitionFilteredRows.toString(), 1, partitionFilteredRows.size());
assertEquals(2L, partitionFilteredRows.get(0).get("num_field"));
}
use of com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference in project java-bigquerystorage by googleapis.
the class ITBigQueryStorageTest method testDateAndTimeSqlTypes.
@Test
public void testDateAndTimeSqlTypes() throws InterruptedException, IOException {
String table_name = "test_date_and_time_sql_types";
String createTableStatement = String.format(" CREATE TABLE %s.%s " + " (date_field DATE NOT NULL," + " datetime_field DATETIME NOT NULL," + " time_field TIME NOT NULL," + " timestamp_field TIMESTAMP NOT NULL)" + " OPTIONS( " + " description=\"a table with date and time column types\" " + " ) " + "AS " + " SELECT " + " CAST(\"2019-05-31\" AS DATE)," + " CAST(\"2019-04-30 21:47:59.999999\" AS DATETIME)," + " CAST(\"21:47:59.999999\" AS TIME)," + " CAST(\"2019-04-30 19:24:19.123456 UTC\" AS TIMESTAMP)", DATASET, table_name);
RunQueryJobAndExpectSuccess(QueryJobConfiguration.newBuilder(createTableStatement).build());
TableReference tableReference = TableReference.newBuilder().setTableId(table_name).setDatasetId(DATASET).setProjectId(ServiceOptions.getDefaultProjectId()).build();
List<GenericData.Record> rows = ReadAllRows(/* tableReference = */
tableReference, /* filter = */
null);
assertEquals("Actual rows read: " + rows.toString(), 1, rows.size());
GenericData.Record record = rows.get(0);
Schema avroSchema = record.getSchema();
String actualSchemaMessage = String.format("Unexpected schema. Actual schema:%n%s", avroSchema.toString(/* pretty = */
true));
String rowAssertMessage = String.format("Row not matching expectations: %s", record.toString());
assertEquals(actualSchemaMessage, Schema.Type.RECORD, avroSchema.getType());
assertEquals(actualSchemaMessage, "__root__", avroSchema.getName());
assertEquals(actualSchemaMessage, 4, avroSchema.getFields().size());
assertEquals(actualSchemaMessage, Schema.Type.INT, avroSchema.getField("date_field").schema().getType());
assertEquals(actualSchemaMessage, LogicalTypes.date(), avroSchema.getField("date_field").schema().getLogicalType());
assertEquals(rowAssertMessage, LocalDate.of(/* year = */
2019, /* month = */
5, /* dayOfMonth = */
31), LocalDate.ofEpochDay((int) record.get("date_field")));
assertEquals(actualSchemaMessage, Schema.Type.STRING, avroSchema.getField("datetime_field").schema().getType());
assertEquals(actualSchemaMessage, "datetime", avroSchema.getField("datetime_field").schema().getObjectProp("logicalType"));
assertEquals(rowAssertMessage, new Utf8("2019-04-30T21:47:59.999999"), (Utf8) record.get("datetime_field"));
assertEquals(actualSchemaMessage, Schema.Type.LONG, avroSchema.getField("time_field").schema().getType());
assertEquals(actualSchemaMessage, LogicalTypes.timeMicros(), avroSchema.getField("time_field").schema().getLogicalType());
assertEquals(rowAssertMessage, LocalTime.of(/* hour = */
21, /* minute = */
47, /* second = */
59, /* nanoOfSecond = */
999_999_000), LocalTime.ofNanoOfDay(1_000L * (long) record.get("time_field")));
assertEquals(actualSchemaMessage, Schema.Type.LONG, avroSchema.getField("timestamp_field").schema().getType());
assertEquals(actualSchemaMessage, LogicalTypes.timestampMicros(), avroSchema.getField("timestamp_field").schema().getLogicalType());
ZonedDateTime expected_timestamp = ZonedDateTime.parse("2019-04-30T19:24:19Z", DateTimeFormatter.ISO_INSTANT.withZone(ZoneOffset.UTC)).withNano(123_456_000);
long actual_timestamp_micros = (long) record.get("timestamp_field");
ZonedDateTime actual_timestamp = ZonedDateTime.ofInstant(Instant.ofEpochSecond(/* epochSecond = */
actual_timestamp_micros / 1_000_000, (actual_timestamp_micros % 1_000_000) * 1_000), ZoneOffset.UTC);
assertEquals(rowAssertMessage, expected_timestamp, actual_timestamp);
}
use of com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference in project java-bigquerystorage by googleapis.
the class ITBigQueryStorageTest method ReadAllRows.
/**
* Reads all the rows from the specified table reference and returns a list as generic Avro
* records.
*
* @param tableReference
* @param filter Optional. If specified, it will be used to restrict returned data.
* @return
*/
List<GenericData.Record> ReadAllRows(TableReference tableReference, String filter) throws IOException {
final List<GenericData.Record> rows = new ArrayList<>();
ProcessRowsAtSnapshot(/* tableReference = */
tableReference, /* snapshotInMillis = */
null, /* filter = */
filter, new AvroRowConsumer() {
@Override
public void accept(GenericData.Record record) {
// clone the record since that reference will be reused by the reader.
rows.add(new GenericRecordBuilder(record).build());
}
});
return rows;
}
use of com.google.cloud.bigquery.storage.v1beta1.TableReferenceProto.TableReference in project java-bigquerystorage by googleapis.
the class BigQueryStorageClientTest method createReadSessionTest.
@Test
@SuppressWarnings("all")
public void createReadSessionTest() {
String name = "name3373707";
ReadSession expectedResponse = ReadSession.newBuilder().setName(name).build();
mockBigQueryStorage.addResponse(expectedResponse);
TableReference tableReference = TableReference.newBuilder().build();
String parent = "parent-995424086";
int requestedStreams = 1017221410;
ReadSession actualResponse = client.createReadSession(tableReference, parent, requestedStreams);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockBigQueryStorage.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0);
Assert.assertEquals(tableReference, actualRequest.getTableReference());
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(requestedStreams, actualRequest.getRequestedStreams());
Assert.assertTrue(channelProvider.isHeaderSent(ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
Aggregations