use of com.google.cloud.bigtable.beam.CloudBigtableTableConfiguration in project java-docs-samples by GoogleCloudPlatform.
the class WorkloadGenerator method generateWorkload.
static PipelineResult generateWorkload(BigtableWorkloadOptions options) {
CloudBigtableTableConfiguration bigtableTableConfig = new CloudBigtableTableConfiguration.Builder().withProjectId(options.getProject()).withInstanceId(options.getBigtableInstanceId()).withTableId(options.getBigtableTableId()).build();
Pipeline p = Pipeline.create(options);
// Initiates a new pipeline every second
p.apply(GenerateSequence.from(0).withRate(options.getWorkloadRate(), new Duration(1000))).apply(ParDo.of(new ReadFromTableFn(bigtableTableConfig)));
System.out.println("Beginning to generate read workload.");
PipelineResult pipelineResult = p.run();
// Cancel the workload after the scheduled time.
ScheduledThreadPoolExecutor exec = new ScheduledThreadPoolExecutor(1);
exec.schedule(() -> {
try {
cancelJob(options, (DataflowPipelineJob) pipelineResult);
} catch (IOException e) {
e.printStackTrace();
System.out.println("Unable to cancel job.");
}
}, options.getWorkloadDurationMinutes(), TimeUnit.MINUTES);
return pipelineResult;
}
use of com.google.cloud.bigtable.beam.CloudBigtableTableConfiguration in project java-docs-samples by GoogleCloudPlatform.
the class HelloWorldWrite method main.
public static void main(String[] args) {
// [START bigtable_beam_helloworld_create_pipeline]
BigtableOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().as(BigtableOptions.class);
Pipeline p = Pipeline.create(options);
// [END bigtable_beam_helloworld_create_pipeline]
// [START bigtable_beam_helloworld_write_config]
CloudBigtableTableConfiguration bigtableTableConfig = new CloudBigtableTableConfiguration.Builder().withProjectId(options.getBigtableProjectId()).withInstanceId(options.getBigtableInstanceId()).withTableId(options.getBigtableTableId()).build();
// [END bigtable_beam_helloworld_write_config]
// [START bigtable_beam_helloworld_write_transforms]
p.apply(Create.of("phone#4c410523#20190501", "phone#4c410523#20190502")).apply(ParDo.of(new DoFn<String, Mutation>() {
@ProcessElement
public void processElement(@Element String rowkey, OutputReceiver<Mutation> out) {
long timestamp = System.currentTimeMillis();
Put row = new Put(Bytes.toBytes(rowkey));
row.addColumn(Bytes.toBytes("stats_summary"), Bytes.toBytes("os_build"), timestamp, Bytes.toBytes("android"));
out.output(row);
}
})).apply(CloudBigtableIO.writeToTable(bigtableTableConfig));
// [END bigtable_beam_helloworld_write_transforms]
p.run().waitUntilFinish();
}
use of com.google.cloud.bigtable.beam.CloudBigtableTableConfiguration in project java-docs-samples by GoogleCloudPlatform.
the class WorkloadGeneratorTest method testGenerateWorkload.
@Test
public void testGenerateWorkload() {
BigtableWorkloadOptions options = PipelineOptionsFactory.create().as(BigtableWorkloadOptions.class);
options.setBigtableInstanceId(instanceId);
options.setBigtableTableId(TABLE_ID);
options.setRegion(REGION_ID);
Pipeline p = Pipeline.create(options);
CloudBigtableTableConfiguration bigtableTableConfig = new CloudBigtableTableConfiguration.Builder().withProjectId(options.getProject()).withInstanceId(options.getBigtableInstanceId()).withTableId(options.getBigtableTableId()).build();
// Initiates a new pipeline every second
p.apply(Create.of(1L)).apply(ParDo.of(new ReadFromTableFn(bigtableTableConfig)));
p.run().waitUntilFinish();
String output = bout.toString();
assertThat(output.contains("Connected to table"));
}
use of com.google.cloud.bigtable.beam.CloudBigtableTableConfiguration in project java-bigtable-hbase by googleapis.
the class CloudBigtableBeamIT method testWriteToBigtable.
@Test
public void testWriteToBigtable() throws IOException {
DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
properties.applyTo(options);
options.setAppName("testWriteToBigtable-" + System.currentTimeMillis());
LOG.info(String.format("Started writeToBigtable test with jobName as: %s", options.getAppName()));
CloudBigtableTableConfiguration.Builder configBuilder = new CloudBigtableTableConfiguration.Builder().withProjectId(properties.getProjectId()).withInstanceId(properties.getInstanceId()).withTableId(tableName.getNameAsString());
properties.getDataEndpoint().ifPresent(endpoint -> configBuilder.withConfiguration(BIGTABLE_HOST_KEY, endpoint));
properties.getAdminEndpoint().ifPresent(endpoint -> configBuilder.withConfiguration(BIGTABLE_ADMIN_HOST_KEY, endpoint));
CloudBigtableTableConfiguration config = configBuilder.build();
List<String> keys = new ArrayList<>();
for (int i = 0; i < PREFIX_COUNT; i++) {
keys.add(RandomStringUtils.randomAlphanumeric(10));
}
PipelineResult.State result = Pipeline.create(options).apply("Keys", Create.of(keys)).apply("Create Puts", ParDo.of(WRITE_ONE_TENTH_PERCENT)).apply("Write to BT", CloudBigtableIO.writeToTable(config)).getPipeline().run().waitUntilFinish();
Assert.assertEquals(PipelineResult.State.DONE, result);
try (ResultScanner scanner = connection.getTable(tableName).getScanner(new Scan().setFilter(new KeyOnlyFilter()))) {
int count = 0;
while (scanner.next() != null) {
count++;
}
Assert.assertEquals(TOTAL_ROW_COUNT, count);
}
}
Aggregations