use of com.google.cloud.bigquery.datatransfer.v1.ProjectName in project google-cloud-java by GoogleCloudPlatform.
the class ReportErrorsServiceClientTest method reportErrorEventExceptionTest.
@Test
@SuppressWarnings("all")
public void reportErrorEventExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockReportErrorsService.addException(exception);
try {
ProjectName projectName = ProjectName.create("[PROJECT]");
ReportedErrorEvent event = ReportedErrorEvent.newBuilder().build();
client.reportErrorEvent(projectName, event);
Assert.fail("No exception raised");
} catch (ApiException e) {
Assert.assertEquals(Status.INVALID_ARGUMENT.getCode(), e.getStatusCode());
}
}
use of com.google.cloud.bigquery.datatransfer.v1.ProjectName in project google-cloud-java by GoogleCloudPlatform.
the class GroupServiceClientTest method createGroupTest.
@Test
@SuppressWarnings("all")
public void createGroupTest() {
GroupName name2 = GroupName.create("[PROJECT]", "[GROUP]");
String displayName = "displayName1615086568";
GroupName parentName = GroupName.create("[PROJECT]", "[GROUP]");
String filter = "filter-1274492040";
boolean isCluster = false;
Group expectedResponse = Group.newBuilder().setNameWithGroupName(name2).setDisplayName(displayName).setParentNameWithGroupName(parentName).setFilter(filter).setIsCluster(isCluster).build();
mockGroupService.addResponse(expectedResponse);
ProjectName name = ProjectName.create("[PROJECT]");
Group group = Group.newBuilder().build();
Group actualResponse = client.createGroup(name, group);
Assert.assertEquals(expectedResponse, actualResponse);
List<GeneratedMessageV3> actualRequests = mockGroupService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateGroupRequest actualRequest = (CreateGroupRequest) actualRequests.get(0);
Assert.assertEquals(name, actualRequest.getNameAsProjectName());
Assert.assertEquals(group, actualRequest.getGroup());
}
use of com.google.cloud.bigquery.datatransfer.v1.ProjectName in project google-cloud-java by GoogleCloudPlatform.
the class GroupServiceClientTest method createGroupExceptionTest.
@Test
@SuppressWarnings("all")
public void createGroupExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockGroupService.addException(exception);
try {
ProjectName name = ProjectName.create("[PROJECT]");
Group group = Group.newBuilder().build();
client.createGroup(name, group);
Assert.fail("No exception raised");
} catch (ApiException e) {
Assert.assertEquals(Status.INVALID_ARGUMENT.getCode(), e.getStatusCode());
}
}
use of com.google.cloud.bigquery.datatransfer.v1.ProjectName in project spring-cloud-gcp by spring-cloud.
the class SecretManagerTemplate method createSecretInternal.
/**
* Creates a new secret for the GCP Project.
*
* <p>
* Note that the {@link Secret} object does not contain the secret payload. You must
* create versions of the secret which stores the payload of the secret.
*/
private void createSecretInternal(String secretId, String projectId) {
ProjectName projectName = ProjectName.of(projectId);
Secret secret = Secret.newBuilder().setReplication(Replication.newBuilder().setAutomatic(Replication.Automatic.getDefaultInstance())).build();
CreateSecretRequest request = CreateSecretRequest.newBuilder().setParent(projectName.toString()).setSecretId(secretId).setSecret(secret).build();
this.secretManagerServiceClient.createSecret(request);
}
use of com.google.cloud.bigquery.datatransfer.v1.ProjectName in project java-docs-samples by GoogleCloudPlatform.
the class WorkloadGeneratorTest method testPipeline.
@Test
public void testPipeline() throws IOException, InterruptedException {
String workloadJobName = "bigtable-workload-generator-test-" + new Date().getTime();
final int WORKLOAD_DURATION = 10;
final int WAIT_DURATION = WORKLOAD_DURATION * 60 * 1000;
int rate = 1000;
BigtableWorkloadOptions options = PipelineOptionsFactory.create().as(BigtableWorkloadOptions.class);
options.setBigtableInstanceId(instanceId);
options.setBigtableTableId(TABLE_ID);
options.setWorkloadRate(rate);
options.setRegion(REGION_ID);
options.setWorkloadDurationMinutes(WORKLOAD_DURATION);
options.setRunner(DataflowRunner.class);
options.setJobName(workloadJobName);
final PipelineResult pipelineResult = WorkloadGenerator.generateWorkload(options);
MetricServiceClient metricServiceClient = MetricServiceClient.create();
ProjectName name = ProjectName.of(projectId);
// Wait X minutes and then get metrics for the X minute period.
Thread.sleep(WAIT_DURATION);
long startMillis = System.currentTimeMillis() - WAIT_DURATION;
TimeInterval interval = TimeInterval.newBuilder().setStartTime(Timestamps.fromMillis(startMillis)).setEndTime(Timestamps.fromMillis(System.currentTimeMillis())).build();
ListTimeSeriesRequest request = ListTimeSeriesRequest.newBuilder().setName(name.toString()).setFilter("metric.type=\"bigtable.googleapis.com/server/request_count\"").setInterval(interval).build();
ListTimeSeriesPagedResponse response = metricServiceClient.listTimeSeries(request);
long startRequestCount = 0;
long endRequestCount = 0;
for (TimeSeries ts : response.iterateAll()) {
startRequestCount = ts.getPoints(0).getValue().getInt64Value();
endRequestCount = ts.getPoints(ts.getPointsCount() - 1).getValue().getInt64Value();
}
assertThat(endRequestCount - startRequestCount > rate);
// Stop the running job.
String jobId = ((DataflowPipelineJob) pipelineResult).getJobId();
DataflowClient client = DataflowClient.create(options);
Job job = client.getJob(jobId);
assertThat(job.getCurrentState().equals("JOB_STATE_CANCELLED"));
}
Aggregations