use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowPipelineJobTest method testCancelUnterminatedJobThatSucceeds.
@Test
public void testCancelUnterminatedJobThatSucceeds() throws IOException {
Dataflow.Projects.Locations.Jobs.Update update = mock(Dataflow.Projects.Locations.Jobs.Update.class);
when(mockJobs.update(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID), any(Job.class))).thenReturn(update);
when(update.execute()).thenReturn(new Job().setCurrentState("JOB_STATE_CANCELLED"));
DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, null);
assertEquals(State.CANCELLED, job.cancel());
Job content = new Job();
content.setProjectId(PROJECT_ID);
content.setId(JOB_ID);
content.setRequestedState("JOB_STATE_CANCELLED");
verify(mockJobs).update(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID), eq(content));
verifyNoMoreInteractions(mockJobs);
}
use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowPipelineTranslatorTest method testNamesOverridden.
/**
* Test that in translation the name for a collection (in this case just a Create output) is
* overriden to be what the Dataflow service expects.
*/
@Test
public void testNamesOverridden() throws Exception {
DataflowPipelineOptions options = buildPipelineOptions();
DataflowRunner runner = DataflowRunner.fromOptions(options);
options.setStreaming(false);
DataflowPipelineTranslator translator = DataflowPipelineTranslator.fromOptions(options);
Pipeline pipeline = Pipeline.create(options);
pipeline.apply("Jazzy", Create.of(3)).setName("foobizzle");
runner.replaceTransforms(pipeline);
Job job = translator.translate(pipeline, runner, Collections.<DataflowPackage>emptyList()).getJob();
// The Create step
Step step = job.getSteps().get(0);
// This is the name that is "set by the user" that the Dataflow translator must override
String userSpecifiedName = Structs.getString(Structs.getListOfMaps(step.getProperties(), PropertyNames.OUTPUT_INFO, null).get(0), PropertyNames.USER_NAME);
// This is the calculated name that must actually be used
String calculatedName = getString(step.getProperties(), PropertyNames.USER_NAME) + ".out0";
assertThat(userSpecifiedName, equalTo(calculatedName));
}
use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowMetricsTest method testEmptyMetricUpdates.
@Test
public void testEmptyMetricUpdates() throws IOException {
Job modelJob = new Job();
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
JobMetrics jobMetrics = new JobMetrics();
jobMetrics.setMetrics(null);
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.queryMetrics();
assertThat(ImmutableList.copyOf(result.counters()), is(empty()));
assertThat(ImmutableList.copyOf(result.distributions()), is(empty()));
}
use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowPipelineTranslatorTest method testSubnetworkConfigMissing.
@Test
public void testSubnetworkConfigMissing() throws IOException {
DataflowPipelineOptions options = buildPipelineOptions();
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertNull(job.getEnvironment().getWorkerPools().get(0).getSubnetwork());
}
use of com.google.api.services.dataflow.model.Job in project beam by apache.
the class DataflowPipelineTranslatorTest method testScalingAlgorithmNone.
@Test
public void testScalingAlgorithmNone() throws IOException {
final DataflowPipelineWorkerPoolOptions.AutoscalingAlgorithmType noScaling = DataflowPipelineWorkerPoolOptions.AutoscalingAlgorithmType.NONE;
DataflowPipelineOptions options = buildPipelineOptions();
options.setAutoscalingAlgorithm(noScaling);
Pipeline p = buildPipeline(options);
p.traverseTopologically(new RecordingPipelineVisitor());
Job job = DataflowPipelineTranslator.fromOptions(options).translate(p, DataflowRunner.fromOptions(options), Collections.<DataflowPackage>emptyList()).getJob();
assertEquals(1, job.getEnvironment().getWorkerPools().size());
assertEquals("AUTOSCALING_ALGORITHM_NONE", job.getEnvironment().getWorkerPools().get(0).getAutoscalingSettings().getAlgorithm());
assertEquals(0, job.getEnvironment().getWorkerPools().get(0).getAutoscalingSettings().getMaxNumWorkers().intValue());
}
Aggregations