use of org.apache.beam.sdk.PipelineResult.State in project beam by apache.
the class PortableRunnerTest method stagesAndRunsJob.
@Test
public void stagesAndRunsJob() throws Exception {
createJobServer(JobState.Enum.DONE, JobApi.MetricResults.getDefaultInstance());
PortableRunner runner = PortableRunner.create(options, ManagedChannelFactory.createInProcess());
State state = runner.run(p).waitUntilFinish();
assertThat(state, is(State.DONE));
}
use of org.apache.beam.sdk.PipelineResult.State in project beam by apache.
the class DataflowPipelineJobTest method testWaitToFinishMessagesFail.
@Test
public void testWaitToFinishMessagesFail() throws Exception {
Dataflow.Projects.Locations.Jobs.Get statusRequest = mock(Dataflow.Projects.Locations.Jobs.Get.class);
Job statusResponse = new Job();
statusResponse.setCurrentState("JOB_STATE_" + State.DONE.name());
when(mockJobs.get(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID))).thenReturn(statusRequest);
when(statusRequest.execute()).thenReturn(statusResponse);
MonitoringUtil.JobMessagesHandler jobHandler = mock(MonitoringUtil.JobMessagesHandler.class);
Dataflow.Projects.Locations.Jobs.Messages mockMessages = mock(Dataflow.Projects.Locations.Jobs.Messages.class);
Messages.List listRequest = mock(Dataflow.Projects.Locations.Jobs.Messages.List.class);
when(mockJobs.messages()).thenReturn(mockMessages);
when(mockMessages.list(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID))).thenReturn(listRequest);
when(listRequest.setPageToken(eq((String) null))).thenReturn(listRequest);
when(listRequest.execute()).thenThrow(SocketTimeoutException.class);
DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, ImmutableMap.of());
State state = job.waitUntilFinish(Duration.standardMinutes(5), jobHandler, fastClock, fastClock);
assertEquals(null, state);
}
use of org.apache.beam.sdk.PipelineResult.State in project beam by apache.
the class DataflowPipelineJobTest method testCancelTerminatedJobWithStaleState.
/**
* Test that {@link DataflowPipelineJob#cancel} doesn't throw if the Dataflow service returns
* non-terminal state even though the cancel API call failed, which can happen in practice.
*
* <p>TODO: delete this code if the API calls become consistent.
*/
@Test
public void testCancelTerminatedJobWithStaleState() throws IOException {
Dataflow.Projects.Locations.Jobs.Get statusRequest = mock(Dataflow.Projects.Locations.Jobs.Get.class);
Job statusResponse = new Job();
statusResponse.setCurrentState("JOB_STATE_RUNNING");
when(mockJobs.get(PROJECT_ID, REGION_ID, JOB_ID)).thenReturn(statusRequest);
when(statusRequest.execute()).thenReturn(statusResponse);
Dataflow.Projects.Locations.Jobs.Update update = mock(Dataflow.Projects.Locations.Jobs.Update.class);
when(mockJobs.update(eq(PROJECT_ID), eq(REGION_ID), eq(JOB_ID), any(Job.class))).thenReturn(update);
when(update.execute()).thenThrow(new IOException("Job has terminated in state SUCCESS"));
DataflowPipelineJob job = new DataflowPipelineJob(DataflowClient.create(options), JOB_ID, options, null);
State returned = job.cancel();
assertThat(returned, equalTo(State.RUNNING));
expectedLogs.verifyWarn("Cancel failed because job is already terminated.");
}
use of org.apache.beam.sdk.PipelineResult.State in project beam by apache.
the class DataStoreReadWriteIT method testDataStoreV1SqlWriteRead_withoutKey.
@Test
public void testDataStoreV1SqlWriteRead_withoutKey() {
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new DataStoreV1TableProvider());
String projectId = options.getProject();
String createTableStatement = "CREATE EXTERNAL TABLE TEST( \n" + " `content` VARCHAR \n" + ") \n" + "TYPE 'datastoreV1' \n" + "LOCATION '" + projectId + "/" + KIND + "'";
sqlEnv.executeDdl(createTableStatement);
String insertStatement = "INSERT INTO TEST VALUES ( '3000' )";
BeamSqlRelUtils.toPCollection(writePipeline, sqlEnv.parseQuery(insertStatement));
writePipeline.run().waitUntilFinish();
String selectTableStatement = "SELECT * FROM TEST";
PCollection<Row> output = BeamSqlRelUtils.toPCollection(readPipeline, sqlEnv.parseQuery(selectTableStatement));
assertThat(output.getSchema(), equalTo(SOURCE_SCHEMA_WITHOUT_KEY));
PipelineResult.State state = readPipeline.run().waitUntilFinish(Duration.standardMinutes(5));
assertThat(state, equalTo(State.DONE));
}
use of org.apache.beam.sdk.PipelineResult.State in project beam by apache.
the class BigQueryReadWriteIT method testSQLRead_withDirectRead_withProjectPushDown.
@Test
public void testSQLRead_withDirectRead_withProjectPushDown() {
BeamSqlEnv sqlEnv = BeamSqlEnv.inMemory(new BigQueryTableProvider());
String createTableStatement = "CREATE EXTERNAL TABLE TEST( \n" + " c_bigint BIGINT, \n" + " c_tinyint TINYINT, \n" + " c_smallint SMALLINT, \n" + " c_integer INTEGER, \n" + " c_float FLOAT, \n" + " c_double DOUBLE, \n" + " c_boolean BOOLEAN, \n" + " c_timestamp TIMESTAMP, \n" + " c_varchar VARCHAR, \n " + " c_char CHAR, \n" + " c_arr ARRAY<VARCHAR> \n" + ") \n" + "TYPE 'bigquery' \n" + "LOCATION '" + bigQueryTestingTypes.tableSpec() + "' \n" + "TBLPROPERTIES " + "'{ " + METHOD_PROPERTY + ": \"" + Method.DIRECT_READ.toString() + "\" }'";
sqlEnv.executeDdl(createTableStatement);
String insertStatement = "INSERT INTO TEST VALUES (" + "9223372036854775807, " + "127, " + "32767, " + "2147483647, " + "1.0, " + "1.0, " + "TRUE, " + "TIMESTAMP '2018-05-28 20:17:40.123', " + "'varchar', " + "'char', " + "ARRAY['123', '456']" + ")";
sqlEnv.parseQuery(insertStatement);
BeamSqlRelUtils.toPCollection(pipeline, sqlEnv.parseQuery(insertStatement));
pipeline.run().waitUntilFinish(Duration.standardMinutes(5));
String selectTableStatement = "SELECT c_integer, c_varchar, c_tinyint FROM TEST";
BeamRelNode relNode = sqlEnv.parseQuery(selectTableStatement);
PCollection<Row> output = BeamSqlRelUtils.toPCollection(readPipeline, relNode);
assertThat(relNode, instanceOf(BeamPushDownIOSourceRel.class));
// IO projects fields in the same order they are defined in the schema.
assertThat(relNode.getRowType().getFieldNames(), containsInAnyOrder("c_tinyint", "c_integer", "c_varchar"));
// Field reordering is done in a Calc
assertThat(output.getSchema(), equalTo(Schema.builder().addNullableField("c_integer", INT32).addNullableField("c_varchar", STRING).addNullableField("c_tinyint", BYTE).build()));
PAssert.that(output).containsInAnyOrder(row(output.getSchema(), 2147483647, "varchar", (byte) 127));
PipelineResult.State state = readPipeline.run().waitUntilFinish(Duration.standardMinutes(5));
assertThat(state, equalTo(State.DONE));
}
Aggregations