use of org.apache.beam.runners.dataflow.util.CloudObject in project beam by apache.
the class TimerOrElementTest method testCoderCanBeDecodedFromCloudObject.
@Test
public void testCoderCanBeDecodedFromCloudObject() {
CloudObject cloudObject = CloudObject.forClassName("com.google.cloud.dataflow.sdk.util.TimerOrElement$TimerOrElementCoder");
List<CloudObject> component = Collections.singletonList(CloudObjects.asCloudObject(KvCoder.of(VarLongCoder.of(), ByteArrayCoder.of()), /*sdkComponents=*/
null));
Structs.addList(cloudObject, PropertyNames.COMPONENT_ENCODINGS, component);
Coder<?> decoded = CloudObjects.coderFromCloudObject(cloudObject);
assertThat(decoded, instanceOf(TimerOrElementCoder.class));
TimerOrElementCoder<?> decodedCoder = (TimerOrElementCoder<?>) decoded;
assertThat(decodedCoder.getKeyCoder(), equalTo(VarLongCoder.of()));
assertThat(decodedCoder.getElementCoder(), equalTo(ByteArrayCoder.of()));
}
use of org.apache.beam.runners.dataflow.util.CloudObject in project beam by apache.
the class CloudSourceUtilsTest method testFlattenBaseSpecs.
@Test
public void testFlattenBaseSpecs() throws Exception {
// G = grandparent, P = parent, C = child.
CloudObject grandparent = CloudObject.forClassName("text");
addString(grandparent, "G", "g_g");
addString(grandparent, "GP", "gp_g");
addString(grandparent, "GC", "gc_g");
addString(grandparent, "GPC", "gpc_g");
CloudObject parent = CloudObject.forClassName("text");
addString(parent, "P", "p_p");
addString(parent, "PC", "pc_p");
addString(parent, "GP", "gp_p");
addString(parent, "GPC", "gpc_p");
CloudObject child = CloudObject.forClassName("text");
addString(child, "C", "c_c");
addString(child, "PC", "pc_c");
addString(child, "GC", "gc_c");
addString(child, "GPC", "gpc_c");
Source source = new Source();
source.setBaseSpecs(new ArrayList<Map<String, Object>>());
source.getBaseSpecs().add(grandparent);
source.getBaseSpecs().add(parent);
source.setSpec(child);
source.setCodec(CloudObjects.asCloudObject(StringUtf8Coder.of(), /*sdkComponents=*/
null));
Source flat = CloudSourceUtils.flattenBaseSpecs(source);
assertNull(flat.getBaseSpecs());
assertEquals(StringUtf8Coder.class.getName(), getString(flat.getCodec(), PropertyNames.OBJECT_TYPE_NAME));
CloudObject flatSpec = CloudObject.fromSpec(flat.getSpec());
assertEquals("g_g", getString(flatSpec, "G"));
assertEquals("p_p", getString(flatSpec, "P"));
assertEquals("c_c", getString(flatSpec, "C"));
assertEquals("gp_p", getString(flatSpec, "GP"));
assertEquals("gc_c", getString(flatSpec, "GC"));
assertEquals("pc_c", getString(flatSpec, "PC"));
assertEquals("gpc_c", getString(flatSpec, "GPC"));
}
use of org.apache.beam.runners.dataflow.util.CloudObject in project beam by apache.
the class UserParDoFnFactoryTest method getCloudObject.
private CloudObject getCloudObject(DoFn<?, ?> fn, WindowingStrategy<?, ?> windowingStrategy) {
CloudObject object = CloudObject.forClassName("DoFn");
@SuppressWarnings({ // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
"rawtypes", "unchecked" }) DoFnInfo<?, ?> info = DoFnInfo.forFn(fn, windowingStrategy, null, /* side input views */
null, /* input coder */
new TupleTag<>(PropertyNames.OUTPUT), /* main output id */
DoFnSchemaInformation.create(), Collections.emptyMap());
object.set(PropertyNames.SERIALIZED_FN, StringUtils.byteArrayToJsonString(SerializableUtils.serializeToByteArray(info)));
return object;
}
use of org.apache.beam.runners.dataflow.util.CloudObject in project beam by apache.
the class UserParDoFnFactoryTest method testFactoryDoesNotReuseAfterAborted.
@Test
public void testFactoryDoesNotReuseAfterAborted() throws Exception {
PipelineOptions options = PipelineOptionsFactory.create();
CounterSet counters = new CounterSet();
TestDoFn initialFn = new TestDoFn(Collections.<TupleTag<String>>emptyList());
CloudObject cloudObject = getCloudObject(initialFn);
ParDoFn parDoFn = factory.create(options, cloudObject, null, MAIN_OUTPUT, ImmutableMap.<TupleTag<?>, Integer>of(MAIN_OUTPUT, 0), BatchModeExecutionContext.forTesting(options, "testStage"), TestOperationContext.create(counters));
Receiver rcvr = new OutputReceiver();
parDoFn.startBundle(rcvr);
parDoFn.processElement(WindowedValue.valueInGlobalWindow("foo"));
TestDoFn fn = (TestDoFn) ((SimpleParDoFn) parDoFn).getDoFnInfo().getDoFn();
parDoFn.abort();
assertThat(fn.state, equalTo(TestDoFn.State.TORN_DOWN));
// The fn should not be torn down here
ParDoFn secondParDoFn = factory.create(options, cloudObject.clone(), null, MAIN_OUTPUT, ImmutableMap.<TupleTag<?>, Integer>of(MAIN_OUTPUT, 0), BatchModeExecutionContext.forTesting(options, "testStage"), TestOperationContext.create(counters));
secondParDoFn.startBundle(rcvr);
secondParDoFn.processElement(WindowedValue.valueInGlobalWindow("foo"));
TestDoFn secondFn = (TestDoFn) ((SimpleParDoFn) secondParDoFn).getDoFnInfo().getDoFn();
assertThat(secondFn, not(theInstance(fn)));
assertThat(fn.state, equalTo(TestDoFn.State.TORN_DOWN));
assertThat(secondFn.state, equalTo(TestDoFn.State.PROCESSING));
}
use of org.apache.beam.runners.dataflow.util.CloudObject in project beam by apache.
the class UserParDoFnFactoryTest method testCleanupRegistered.
@Test
public void testCleanupRegistered() throws Exception {
PipelineOptions options = PipelineOptionsFactory.create();
CounterSet counters = new CounterSet();
DoFn<?, ?> initialFn = new TestStatefulDoFn();
CloudObject cloudObject = getCloudObject(initialFn, WindowingStrategy.globalDefault().withWindowFn(FixedWindows.of(Duration.millis(10))));
TimerInternals timerInternals = mock(TimerInternals.class);
DataflowStepContext stepContext = mock(DataflowStepContext.class);
when(stepContext.timerInternals()).thenReturn(timerInternals);
DataflowExecutionContext<DataflowStepContext> executionContext = mock(DataflowExecutionContext.class);
TestOperationContext operationContext = TestOperationContext.create(counters);
when(executionContext.getStepContext(operationContext)).thenReturn(stepContext);
when(executionContext.getSideInputReader(any(), any(), any())).thenReturn(NullSideInputReader.empty());
ParDoFn parDoFn = factory.create(options, cloudObject, Collections.emptyList(), MAIN_OUTPUT, ImmutableMap.of(MAIN_OUTPUT, 0), executionContext, operationContext);
Receiver rcvr = new OutputReceiver();
parDoFn.startBundle(rcvr);
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(10));
parDoFn.processElement(WindowedValue.of("foo", new Instant(1), firstWindow, PaneInfo.NO_FIRING));
verify(stepContext).setStateCleanupTimer(SimpleParDoFn.CLEANUP_TIMER_ID, firstWindow, IntervalWindow.getCoder(), firstWindow.maxTimestamp().plus(Duration.millis(1L)), firstWindow.maxTimestamp().plus(Duration.millis(1L)));
}
Aggregations