use of com.google.api.services.dataflow.model.SourceSplitResponse in project beam by apache.
the class BatchDataflowWorkerTest method testIsSplitResponseTooLarge.
@Test
public void testIsSplitResponseTooLarge() throws IOException {
SourceSplitResponse splitResponse = new SourceSplitResponse();
splitResponse.setShards(ImmutableList.<SourceSplitShard>of(new SourceSplitShard(), new SourceSplitShard()));
assertThat(DataflowApiUtils.computeSerializedSizeBytes(splitResponse), greaterThan(0L));
}
use of com.google.api.services.dataflow.model.SourceSplitResponse in project beam by apache.
the class WorkerCustomSourcesSplitOnlySourceTest method testAllSplitsAreReturned.
@Test
public void testAllSplitsAreReturned() throws Exception {
final long apiSizeLimitForTest = 500 * 1024;
DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
options.setAppName("TestAppName");
options.setProject("test-project");
options.setRegion("some-region1");
options.setTempLocation("gs://test/temp/location");
options.setGcpCredential(new TestCredential());
options.setRunner(DataflowRunner.class);
options.setPathValidatorClass(NoopPathValidator.class);
// Generate a CountingSource and split it into the desired number of splits
// (desired size = 1 byte), triggering the re-split with a larger bundle size.
// Thus below we expect to produce 'numberOfSplits' splits.
com.google.api.services.dataflow.model.Source source = WorkerCustomSourcesTest.translateIOToCloudSource(CountingSource.upTo(numberOfSplits), options);
SourceSplitResponse split = WorkerCustomSourcesTest.performSplit(source, options, 1L, null, /* numBundles limit */
apiSizeLimitForTest);
assertThat(split.getBundles().size(), lessThanOrEqualTo(WorkerCustomSources.DEFAULT_NUM_BUNDLES_LIMIT));
List<OffsetBasedSource<?>> originalSplits = new ArrayList<>(numberOfSplits);
// Collect all the splits
for (DerivedSource derivedSource : split.getBundles()) {
Object deserializedSource = WorkerCustomSources.deserializeFromCloudSource(derivedSource.getSource().getSpec());
if (deserializedSource instanceof SplittableOnlyBoundedSource) {
SplittableOnlyBoundedSource<?> splittableOnlySource = (SplittableOnlyBoundedSource<?>) deserializedSource;
originalSplits.addAll((List) splittableOnlySource.split(1L, options));
} else {
originalSplits.add((OffsetBasedSource<?>) deserializedSource);
}
}
assertEquals(numberOfSplits, originalSplits.size());
for (int i = 0; i < originalSplits.size(); i++) {
OffsetBasedSource<?> offsetBasedSource = (OffsetBasedSource<?>) originalSplits.get(i);
assertEquals(i, offsetBasedSource.getStartOffset());
assertEquals(i + 1, offsetBasedSource.getEndOffset());
}
}
use of com.google.api.services.dataflow.model.SourceSplitResponse in project beam by apache.
the class WorkerCustomSourcesTest method testLargeNumberOfSplitsReturnsSplittableOnlyBoundedSources.
@Test
public void testLargeNumberOfSplitsReturnsSplittableOnlyBoundedSources() throws Exception {
final long apiSizeLimitForTest = 500 * 1024;
// Generate a CountingSource and split it into the desired number of splits
// (desired size = 1 byte), triggering the re-split with a larger bundle size.
// Thus below we expect to produce 451 splits.
com.google.api.services.dataflow.model.Source source = translateIOToCloudSource(CountingSource.upTo(451), options);
SourceSplitResponse split = performSplit(source, options, 1L, null, /* numBundles limit */
apiSizeLimitForTest);
assertEquals(WorkerCustomSources.DEFAULT_NUM_BUNDLES_LIMIT, split.getBundles().size());
// 0-99, 100-199, 200-299, 300-355, 356, 357, ... 451
for (int i = 0; i <= 3; ++i) {
DerivedSource derivedSource = split.getBundles().get(i);
// Make sure that we are setting the flag telling Dataflow that we need further splits.
assertFalse(derivedSource.getSource().getDoesNotNeedSplitting());
Object deserializedSource = WorkerCustomSources.deserializeFromCloudSource(derivedSource.getSource().getSpec());
assertTrue(deserializedSource instanceof SplittableOnlyBoundedSource);
SplittableOnlyBoundedSource<?> splittableOnlySource = (SplittableOnlyBoundedSource<?>) deserializedSource;
List<? extends BoundedSource<?>> splitSources = splittableOnlySource.split(1L, options);
int expectedNumSplits = i < 3 ? 100 : 55;
assertEquals(expectedNumSplits, splitSources.size());
for (int j = 0; j < splitSources.size(); ++j) {
assertTrue(splitSources.get(j) instanceof OffsetBasedSource);
OffsetBasedSource<?> offsetBasedSource = (OffsetBasedSource<?>) splitSources.get(j);
assertEquals(i * 100 + j, offsetBasedSource.getStartOffset());
assertEquals(i * 100 + j + 1, offsetBasedSource.getEndOffset());
}
}
for (int i = 4; i < WorkerCustomSources.DEFAULT_NUM_BUNDLES_LIMIT; ++i) {
DerivedSource derivedSource = split.getBundles().get(i);
// Make sure that we are not setting the flag telling Dataflow that we need further splits
// for the individual counting sources
assertTrue(derivedSource.getSource().getDoesNotNeedSplitting());
Object deserializedSource = WorkerCustomSources.deserializeFromCloudSource(derivedSource.getSource().getSpec());
assertTrue(deserializedSource instanceof OffsetBasedSource);
OffsetBasedSource<?> offsetBasedSource = (OffsetBasedSource<?>) deserializedSource;
assertEquals(351 + i, offsetBasedSource.getStartOffset());
assertEquals(351 + i + 1, offsetBasedSource.getEndOffset());
}
}
use of com.google.api.services.dataflow.model.SourceSplitResponse in project beam by apache.
the class WorkerCustomSourcesTest method testOversplittingDesiredBundleSizeScaledFirst.
@Test
public void testOversplittingDesiredBundleSizeScaledFirst() throws Exception {
// Create a source that greatly oversplits but with coalescing/compression it would still fit
// under the API limit. Test that the API limit gets applied first, so oversplitting is
// reduced.
com.google.api.services.dataflow.model.Source source = translateIOToCloudSource(CountingSource.upTo(8000), options);
// Without either limit, produces 1000 bundles, total size ~500kb.
// With only numBundles limit 100, produces 100 bundles, total size ~72kb.
// With only apiSize limit = 10kb, 72 bundles, total size ~40kb (over the limit but oh well).
// With numBundles limit 100 and apiSize limit 10kb, should produce 72 bundles.
// On the other hand, if the numBundles limit of 100 was applied first, we'd get 100 bundles.
SourceSplitResponse bundledWithOnlyNumBundlesLimit = performSplit(source, options, 8L, 100, /* numBundles limit */
10000 * 1024L);
assertEquals(100, bundledWithOnlyNumBundlesLimit.getBundles().size());
assertThat(DataflowApiUtils.computeSerializedSizeBytes(bundledWithOnlyNumBundlesLimit), greaterThan(10 * 1024L));
SourceSplitResponse bundledWithOnlySizeLimit = performSplit(source, options, 8L, 1000000, /* numBundles limit */
10 * 1024L);
int numBundlesWithOnlySizeLimit = bundledWithOnlySizeLimit.getBundles().size();
assertThat(numBundlesWithOnlySizeLimit, lessThan(100));
SourceSplitResponse bundledWithSizeLimit = performSplit(source, options, 8L, 100, 10 * 1024L);
assertEquals(numBundlesWithOnlySizeLimit, bundledWithSizeLimit.getBundles().size());
}
use of com.google.api.services.dataflow.model.SourceSplitResponse in project beam by apache.
the class WorkerCustomSourcesTest method testSplitAndReadBundlesBack.
@Test
public void testSplitAndReadBundlesBack() throws Exception {
com.google.api.services.dataflow.model.Source source = translateIOToCloudSource(CountingSource.upTo(10L), options);
List<WindowedValue<Integer>> elems = readElemsFromSource(options, source);
assertEquals(10L, elems.size());
for (long i = 0; i < 10L; i++) {
assertEquals(valueInGlobalWindow(i), elems.get((int) i));
}
SourceSplitResponse response = performSplit(source, options, 16L, /*desiredBundleSizeBytes for two longs*/
null, /* numBundles limit */
null);
assertEquals("SOURCE_SPLIT_OUTCOME_SPLITTING_HAPPENED", response.getOutcome());
List<DerivedSource> bundles = response.getBundles();
assertEquals(5, bundles.size());
for (int i = 0; i < 5; ++i) {
DerivedSource bundle = bundles.get(i);
assertEquals("SOURCE_DERIVATION_MODE_INDEPENDENT", bundle.getDerivationMode());
com.google.api.services.dataflow.model.Source bundleSource = bundle.getSource();
assertTrue(bundleSource.getDoesNotNeedSplitting());
bundleSource.setCodec(source.getCodec());
List<WindowedValue<Integer>> xs = readElemsFromSource(options, bundleSource);
assertThat("Failed on bundle " + i, xs, contains(valueInGlobalWindow(0L + 2 * i), valueInGlobalWindow(1L + 2 * i)));
assertTrue(bundle.getSource().getMetadata().getEstimatedSizeBytes() > 0);
}
}
Aggregations