use of org.apache.flink.connector.file.src.FileSourceSplit in project flink by apache.
the class AdapterTestBase method buildSplits.
static Queue<FileSourceSplit> buildSplits(int numSplits) {
final Queue<FileSourceSplit> splits = new ArrayDeque<>();
final long rangeForSplit = FILE_LEN / numSplits;
for (int i = 0; i < numSplits - 1; i++) {
splits.add(new FileSourceSplit("ID-" + i, testPath, i * rangeForSplit, rangeForSplit, 0, FILE_LEN));
}
final long startOfLast = (numSplits - 1) * rangeForSplit;
splits.add(new FileSourceSplit("ID-" + (numSplits - 1), testPath, startOfLast, FILE_LEN - startOfLast, 0, FILE_LEN));
return splits;
}
use of org.apache.flink.connector.file.src.FileSourceSplit in project flink by apache.
the class FileSourceReaderTest method testRequestSplitWhenNoSplitRestored.
@Test
public void testRequestSplitWhenNoSplitRestored() throws Exception {
final TestingReaderContext context = new TestingReaderContext();
final FileSourceReader<String, FileSourceSplit> reader = createReader(context);
reader.start();
reader.close();
assertEquals(1, context.getNumSplitRequests());
}
use of org.apache.flink.connector.file.src.FileSourceSplit in project flink by apache.
the class FileSourceReaderTest method testNoSplitRequestWhenSplitRestored.
@Test
public void testNoSplitRequestWhenSplitRestored() throws Exception {
final TestingReaderContext context = new TestingReaderContext();
final FileSourceReader<String, FileSourceSplit> reader = createReader(context);
reader.addSplits(Collections.singletonList(createTestFileSplit()));
reader.start();
reader.close();
assertEquals(0, context.getNumSplitRequests());
}
use of org.apache.flink.connector.file.src.FileSourceSplit in project flink by apache.
the class StreamFormatAdapterTest method simpleReadTest.
private void simpleReadTest(int batchSize) throws IOException {
final Configuration config = new Configuration();
config.set(StreamFormat.FETCH_IO_SIZE, new MemorySize(batchSize));
final StreamFormatAdapter<Integer> format = new StreamFormatAdapter<>(new CheckpointedIntFormat());
final BulkFormat.Reader<Integer> reader = format.createReader(config, new FileSourceSplit("test-id", testPath, 0L, FILE_LEN, 0L, FILE_LEN));
final List<Integer> result = new ArrayList<>();
readNumbers(reader, result, NUM_NUMBERS);
verifyIntListResult(result);
}
use of org.apache.flink.connector.file.src.FileSourceSplit in project flink by apache.
the class LimitableBulkFormatTest method testSwallowExceptionWhenLimited.
@Test
public void testSwallowExceptionWhenLimited() throws IOException {
long limit = 1000L;
LimitableBulkFormat<String, FileSourceSplit> format = (LimitableBulkFormat<String, FileSourceSplit>) LimitableBulkFormat.create(new StreamFormatAdapter<>(new FailedFormat()), limit);
BulkFormat.Reader<String> reader = format.createReader(new Configuration(), new FileSourceSplit("id", new Path(file.toURI()), 0, file.length()));
format.globalNumberRead().set(limit + 1);
// should swallow exception
reader.readBatch();
}
Aggregations