use of org.apache.flink.core.fs.FileInputSplit in project flink by apache.
the class RowCsvInputFormatTest method testRemovingTrailingCR.
private static void testRemovingTrailingCR(String lineBreakerInFile, String lineBreakerSetup) throws IOException {
String fileContent = FIRST_PART + lineBreakerInFile + SECOND_PART + lineBreakerInFile;
// create input file
File tempFile = File.createTempFile("CsvInputFormatTest", "tmp");
tempFile.deleteOnExit();
tempFile.setWritable(true);
OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
wrt.write(fileContent);
wrt.close();
TypeInformation[] fieldTypes = new TypeInformation[] { BasicTypeInfo.STRING_TYPE_INFO };
RowCsvInputFormat inputFormat = new RowCsvInputFormat(new Path(tempFile.toURI().toString()), fieldTypes);
inputFormat.configure(new Configuration());
inputFormat.setDelimiter(lineBreakerSetup);
FileInputSplit[] splits = inputFormat.createInputSplits(1);
inputFormat.open(splits[0]);
Row result = inputFormat.nextRecord(new Row(1));
assertNotNull("Expecting to not return null", result);
assertEquals(FIRST_PART, result.getField(0));
result = inputFormat.nextRecord(result);
assertNotNull("Expecting to not return null", result);
assertEquals(SECOND_PART, result.getField(0));
}
use of org.apache.flink.core.fs.FileInputSplit in project flink by apache.
the class TextInputFormatTest method testSimpleRead.
@Test
public void testSimpleRead() {
final String FIRST = "First line";
final String SECOND = "Second line";
try {
// create input file
File tempFile = File.createTempFile("TextInputFormatTest", "tmp");
tempFile.deleteOnExit();
tempFile.setWritable(true);
PrintStream ps = new PrintStream(tempFile);
ps.println(FIRST);
ps.println(SECOND);
ps.close();
TextInputFormat inputFormat = new TextInputFormat(new Path(tempFile.toURI().toString()));
Configuration parameters = new Configuration();
inputFormat.configure(parameters);
FileInputSplit[] splits = inputFormat.createInputSplits(1);
assertTrue("expected at least one input split", splits.length >= 1);
inputFormat.open(splits[0]);
String result = "";
assertFalse(inputFormat.reachedEnd());
result = inputFormat.nextRecord("");
assertNotNull("Expecting first record here", result);
assertEquals(FIRST, result);
assertFalse(inputFormat.reachedEnd());
result = inputFormat.nextRecord(result);
assertNotNull("Expecting second record here", result);
assertEquals(SECOND, result);
assertTrue(inputFormat.reachedEnd() || null == inputFormat.nextRecord(result));
} catch (Throwable t) {
System.err.println("test failed with exception: " + t.getMessage());
t.printStackTrace(System.err);
fail("Test erroneous");
}
}
use of org.apache.flink.core.fs.FileInputSplit in project flink by apache.
the class CsvInputFormatTest method testPojoTypeWithMappingInfoAndPartialField.
@Test
public void testPojoTypeWithMappingInfoAndPartialField() throws Exception {
File tempFile = File.createTempFile("CsvReaderPojoType", "tmp");
tempFile.deleteOnExit();
tempFile.setWritable(true);
OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
wrt.write("123,3.123,AAA,BBB\n");
wrt.write("456,1.123,BBB,AAA\n");
wrt.close();
@SuppressWarnings("unchecked") PojoTypeInfo<PojoItem> typeInfo = (PojoTypeInfo<PojoItem>) TypeExtractor.createTypeInfo(PojoItem.class);
CsvInputFormat<PojoItem> inputFormat = new PojoCsvInputFormat<PojoItem>(new Path(tempFile.toURI().toString()), typeInfo, new String[] { "field1", "field4" }, new boolean[] { true, false, false, true });
inputFormat.configure(new Configuration());
FileInputSplit[] splits = inputFormat.createInputSplits(1);
inputFormat.open(splits[0]);
PojoItem item = new PojoItem();
inputFormat.nextRecord(item);
assertEquals(123, item.field1);
assertEquals("BBB", item.field4);
}
use of org.apache.flink.core.fs.FileInputSplit in project flink by apache.
the class CsvInputFormatTest method ignoreInvalidLines.
private void ignoreInvalidLines(int bufferSize) {
try {
final String fileContent = "#description of the data\n" + "header1|header2|header3|\n" + "this is|1|2.0|\n" + "//a comment\n" + "a test|3|4.0|\n" + "#next|5|6.0|\n" + "asdasdas";
final FileInputSplit split = createTempFile(fileContent);
final TupleTypeInfo<Tuple3<String, Integer, Double>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class, Double.class);
final CsvInputFormat<Tuple3<String, Integer, Double>> format = new TupleCsvInputFormat<Tuple3<String, Integer, Double>>(PATH, "\n", "|", typeInfo);
format.setLenient(true);
format.setBufferSize(bufferSize);
final Configuration parameters = new Configuration();
format.configure(parameters);
format.open(split);
Tuple3<String, Integer, Double> result = new Tuple3<String, Integer, Double>();
result = format.nextRecord(result);
assertNotNull(result);
assertEquals("this is", result.f0);
assertEquals(Integer.valueOf(1), result.f1);
assertEquals(new Double(2.0), result.f2);
assertEquals((long) format.getCurrentState(), 65);
result = format.nextRecord(result);
assertNotNull(result);
assertEquals("a test", result.f0);
assertEquals(Integer.valueOf(3), result.f1);
assertEquals(new Double(4.0), result.f2);
assertEquals((long) format.getCurrentState(), 91);
result = format.nextRecord(result);
assertNotNull(result);
assertEquals("#next", result.f0);
assertEquals(Integer.valueOf(5), result.f1);
assertEquals(new Double(6.0), result.f2);
assertEquals((long) format.getCurrentState(), 104);
result = format.nextRecord(result);
assertNull(result);
assertEquals(fileContent.length(), (long) format.getCurrentState());
} catch (Exception ex) {
ex.printStackTrace();
fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
}
}
use of org.apache.flink.core.fs.FileInputSplit in project flink by apache.
the class CsvInputFormatTest method ignoreMultiCharPrefixComments.
@Test
public void ignoreMultiCharPrefixComments() {
try {
final String fileContent = "//description of the data\n" + "//successive commented line\n" + "this is|1|2.0|\n" + "a test|3|4.0|\n" + "//next|5|6.0|\n";
final FileInputSplit split = createTempFile(fileContent);
final TupleTypeInfo<Tuple3<String, Integer, Double>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class, Double.class);
final CsvInputFormat<Tuple3<String, Integer, Double>> format = new TupleCsvInputFormat<Tuple3<String, Integer, Double>>(PATH, "\n", "|", typeInfo);
format.setCommentPrefix("//");
final Configuration parameters = new Configuration();
format.configure(parameters);
format.open(split);
Tuple3<String, Integer, Double> result = new Tuple3<String, Integer, Double>();
result = format.nextRecord(result);
assertNotNull(result);
assertEquals("this is", result.f0);
assertEquals(Integer.valueOf(1), result.f1);
assertEquals(new Double(2.0), result.f2);
result = format.nextRecord(result);
assertNotNull(result);
assertEquals("a test", result.f0);
assertEquals(Integer.valueOf(3), result.f1);
assertEquals(new Double(4.0), result.f2);
result = format.nextRecord(result);
assertNull(result);
} catch (Exception ex) {
ex.printStackTrace();
fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
}
}
Aggregations