Search in sources :

Example 26 with Configuration

use of org.apache.flink.configuration.Configuration in project flink by apache.

the class BlockingCheckpointsTest method testBlockingNonInterruptibleCheckpoint.

@Test
public void testBlockingNonInterruptibleCheckpoint() throws Exception {
    Configuration taskConfig = new Configuration();
    StreamConfig cfg = new StreamConfig(taskConfig);
    cfg.setStreamOperator(new TestOperator());
    cfg.setStateBackend(new LockingStreamStateBackend());
    Task task = createTask(taskConfig);
    // start the task and wait until it is in "restore"
    task.startTaskThread();
    IN_CHECKPOINT_LATCH.await();
    // cancel the task and wait. unless cancellation properly closes
    // the streams, this will never terminate
    task.cancelExecution();
    task.getExecutingThread().join();
    assertEquals(ExecutionState.CANCELED, task.getExecutionState());
    assertNull(task.getFailureCause());
}
Also used : Task(org.apache.flink.runtime.taskmanager.Task) Configuration(org.apache.flink.configuration.Configuration) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) Test(org.junit.Test)

Example 27 with Configuration

use of org.apache.flink.configuration.Configuration in project flink by apache.

the class CsvInputFormatTest method testRemovingTrailingCR.

private void testRemovingTrailingCR(String lineBreakerInFile, String lineBreakerSetup) {
    File tempFile = null;
    String fileContent = CsvInputFormatTest.FIRST_PART + lineBreakerInFile + CsvInputFormatTest.SECOND_PART + lineBreakerInFile;
    try {
        // create input file
        tempFile = File.createTempFile("CsvInputFormatTest", "tmp");
        tempFile.deleteOnExit();
        tempFile.setWritable(true);
        OutputStreamWriter wrt = new OutputStreamWriter(new FileOutputStream(tempFile));
        wrt.write(fileContent);
        wrt.close();
        final TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
        final CsvInputFormat<Tuple1<String>> inputFormat = new TupleCsvInputFormat<Tuple1<String>>(new Path(tempFile.toURI().toString()), typeInfo);
        Configuration parameters = new Configuration();
        inputFormat.configure(parameters);
        inputFormat.setDelimiter(lineBreakerSetup);
        FileInputSplit[] splits = inputFormat.createInputSplits(1);
        inputFormat.open(splits[0]);
        Tuple1<String> result = inputFormat.nextRecord(new Tuple1<String>());
        assertNotNull("Expecting to not return null", result);
        assertEquals(FIRST_PART, result.f0);
        result = inputFormat.nextRecord(result);
        assertNotNull("Expecting to not return null", result);
        assertEquals(SECOND_PART, result.f0);
    } catch (Throwable t) {
        System.err.println("test failed with exception: " + t.getMessage());
        t.printStackTrace(System.err);
        fail("Test erroneous");
    }
}
Also used : Path(org.apache.flink.core.fs.Path) Configuration(org.apache.flink.configuration.Configuration) FileInputSplit(org.apache.flink.core.fs.FileInputSplit) FileOutputStream(java.io.FileOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) File(java.io.File)

Example 28 with Configuration

use of org.apache.flink.configuration.Configuration in project flink by apache.

the class CsvInputFormatTest method testDoubleFields.

@Test
public void testDoubleFields() throws IOException {
    try {
        final String fileContent = "11.1|22.2|33.3|44.4|55.5\n66.6|77.7|88.8|99.9|00.0|\n";
        final FileInputSplit split = createTempFile(fileContent);
        final TupleTypeInfo<Tuple5<Double, Double, Double, Double, Double>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Double.class, Double.class, Double.class, Double.class, Double.class);
        final CsvInputFormat<Tuple5<Double, Double, Double, Double, Double>> format = new TupleCsvInputFormat<Tuple5<Double, Double, Double, Double, Double>>(PATH, typeInfo);
        format.setFieldDelimiter("|");
        format.configure(new Configuration());
        format.open(split);
        Tuple5<Double, Double, Double, Double, Double> result = new Tuple5<Double, Double, Double, Double, Double>();
        result = format.nextRecord(result);
        assertNotNull(result);
        assertEquals(Double.valueOf(11.1), result.f0);
        assertEquals(Double.valueOf(22.2), result.f1);
        assertEquals(Double.valueOf(33.3), result.f2);
        assertEquals(Double.valueOf(44.4), result.f3);
        assertEquals(Double.valueOf(55.5), result.f4);
        result = format.nextRecord(result);
        assertNotNull(result);
        assertEquals(Double.valueOf(66.6), result.f0);
        assertEquals(Double.valueOf(77.7), result.f1);
        assertEquals(Double.valueOf(88.8), result.f2);
        assertEquals(Double.valueOf(99.9), result.f3);
        assertEquals(Double.valueOf(00.0), result.f4);
        result = format.nextRecord(result);
        assertNull(result);
        assertTrue(format.reachedEnd());
    } catch (Exception ex) {
        fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
    }
}
Also used : FileInputSplit(org.apache.flink.core.fs.FileInputSplit) Configuration(org.apache.flink.configuration.Configuration) IOException(java.io.IOException) ParseException(org.apache.flink.api.common.io.ParseException) Test(org.junit.Test)

Example 29 with Configuration

use of org.apache.flink.configuration.Configuration in project flink by apache.

the class CsvInputFormatTest method testReadFirstN.

@Test
public void testReadFirstN() throws IOException {
    try {
        final String fileContent = "111|222|333|444|555|\n666|777|888|999|000|\n";
        final FileInputSplit split = createTempFile(fileContent);
        final TupleTypeInfo<Tuple2<Integer, Integer>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Integer.class, Integer.class);
        final CsvInputFormat<Tuple2<Integer, Integer>> format = new TupleCsvInputFormat<Tuple2<Integer, Integer>>(PATH, typeInfo);
        format.setFieldDelimiter("|");
        format.configure(new Configuration());
        format.open(split);
        Tuple2<Integer, Integer> result = new Tuple2<Integer, Integer>();
        result = format.nextRecord(result);
        assertNotNull(result);
        assertEquals(Integer.valueOf(111), result.f0);
        assertEquals(Integer.valueOf(222), result.f1);
        result = format.nextRecord(result);
        assertNotNull(result);
        assertEquals(Integer.valueOf(666), result.f0);
        assertEquals(Integer.valueOf(777), result.f1);
        result = format.nextRecord(result);
        assertNull(result);
        assertTrue(format.reachedEnd());
    } catch (Exception ex) {
        fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
    }
}
Also used : FileInputSplit(org.apache.flink.core.fs.FileInputSplit) Configuration(org.apache.flink.configuration.Configuration) IOException(java.io.IOException) ParseException(org.apache.flink.api.common.io.ParseException) Test(org.junit.Test)

Example 30 with Configuration

use of org.apache.flink.configuration.Configuration in project flink by apache.

the class CsvInputFormatTest method testReadSparseWithPositionSetter.

@Test
public void testReadSparseWithPositionSetter() throws IOException {
    try {
        final String fileContent = "111|222|333|444|555|666|777|888|999|000|\n000|999|888|777|666|555|444|333|222|111|";
        final FileInputSplit split = createTempFile(fileContent);
        final TupleTypeInfo<Tuple3<Integer, Integer, Integer>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Integer.class, Integer.class, Integer.class);
        final CsvInputFormat<Tuple3<Integer, Integer, Integer>> format = new TupleCsvInputFormat<Tuple3<Integer, Integer, Integer>>(PATH, typeInfo, new int[] { 0, 3, 7 });
        format.setFieldDelimiter("|");
        format.configure(new Configuration());
        format.open(split);
        Tuple3<Integer, Integer, Integer> result = new Tuple3<Integer, Integer, Integer>();
        result = format.nextRecord(result);
        assertNotNull(result);
        assertEquals(Integer.valueOf(111), result.f0);
        assertEquals(Integer.valueOf(444), result.f1);
        assertEquals(Integer.valueOf(888), result.f2);
        result = format.nextRecord(result);
        assertNotNull(result);
        assertEquals(Integer.valueOf(000), result.f0);
        assertEquals(Integer.valueOf(777), result.f1);
        assertEquals(Integer.valueOf(333), result.f2);
        result = format.nextRecord(result);
        assertNull(result);
        assertTrue(format.reachedEnd());
    } catch (Exception ex) {
        fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
    }
}
Also used : FileInputSplit(org.apache.flink.core.fs.FileInputSplit) Configuration(org.apache.flink.configuration.Configuration) IOException(java.io.IOException) ParseException(org.apache.flink.api.common.io.ParseException) Test(org.junit.Test)

Aggregations

Configuration (org.apache.flink.configuration.Configuration)630 Test (org.junit.Test)452 IOException (java.io.IOException)137 FileInputSplit (org.apache.flink.core.fs.FileInputSplit)93 File (java.io.File)92 JobID (org.apache.flink.api.common.JobID)74 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)68 JobVertex (org.apache.flink.runtime.jobgraph.JobVertex)49 ActorGateway (org.apache.flink.runtime.instance.ActorGateway)46 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)45 Path (org.apache.flink.core.fs.Path)44 ActorRef (akka.actor.ActorRef)43 ArrayList (java.util.ArrayList)43 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)39 FiniteDuration (scala.concurrent.duration.FiniteDuration)38 LocalFlinkMiniCluster (org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster)36 BeforeClass (org.junit.BeforeClass)35 AkkaActorGateway (org.apache.flink.runtime.instance.AkkaActorGateway)33 MetricRegistry (org.apache.flink.runtime.metrics.MetricRegistry)33 JobVertexID (org.apache.flink.runtime.jobgraph.JobVertexID)32