Search in sources :

Example 36 with JobID

use of org.apache.flink.api.common.JobID in project flink by apache.

the class BlobRecoveryITCase method testBlobServerRecovery.

public static void testBlobServerRecovery(final Configuration config) throws IOException {
    final String clusterId = config.getString(HighAvailabilityOptions.HA_CLUSTER_ID);
    String storagePath = config.getString(HighAvailabilityOptions.HA_STORAGE_PATH) + "/" + clusterId;
    Random rand = new Random();
    BlobServer[] server = new BlobServer[2];
    InetSocketAddress[] serverAddress = new InetSocketAddress[2];
    BlobClient client = null;
    try {
        for (int i = 0; i < server.length; i++) {
            server[i] = new BlobServer(config);
            serverAddress[i] = new InetSocketAddress("localhost", server[i].getPort());
        }
        client = new BlobClient(serverAddress[0], config);
        // Random data
        byte[] expected = new byte[1024];
        rand.nextBytes(expected);
        BlobKey[] keys = new BlobKey[2];
        // Put data
        // Request 1
        keys[0] = client.put(expected);
        // Request 2
        keys[1] = client.put(expected, 32, 256);
        JobID[] jobId = new JobID[] { new JobID(), new JobID() };
        String[] testKey = new String[] { "test-key-1", "test-key-2" };
        // Request 3
        client.put(jobId[0], testKey[0], expected);
        // Request 4
        client.put(jobId[1], testKey[1], expected, 32, 256);
        // check that the storage directory exists
        final Path blobServerPath = new Path(storagePath, "blob");
        FileSystem fs = blobServerPath.getFileSystem();
        assertTrue("Unknown storage dir: " + blobServerPath, fs.exists(blobServerPath));
        // Close the client and connect to the other server
        client.close();
        client = new BlobClient(serverAddress[1], config);
        // Verify request 1
        try (InputStream is = client.get(keys[0])) {
            byte[] actual = new byte[expected.length];
            BlobUtils.readFully(is, actual, 0, expected.length, null);
            for (int i = 0; i < expected.length; i++) {
                assertEquals(expected[i], actual[i]);
            }
        }
        // Verify request 2
        try (InputStream is = client.get(keys[1])) {
            byte[] actual = new byte[256];
            BlobUtils.readFully(is, actual, 0, 256, null);
            for (int i = 32, j = 0; i < 256; i++, j++) {
                assertEquals(expected[i], actual[j]);
            }
        }
        // Verify request 3
        try (InputStream is = client.get(jobId[0], testKey[0])) {
            byte[] actual = new byte[expected.length];
            BlobUtils.readFully(is, actual, 0, expected.length, null);
            for (int i = 0; i < expected.length; i++) {
                assertEquals(expected[i], actual[i]);
            }
        }
        // Verify request 4
        try (InputStream is = client.get(jobId[1], testKey[1])) {
            byte[] actual = new byte[256];
            BlobUtils.readFully(is, actual, 0, 256, null);
            for (int i = 32, j = 0; i < 256; i++, j++) {
                assertEquals(expected[i], actual[j]);
            }
        }
        // Remove again
        client.delete(keys[0]);
        client.delete(keys[1]);
        client.delete(jobId[0], testKey[0]);
        client.delete(jobId[1], testKey[1]);
        // Verify everything is clean
        assertTrue("HA storage directory does not exist", fs.exists(new Path(storagePath)));
        if (fs.exists(blobServerPath)) {
            final org.apache.flink.core.fs.FileStatus[] recoveryFiles = fs.listStatus(blobServerPath);
            ArrayList<String> filenames = new ArrayList<String>(recoveryFiles.length);
            for (org.apache.flink.core.fs.FileStatus file : recoveryFiles) {
                filenames.add(file.toString());
            }
            fail("Unclean state backend: " + filenames);
        }
    } finally {
        for (BlobServer s : server) {
            if (s != null) {
                s.shutdown();
            }
        }
        if (client != null) {
            client.close();
        }
    }
}
Also used : InetSocketAddress(java.net.InetSocketAddress) ArrayList(java.util.ArrayList) Random(java.util.Random) FileSystem(org.apache.flink.core.fs.FileSystem) Path(org.apache.flink.core.fs.Path) InputStream(java.io.InputStream) JobID(org.apache.flink.api.common.JobID)

Example 37 with JobID

use of org.apache.flink.api.common.JobID in project flink by apache.

the class BlobServerPutTest method testPutChunkedStreamSuccessful.

@Test
public void testPutChunkedStreamSuccessful() {
    BlobServer server = null;
    BlobClient client = null;
    try {
        Configuration config = new Configuration();
        server = new BlobServer(config);
        InetSocketAddress serverAddress = new InetSocketAddress("localhost", server.getPort());
        client = new BlobClient(serverAddress, config);
        byte[] data = new byte[2000000];
        rnd.nextBytes(data);
        // put content addressable (like libraries)
        {
            BlobKey key1 = client.put(new ChunkedInputStream(data, 19));
            assertNotNull(key1);
        }
        // put under job and name scope
        {
            JobID jid = new JobID();
            String stringKey = "my test key";
            client.put(jid, stringKey, new ChunkedInputStream(data, 17));
        }
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        if (client != null) {
            try {
                client.close();
            } catch (Throwable t) {
                t.printStackTrace();
            }
        }
        if (server != null) {
            server.shutdown();
        }
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) InetSocketAddress(java.net.InetSocketAddress) JobID(org.apache.flink.api.common.JobID) IOException(java.io.IOException) Test(org.junit.Test)

Example 38 with JobID

use of org.apache.flink.api.common.JobID in project flink by apache.

the class BlobServerPutTest method testPutStreamSuccessful.

@Test
public void testPutStreamSuccessful() {
    BlobServer server = null;
    BlobClient client = null;
    try {
        Configuration config = new Configuration();
        server = new BlobServer(config);
        InetSocketAddress serverAddress = new InetSocketAddress("localhost", server.getPort());
        client = new BlobClient(serverAddress, config);
        byte[] data = new byte[2000000];
        rnd.nextBytes(data);
        // put content addressable (like libraries)
        {
            BlobKey key1 = client.put(new ByteArrayInputStream(data));
            assertNotNull(key1);
        }
        // put under job and name scope
        {
            JobID jid = new JobID();
            String stringKey = "my test key";
            client.put(jid, stringKey, new ByteArrayInputStream(data));
        }
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        if (client != null) {
            try {
                client.close();
            } catch (Throwable t) {
                t.printStackTrace();
            }
        }
        if (server != null) {
            server.shutdown();
        }
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) ByteArrayInputStream(java.io.ByteArrayInputStream) InetSocketAddress(java.net.InetSocketAddress) JobID(org.apache.flink.api.common.JobID) IOException(java.io.IOException) Test(org.junit.Test)

Example 39 with JobID

use of org.apache.flink.api.common.JobID in project flink by apache.

the class BlobServerPutTest method testPutNamedBufferFails.

@Test
public void testPutNamedBufferFails() {
    //setWritable doesn't work on Windows.
    assumeTrue(!OperatingSystem.isWindows());
    BlobServer server = null;
    BlobClient client = null;
    File tempFileDir = null;
    try {
        Configuration config = new Configuration();
        server = new BlobServer(config);
        // make sure the blob server cannot create any files in its storage dir
        tempFileDir = server.createTemporaryFilename().getParentFile().getParentFile();
        assertTrue(tempFileDir.setExecutable(true, false));
        assertTrue(tempFileDir.setReadable(true, false));
        assertTrue(tempFileDir.setWritable(false, false));
        InetSocketAddress serverAddress = new InetSocketAddress("localhost", server.getPort());
        client = new BlobClient(serverAddress, config);
        byte[] data = new byte[2000000];
        rnd.nextBytes(data);
        // put under job and name scope
        try {
            JobID jid = new JobID();
            String stringKey = "my test key";
            client.put(jid, stringKey, data);
            fail("This should fail.");
        } catch (IOException e) {
            assertTrue(e.getMessage(), e.getMessage().contains("Server side error"));
        }
        try {
            JobID jid = new JobID();
            String stringKey = "another key";
            client.put(jid, stringKey, data);
            fail("Client should be closed");
        } catch (IllegalStateException e) {
        // expected
        }
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        // set writable again to make sure we can remove the directory
        if (tempFileDir != null) {
            tempFileDir.setWritable(true, false);
        }
        if (client != null) {
            try {
                client.close();
            } catch (Throwable t) {
                t.printStackTrace();
            }
        }
        if (server != null) {
            server.shutdown();
        }
    }
}
Also used : Configuration(org.apache.flink.configuration.Configuration) InetSocketAddress(java.net.InetSocketAddress) IOException(java.io.IOException) File(java.io.File) JobID(org.apache.flink.api.common.JobID) IOException(java.io.IOException) Test(org.junit.Test)

Example 40 with JobID

use of org.apache.flink.api.common.JobID in project flink by apache.

the class TaskMetricGroupTest method testGenerateScopeCustom.

@Test
public void testGenerateScopeCustom() {
    Configuration cfg = new Configuration();
    cfg.setString(ConfigConstants.METRICS_SCOPE_NAMING_TM, "abc");
    cfg.setString(ConfigConstants.METRICS_SCOPE_NAMING_TM_JOB, "def");
    cfg.setString(ConfigConstants.METRICS_SCOPE_NAMING_TASK, "<tm_id>.<job_id>.<task_id>.<task_attempt_id>");
    MetricRegistry registry = new MetricRegistry(MetricRegistryConfiguration.fromConfiguration(cfg));
    JobID jid = new JobID();
    AbstractID vertexId = new AbstractID();
    AbstractID executionId = new AbstractID();
    TaskManagerMetricGroup tmGroup = new TaskManagerMetricGroup(registry, "theHostName", "test-tm-id");
    TaskManagerJobMetricGroup jmGroup = new TaskManagerJobMetricGroup(registry, tmGroup, jid, "myJobName");
    TaskMetricGroup taskGroup = new TaskMetricGroup(registry, jmGroup, vertexId, executionId, "aTaskName", 13, 2);
    assertArrayEquals(new String[] { "test-tm-id", jid.toString(), vertexId.toString(), executionId.toString() }, taskGroup.getScopeComponents());
    assertEquals(String.format("test-tm-id.%s.%s.%s.name", jid, vertexId, executionId), taskGroup.getMetricIdentifier("name"));
    registry.shutdown();
}
Also used : Configuration(org.apache.flink.configuration.Configuration) MetricRegistryConfiguration(org.apache.flink.runtime.metrics.MetricRegistryConfiguration) MetricRegistry(org.apache.flink.runtime.metrics.MetricRegistry) AbstractID(org.apache.flink.util.AbstractID) JobID(org.apache.flink.api.common.JobID) Test(org.junit.Test)

Aggregations

JobID (org.apache.flink.api.common.JobID)335 Test (org.junit.Test)274 JobVertexID (org.apache.flink.runtime.jobgraph.JobVertexID)88 IOException (java.io.IOException)74 Configuration (org.apache.flink.configuration.Configuration)72 ExecutionAttemptID (org.apache.flink.runtime.executiongraph.ExecutionAttemptID)61 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)48 ActorGateway (org.apache.flink.runtime.instance.ActorGateway)47 JobVertex (org.apache.flink.runtime.jobgraph.JobVertex)44 ExecutionVertex (org.apache.flink.runtime.executiongraph.ExecutionVertex)42 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)38 ArrayList (java.util.ArrayList)37 MetricRegistry (org.apache.flink.runtime.metrics.MetricRegistry)32 KeyGroupRange (org.apache.flink.runtime.state.KeyGroupRange)31 HashMap (java.util.HashMap)29 AllocationID (org.apache.flink.runtime.clusterframework.types.AllocationID)29 FiniteDuration (scala.concurrent.duration.FiniteDuration)28 IntermediateDataSetID (org.apache.flink.runtime.jobgraph.IntermediateDataSetID)24 File (java.io.File)23 UUID (java.util.UUID)23