Search in sources :

Example 1 with IPCLoggerChannel

use of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel in project hadoop by apache.

the class TestJournalNode method testHttpServer.

@Test(timeout = 100000)
public void testHttpServer() throws Exception {
    String urlRoot = jn.getHttpServerURI();
    // Check default servlets.
    String pageContents = DFSTestUtil.urlGet(new URL(urlRoot + "/jmx"));
    assertTrue("Bad contents: " + pageContents, pageContents.contains("Hadoop:service=JournalNode,name=JvmMetrics"));
    // Create some edits on server side
    byte[] EDITS_DATA = QJMTestUtil.createTxnData(1, 3);
    IPCLoggerChannel ch = new IPCLoggerChannel(conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
    ch.newEpoch(1).get();
    ch.setEpoch(1);
    ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
    ch.sendEdits(1L, 1, 3, EDITS_DATA).get();
    ch.finalizeLogSegment(1, 3).get();
    // Attempt to retrieve via HTTP, ensure we get the data back
    // including the header we expected
    byte[] retrievedViaHttp = DFSTestUtil.urlGetBytes(new URL(urlRoot + "/getJournal?segmentTxId=1&jid=" + journalId));
    byte[] expected = Bytes.concat(Ints.toByteArray(HdfsServerConstants.NAMENODE_LAYOUT_VERSION), // layout flags section
    (new byte[] { 0, 0, 0, 0 }), EDITS_DATA);
    assertArrayEquals(expected, retrievedViaHttp);
    // Attempt to fetch a non-existent file, check that we get an
    // error status code
    URL badUrl = new URL(urlRoot + "/getJournal?segmentTxId=12345&jid=" + journalId);
    HttpURLConnection connection = (HttpURLConnection) badUrl.openConnection();
    try {
        assertEquals(404, connection.getResponseCode());
    } finally {
        connection.disconnect();
    }
}
Also used : HttpURLConnection(java.net.HttpURLConnection) IPCLoggerChannel(org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel) URL(java.net.URL) Test(org.junit.Test)

Example 2 with IPCLoggerChannel

use of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel in project hadoop by apache.

the class TestJournalNode method testJournal.

@Test(timeout = 100000)
public void testJournal() throws Exception {
    MetricsRecordBuilder metrics = MetricsAsserts.getMetrics(journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 0L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 0L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 0L, metrics);
    MetricsAsserts.assertGauge("LastJournalTimestamp", 0L, metrics);
    long beginTimestamp = System.currentTimeMillis();
    IPCLoggerChannel ch = new IPCLoggerChannel(conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
    ch.newEpoch(1).get();
    ch.setEpoch(1);
    ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
    ch.sendEdits(1L, 1, 1, "hello".getBytes(Charsets.UTF_8)).get();
    metrics = MetricsAsserts.getMetrics(journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 1L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 0L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 0L, metrics);
    long lastJournalTimestamp = MetricsAsserts.getLongGauge("LastJournalTimestamp", metrics);
    assertTrue(lastJournalTimestamp > beginTimestamp);
    beginTimestamp = lastJournalTimestamp;
    ch.setCommittedTxId(100L);
    ch.sendEdits(1L, 2, 1, "goodbye".getBytes(Charsets.UTF_8)).get();
    metrics = MetricsAsserts.getMetrics(journal.getMetricsForTests().getName());
    MetricsAsserts.assertCounter("BatchesWritten", 2L, metrics);
    MetricsAsserts.assertCounter("BatchesWrittenWhileLagging", 1L, metrics);
    MetricsAsserts.assertGauge("CurrentLagTxns", 98L, metrics);
    lastJournalTimestamp = MetricsAsserts.getLongGauge("LastJournalTimestamp", metrics);
    assertTrue(lastJournalTimestamp > beginTimestamp);
}
Also used : IPCLoggerChannel(org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel) MetricsRecordBuilder(org.apache.hadoop.metrics2.MetricsRecordBuilder) Test(org.junit.Test)

Example 3 with IPCLoggerChannel

use of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel in project hadoop by apache.

the class TestIPCLoggerChannel method setupMock.

@Before
public void setupMock() {
    conf.setInt(DFSConfigKeys.DFS_QJOURNAL_QUEUE_SIZE_LIMIT_KEY, LIMIT_QUEUE_SIZE_MB);
    // Channel to the mock object instead of a real IPC proxy.
    ch = new IPCLoggerChannel(conf, FAKE_NSINFO, JID, FAKE_ADDR) {

        @Override
        protected QJournalProtocol getProxy() throws IOException {
            return mockProxy;
        }
    };
    ch.setEpoch(1);
}
Also used : QJournalProtocol(org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol) IPCLoggerChannel(org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel) IOException(java.io.IOException) Before(org.junit.Before)

Example 4 with IPCLoggerChannel

use of org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel in project hadoop by apache.

the class TestJournalNode method setup.

@Before
public void setup() throws Exception {
    File editsDir = new File(MiniDFSCluster.getBaseDirectory() + File.separator + "TestJournalNode");
    FileUtil.fullyDelete(editsDir);
    conf.set(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY, editsDir.getAbsolutePath());
    conf.set(DFSConfigKeys.DFS_JOURNALNODE_RPC_ADDRESS_KEY, "0.0.0.0:0");
    jn = new JournalNode();
    jn.setConf(conf);
    jn.start();
    journalId = "test-journalid-" + GenericTestUtils.uniqueSequenceId();
    journal = jn.getOrCreateJournal(journalId);
    journal.format(FAKE_NSINFO);
    ch = new IPCLoggerChannel(conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress());
}
Also used : IPCLoggerChannel(org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel) File(java.io.File) Before(org.junit.Before)

Aggregations

IPCLoggerChannel (org.apache.hadoop.hdfs.qjournal.client.IPCLoggerChannel)4 Before (org.junit.Before)2 Test (org.junit.Test)2 File (java.io.File)1 IOException (java.io.IOException)1 HttpURLConnection (java.net.HttpURLConnection)1 URL (java.net.URL)1 QJournalProtocol (org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol)1 MetricsRecordBuilder (org.apache.hadoop.metrics2.MetricsRecordBuilder)1