Search in sources :

Example 6 with LogCapturer

use of org.apache.hadoop.test.GenericTestUtils.LogCapturer in project hadoop by apache.

the class TestNativeAzureFileSystemClientLogging method testLoggingEnabled.

@Test
public void testLoggingEnabled() throws Exception {
    LogCapturer logs = LogCapturer.captureLogs(new Log4JLogger(Logger.getRootLogger()));
    // Update configuration based on the Test.
    updateFileSystemConfiguration(true);
    performWASBOperations();
    assertTrue(verifyStorageClientLogs(logs.getOutput(), TEMP_DIR));
}
Also used : Log4JLogger(org.apache.commons.logging.impl.Log4JLogger) LogCapturer(org.apache.hadoop.test.GenericTestUtils.LogCapturer) Test(org.junit.Test)

Example 7 with LogCapturer

use of org.apache.hadoop.test.GenericTestUtils.LogCapturer in project hadoop by apache.

the class TestEncryptedTransfer method testEncryptedWrite.

private void testEncryptedWrite(int numDns) throws IOException {
    setEncryptionConfigKeys();
    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDns).build();
    fs = getFileSystem(conf);
    LogCapturer logs = GenericTestUtils.LogCapturer.captureLogs(LogFactory.getLog(SaslDataTransferServer.class));
    LogCapturer logs1 = GenericTestUtils.LogCapturer.captureLogs(LogFactory.getLog(DataTransferSaslUtil.class));
    try {
        writeTestDataToFile(fs);
    } finally {
        logs.stopCapturing();
        logs1.stopCapturing();
    }
    assertEquals(PLAIN_TEXT, DFSTestUtil.readFile(fs, TEST_PATH));
    if (resolverClazz == null) {
        // Test client and server negotiate cipher option
        GenericTestUtils.assertDoesNotMatch(logs.getOutput(), "Server using cipher suite");
        // Check the IOStreamPair
        GenericTestUtils.assertDoesNotMatch(logs1.getOutput(), "Creating IOStreamPair of CryptoInputStream and CryptoOutputStream.");
    }
}
Also used : SaslDataTransferServer(org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer) LogCapturer(org.apache.hadoop.test.GenericTestUtils.LogCapturer) DataTransferSaslUtil(org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil)

Example 8 with LogCapturer

use of org.apache.hadoop.test.GenericTestUtils.LogCapturer in project hadoop by apache.

the class TestEncryptedTransfer method testClientThatDoesNotSupportEncryption.

@Test
public void testClientThatDoesNotSupportEncryption() throws IOException {
    // Set short retry timeouts so this test runs faster
    conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10);
    writeUnencryptedAndThenRestartEncryptedCluster();
    DFSClient client = DFSClientAdapter.getDFSClient((DistributedFileSystem) fs);
    DFSClient spyClient = Mockito.spy(client);
    Mockito.doReturn(false).when(spyClient).shouldEncryptData();
    DFSClientAdapter.setDFSClient((DistributedFileSystem) fs, spyClient);
    LogCapturer logs = GenericTestUtils.LogCapturer.captureLogs(LogFactory.getLog(DataNode.class));
    try {
        assertEquals(PLAIN_TEXT, DFSTestUtil.readFile(fs, TEST_PATH));
        if (resolverClazz != null && !resolverClazz.endsWith("TestTrustedChannelResolver")) {
            fail("Should not have been able to read without encryption enabled.");
        }
    } catch (IOException ioe) {
        GenericTestUtils.assertExceptionContains("Could not obtain block:", ioe);
    } finally {
        logs.stopCapturing();
    }
    if (resolverClazz == null) {
        GenericTestUtils.assertMatches(logs.getOutput(), "Failed to read expected encryption handshake from client at");
    }
}
Also used : DataNode(org.apache.hadoop.hdfs.server.datanode.DataNode) LogCapturer(org.apache.hadoop.test.GenericTestUtils.LogCapturer) IOException(java.io.IOException) Test(org.junit.Test)

Example 9 with LogCapturer

use of org.apache.hadoop.test.GenericTestUtils.LogCapturer in project hadoop by apache.

the class TestDataStream method testDfsClient.

@Test(timeout = 60000)
public void testDfsClient() throws IOException, InterruptedException {
    LogCapturer logs = GenericTestUtils.LogCapturer.captureLogs(LogFactory.getLog(DataStreamer.class));
    byte[] toWrite = new byte[PACKET_SIZE];
    new Random(1).nextBytes(toWrite);
    final Path path = new Path("/file1");
    final DistributedFileSystem dfs = cluster.getFileSystem();
    FSDataOutputStream out = null;
    out = dfs.create(path, false);
    out.write(toWrite);
    out.write(toWrite);
    out.hflush();
    //Wait to cross slow IO warning threshold
    Thread.sleep(15 * 1000);
    out.write(toWrite);
    out.write(toWrite);
    out.hflush();
    //Wait for capturing logs in busy cluster
    Thread.sleep(5 * 1000);
    out.close();
    logs.stopCapturing();
    GenericTestUtils.assertDoesNotMatch(logs.getOutput(), "Slow ReadProcessor read fields for block");
}
Also used : Path(org.apache.hadoop.fs.Path) Random(java.util.Random) LogCapturer(org.apache.hadoop.test.GenericTestUtils.LogCapturer) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) Test(org.junit.Test)

Example 10 with LogCapturer

use of org.apache.hadoop.test.GenericTestUtils.LogCapturer in project hadoop by apache.

the class TestSaslDataTransfer method testServerSaslNoClientSasl.

@Test
public void testServerSaslNoClientSasl() throws Exception {
    HdfsConfiguration clusterConf = createSecureConfig("authentication,integrity,privacy");
    // Set short retry timeouts so this test runs faster
    clusterConf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10);
    startCluster(clusterConf);
    HdfsConfiguration clientConf = new HdfsConfiguration(clusterConf);
    clientConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "");
    LogCapturer logs = GenericTestUtils.LogCapturer.captureLogs(LogFactory.getLog(DataNode.class));
    try {
        doTest(clientConf);
        Assert.fail("Should fail if SASL data transfer protection is not " + "configured or not supported in client");
    } catch (IOException e) {
        GenericTestUtils.assertMatches(e.getMessage(), "could only be replicated to 0 nodes");
    } finally {
        logs.stopCapturing();
    }
    GenericTestUtils.assertMatches(logs.getOutput(), "Failed to read expected SASL data transfer protection " + "handshake from client at");
}
Also used : DataNode(org.apache.hadoop.hdfs.server.datanode.DataNode) LogCapturer(org.apache.hadoop.test.GenericTestUtils.LogCapturer) IOException(java.io.IOException) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Test(org.junit.Test)

Aggregations

LogCapturer (org.apache.hadoop.test.GenericTestUtils.LogCapturer)14 Test (org.junit.Test)10 IOException (java.io.IOException)4 Configuration (org.apache.hadoop.conf.Configuration)4 Path (org.apache.hadoop.fs.Path)4 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)3 Log4JLogger (org.apache.commons.logging.impl.Log4JLogger)2 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)2 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)2 DataTransferSaslUtil (org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil)2 SaslDataTransferServer (org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer)2 DataNode (org.apache.hadoop.hdfs.server.datanode.DataNode)2 FakeTimer (org.apache.hadoop.util.FakeTimer)2 File (java.io.File)1 URI (java.net.URI)1 Random (java.util.Random)1 CountDownLatch (java.util.concurrent.CountDownLatch)1 Pattern (java.util.regex.Pattern)1 FileChecksum (org.apache.hadoop.fs.FileChecksum)1 FileSystem (org.apache.hadoop.fs.FileSystem)1