Search in sources :

Example 1 with DurationStats

use of org.apache.hadoop.fs.swift.util.DurationStats in project hadoop by apache.

the class TestWriteManySmallFiles method testScaledWriteThenRead.

@Test(timeout = SWIFT_BULK_IO_TEST_TIMEOUT)
public void testScaledWriteThenRead() throws Throwable {
    Path dir = new Path("/test/manysmallfiles");
    Duration rm1 = new Duration();
    fs.delete(dir, true);
    rm1.finished();
    fs.mkdirs(dir);
    Duration ls1 = new Duration();
    fs.listStatus(dir);
    ls1.finished();
    long count = getOperationCount();
    SwiftTestUtils.noteAction("Beginning Write of " + count + " files ");
    DurationStats writeStats = new DurationStats("write");
    DurationStats readStats = new DurationStats("read");
    String format = "%08d";
    for (long l = 0; l < count; l++) {
        String name = String.format(format, l);
        Path p = new Path(dir, "part-" + name);
        Duration d = new Duration();
        SwiftTestUtils.writeTextFile(fs, p, name, false);
        d.finished();
        writeStats.add(d);
        Thread.sleep(1000);
    }
    //at this point, the directory is full.
    SwiftTestUtils.noteAction("Beginning ls");
    Duration ls2 = new Duration();
    FileStatus[] status2 = (FileStatus[]) fs.listStatus(dir);
    ls2.finished();
    assertEquals("Not enough entries in the directory", count, status2.length);
    SwiftTestUtils.noteAction("Beginning read");
    for (long l = 0; l < count; l++) {
        String name = String.format(format, l);
        Path p = new Path(dir, "part-" + name);
        Duration d = new Duration();
        String result = SwiftTestUtils.readBytesToString(fs, p, name.length());
        assertEquals(name, result);
        d.finished();
        readStats.add(d);
    }
    //do a recursive delete
    SwiftTestUtils.noteAction("Beginning delete");
    Duration rm2 = new Duration();
    fs.delete(dir, true);
    rm2.finished();
    //print the stats
    LOG.info(String.format("'filesystem','%s'", fs.getUri()));
    LOG.info(writeStats.toString());
    LOG.info(readStats.toString());
    LOG.info(String.format("'rm1',%d,'ls1',%d", rm1.value(), ls1.value()));
    LOG.info(String.format("'rm2',%d,'ls2',%d", rm2.value(), ls2.value()));
}
Also used : Path(org.apache.hadoop.fs.Path) DurationStats(org.apache.hadoop.fs.swift.util.DurationStats) FileStatus(org.apache.hadoop.fs.FileStatus) Duration(org.apache.hadoop.fs.swift.util.Duration) Test(org.junit.Test)

Example 2 with DurationStats

use of org.apache.hadoop.fs.swift.util.DurationStats in project hadoop by apache.

the class TestSwiftRestClient method testPutAndDelete.

@Test(timeout = SWIFT_TEST_TIMEOUT)
public void testPutAndDelete() throws Throwable {
    assumeEnabled();
    SwiftRestClient client = createClient();
    client.authenticate();
    Path path = new Path("restTestPutAndDelete");
    SwiftObjectPath sobject = SwiftObjectPath.fromPath(serviceURI, path);
    byte[] stuff = new byte[1];
    stuff[0] = 'a';
    client.upload(sobject, new ByteArrayInputStream(stuff), stuff.length);
    //check file exists
    Duration head = new Duration();
    Header[] responseHeaders = client.headRequest("expect success", sobject, SwiftRestClient.NEWEST);
    head.finished();
    LOG.info("head request duration " + head);
    for (Header header : responseHeaders) {
        LOG.info(header.toString());
    }
    //delete the file
    client.delete(sobject);
    //check file is gone
    try {
        Header[] headers = client.headRequest("expect fail", sobject, SwiftRestClient.NEWEST);
        Assert.fail("Expected deleted file, but object is still present: " + sobject);
    } catch (FileNotFoundException e) {
    //expected
    }
    for (DurationStats stats : client.getOperationStatistics()) {
        LOG.info(stats);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SwiftObjectPath(org.apache.hadoop.fs.swift.util.SwiftObjectPath) DurationStats(org.apache.hadoop.fs.swift.util.DurationStats) Header(org.apache.commons.httpclient.Header) ByteArrayInputStream(java.io.ByteArrayInputStream) FileNotFoundException(java.io.FileNotFoundException) Duration(org.apache.hadoop.fs.swift.util.Duration) SwiftObjectPath(org.apache.hadoop.fs.swift.util.SwiftObjectPath) Test(org.junit.Test)

Aggregations

Path (org.apache.hadoop.fs.Path)2 Duration (org.apache.hadoop.fs.swift.util.Duration)2 DurationStats (org.apache.hadoop.fs.swift.util.DurationStats)2 Test (org.junit.Test)2 ByteArrayInputStream (java.io.ByteArrayInputStream)1 FileNotFoundException (java.io.FileNotFoundException)1 Header (org.apache.commons.httpclient.Header)1 FileStatus (org.apache.hadoop.fs.FileStatus)1 SwiftObjectPath (org.apache.hadoop.fs.swift.util.SwiftObjectPath)1