use of org.apache.hadoop.fs.swift.util.Duration in project hadoop by apache.
the class TestWriteManySmallFiles method testScaledWriteThenRead.
@Test(timeout = SWIFT_BULK_IO_TEST_TIMEOUT)
public void testScaledWriteThenRead() throws Throwable {
Path dir = new Path("/test/manysmallfiles");
Duration rm1 = new Duration();
fs.delete(dir, true);
rm1.finished();
fs.mkdirs(dir);
Duration ls1 = new Duration();
fs.listStatus(dir);
ls1.finished();
long count = getOperationCount();
SwiftTestUtils.noteAction("Beginning Write of " + count + " files ");
DurationStats writeStats = new DurationStats("write");
DurationStats readStats = new DurationStats("read");
String format = "%08d";
for (long l = 0; l < count; l++) {
String name = String.format(format, l);
Path p = new Path(dir, "part-" + name);
Duration d = new Duration();
SwiftTestUtils.writeTextFile(fs, p, name, false);
d.finished();
writeStats.add(d);
Thread.sleep(1000);
}
//at this point, the directory is full.
SwiftTestUtils.noteAction("Beginning ls");
Duration ls2 = new Duration();
FileStatus[] status2 = (FileStatus[]) fs.listStatus(dir);
ls2.finished();
assertEquals("Not enough entries in the directory", count, status2.length);
SwiftTestUtils.noteAction("Beginning read");
for (long l = 0; l < count; l++) {
String name = String.format(format, l);
Path p = new Path(dir, "part-" + name);
Duration d = new Duration();
String result = SwiftTestUtils.readBytesToString(fs, p, name.length());
assertEquals(name, result);
d.finished();
readStats.add(d);
}
//do a recursive delete
SwiftTestUtils.noteAction("Beginning delete");
Duration rm2 = new Duration();
fs.delete(dir, true);
rm2.finished();
//print the stats
LOG.info(String.format("'filesystem','%s'", fs.getUri()));
LOG.info(writeStats.toString());
LOG.info(readStats.toString());
LOG.info(String.format("'rm1',%d,'ls1',%d", rm1.value(), ls1.value()));
LOG.info(String.format("'rm2',%d,'ls2',%d", rm2.value(), ls2.value()));
}
use of org.apache.hadoop.fs.swift.util.Duration in project hadoop by apache.
the class SwiftRestClient method perform.
/**
* Performs the HTTP request, validates the response code and returns
* the received data. HTTP Status codes are converted into exceptions.
* @param reason why is this operation taking place. Used for statistics
* @param uri URI to source
* @param processor HttpMethodProcessor
* @param <M> method
* @param <R> result type
* @return result of HTTP request
* @throws IOException IO problems
* @throws SwiftBadRequestException the status code indicated "Bad request"
* @throws SwiftInvalidResponseException the status code is out of range
* for the action (excluding 404 responses)
* @throws SwiftInternalStateException the internal state of this client
* is invalid
* @throws FileNotFoundException a 404 response was returned
*/
private <M extends HttpMethod, R> R perform(String reason, URI uri, HttpMethodProcessor<M, R> processor) throws IOException, SwiftBadRequestException, SwiftInternalStateException, SwiftInvalidResponseException, FileNotFoundException {
checkNotNull(uri);
checkNotNull(processor);
final M method = processor.createMethod(uri.toString());
//retry policy
HttpMethodParams methodParams = method.getParams();
methodParams.setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(retryCount, false));
methodParams.setIntParameter(HttpConnectionParams.CONNECTION_TIMEOUT, connectTimeout);
methodParams.setSoTimeout(socketTimeout);
method.addRequestHeader(HEADER_USER_AGENT, SWIFT_USER_AGENT);
Duration duration = new Duration();
boolean success = false;
try {
int statusCode = 0;
try {
statusCode = exec(method);
} catch (IOException e) {
//rethrow with extra diagnostics and wiki links
throw ExceptionDiags.wrapException(uri.toString(), method.getName(), e);
}
//look at the response and see if it was valid or not.
//Valid is more than a simple 200; even 404 "not found" is considered
//valid -which it is for many methods.
//validate the allowed status code for this operation
int[] allowedStatusCodes = processor.getAllowedStatusCodes();
boolean validResponse = isStatusCodeExpected(statusCode, allowedStatusCodes);
if (!validResponse) {
IOException ioe = buildException(uri, method, statusCode);
throw ioe;
}
R r = processor.extractResult(method);
success = true;
return r;
} catch (IOException e) {
//release the connection -always
method.releaseConnection();
throw e;
} finally {
duration.finished();
durationStats.add(method.getName() + " " + reason, duration, success);
}
}
use of org.apache.hadoop.fs.swift.util.Duration in project hadoop by apache.
the class TestSwiftRestClient method testPutAndDelete.
@Test(timeout = SWIFT_TEST_TIMEOUT)
public void testPutAndDelete() throws Throwable {
assumeEnabled();
SwiftRestClient client = createClient();
client.authenticate();
Path path = new Path("restTestPutAndDelete");
SwiftObjectPath sobject = SwiftObjectPath.fromPath(serviceURI, path);
byte[] stuff = new byte[1];
stuff[0] = 'a';
client.upload(sobject, new ByteArrayInputStream(stuff), stuff.length);
//check file exists
Duration head = new Duration();
Header[] responseHeaders = client.headRequest("expect success", sobject, SwiftRestClient.NEWEST);
head.finished();
LOG.info("head request duration " + head);
for (Header header : responseHeaders) {
LOG.info(header.toString());
}
//delete the file
client.delete(sobject);
//check file is gone
try {
Header[] headers = client.headRequest("expect fail", sobject, SwiftRestClient.NEWEST);
Assert.fail("Expected deleted file, but object is still present: " + sobject);
} catch (FileNotFoundException e) {
//expected
}
for (DurationStats stats : client.getOperationStatistics()) {
LOG.info(stats);
}
}
Aggregations