use of org.apache.crail.CrailFile in project incubator-crail by apache.
the class CrailHDFS method open.
@Override
public FSDataInputStream open(Path path, int bufferSize) throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
CrailFile fileInfo = null;
try {
fileInfo = dfs.lookup(path.toUri().getRawPath()).get().asFile();
} catch (Exception e) {
throw new IOException(e);
}
CrailBufferedInputStream inputStream = null;
if (fileInfo != null) {
try {
inputStream = fileInfo.getBufferedInputStream(fileInfo.getCapacity());
} catch (Exception e) {
throw new IOException(e);
}
}
if (inputStream != null) {
return new CrailHDFSInputStream(inputStream);
} else {
throw new IOException("Failed to open file, path " + path.toString());
}
}
use of org.apache.crail.CrailFile in project incubator-crail by apache.
the class CrailFsck method blockStatistics.
public void blockStatistics(String filename) throws Exception {
HashMap<String, AtomicInteger> stats = new HashMap<String, AtomicInteger>();
CrailConfiguration conf = new CrailConfiguration();
CrailStore fs = CrailStore.newInstance(conf);
CrailNode node = fs.lookup(filename).get();
if (node.getType() == CrailNodeType.DIRECTORY) {
CrailDirectory directory = node.asDirectory();
Iterator<String> iter = directory.listEntries();
while (iter.hasNext()) {
String path = iter.next();
CrailFile child = fs.lookup(path).get().asFile();
walkBlocks(stats, fs, child.getPath(), 0, child.getCapacity());
}
} else if (node.getType() == CrailNodeType.DATAFILE) {
CrailFile file = node.asFile();
walkBlocks(stats, fs, file.getPath(), 0, file.getCapacity());
} else if (node.getType() == CrailNodeType.MULTIFILE) {
CrailMultiFile directory = node.asMultiFile();
Iterator<String> iter = directory.listEntries();
while (iter.hasNext()) {
String path = iter.next();
CrailFile child = fs.lookup(path).get().asFile();
walkBlocks(stats, fs, child.getPath(), 0, child.getCapacity());
}
}
printStats(stats);
fs.close();
}
use of org.apache.crail.CrailFile in project incubator-crail by apache.
the class CrailHadoopFileSystem method create.
@Override
public FSDataOutputStream create(Path path, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException {
CrailFile fileInfo = null;
try {
fileInfo = dfs.create(path.toUri().getRawPath(), CrailNodeType.DATAFILE, CrailStorageClass.PARENT, CrailLocationClass.PARENT, true).get().asFile();
} catch (Exception e) {
if (e.getMessage().contains(RpcErrors.messages[RpcErrors.ERR_PARENT_MISSING])) {
fileInfo = null;
} else {
throw new IOException(e);
}
}
if (fileInfo == null) {
Path parent = path.getParent();
this.mkdirs(parent, FsPermission.getDirDefault());
try {
fileInfo = dfs.create(path.toUri().getRawPath(), CrailNodeType.DATAFILE, CrailStorageClass.PARENT, CrailLocationClass.PARENT, true).get().asFile();
} catch (Exception e) {
throw new IOException(e);
}
}
CrailBufferedOutputStream outputStream = null;
if (fileInfo != null) {
try {
fileInfo.syncDir();
outputStream = fileInfo.getBufferedOutputStream(Integer.MAX_VALUE);
} catch (Exception e) {
throw new IOException(e);
}
}
if (outputStream != null) {
return new CrailHDFSOutputStream(outputStream, statistics);
} else {
throw new IOException("Failed to create file, path " + path.toString());
}
}
use of org.apache.crail.CrailFile in project incubator-crail by apache.
the class CrailBenchmark method readRandom.
void readRandom(String filename, int size, int loop, boolean buffered) throws Exception {
System.out.println("readRandom, filename " + filename + ", size " + size + ", loop " + loop + ", buffered " + buffered);
CrailBuffer buf = null;
if (size == CrailConstants.BUFFER_SIZE) {
buf = fs.allocateBuffer();
} else if (size < CrailConstants.BUFFER_SIZE) {
CrailBuffer _buf = fs.allocateBuffer();
_buf.clear().limit(size);
buf = _buf.slice();
} else {
buf = OffHeapBuffer.wrap(ByteBuffer.allocateDirect(size));
}
// warmup
ConcurrentLinkedQueue<CrailBuffer> bufferQueue = new ConcurrentLinkedQueue<CrailBuffer>();
bufferQueue.add(buf);
warmUp(filename, warmup, bufferQueue);
// benchmark
System.out.println("starting benchmark...");
fs.getStatistics().reset();
CrailFile file = fs.lookup(filename).get().asFile();
CrailBufferedInputStream bufferedStream = file.getBufferedInputStream(file.getCapacity());
CrailInputStream directStream = file.getDirectInputStream(file.getCapacity());
double sumbytes = 0;
double ops = 0;
long _range = file.getCapacity() - ((long) buf.capacity());
_range = _range / size;
double range = (double) _range;
Random random = new Random();
long start = System.currentTimeMillis();
while (ops < loop) {
if (buffered) {
buf.clear();
double _offset = range * random.nextDouble();
long offset = (long) _offset * size;
bufferedStream.seek(offset);
double ret = (double) bufferedStream.read(buf.getByteBuffer());
if (ret > 0) {
sumbytes = sumbytes + ret;
ops = ops + 1.0;
} else {
break;
}
} else {
buf.clear();
double _offset = range * random.nextDouble();
long offset = (long) _offset * size;
directStream.seek(offset);
double ret = (double) directStream.read(buf).get().getLen();
if (ret > 0) {
sumbytes = sumbytes + ret;
ops = ops + 1.0;
} else {
break;
}
}
}
long end = System.currentTimeMillis();
double executionTime = ((double) (end - start)) / 1000.0;
double throughput = 0.0;
double latency = 0.0;
double sumbits = sumbytes * 8.0;
if (executionTime > 0) {
throughput = sumbits / executionTime / 1000.0 / 1000.0;
latency = 1000000.0 * executionTime / ops;
}
bufferedStream.close();
directStream.close();
System.out.println("execution time " + executionTime);
System.out.println("ops " + ops);
System.out.println("sumbytes " + sumbytes);
System.out.println("throughput " + throughput);
System.out.println("latency " + latency);
fs.getStatistics().print("close");
}
use of org.apache.crail.CrailFile in project incubator-crail by apache.
the class CrailBenchmark method writeAsync.
void writeAsync(String filename, int size, int loop, int batch, int storageClass, int locationClass, boolean skipDir) throws Exception {
System.out.println("writeAsync, filename " + filename + ", size " + size + ", loop " + loop + ", batch " + batch + ", storageClass " + storageClass + ", locationClass " + locationClass);
ConcurrentLinkedQueue<CrailBuffer> bufferQueue = new ConcurrentLinkedQueue<CrailBuffer>();
for (int i = 0; i < batch; i++) {
CrailBuffer buf = null;
if (size == CrailConstants.BUFFER_SIZE) {
buf = fs.allocateBuffer();
} else if (size < CrailConstants.BUFFER_SIZE) {
CrailBuffer _buf = fs.allocateBuffer();
_buf.clear().limit(size);
buf = _buf.slice();
} else {
buf = OffHeapBuffer.wrap(ByteBuffer.allocateDirect(size));
}
bufferQueue.add(buf);
}
// warmup
warmUp(filename, warmup, bufferQueue);
// benchmark
System.out.println("starting benchmark...");
LinkedBlockingQueue<Future<CrailResult>> futureQueue = new LinkedBlockingQueue<Future<CrailResult>>();
HashMap<Integer, CrailBuffer> futureMap = new HashMap<Integer, CrailBuffer>();
fs.getStatistics().reset();
long _loop = (long) loop;
long _bufsize = (long) CrailConstants.BUFFER_SIZE;
long _capacity = _loop * _bufsize;
double sumbytes = 0;
double ops = 0;
CrailFile file = fs.create(filename, CrailNodeType.DATAFILE, CrailStorageClass.get(storageClass), CrailLocationClass.get(locationClass), !skipDir).get().asFile();
CrailOutputStream directStream = file.getDirectOutputStream(_capacity);
long start = System.currentTimeMillis();
for (int i = 0; i < batch - 1 && ops < loop; i++) {
CrailBuffer buf = bufferQueue.poll();
buf.clear();
Future<CrailResult> future = directStream.write(buf);
futureQueue.add(future);
futureMap.put(future.hashCode(), buf);
ops = ops + 1.0;
}
while (ops < loop) {
CrailBuffer buf = bufferQueue.poll();
buf.clear();
Future<CrailResult> future = directStream.write(buf);
futureQueue.add(future);
futureMap.put(future.hashCode(), buf);
future = futureQueue.poll();
future.get();
buf = futureMap.get(future.hashCode());
bufferQueue.add(buf);
sumbytes = sumbytes + buf.capacity();
ops = ops + 1.0;
}
while (!futureQueue.isEmpty()) {
Future<CrailResult> future = futureQueue.poll();
future.get();
CrailBuffer buf = futureMap.get(future.hashCode());
sumbytes = sumbytes + buf.capacity();
ops = ops + 1.0;
}
long end = System.currentTimeMillis();
double executionTime = ((double) (end - start)) / 1000.0;
double throughput = 0.0;
double latency = 0.0;
double sumbits = sumbytes * 8.0;
if (executionTime > 0) {
throughput = sumbits / executionTime / 1000.0 / 1000.0;
latency = 1000000.0 * executionTime / ops;
}
directStream.close();
System.out.println("execution time " + executionTime);
System.out.println("ops " + ops);
System.out.println("sumbytes " + sumbytes);
System.out.println("throughput " + throughput);
System.out.println("latency " + latency);
fs.getStatistics().print("close");
}
Aggregations