use of java.nio.BufferOverflowException in project hbase by apache.
the class ByteBufferOutputStream method checkSizeAndGrow.
protected void checkSizeAndGrow(int extra) {
long capacityNeeded = curBuf.position() + (long) extra;
if (capacityNeeded > curBuf.limit()) {
// guarantee it's possible to fit
if (capacityNeeded > MAX_ARRAY_SIZE) {
throw new BufferOverflowException();
}
// double until hit the cap
long nextCapacity = Math.min(curBuf.capacity() * 2L, MAX_ARRAY_SIZE);
// but make sure there is enough if twice the existing capacity is still too small
nextCapacity = Math.max(nextCapacity, capacityNeeded);
ByteBuffer newBuf = allocate((int) nextCapacity, curBuf.isDirect());
curBuf.flip();
ByteBufferUtils.copyFromBufferToBuffer(curBuf, newBuf);
curBuf = newBuf;
}
}
use of java.nio.BufferOverflowException in project hbase by apache.
the class CellBlockBuilder method encodeCellsTo.
private void encodeCellsTo(OutputStream os, CellScanner cellScanner, Codec codec, CompressionCodec compressor) throws IOException {
Compressor poolCompressor = null;
try {
if (compressor != null) {
if (compressor instanceof Configurable) {
((Configurable) compressor).setConf(this.conf);
}
poolCompressor = CodecPool.getCompressor(compressor);
os = compressor.createOutputStream(os, poolCompressor);
}
Codec.Encoder encoder = codec.getEncoder(os);
while (cellScanner.advance()) {
encoder.write(cellScanner.current());
}
encoder.flush();
} catch (BufferOverflowException | IndexOutOfBoundsException e) {
throw new DoNotRetryIOException(e);
} finally {
os.close();
if (poolCompressor != null) {
CodecPool.returnCompressor(poolCompressor);
}
}
}
use of java.nio.BufferOverflowException in project jmonkeyengine by jMonkeyEngine.
the class FieldSerializer method writeObject.
public void writeObject(ByteBuffer buffer, Object object) throws IOException {
// Add the null/non-null marker
buffer.put((byte) (object != null ? 0x1 : 0x0));
if (object == null) {
// Nothing left to do
return;
}
SavedField[] fields = savedFields.get(object.getClass());
if (fields == null)
throw new IOException("The " + object.getClass() + " is not registered" + " in the serializer!");
for (SavedField savedField : fields) {
Object val = null;
try {
val = savedField.field.get(object);
} catch (IllegalAccessException e) {
throw new SerializerException("Unable to access field:" + savedField.field + " on:" + object, e);
}
Serializer serializer = savedField.serializer;
if (log.isLoggable(Level.FINER)) {
log.log(Level.FINER, "Writing field:{0} using serializer:{1}", new Object[] { savedField.field, serializer });
}
try {
if (serializer != null) {
serializer.writeObject(buffer, val);
} else {
Serializer.writeClassAndObject(buffer, val);
}
} catch (BufferOverflowException boe) {
throw boe;
} catch (Exception e) {
throw new SerializerException("Error writing object for field:" + savedField.field, e);
}
}
}
use of java.nio.BufferOverflowException in project hs4j by killme2008.
the class AbstractIoBuffer method putString.
/**
* {@inheritDoc}
*/
@Override
public IoBuffer putString(CharSequence val, int fieldSize, CharsetEncoder encoder) throws CharacterCodingException {
checkFieldSize(fieldSize);
if (fieldSize == 0) {
return this;
}
autoExpand(fieldSize);
boolean utf16 = encoder.charset().name().startsWith("UTF-16");
if (utf16 && (fieldSize & 1) != 0) {
throw new IllegalArgumentException("fieldSize is not even.");
}
int oldLimit = limit();
int end = position() + fieldSize;
if (oldLimit < end) {
throw new BufferOverflowException();
}
if (val.length() == 0) {
if (!utf16) {
put((byte) 0x00);
} else {
put((byte) 0x00);
put((byte) 0x00);
}
position(end);
return this;
}
CharBuffer in = CharBuffer.wrap(val);
limit(end);
encoder.reset();
for (; ; ) {
CoderResult cr;
if (in.hasRemaining()) {
cr = encoder.encode(in, buf(), true);
} else {
cr = encoder.flush(buf());
}
if (cr.isUnderflow() || cr.isOverflow()) {
break;
}
cr.throwException();
}
limit(oldLimit);
if (position() < end) {
if (!utf16) {
put((byte) 0x00);
} else {
put((byte) 0x00);
put((byte) 0x00);
}
}
position(end);
return this;
}
use of java.nio.BufferOverflowException in project robovm by robovm.
the class FileChannelTest method test_map_Private.
/**
* @tests java.nio.channels.FileChannel#map(MapMode,long,long)
*/
public void test_map_Private() throws IOException {
this.writeDataToFile(fileOfReadWriteFileChannel);
MappedByteBuffer mapped = readWriteFileChannel.map(MapMode.PRIVATE, 0, CONTENT_LENGTH);
assertEquals(CONTENT_LENGTH, mapped.limit());
// test copy on write if private
ByteBuffer returnByPut = mapped.put(TEST_BYTES);
assertSame(returnByPut, mapped);
ByteBuffer checkBuffer = ByteBuffer.allocate(CONTENT_LENGTH);
mapped.force();
readWriteFileChannel.read(checkBuffer);
assertEquals(CONTENT, new String(checkBuffer.array(), "iso8859-1"));
// test overflow
try {
mapped.put(("test" + CONTENT).getBytes("iso8859-1"));
fail("should throw BufferOverflowException.");
} catch (BufferOverflowException ex) {
// expected;
}
}
Aggregations