use of org.haiku.pkg.HpkException in project haikudepotserver by haiku.
the class AttributeWriter method write.
private void write(int indent, AttributeContext context, Attribute attribute) throws IOException {
Preconditions.checkNotNull(context);
Preconditions.checkNotNull(attribute);
Preconditions.checkState(indent >= 0);
for (int i = 0; i < indent; i++) {
write(' ');
}
write(attribute.getAttributeId().getName());
write(" : ");
write(attribute.getAttributeType().name());
write(" : ");
try {
switch(attribute.getAttributeType()) {
case RAW:
ByteSource byteSource = (ByteSource) attribute.getValue(context);
write(String.format("%d bytes", byteSource.size()));
if (byteSource instanceof RawHeapAttribute.HeapByteSource) {
HeapCoordinates coordinates = ((RawHeapAttribute.HeapByteSource) byteSource).getHeapCoordinates();
write(String.format(" {off:%d, len:%d}", coordinates.getOffset(), coordinates.getLength()));
}
break;
case INT:
write(attribute.getValue(context).toString());
break;
case STRING:
write(attribute.getValue(context).toString());
break;
default:
write("???");
break;
}
} catch (HpkException e) {
throw new IOException("unable to process an attribute '" + attribute.toString() + "'", e);
}
write("\n");
if (attribute.hasChildAttributes()) {
for (Attribute childAttribute : attribute.getChildAttributes()) {
write(indent + 2, context, childAttribute);
}
}
}
use of org.haiku.pkg.HpkException in project haikudepotserver by haiku.
the class HpkHeapReader method readHeapChunk.
/**
* <p>This will read a chunk of the heap into the supplied buffer. It is assumed that the buffer will be
* of the correct length for the uncompressed heap chunk size.</p>
*/
private void readHeapChunk(int index, byte[] buffer) throws IOException {
randomAccessFile.seek(getHeapChunkAbsoluteFileOffset(index));
int chunkUncompressedLength = getHeapChunkUncompressedLength(index);
if (isHeapChunkCompressed(index) || HeapCompression.NONE == compression) {
switch(compression) {
case NONE:
throw new IllegalStateException();
case ZLIB:
{
byte[] deflatedBuffer = new byte[getHeapChunkCompressedLength(index)];
readFully(deflatedBuffer);
Inflater inflater = new Inflater();
inflater.setInput(deflatedBuffer);
try {
int read;
if (chunkUncompressedLength != (read = inflater.inflate(buffer))) {
if (index < getHeapChunkCount() - 1) {
String message = String.format("a compressed heap chunk inflated to %d bytes; was expecting %d", read, chunkUncompressedLength);
if (inflater.needsInput()) {
message += "; needs input";
}
if (inflater.needsDictionary()) {
message += "; needs dictionary";
}
throw new HpkException(message);
}
}
if (!inflater.finished()) {
throw new HpkException(String.format("incomplete inflation of input data while reading chunk %d", index));
}
} catch (DataFormatException dfe) {
throw new HpkException("unable to inflate (decompress) heap chunk " + index, dfe);
}
}
break;
default:
throw new IllegalStateException("unsupported compression; " + compression);
}
} else {
int read;
if (chunkUncompressedLength != (read = randomAccessFile.read(buffer, 0, chunkUncompressedLength))) {
throw new HpkException(String.format("problem reading chunk %d of heap; only read %d of %d bytes", index, read, buffer.length));
}
}
}
Aggregations