use of org.haiku.pkg.heap.HeapCoordinates in project haikudepotserver by haiku.
the class AttributeWriter method write.
private void write(int indent, AttributeContext context, Attribute attribute) throws IOException {
Preconditions.checkNotNull(context);
Preconditions.checkNotNull(attribute);
Preconditions.checkState(indent >= 0);
for (int i = 0; i < indent; i++) {
write(' ');
}
write(attribute.getAttributeId().getName());
write(" : ");
write(attribute.getAttributeType().name());
write(" : ");
try {
switch(attribute.getAttributeType()) {
case RAW:
ByteSource byteSource = (ByteSource) attribute.getValue(context);
write(String.format("%d bytes", byteSource.size()));
if (byteSource instanceof RawHeapAttribute.HeapByteSource) {
HeapCoordinates coordinates = ((RawHeapAttribute.HeapByteSource) byteSource).getHeapCoordinates();
write(String.format(" {off:%d, len:%d}", coordinates.getOffset(), coordinates.getLength()));
}
break;
case INT:
write(attribute.getValue(context).toString());
break;
case STRING:
write(attribute.getValue(context).toString());
break;
default:
write("???");
break;
}
} catch (HpkException e) {
throw new IOException("unable to process an attribute '" + attribute.toString() + "'", e);
}
write("\n");
if (attribute.hasChildAttributes()) {
for (Attribute childAttribute : attribute.getChildAttributes()) {
write(indent + 2, context, childAttribute);
}
}
}
use of org.haiku.pkg.heap.HeapCoordinates in project haikudepotserver by haiku.
the class AttributeIterator method readRawHeap.
private Attribute readRawHeap(AttributeId attributeId) {
BigInteger rawLength = readUnsignedLeb128();
BigInteger rawOffset = readUnsignedLeb128();
if (rawLength.compareTo(BigInteger.valueOf(Integer.MAX_VALUE)) > 0) {
throw new HpkException("the length of the heap data is too large");
}
if (rawOffset.compareTo(BigInteger.valueOf(Integer.MAX_VALUE)) > 0) {
throw new HpkException("the offset of the heap data is too large");
}
return new RawHeapAttribute(attributeId, new HeapCoordinates(rawOffset.longValue(), rawLength.longValue()));
}
use of org.haiku.pkg.heap.HeapCoordinates in project haikudepotserver by haiku.
the class AttributeIterator method readBufferForInt.
private byte[] readBufferForInt(int encoding) {
ensureValidEncodingForInt(encoding);
int bytesToRead = 1 << encoding;
byte[] buffer = new byte[bytesToRead];
context.getHeapReader().readHeap(buffer, 0, new HeapCoordinates(offset, bytesToRead));
offset += bytesToRead;
return buffer;
}
use of org.haiku.pkg.heap.HeapCoordinates in project haikudepotserver by haiku.
the class AttributeIterator method readRawInline.
private Attribute readRawInline(AttributeId attributeId) {
BigInteger length = readUnsignedLeb128();
if (length.compareTo(BigInteger.valueOf(Integer.MAX_VALUE)) > 0) {
throw new HpkException("the length of the inline data is too large");
}
byte[] buffer = new byte[length.intValue()];
context.getHeapReader().readHeap(buffer, 0, new HeapCoordinates(offset, length.intValue()));
offset += length.intValue();
return new RawInlineAttribute(attributeId, buffer);
}
use of org.haiku.pkg.heap.HeapCoordinates in project haikudepotserver by haiku.
the class HpkStringTable method readStrings.
// TODO; could avoid the big read into a buffer by reading the heap byte by byte or with a buffer.
private String[] readStrings() {
String[] result = new String[(int) expectedCount];
byte[] stringsDataBuffer = new byte[(int) heapLength];
heapReader.readHeap(stringsDataBuffer, 0, new HeapCoordinates(heapOffset, heapLength));
// now work through the data and load them into the strings.
int stringIndex = 0;
int offset = 0;
while (offset < stringsDataBuffer.length) {
if (0 == stringsDataBuffer[offset]) {
if (stringIndex != result.length) {
throw new HpkException(String.format("expected to read %d package strings from the strings table, but actually found %d", expectedCount, stringIndex));
}
return result;
}
if (stringIndex >= expectedCount) {
throw new HpkException("have already read all of the strings from the string table, but have not exhausted the string table data");
}
int start = offset;
while (offset < stringsDataBuffer.length && 0 != stringsDataBuffer[offset]) {
offset++;
}
if (offset < stringsDataBuffer.length) {
result[stringIndex] = new String(stringsDataBuffer, start, offset - start, Charsets.UTF_8);
stringIndex++;
offset++;
}
}
throw new HpkException("expected to find the null-terminator for the list of strings, but was not able to find one; did read " + stringIndex + " of " + expectedCount);
}
Aggregations