Search in sources :

Example 1 with DataReader

use of info.ata4.io.DataReader in project disunity by ata4.

the class SerializedFileReader method readMetadata.

private void readMetadata(DataReader in) throws IOException {
    SerializedFileMetadata metadata = serialized.metadata();
    SerializedFileHeader header = serialized.header();
    DataBlock metadataBlock = serialized.metadataBlock();
    metadataBlock.markBegin(in);
    metadata.version(header.version());
    in.readStruct(metadata);
    metadataBlock.markEnd(in);
    L.log(Level.FINER, "metadataBlock: {0}", metadataBlock);
}
Also used : DataBlock(info.ata4.util.io.DataBlock)

Example 2 with DataReader

use of info.ata4.io.DataReader in project disunity by ata4.

the class SerializedFileReader method readObjects.

private void readObjects(DataReader in) throws IOException {
    long ofsMin = Long.MAX_VALUE;
    long ofsMax = Long.MIN_VALUE;
    SerializedFileHeader header = serialized.header();
    SerializedFileMetadata metadata = serialized.metadata();
    Map<Long, ObjectInfo> objectInfoMap = metadata.objectInfoTable().infoMap();
    Map<Integer, TypeRoot<Type>> typeTreeMap = metadata.typeTree().typeMap();
    List<SerializedObjectData> objectData = serialized.objectData();
    for (Map.Entry<Long, ObjectInfo> infoEntry : objectInfoMap.entrySet()) {
        ObjectInfo info = infoEntry.getValue();
        long id = infoEntry.getKey();
        long ofs = header.dataOffset() + info.offset();
        ofsMin = Math.min(ofsMin, ofs);
        ofsMax = Math.max(ofsMax, ofs + info.length());
        SerializedObjectData object = new SerializedObjectData(id);
        object.info(info);
        // create and read object data buffer
        ByteBuffer buf = ByteBufferUtils.allocate((int) info.length());
        in.position(ofs);
        in.readBuffer(buf);
        object.buffer(buf);
        // get type tree if possible
        TypeRoot typeRoot = typeTreeMap.get(info.typeID());
        if (typeRoot != null) {
            object.typeTree(typeRoot.nodes());
        }
        objectData.add(object);
    }
    DataBlock objectDataBlock = serialized.objectDataBlock();
    objectDataBlock.offset(ofsMin);
    objectDataBlock.endOffset(ofsMax);
    L.log(Level.FINER, "objectDataBlock: {0}", objectDataBlock);
}
Also used : ByteBuffer(java.nio.ByteBuffer) ObjectInfo(info.ata4.junity.serialize.objectinfo.ObjectInfo) DataBlock(info.ata4.util.io.DataBlock) TypeRoot(info.ata4.junity.serialize.typetree.TypeRoot) Map(java.util.Map)

Example 3 with DataReader

use of info.ata4.io.DataReader in project disunity by ata4.

the class SerializedFileReader method readHeader.

private void readHeader(DataReader in) throws IOException {
    DataBlock headerBlock = serialized.headerBlock();
    headerBlock.markBegin(in);
    in.readStruct(serialized.header());
    headerBlock.markEnd(in);
    L.log(Level.FINER, "headerBlock: {0}", headerBlock);
}
Also used : DataBlock(info.ata4.util.io.DataBlock)

Example 4 with DataReader

use of info.ata4.io.DataReader in project disunity by ata4.

the class BundleHeader method read.

@Override
public void read(DataReader in) throws IOException {
    signature = in.readStringNull();
    streamVersion = in.readInt();
    unityVersion = new UnityVersion(in.readStringNull());
    unityRevision = new UnityVersion(in.readStringNull());
    if (signature.equals(SIGNATURE_FS)) {
        // FS signature
        // Expect streamVersion == 6
        completeFileSize = in.readLong();
        compressedDataHeaderSize = in.readInt();
        dataHeaderSize = in.readInt();
        flags = in.readInt();
        headerSize = (int) in.position();
        if ((flags & 0x80) == 0) {
            // The data header is part of the bundle header
            headerSize += compressedDataHeaderSize;
        }
    // else it's at the end of the file
    } else {
        // Web or Raw signature
        minimumStreamedBytes = in.readUnsignedInt();
        headerSize = in.readInt();
        numberOfLevelsToDownload = in.readInt();
        int numberOfLevels = in.readInt();
        levelByteEnd.clear();
        for (int i = 0; i < numberOfLevels; i++) {
            levelByteEnd.add(new ImmutablePair(in.readUnsignedInt(), in.readUnsignedInt()));
        }
        if (streamVersion >= 2) {
            completeFileSize = in.readUnsignedInt();
        }
        if (streamVersion >= 3) {
            dataHeaderSize = in.readUnsignedInt();
        }
        in.readByte();
    }
}
Also used : ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair) UnityVersion(info.ata4.junity.UnityVersion)

Example 5 with DataReader

use of info.ata4.io.DataReader in project disunity by ata4.

the class BundleReader method read.

public Bundle read() throws BundleException, IOException {
    bundle = new Bundle();
    in.position(0);
    BundleHeader header = bundle.header();
    in.readStruct(header);
    // check signature
    if (!header.hasValidSignature()) {
        throw new BundleException("Invalid signature");
    }
    List<BundleEntryInfo> entryInfos = bundle.entryInfos();
    if (header.compressedDataHeaderSize() > 0) {
        if (header.dataHeaderAtEndOfFile()) {
            in.position(header.completeFileSize() - header.compressedDataHeaderSize());
        }
        // build an input stream for the uncompressed data header
        InputStream headerIn = new BoundedInputStream(in.stream(), header.compressedDataHeaderSize());
        DataReader inData;
        switch(header.dataHeaderCompressionScheme()) {
            default:
            case 0:
                // Not compressed
                inData = DataReaders.forInputStream(headerIn);
            case 1:
                // LZMA
                inData = DataReaders.forInputStream(new CountingInputStream(new LzmaInputStream(headerIn)));
            case 3:
                // LZ4
                byte[] compressed = new byte[header.compressedDataHeaderSize()];
                byte[] decompressed = new byte[(int) header.dataHeaderSize()];
                headerIn.read(compressed);
                LZ4JavaSafeFastDecompressor.INSTANCE.decompress(compressed, decompressed);
                inData = DataReaders.forByteBuffer(ByteBuffer.wrap(decompressed));
        }
        // Block info: not captured for now
        {
            // 16 bytes unknown
            byte[] unknown = new byte[16];
            inData.readBytes(unknown);
            int storageBlocks = inData.readInt();
            for (int i = 0; i < storageBlocks; ++i) {
                inData.readUnsignedInt();
                inData.readUnsignedInt();
                inData.readUnsignedShort();
            }
        }
        int files = inData.readInt();
        for (int i = 0; i < files; i++) {
            BundleEntryInfo entryInfo = new BundleEntryInfoFS();
            inData.readStruct(entryInfo);
            entryInfos.add(entryInfo);
        }
    } else {
        // raw or web header
        long dataHeaderSize = header.dataHeaderSize();
        if (dataHeaderSize == 0) {
            // old stream versions don't store the data header size, so use a large
            // fixed number instead
            dataHeaderSize = 4096;
        }
        InputStream is = dataInputStream(0, dataHeaderSize);
        DataReader inData = DataReaders.forInputStream(is);
        int files = inData.readInt();
        for (int i = 0; i < files; i++) {
            BundleEntryInfo entryInfo = new BundleEntryInfo();
            inData.readStruct(entryInfo);
            entryInfos.add(entryInfo);
        }
    }
    // sort entries by offset so that they're in the order in which they
    // appear in the file, which is convenient for compressed bundles
    entryInfos.sort((a, b) -> Long.compare(a.offset(), b.offset()));
    List<BundleEntry> entries = bundle.entries();
    entryInfos.forEach(entryInfo -> {
        entries.add(new BundleInternalEntry(entryInfo, this::inputStreamForEntry));
    });
    return bundle;
}
Also used : LzmaInputStream(net.contrapunctus.lzma.LzmaInputStream) BoundedInputStream(org.apache.commons.io.input.BoundedInputStream) CountingInputStream(org.apache.commons.io.input.CountingInputStream) InputStream(java.io.InputStream) CountingInputStream(org.apache.commons.io.input.CountingInputStream) DataReader(info.ata4.io.DataReader) LzmaInputStream(net.contrapunctus.lzma.LzmaInputStream) BoundedInputStream(org.apache.commons.io.input.BoundedInputStream)

Aggregations

UnityVersion (info.ata4.junity.UnityVersion)3 DataBlock (info.ata4.util.io.DataBlock)3 DataReader (info.ata4.io.DataReader)1 UnityHash128 (info.ata4.junity.UnityHash128)1 FileIdentifierTable (info.ata4.junity.serialize.fileidentifier.FileIdentifierTable)1 FileIdentifierV1 (info.ata4.junity.serialize.fileidentifier.FileIdentifierV1)1 FileIdentifierV2 (info.ata4.junity.serialize.fileidentifier.FileIdentifierV2)1 ObjectIdentifierTable (info.ata4.junity.serialize.objectidentifier.ObjectIdentifierTable)1 ObjectInfo (info.ata4.junity.serialize.objectinfo.ObjectInfo)1 ObjectInfoTableV1 (info.ata4.junity.serialize.objectinfo.ObjectInfoTableV1)1 ObjectInfoTableV2 (info.ata4.junity.serialize.objectinfo.ObjectInfoTableV2)1 ObjectInfoV3 (info.ata4.junity.serialize.objectinfo.ObjectInfoV3)1 TypeRoot (info.ata4.junity.serialize.typetree.TypeRoot)1 TypeTreeV1 (info.ata4.junity.serialize.typetree.TypeTreeV1)1 TypeTreeV2 (info.ata4.junity.serialize.typetree.TypeTreeV2)1 TypeTreeV3 (info.ata4.junity.serialize.typetree.TypeTreeV3)1 TypeV2 (info.ata4.junity.serialize.typetree.TypeV2)1 Node (info.ata4.util.collection.Node)1 InputStream (java.io.InputStream)1 ByteBuffer (java.nio.ByteBuffer)1