Package info.ata4.io

Examples of info.ata4.io.DataOutputWriter


        ByteBuffer vertexBuffer = mesh.vertexData.dataSize;
        vertexBuffer.order(ByteOrder.LITTLE_ENDIAN);
       
        L.log(Level.FINE, "Vertex buffer size: {0}", vertexBuffer.capacity());

        DataInputReader in = DataInputReader.newReader(vertexBuffer);

        List<StreamInfo> streams = mesh.vertexData.streams;
        List<ChannelInfo> channels = mesh.vertexData.channels;
       
        for (StreamInfo stream : streams) {
            // skip empty channels
            if (stream.channelMask.longValue() == 0) {
                continue;
            }

            vertexBuffer.position(stream.offset.intValue());

            // read vertex data from each vertex and channel
            for (int i = 0; i < mesh.vertexData.vertexCount; i++) {
                for (int j = 0; j < CHANNEL_COUNT; j++) {
                    // skip unselected channels
                    if ((stream.channelMask.longValue() & 1 << j) == 0) {
                        continue;
                    }
                   
                    boolean half = false;
                    ChannelInfo channel = null;

                    // channels may not be available in older versions
                    if (!channels.isEmpty()) {
                        channel = channels.get(j);
                        half = channel.format == 1;
                    }

                    switch (j) {
                        case CHANNEL_VERTS:
                            Vector3f v = new Vector3f();
                            v.setHalf(half);
                            v.read(in);
                            vertices.add(v);
                            break;

                        case CHANNEL_NORMALS:
                            Vector3f vn = new Vector3f();
                            vn.setHalf(half);
                            vn.read(in);
                            normals.add(vn);
                            if (half && channel != null && channel.dimension == 4) {
                                in.skipBytes(2); // padding?
                            }
                            break;

                        case CHANNEL_COLORS:
                            Color32 c = new Color32();
                            c.read(in);
                            colors.add(c);
                            break;

                        case CHANNEL_UV1:
                        case CHANNEL_UV2:
                            Vector2f vt = new Vector2f();
                            vt.setHalf(half);
                            vt.read(in);
                            if (j == CHANNEL_UV1) {
                                uv1.add(vt);
                            } else {
                                uv2.add(vt);
                            }
                            break;

                        case CHANNEL_TANGENTS:
                            Vector4f t = new Vector4f();
                            t.setHalf(half);
                            t.read(in);
                            tangents.add(t);
                            break;
                    }
                }
               
                in.align(stream.stride.intValue());
            }
        }
    }
View Full Code Here


        }
    }

    private void readIndexBuffer() throws IOException {
        mesh.indexBuffer.order(ByteOrder.LITTLE_ENDIAN);
        DataInputReader in = DataInputReader.newReader(mesh.indexBuffer);

        for (SubMesh subMesh : mesh.subMeshes) {
            List<Integer> subMeshIndices = new ArrayList<>();
            List<Integer> subMeshTriangles = new ArrayList<>();
           
            in.position(subMesh.firstByte.longValue());
            for (long j = 0; j < subMesh.indexCount.longValue(); j++) {
                subMeshIndices.add(in.readUnsignedShort());
            }
           
            // read triangle strips if topology/isTriStrip is not zero
            if (subMesh.topology.longValue() == 0) {
                // use indices as is
View Full Code Here

                if (tgaSaveMipMaps || j == 0) {
                    ByteBuffer bbTga = ByteBuffer.allocateDirect(TGAHeader.SIZE + imageSize);
                    bbTga.order(ByteOrder.LITTLE_ENDIAN);

                    // write TGA header
                    DataOutputWriter out = DataOutputWriter.newWriter(bbTga);
                    header.write(out);

                    // write image data
                    bb.limit(bb.position() + imageSize);
                    bbTga.put(bb);
View Full Code Here

       
        ByteBuffer bbTex = ByteBuffer.allocateDirect(DDSHeader.SIZE + tex.imageBuffer.capacity());
        bbTex.order(ByteOrder.LITTLE_ENDIAN);
       
        // write header
        DataOutputWriter out = DataOutputWriter.newWriter(bbTex);
        header.write(out);
       
        // write data
        bbTex.put(tex.imageBuffer);
       
View Full Code Here

        L.info("Saving struct database");
       
        // write database file
        File dbFile = new File(FILENAME);
        try (BufferedOutputStream bos = new BufferedOutputStream(FileUtils.openOutputStream(dbFile))) {
            DataOutputWriter out = DataOutputWriter.newWriter(bos);
           
            // write header
            out.writeInt(VERSION);

            // write field node table
            Set<TypeField> fieldNodes = new HashSet<>(ftm.values());
            Map<TypeField, Integer> fieldNodeMap = new HashMap<>();

            out.writeInt(fieldNodes.size());

            int index = 0;
            for (TypeField fieldNode : fieldNodes) {
                fieldNodeMap.put(fieldNode, index++);
                fieldNode.write(out);
            }

            // write version string table
            Set<UnityVersion> versions = new HashSet<>();
            Map<UnityVersion, Integer> versionMap = new HashMap<>();

            for (Map.Entry<Pair<Integer, UnityVersion>, TypeField> entry : ftm.entrySet()) {
                versions.add(entry.getKey().getRight());
            }

            out.writeInt(versions.size());

            index = 0;
            for (UnityVersion version : versions) {
                versionMap.put(version, index++);
                out.writeStringNull(version.toString());
            }

            // write mapping data
            out.writeInt(ftm.entrySet().size());

            for (Map.Entry<Pair<Integer, UnityVersion>, TypeField> entry : ftm.entrySet()) {
                index = fieldNodeMap.get(entry.getValue());
                Pair<Integer, UnityVersion> fieldNodeKey = entry.getKey();

                int classID = fieldNodeKey.getLeft();
                UnityVersion version = fieldNodeKey.getRight();

                out.writeInt(index);
                out.writeInt(classID);
                out.writeInt(versionMap.get(version));
            }
        } catch (IOException ex) {
            L.log(Level.SEVERE, "Can't write struct database", ex);
        }
    }
View Full Code Here

            throw new AssetException("Only format 9 is supported right now");
        }
       
        // build struct info
        ByteArrayOutputStream bosStruct = new ByteArrayOutputStream();
        DataOutputWriter outStruct = DataOutputWriter.newWriter(bosStruct);
        outStruct.setSwap(true);
       
        typeTree.setFormat(header.getFormat());
        typeTree.write(outStruct);
        objTable.write(outStruct);
        refTable.write(outStruct);
       
        // align block to 16 bytes
        int structSize = bosStruct.size();
        int structAlign = 16;
        outStruct.align(structAlign);
       
        ByteBuffer bbStruct = ByteBuffer.wrap(bosStruct.toByteArray());
       
        // calculate padding
        int minSize = 4096;
        int padding = Math.max(0, minSize - AssetHeader.SIZE - bbStruct.limit());
       
        // configure header
        header.setTreeSize(structSize);
        header.setDataOffset(AssetHeader.SIZE + bbStruct.limit() + padding);
        header.setFileSize(header.getDataOffset() + bbData.limit());
       
        // open file
        ByteBuffer bb = ByteBufferUtils.openReadWrite(file, 0, header.getFileSize());
        DataOutputWriter out = DataOutputWriter.newWriter(bb);
       
        // write header
        header.write(out);

        // write struct
        bb.put(bbStruct);
       
        // write padding
        out.skipBytes(padding);
       
        // write data
        bb.put(bbData);
    }
View Full Code Here

        // first entry starts after the header
        int bundleDataOffset = align(bundleHeaderSize);
       
        // allocate data buffer
        ByteBuffer bbData = ByteBuffer.allocateDirect(bundleDataOffset + bundleDataSize);
        DataOutputWriter out = DataOutputWriter.newWriter(bbData.duplicate());
       
        // write bundle entries
        out.writeInt(entrySet.size());
        for (Map.Entry<String, ByteBuffer> entry : entrySet) {
            String name = entry.getKey();
            ByteBuffer buffer = entry.getValue();
            buffer.rewind();
           
            out.writeStringNull(name);
            out.writeInt(bundleDataOffset);
            out.writeInt(buffer.limit());
           
            bbData.position(bundleDataOffset);
            bbData.put(buffer);
           
            bundleDataOffset += align(buffer.limit());
        }
       
        bbData.flip();
       
        int dataSizeC = bbData.limit();
        int dataSizeU = dataSizeC;
       
        // compress bundle data if required
        if (isCompressed()) {
            L.log(Level.INFO, "Compressing asset bundle, this may take a while");
            bbData = LzmaBufferUtils.encode(bbData);
            dataSizeC = bbData.limit();
        }
       
        // configure header
        int headerSize = header.getSize();
        int bundleSize = headerSize + dataSizeC;
        header.setCompressed(isCompressed());
        header.setDataOffset(headerSize);
        header.setFileSize1(bundleSize);
        header.setFileSize2(bundleSize);
        header.setUnknown1(assets);
        header.setUnknown2(bundleHeaderSize);
       
        List<Pair<Integer, Integer>> offsetMap = header.getOffsetMap();
        offsetMap.clear();
       
        // TODO: Original asset bundles have ascending lengths for each asset
        // file. The exact calculation of these values is not yet known, so use
        // the maximum size for each entry for now to avoid crashes.
        for (int i = 0; i < assets; i++) {
            offsetMap.add(new ImmutablePair<>(dataSizeC, dataSizeU));
        }
       
        // create bundle buffer
        ByteBuffer bb = ByteBuffer.allocateDirect(bundleSize);
        out = DataOutputWriter.newWriter(bb);
        out.writeStruct(header);
        out.writeBuffer(bbData);
        bb.flip();
       
        // encode bundle buffer
        for (AssetBundleCodec codec : codecsSave) {
            L.log(Level.INFO, "Encoding: {0}", codec.getName());
View Full Code Here

        if (pbv.numItems == 0 || pbv.bitSize == 0) {
            return new int[]{};
        }
       
        // the values are packed with a variable bit length
        BitInputStream bis = new BitInputStream(new ByteBufferInputStream(pbv.data));
        bis.setBitLength(pbv.bitSize);
       
        int numItems = pbv.numItems.intValue();
        int[] items = new int[numItems];
        for (int i = 0; i < items.length; i++) {
View Full Code Here

        if (pbv.numItems == 0 || pbv.bitSize == 0) {
            return new int[]{};
        }
       
        // the values are packed with a variable bit length
        BitInputStream bis = new BitInputStream(new ByteBufferInputStream(pbv.data));
        bis.setBitLength(pbv.bitSize);
       
        int numItems = pbv.numItems.intValue();
        int[] items = new int[numItems];
        for (int i = 0; i < items.length; i++) {
            items[i] = bis.read();
        }
       
        return items;
    }
View Full Code Here

                continue;
            }

            String className = ClassID.getNameForID(path.getClassID(), true);
           
            AssetFile subAsset = new AssetFile();
            subAsset.getHeader().setFormat(asset.getHeader().getFormat());
           
            ObjectPath subFieldPath = new ObjectPath();
            subFieldPath.setClassID1(path.getClassID1());
            subFieldPath.setClassID2(path.getClassID2());
            subFieldPath.setLength(path.getLength());
            subFieldPath.setOffset(0);
            subFieldPath.setPathID(1);
            subAsset.getPaths().add(subFieldPath);
           
            TypeTree subTypeTree = subAsset.getTypeTree();
            subTypeTree.setEngineVersion(typeTree.getEngineVersion());
            subTypeTree.setVersion(-2);
            subTypeTree.setFormat(typeTree.getFormat());
            subTypeTree.getFields().put(path.getClassID(), typeTree.getFields().get(path.getClassID()));

            subAsset.setDataBuffer(asset.getPathBuffer(path));
           
            Path subAssetDir = outputDir.resolve(className);
            if (Files.notExists(subAssetDir)) {
                Files.createDirectories(subAssetDir);
            }
           
            // probe asset name
            String subAssetName = getObjectName(asset, path);
            if (subAssetName != null) {
                // remove any chars that could cause troubles on various file systems
                subAssetName = FilenameSanitizer.sanitizeName(subAssetName);
            } else {
                // use numeric names
                subAssetName = String.format("%06d", path.getPathID());
            }
            subAssetName += ".asset";
           
            Path subAssetFile = subAssetDir.resolve(subAssetName);
            if (Files.notExists(subAssetFile)) {
                L.log(Level.INFO, "Writing {0}", subAssetFile);
                subAsset.save(subAssetFile);
            }
        }
    }
View Full Code Here

TOP

Related Classes of info.ata4.io.DataOutputWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.