public void addAsset(AssetEntry assetEntry) { if (assets.ContainsKey(assetEntry.strID)) { throw new Exception( "Asset key [" + assetEntry + "] already exists in db[" + assets[assetEntry.strID] + "]"); } assets[assetEntry.strID] = assetEntry; }
public void AddAsset(AssetEntry assetEntry) { if (assets.ContainsKey(assetEntry.StringId)) { throw new Exception( "Asset key [" + assetEntry + "] already exists in db[" + assets[assetEntry.StringId] + "]"); } assets[assetEntry.StringId] = assetEntry; }
private byte[] extractPart(AssetEntry ae, int size) { AssetFile af = ae.File; if (af != ae.File) { throw new Exception("Incorrect af found for asset[" + ae + "]"); } return(af.extractPart(ae, size, null, false)); }
//static AssetCache cache = AssetCache.inst; /** * Attempt to extract the given assetentry into a byte array. Because the content may be compressed the returned byte array * may be larger than the requested max bytes. * * @param entry The entry to read * @param maxBytesToRead The maximum bytes to read from the source * @return The bytes read, may be larger than requested if the data is compressed */ public byte[] extractPart(AssetEntry entry, int maxBytesToRead, Stream os, bool nodecomp) { if (entry.file != this) { throw new Exception( "Extract called on wrong asset file[" + file + "] for asset:" + entry); } byte [] data = extractPart1(entry, maxBytesToRead, nodecomp); if (os != null) { os.Write(data, 0, data.Length); } return(data); }
private AssetFile getAssetFile(AssetEntry ae) { return(ae.File); }
private void extract(AssetEntry ae, Stream fos) { byte[] data = extract(ae); fos.Write(data, 0, data.Length); fos.Flush(); }
internal string getHash(string v) { AssetEntry ae = getEntryForFileName(v); return(BitConverter.ToString(ae.Hash)); }
/** Attempt to extract the asset with the given filename */ /* * public void extractToFilename( String filename, String outputfilename) * { * try * { * using (FileStream fos = new FileStream(outputfilename, FileMode.Truncate)) * { * using (BufferedStream bi = new BufferedStream(fos)) * { * byte[] data = extract(getEntryForFileName(filename)); * bi.Write(data, 0, data.Length); * } * } * } catch (Exception e) * { * Console.WriteLine(e); * } * } */ private byte[] extract(AssetEntry ae) { return(ae.File.extract(ae)); }
private byte[] extractPart1(AssetEntry entry, int maxBytesToRead, bool nodecomp) { try { if (nodecomp || !entry.compressed) { using (FileStream stream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read)) { // if not compressed byte[] data = new byte[maxBytesToRead]; stream.Seek(entry.offset, SeekOrigin.Begin); long bytesRead = stream.Read(data, 0, maxBytesToRead); if (entry.size >= maxBytesToRead && bytesRead != maxBytesToRead) { throw new Exception("Not enough bytes read, expected [" + maxBytesToRead + "], got: " + bytesRead); } return(data); } } else { // COMPRESSED // NOTE: entry.size doesn't indicate the size of the uncompressed data // Check if we want to read all the data or only a little bool readAll = maxBytesToRead >= entry.size; using (FileStream stream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read)) { long pos = stream.Seek(entry.offset, SeekOrigin.Begin); lock (unzipLock) { // Debug.Log("extract asset:" + entry); //Debug.Log("decompress asset:" + entry.strID + ", size:" + entry.size + ", sizeD:" + entry.sizeD); if (unzipCache.Length < entry.sizeD) { //Debug.Log("Increasing unzip cache size from " + unzipCache.Length + " to " + entry.sizeD); unzipCache = new byte[entry.sizeD]; } int writeIndex = 0; using (ZlibStream ds = new ZlibStream(stream, Ionic.Zlib.CompressionMode.Decompress)) { int numRead; while ((numRead = ds.Read(unzipCache, writeIndex, unzipCache.Length - writeIndex)) > 0) { //Debug.Log("read " + numRead + " into array at " + writeIndex + ", we tried to read " + (unzipCache.Length - writeIndex) + " bytes"); writeIndex += numRead; if (!readAll && writeIndex >= maxBytesToRead) { break; } } //if (writeIndex != entry.sizeD) // Debug.LogWarning("expected to read " + entry.sizeD + " bytes, but only got " + writeIndex); /* * //Copy the decompression stream into the output file. * byte[] buffer = new byte[4096]; * while ((numRead = ds.Read(buffer, 0, buffer.Length)) > 0) * { * totalRead += numRead; * if (!readAll && totalRead >= maxBytesToRead) * break; * decompressed.Write(buffer, 0, numRead); * * } */ byte[] outData = new byte[writeIndex]; Array.Copy(unzipCache, outData, outData.Length); return(outData); } } } } } catch (Exception ex) { throw new Exception("failure in file " + file + ", @ " + entry.offset + ", id:" + entry.strID + ", compressed?" + entry.compressed + ", filesize:" + entry.size + "\n\t", ex); } }
public byte[] extractNoDecomp(AssetEntry entry) { return(extractPart(entry, entry.size, null, true)); }
public void extract(AssetEntry entry, Stream fos) { extractPart(entry, entry.size, fos, false); }
/** Attempt to extract the given assetentry into a byte array */ public byte[] extract(AssetEntry entry) { return(extractPart(entry, entry.size, null, false)); }