public void Tree() { var sut2 = new Memory(Memory.Mode.Tree); var o = new SealedC(); sut2.SizeOf(new { a = o }).Should().Be(24); sut2.SizeOf(new { a = o, b = o }).Should().Be(40); sut2.SizeOf(new { a = o, b = o, c = o }).Should().Be(56); }
protected void InitializeCounts(BinaryReader reader) { this.brushes = new InternalBspBrush[ this.header.lumps[(int)Quake3LumpType.Brushes].size / Memory.SizeOf(typeof(InternalBspBrush))]; this.leafBrushes = new int[this.header.lumps[(int)Quake3LumpType.LeafBrushes].size / Memory.SizeOf(typeof(int))]; this.vertices = new InternalBspVertex[ this.header.lumps[(int)Quake3LumpType.Vertices].size / Memory.SizeOf(typeof(InternalBspVertex))]; this.planes = new InternalBspPlane[ this.header.lumps[(int)Quake3LumpType.Planes].size / Memory.SizeOf(typeof(InternalBspPlane))]; this.nodes = new InternalBspNode[this.header.lumps[(int)Quake3LumpType.Nodes].size / Memory.SizeOf(typeof(InternalBspNode))]; this.models = new InternalBspModel[ this.header.lumps[(int)Quake3LumpType.Models].size / Memory.SizeOf(typeof(InternalBspModel))]; this.leaves = new InternalBspLeaf[this.header.lumps[(int)Quake3LumpType.Leaves].size / Memory.SizeOf(typeof(InternalBspLeaf)) ]; this.leafFaces = new int[this.header.lumps[(int)Quake3LumpType.LeafFaces].size / Memory.SizeOf(typeof(int))]; this.faces = new InternalBspFace[this.header.lumps[(int)Quake3LumpType.Faces].size / Memory.SizeOf(typeof(InternalBspFace))]; this.elements = new int[this.header.lumps[(int)Quake3LumpType.Elements].size / Memory.SizeOf(typeof(int))]; }
public HardwareIndexBuffer(HardwareBufferManagerBase manager, IndexType type, int numIndices, BufferUsage usage, bool useSystemMemory, bool useShadowBuffer) : base(usage, useSystemMemory, useShadowBuffer) { this.type = type; this.numIndices = numIndices; this.Manager = manager; // calc the index buffer size sizeInBytes = numIndices; if (type == IndexType.Size32) { this.indexSize = Memory.SizeOf(typeof(int)); } else { this.indexSize = Memory.SizeOf(typeof(short)); } sizeInBytes *= this.indexSize; // create a shadow buffer if required if (useShadowBuffer) { shadowBuffer = new DefaultHardwareIndexBuffer(this.Manager, type, numIndices, BufferUsage.Dynamic); } }
public void build(PrimitiveList primitives) { this.primitives = primitives; int n = primitives.getNumPrimitives(); UI.printDetailed(UI.Module.ACCEL, "Getting bounding box ..."); bounds = primitives.getWorldBounds(null); objects = new int[n]; for (int i = 0; i < n; i++) { objects[i] = i; } UI.printDetailed(UI.Module.ACCEL, "Creating tree ..."); int initialSize = 3 * (2 * 6 * n + 1); List <int> tempTree = new List <int>((initialSize + 3) / 4); BuildStats stats = new BuildStats(); Timer t = new Timer(); t.start(); buildHierarchy(tempTree, objects, stats); t.end(); UI.printDetailed(UI.Module.ACCEL, "Trimming tree ..."); tree = tempTree.ToArray(); // display stats stats.printStats(); UI.printDetailed(UI.Module.ACCEL, " * Creation time: {0}", t); UI.printDetailed(UI.Module.ACCEL, " * Usage of init: {0,9:0.00}%", (double)(100 * tree.Length) / initialSize); UI.printDetailed(UI.Module.ACCEL, " * Tree memory: {0}", Memory.SizeOf(tree)); UI.printDetailed(UI.Module.ACCEL, " * Indices memory: {0}", Memory.SizeOf(objects)); }
/// <summary> /// Utility method for helping to calculate offsets. /// </summary> public static int GetTypeSize(VertexElementType type) { switch (type) { case VertexElementType.Color_ABGR: case VertexElementType.Color_ARGB: case VertexElementType.Color: return(Memory.SizeOf(typeof(int))); case VertexElementType.Float1: return(Memory.SizeOf(typeof(float))); case VertexElementType.Float2: return(Memory.SizeOf(typeof(float)) * 2); case VertexElementType.Float3: return(Memory.SizeOf(typeof(float)) * 3); case VertexElementType.Float4: return(Memory.SizeOf(typeof(float)) * 4); case VertexElementType.Short1: return(Memory.SizeOf(typeof(short))); case VertexElementType.Short2: return(Memory.SizeOf(typeof(short)) * 2); case VertexElementType.Short3: return(Memory.SizeOf(typeof(short)) * 3); case VertexElementType.Short4: return(Memory.SizeOf(typeof(short)) * 4); case VertexElementType.UByte4: return(Memory.SizeOf(typeof(byte)) * 4); } // end switch // keep the compiler happy return(0); }
private void ExactSize <T>(Func <T> func, int adjustment = 0) { GC.Collect(); var s = GC.GetAllocatedBytesForCurrentThread(); var t = func(); GC.Collect(); var e = GC.GetAllocatedBytesForCurrentThread(); var actual = sut.SizeOf(t); t = func(); actual.Should().Be(e - s + adjustment); }
public void build(PrimitiveList primitives) { UI.printDetailed(UI.Module.ACCEL, "KDTree settings"); UI.printDetailed(UI.Module.ACCEL, " * Max Leaf Size: {0}", maxPrims); UI.printDetailed(UI.Module.ACCEL, " * Max Depth: {0}", MAX_DEPTH); UI.printDetailed(UI.Module.ACCEL, " * Traversal cost: {0}", TRAVERSAL_COST); UI.printDetailed(UI.Module.ACCEL, " * Intersect cost: {0}", INTERSECT_COST); UI.printDetailed(UI.Module.ACCEL, " * Empty bonus: {0}", EMPTY_BONUS); UI.printDetailed(UI.Module.ACCEL, " * Dump leaves: {0}", dump ? "enabled" : "disabled"); Timer total = new Timer(); total.start(); this.primitiveList = primitives; // get the object space bounds bounds = primitives.getWorldBounds(null); int nPrim = primitiveList.getNumPrimitives(), nSplits = 0; BuildTask task = new BuildTask(nPrim); Timer prepare = new Timer(); prepare.start(); for (int i = 0; i < nPrim; i++) { for (int axis = 0; axis < 3; axis++) { float ls = primitiveList.getPrimitiveBound(i, 2 * axis + 0); float rs = primitiveList.getPrimitiveBound(i, 2 * axis + 1); if (ls == rs) { // flat in this dimension task.splits[nSplits] = pack(ls, PLANAR, axis, i); nSplits++; } else { task.splits[nSplits + 0] = pack(ls, OPENED, axis, i); task.splits[nSplits + 1] = pack(rs, CLOSED, axis, i); nSplits += 2; } } } task.n = nSplits; prepare.end(); Timer t = new Timer(); List <int> tempTree = new List <int>(); List <int> tempList = new List <int>(); tempTree.Add(0); tempTree.Add(1); t.start(); // sort it Timer sorting = new Timer(); sorting.start(); radix12(task.splits, task.n); sorting.end(); // build the actual tree BuildStats stats = new BuildStats(); buildTree(bounds.getMinimum().x, bounds.getMaximum().x, bounds.getMinimum().y, bounds.getMaximum().y, bounds.getMinimum().z, bounds.getMaximum().z, task, 1, tempTree, 0, tempList, stats); t.end(); // write out arrays // free some memory task = null; tree = tempTree.ToArray(); tempTree = null; this.primitives = tempList.ToArray(); tempList = null; total.end(); // display some extra info stats.printStats(); UI.printDetailed(UI.Module.ACCEL, " * Node memory: {0}", Memory.SizeOf(tree)); UI.printDetailed(UI.Module.ACCEL, " * Object memory: {0}", Memory.SizeOf(this.primitives)); UI.printDetailed(UI.Module.ACCEL, " * Prepare time: {0}", prepare); UI.printDetailed(UI.Module.ACCEL, " * Sorting time: {0}", sorting); UI.printDetailed(UI.Module.ACCEL, " * Tree creation: {0}", t); UI.printDetailed(UI.Module.ACCEL, " * Build time: {0}", total); if (dump) { try { UI.printInfo(UI.Module.ACCEL, "Dumping mtls to {0}.mtl ...", dumpPrefix); StreamWriter mtlFile = new StreamWriter(dumpPrefix + ".mtl"); int maxN = stats.maxObjects; for (int n = 0; n <= maxN; n++) { float blend = (float)n / (float)maxN; Color nc; if (blend < 0.25) { nc = Color.blend(Color.BLUE, Color.GREEN, blend / 0.25f); } else if (blend < 0.5) { nc = Color.blend(Color.GREEN, Color.YELLOW, (blend - 0.25f) / 0.25f); } else if (blend < 0.75) { nc = Color.blend(Color.YELLOW, Color.RED, (blend - 0.50f) / 0.25f); } else { nc = Color.MAGENTA; } mtlFile.WriteLine(string.Format("newmtl mtl{0}", n)); float[] rgb = nc.getRGB(); mtlFile.WriteLine("Ka 0.1 0.1 0.1"); mtlFile.WriteLine(string.Format("Kd {0}g {1}g {2}g", rgb[0], rgb[1], rgb[2])); mtlFile.WriteLine("illum 1\n"); } StreamWriter objFile = new StreamWriter(dumpPrefix + ".obj"); UI.printInfo(UI.Module.ACCEL, "Dumping tree to {0}.obj ...", dumpPrefix); dumpObj(0, 0, maxN, new BoundingBox(bounds), objFile, mtlFile); objFile.Close(); mtlFile.Close(); } catch (Exception e) { Console.WriteLine(e); } } }
public override Codec.DecodeResult Decode(Stream input) { using (var br = new BinaryReader(input)) { // Read 4 character code var fileType = br.ReadInt32(); using (var wrap = BufferBase.Wrap(fileType, 2)) { _flipEndian(wrap, sizeof(uint), 1); } if (FOURCC('D', 'D', 'S', ' ') != fileType) { throw new AxiomException("This is not a DDS file!"); } // Read header in full var header = DDSHeader.Read(br); // Endian flip if required, all 32-bit values using (var wrap = BufferBase.Wrap(header, Memory.SizeOf(typeof(DDSHeader)))) { _flipEndian(wrap, 4, Memory.SizeOf(typeof(DDSHeader)) / 4); } // Check some sizes if (header.size != DDS_HEADER_SIZE) { throw new AxiomException("DDS header size mismatch!"); } if (header.pixelFormat.size != DDS_PIXELFORMAT_SIZE) { throw new AxiomException("DDS header size mismatch!"); } var imgData = new ImageData(); imgData.depth = 1; // (deal with volume later) imgData.width = header.width; imgData.height = header.height; var numFaces = 1; // assume one face until we know otherwise if ((header.caps.caps1 & DDSCAPS_MIPMAP) != 0) { imgData.numMipMaps = header.mipMapCount - 1; } else { imgData.numMipMaps = 0; } imgData.flags = 0; var decompressDXT = false; // Figure out basic image type if ((header.caps.caps2 & DDSCAPS2_CUBEMAP) != 0) { imgData.flags |= ImageFlags.CubeMap; numFaces = 6; } else if ((header.caps.caps2 & DDSCAPS2_VOLUME) != 0) { imgData.flags |= ImageFlags.Volume; imgData.depth = header.depth; } // Pixel format var sourceFormat = PixelFormat.Unknown; if ((header.pixelFormat.flags & DDPF_FOURCC) != 0) { sourceFormat = _convertFourCCFormat(header.pixelFormat.fourCC); } else { sourceFormat = _convertPixelFormat(header.pixelFormat.rgbBits, header.pixelFormat.redMask, header.pixelFormat.greenMask, header.pixelFormat.blueMask, (header.pixelFormat.flags & DDPF_ALPHAPIXELS) != 0 ? header.pixelFormat.alphaMask : 0); } if (PixelUtil.IsCompressed(sourceFormat)) { if (!Root.Instance.RenderSystem.Capabilities.HasCapability(Capabilities.TextureCompressionDXT)) { // We'll need to decompress decompressDXT = true; // Convert format switch (sourceFormat) { case PixelFormat.DXT1: // source can be either 565 or 5551 depending on whether alpha present // unfortunately you have to read a block to figure out which // Note that we upgrade to 32-bit pixel formats here, even // though the source is 16-bit; this is because the interpolated // values will benefit from the 32-bit results, and the source // from which the 16-bit samples are calculated may have been // 32-bit so can benefit from this. var block = DXTColorBlock.Read(br); using (var wrap = BufferBase.Wrap(block.colour_0, sizeof(ushort))) { _flipEndian(wrap, sizeof(ushort), 1); } using (var wrap = BufferBase.Wrap(block.colour_1, sizeof(ushort))) { _flipEndian(wrap, sizeof(ushort), 1); } // skip back since we'll need to read this again br.BaseStream.Seek(0 - (long)Memory.SizeOf(typeof(DXTColorBlock)), SeekOrigin.Current); // colour_0 <= colour_1 means transparency in DXT1 if (block.colour_0 <= block.colour_1) { imgData.format = PixelFormat.BYTE_RGBA; } else { imgData.format = PixelFormat.BYTE_RGB; } break; case PixelFormat.DXT2: case PixelFormat.DXT3: case PixelFormat.DXT4: case PixelFormat.DXT5: // full alpha present, formats vary only in encoding imgData.format = PixelFormat.BYTE_RGBA; break; default: // all other cases need no special format handling break; } } else { // Use original format imgData.format = sourceFormat; // Keep DXT data compressed imgData.flags |= ImageFlags.Compressed; } } else // not compressed { // Don't test against DDPF_RGB since greyscale DDS doesn't set this // just derive any other kind of format imgData.format = sourceFormat; } // Calculate total size from number of mipmaps, faces and size imgData.size = Image.CalculateSize(imgData.numMipMaps, numFaces, imgData.width, imgData.height, imgData.depth, imgData.format); // Now deal with the data var dest = new byte[imgData.size]; var destBuffer = BufferBase.Wrap(dest); // all mips for a face, then each face for (var i = 0; i < numFaces; ++i) { var width = imgData.width; var height = imgData.height; var depth = imgData.depth; for (var mip = 0; mip <= imgData.numMipMaps; ++mip) { var dstPitch = width * PixelUtil.GetNumElemBytes(imgData.format); if (PixelUtil.IsCompressed(sourceFormat)) { // Compressed data if (decompressDXT) { DXTColorBlock col; DXTInterpolatedAlphaBlock iAlpha; DXTExplicitAlphaBlock eAlpha; // 4x4 block of decompressed colour var tempColours = new ColorEx[16]; var destBpp = PixelUtil.GetNumElemBytes(imgData.format); var sx = Utility.Min(width, 4); var sy = Utility.Min(height, 4); var destPitchMinus4 = dstPitch - destBpp * sx; // slices are done individually for (var z = 0; z < depth; ++z) { // 4x4 blocks in x/y for (var y = 0; y < height; y += 4) { for (var x = 0; x < width; x += 4) { if (sourceFormat == PixelFormat.DXT2 || sourceFormat == PixelFormat.DXT3) { // explicit alpha eAlpha = DXTExplicitAlphaBlock.Read(br); using (var wrap = BufferBase.Wrap(eAlpha.alphaRow, eAlpha.alphaRow.Length * sizeof(ushort))) { _flipEndian(wrap, sizeof(ushort), 4); } _unpackDXTAlpha(eAlpha, tempColours); } else if (sourceFormat == PixelFormat.DXT4 || sourceFormat == PixelFormat.DXT5) { // interpolated alpha iAlpha = DXTInterpolatedAlphaBlock.Read(br); using (var wrap = BufferBase.Wrap(iAlpha.alpha_0, 1)) { _flipEndian(wrap, sizeof(ushort), 1); } using (var wrap = BufferBase.Wrap(iAlpha.alpha_1, 1)) { _flipEndian(wrap, sizeof(ushort), 1); } _unpackDXTAlpha(iAlpha, tempColours); } // always read colour col = DXTColorBlock.Read(br); using (var wrap = BufferBase.Wrap(col.colour_0, sizeof(ushort))) { _flipEndian(wrap, sizeof(ushort), 1); } using (var wrap = BufferBase.Wrap(col.colour_1, sizeof(ushort))) { _flipEndian(wrap, sizeof(ushort), 1); } _unpackDXTColor(sourceFormat, col, tempColours); // write 4x4 block to uncompressed version for (var by = 0; by < sy; ++by) { for (var bx = 0; bx < sx; ++bx) { PixelConverter.PackColor(tempColours[by * 4 + bx], imgData.format, destBuffer); destBuffer += destBpp; } // advance to next row destBuffer += destPitchMinus4; } // next block. Our dest pointer is 4 lines down // from where it started if (x + 4 >= width) { // Jump back to the start of the line destBuffer += -destPitchMinus4; } else { // Jump back up 4 rows and 4 pixels to the // right to be at the next block to the right destBuffer += -(dstPitch * sy + destBpp * sx); } } } } } else { // load directly // DDS format lies! sizeOrPitch is not always set for DXT!! var dxtSize = PixelUtil.GetMemorySize(width, height, depth, imgData.format); using (var src = BufferBase.Wrap(br.ReadBytes(dxtSize))) { Memory.Copy(src, destBuffer, dxtSize); } destBuffer += dxtSize; } } else { // Final data - trim incoming pitch int srcPitch; if ((header.flags & DDSD_PITCH) != 0) { srcPitch = header.sizeOrPitch / Utility.Max(1, mip * 2); } else { // assume same as final pitch srcPitch = dstPitch; } Contract.Requires(dstPitch <= srcPitch); var srcAdvance = (long)(srcPitch - dstPitch); for (var z = 0; z < imgData.depth; ++z) { for (var y = 0; y < imgData.height; ++y) { using (var src = BufferBase.Wrap(br.ReadBytes(dstPitch))) { Memory.Copy(src, destBuffer, dstPitch); } if (srcAdvance > 0) { br.BaseStream.Seek(srcAdvance, SeekOrigin.Current); } destBuffer += dstPitch; } } } // Next mip if (width != 1) { width /= 2; } if (height != 1) { height /= 2; } if (depth != 1) { depth /= 2; } } } destBuffer.Dispose(); return(new DecodeResult(new MemoryStream(dest), imgData)); } }
public override void EncodeToFile(Stream input, string outFileName, Codec.CodecData data) { // Unwrap codecDataPtr - data is cleaned by calling function var imgData = (ImageData)data; // Check size for cube map faces var isCubeMap = imgData.size == Image.CalculateSize(imgData.numMipMaps, 6, imgData.width, imgData.height, imgData.depth, imgData.format); // Establish texture attributes var isVolume = imgData.depth > 1; var isFloat32r = imgData.format == PixelFormat.FLOAT32_R; var hasAlpha = false; var notImplemented = false; var notImplementedString = string.Empty; // Check for all the 'not implemented' conditions if (imgData.numMipMaps != 0) { // No mip map functionality yet notImplemented = true; notImplementedString += " mipmaps"; } if ((isVolume == true) && (imgData.width != imgData.height)) { // Square textures only notImplemented = true; notImplementedString += " non square textures"; } var size = 1; while (size < imgData.width) { size <<= 1; } if (size != imgData.width) { // Power two textures only notImplemented = true; notImplementedString += " non power two textures"; } switch (imgData.format) { case PixelFormat.A8R8G8B8: case PixelFormat.X8R8G8B8: case PixelFormat.R8G8B8: case PixelFormat.FLOAT32_R: break; default: // No crazy FOURCC or 565 et al. file formats at this stage notImplemented = true; notImplementedString = " unsupported pixel format"; break; } // Except if any 'not implemented' conditions were met if (notImplemented) { throw new NotImplementedException(string.Format("DDS encoding for{0} not supported", notImplementedString)); } else { // Build header and write to disk // Variables for some DDS header flags var ddsHeaderFlags = 0; var ddsHeaderRgbBits = 0; var ddsHeaderSizeOrPitch = 0; var ddsHeaderCaps1 = 0; var ddsHeaderCaps2 = 0; var ddsMagic = this.DDS_MAGIC; // Initalise the header flags ddsHeaderFlags = (isVolume) ? DDSD_CAPS | DDSD_WIDTH | DDSD_HEIGHT | DDSD_DEPTH | DDSD_PIXELFORMAT : DDSD_CAPS | DDSD_WIDTH | DDSD_HEIGHT | DDSD_PIXELFORMAT; // Initalise the rgbBits flags switch (imgData.format) { case PixelFormat.A8R8G8B8: ddsHeaderRgbBits = 8 * 4; hasAlpha = true; break; case PixelFormat.X8R8G8B8: ddsHeaderRgbBits = 8 * 4; break; case PixelFormat.R8G8B8: ddsHeaderRgbBits = 8 * 3; break; case PixelFormat.FLOAT32_R: ddsHeaderRgbBits = 32; break; default: ddsHeaderRgbBits = 0; break; } ; // Initalise the SizeOrPitch flags (power two textures for now) ddsHeaderSizeOrPitch = ddsHeaderRgbBits * imgData.width; // Initalise the caps flags ddsHeaderCaps1 = (isVolume || isCubeMap) ? DDSCAPS_COMPLEX | DDSCAPS_TEXTURE : DDSCAPS_TEXTURE; if (isVolume) { ddsHeaderCaps2 = DDSCAPS2_VOLUME; } else if (isCubeMap) { ddsHeaderCaps2 = DDSCAPS2_CUBEMAP | DDSCAPS2_CUBEMAP_POSITIVEX | DDSCAPS2_CUBEMAP_NEGATIVEX | DDSCAPS2_CUBEMAP_POSITIVEY | DDSCAPS2_CUBEMAP_NEGATIVEY | DDSCAPS2_CUBEMAP_POSITIVEZ | DDSCAPS2_CUBEMAP_NEGATIVEZ; } // Populate the DDS header information var ddsHeader = new DDSHeader(); ddsHeader.size = DDS_HEADER_SIZE; ddsHeader.flags = ddsHeaderFlags; ddsHeader.width = imgData.width; ddsHeader.height = imgData.height; ddsHeader.depth = isVolume ? imgData.depth : 0; ddsHeader.depth = isCubeMap ? 6 : ddsHeader.depth; ddsHeader.mipMapCount = 0; ddsHeader.sizeOrPitch = ddsHeaderSizeOrPitch; ddsHeader.reserved1 = new int[11]; ddsHeader.reserved2 = 0; ddsHeader.pixelFormat.size = DDS_PIXELFORMAT_SIZE; ddsHeader.pixelFormat.flags = (hasAlpha) ? DDPF_RGB | DDPF_ALPHAPIXELS : DDPF_RGB; ddsHeader.pixelFormat.flags = (isFloat32r) ? DDPF_FOURCC : ddsHeader.pixelFormat.flags; ddsHeader.pixelFormat.fourCC = (isFloat32r) ? D3DFMT_R32F : 0; ddsHeader.pixelFormat.rgbBits = ddsHeaderRgbBits; ddsHeader.pixelFormat.alphaMask = hasAlpha ? unchecked ((int)0xFF000000) : 0x00000000; ddsHeader.pixelFormat.alphaMask = isFloat32r ? 0x00000000 : ddsHeader.pixelFormat.alphaMask; ddsHeader.pixelFormat.redMask = isFloat32r ? unchecked ((int)0xFFFFFFFF) : 0x00FF0000; ddsHeader.pixelFormat.greenMask = isFloat32r ? 0x00000000 : 0x0000FF00; ddsHeader.pixelFormat.blueMask = isFloat32r ? 0x00000000 : 0x000000FF; ddsHeader.caps.caps1 = ddsHeaderCaps1; ddsHeader.caps.caps2 = ddsHeaderCaps2; ddsHeader.caps.reserved[0] = 0; ddsHeader.caps.reserved[1] = 0; // Swap endian using (var wrap = BufferBase.Wrap(ddsMagic, 2)) { _flipEndian(wrap, sizeof(uint), 1); } using (var wrap = BufferBase.Wrap(ddsHeader, Memory.SizeOf(typeof(DDSHeader)))) { _flipEndian(wrap, 4, Memory.SizeOf(typeof(DDSHeader)) / 4); } // Write the file using (var br = new BinaryWriter(File.Open(outFileName, FileMode.OpenOrCreate, FileAccess.Write))) { br.Write(ddsMagic); ddsHeader.Write(br); // XXX flipEndian on each pixel chunk written unless isFloat32r ? var inputData = new byte[(int)input.Length]; input.Read(inputData, 0, inputData.Length); br.Write(inputData); } } }
//[Benchmark] public void ArrayInt() { _memory.SizeOf(_intArray); }
public void Write(System.Array val, int offset) { var count = Memory.SizeOf(val.GetType().GetElementType()) * val.Length; Write(val, offset, count); }