private ShaderMixinObjectId() { objectIdBuilder = new ObjectIdBuilder(); buffer = Marshal.AllocHGlobal(65536); memStream = new NativeMemoryStream(buffer, 65536); writer = new HashSerializationWriter(memStream); writer.Context.SerializerSelector = new SerializerSelector("Default", "Hash"); }
private ShaderMixinObjectId() { objectIdBuilder = new ObjectIdBuilder(); buffer = Marshal.AllocHGlobal(65536); memStream = new NativeMemoryStream(buffer, 65536); writer = new HashSerializationWriter(memStream); writer.Context.SerializerSelector = new SerializerSelector(); writer.Context.SerializerSelector.RegisterProfile("Default"); writer.Context.SerializerSelector.RegisterSerializer(new ParameterKeyHashSerializer()); writer.Context.SerializerSelector.RegisterSerializer(new ParameterCollectionHashSerializer()); if (parameters == null) parameters = new ParameterCollection(); }
/// <summary> /// Creates a in-memory binary blob as a <see cref="Blob"/> that will also be stored using the active <see cref="IOdbBackend"/>. /// Even if <see cref="Blob"/> is new (not in the ODB), memory will be copied. /// </summary> /// <param name="data">The data.</param> /// <param name="size">The size.</param> /// <returns>The <see cref="Blob"/> containing given data, with its reference count incremented.</returns> public Blob CreateBlob(IntPtr data, int size) { // Generate hash ObjectId objectId; var nativeMemoryStream = new NativeMemoryStream(data, size); using (var digestStream = new DigestStream(Stream.Null)) { nativeMemoryStream.CopyTo(digestStream); objectId = digestStream.CurrentHash; } lock (LoadedBlobs) { var blob = Lookup(objectId); // Blob doesn't exist yet, so let's create it and save it to ODB. if (blob == null) { // Let's go back to beginning of stream after previous hash nativeMemoryStream.Position = 0; // Create blob blob = new Blob(this, objectId, data, size); blob.AddReference(); // Write to disk backendWrite.Write(objectId, nativeMemoryStream, size, false); // Add blob to cache LoadedBlobs.Add(objectId, blob); } return blob; } }
internal void Load(Stream stream) { if (stream == null) throw new ArgumentNullException(nameof(stream)); if (AudioEngine.IsDisposed) throw new ObjectDisposedException("Audio Engine"); // create a native memory stream to extract the lz4 audio stream. nativeDataBuffer = Utilities.AllocateMemory((int)stream.Length); var nativeStream = new NativeMemoryStream(nativeDataBuffer, stream.Length); stream.CopyTo(nativeStream); nativeStream.Position = 0; var waveStreamReader = new SoundStream(nativeStream); var waveFormat = waveStreamReader.Format; if (waveFormat.Channels > 2) throw new NotSupportedException("The wave file contains more than 2 data channels. Only mono and stereo formats are currently supported."); if (waveFormat.Encoding != WaveFormatEncoding.Pcm || waveFormat.BitsPerSample != 16) throw new NotSupportedException("The wave file audio format is not supported. Only 16bits PCM encoded formats are currently supported."); WaveFormat = waveFormat; WaveDataPtr = nativeDataBuffer + (int)nativeStream.Position; WaveDataSize = (int)waveStreamReader.Length; Name = "Sound Effect " + soundEffectCreationCount; AdaptAudioDataImpl(); // register the sound to the AudioEngine so that it will be properly freed if AudioEngine is disposed before this. AudioEngine.RegisterSound(this); //create a default instance only when actually we load, previously we were creating this on demand which would result sometimes in useless creations and bugs within the editor DefaultInstance = CreateInstance(); //copy back values we might have set before default instance creation DefaultInstance.Pan = defaultPan; DefaultInstance.Volume = defaultVolume; DefaultInstance.IsLooped = defaultIsLooped; Interlocked.Increment(ref soundEffectCreationCount); }
internal void Load(Stream stream) { if (stream == null) throw new ArgumentNullException("stream"); if (AudioEngine.IsDisposed) throw new ObjectDisposedException("Audio Engine"); // create a native memory stream to extract the lz4 audio stream. nativeDataBuffer = Utilities.AllocateMemory((int)stream.Length); var nativeStream = new NativeMemoryStream(nativeDataBuffer, stream.Length); stream.CopyTo(nativeStream); nativeStream.Position = 0; var waveStreamReader = new SoundStream(nativeStream); var waveFormat = waveStreamReader.Format; if (waveFormat.Channels > 2) throw new NotSupportedException("The wave file contains more than 2 data channels. Only mono and stereo formats are currently supported."); if (waveFormat.Encoding != WaveFormatEncoding.Pcm || waveFormat.BitsPerSample != 16) throw new NotSupportedException("The wave file audio format is not supported. Only 16bits PCM encoded formats are currently supported."); WaveFormat = waveFormat; WaveDataPtr = nativeDataBuffer + (int)nativeStream.Position; WaveDataSize = (int)waveStreamReader.Length; Name = "Sound Effect " + soundEffectCreationCount; AdaptAudioDataImpl(); // register the sound to the AudioEngine so that it will be properly freed if AudioEngine is disposed before this. AudioEngine.RegisterSound(this); Interlocked.Increment(ref soundEffectCreationCount); }