Exemplo n.º 1
0
        // for serialization
        internal void Load(Stream stream)
        {
#if SILICONSTUDIO_PLATFORM_ANDROID
            var virtualStream = stream as VirtualFileStream;
            if (virtualStream == null)
            {
                throw new InvalidOperationException("Expecting VirtualFileStream. Music files needs to be stored on the virtual file system in a non-compressed form.");
            }

            var fileStream = virtualStream.InternalStream as FileStream;
            if (fileStream == null)
            {
                throw new InvalidOperationException("Expecting FileStream in VirtualFileStream.InternalStream. Music files needs to be stored on the virtual file system in a non-compressed form.");
            }

            FileName      = fileStream.Name;
            StartPosition = virtualStream.StartPosition;
            Length        = virtualStream.Length;
#else
            // Make a memory copy of the stream so that the source can be properly disposed
            var memoryStream = new MemoryStream();
            stream.CopyTo(memoryStream);
            Stream          = memoryStream;
            Stream.Position = 0;
#endif

            ResetStateToDefault();
            Name = "SoundMusic " + soundMusicCreationCount;

            AudioEngine.RegisterSound(this);

            Interlocked.Increment(ref soundMusicCreationCount);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Create and Load a sound music from an input file.
        /// </summary>
        /// <param name="engine">The audio engine in which to load the soundMusic</param>
        /// <param name="stream">The stream.</param>
        /// <returns>A new instance of soundMusic ready to be played</returns>
        /// <exception cref="System.ArgumentNullException">engine
        /// or
        /// filename</exception>
        /// <exception cref="System.ObjectDisposedException">The AudioEngine in which to create the voice is disposed.</exception>
        /// <exception cref="System.ArgumentException">engine or stream</exception>
        /// <exception cref="ObjectDisposedException">The AudioEngine in which to create the voice is disposed.</exception>
        /// <exception cref="ArgumentNullException">File ' + filename + ' does not exist.</exception>
        /// <remarks>On all platform the wav format is supported.
        /// For compressed formats, it is the task of the build engine to automatically adapt the original files to the best hardware specific format.</remarks>
        public static SoundMusic Load(AudioEngine engine, Stream stream)
        {
            if (engine == null)
            {
                throw new ArgumentNullException("engine");
            }

            if (stream == null)
            {
                throw new ArgumentNullException("stream");
            }

            if (engine.IsDisposed)
            {
                throw new ObjectDisposedException("The AudioEngine in which to create the voice is disposed.");
            }

            // TODO: Not portable on WindowsStore

            var ret = new SoundMusic(engine, stream);

            ret.ResetStateToDefault();
            ret.Name = "SoundMusic " + soundMusicCreationCount;

            engine.RegisterSound(ret);

            Interlocked.Increment(ref soundMusicCreationCount);

            return(ret);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Create and Load a sound effect from an input wav stream.
        /// </summary>
        /// <param name="engine">Name of the audio engine in which to create the sound effect</param>
        /// <param name="stream">A stream corresponding to a wav file.</param>
        /// <returns>A new instance soundEffect ready to be played</returns>
        /// <exception cref="ArgumentNullException"><paramref name="engine"/> or <paramref name="stream"/> is null.</exception>
        /// <exception cref="NotSupportedException">The wave file or has more than 2 channels or is not encoded in 16bits.</exception>
        /// <exception cref="InvalidOperationException">The content of the stream does not correspond to a valid wave file.</exception>
        /// <exception cref="OutOfMemoryException">There is not enough memory anymore to load the specified file in memory. </exception>
        /// <exception cref="ObjectDisposedException">The audio engine has already been disposed</exception>
        /// <remarks>Supported WAV files' audio format is the 16bits PCM format.</remarks>
        public static SoundEffect Load(AudioEngine engine, Stream stream)
        {
            if (engine == null)
            {
                throw new ArgumentNullException("engine");
            }

            if (stream == null)
            {
                throw new ArgumentNullException("stream");
            }

            if (engine.IsDisposed)
            {
                throw new ObjectDisposedException("Audio Engine");
            }

            // create a native memory stream to extract the lz4 audio stream.
            var newSdEff = new SoundEffect(engine)
            {
                nativeDataBuffer = Utilities.AllocateMemory((int)stream.Length)
            };

            var nativeStream = new NativeMemoryStream(newSdEff.nativeDataBuffer, stream.Length);

            stream.CopyTo(nativeStream);
            nativeStream.Position = 0;

            var waveStreamReader = new SoundStream(nativeStream);
            var waveFormat       = waveStreamReader.Format;

            if (waveFormat.Channels > 2)
            {
                throw new NotSupportedException("The wave file contains more than 2 data channels. Only mono and stereo formats are currently supported.");
            }

            if (waveFormat.Encoding != WaveFormatEncoding.Pcm || waveFormat.BitsPerSample != 16)
            {
                throw new NotSupportedException("The wave file audio format is not supported. Only 16bits PCM encoded formats are currently supported.");
            }

            newSdEff.WaveFormat   = waveFormat;
            newSdEff.WaveDataPtr  = newSdEff.nativeDataBuffer + (int)nativeStream.Position;
            newSdEff.WaveDataSize = (int)waveStreamReader.Length;
            newSdEff.Name         = "Sound Effect " + soundEffectCreationCount;

            newSdEff.AdaptAudioDataImpl();

            // register the sound to the AudioEngine so that it will be properly freed if AudioEngine is disposed before this.
            engine.RegisterSound(newSdEff);

            Interlocked.Increment(ref soundEffectCreationCount);

            return(newSdEff);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Create a dynamic sound effect instance with the given sound properties.
        /// </summary>
        /// <param name="engine">The engine in which the dynamicSoundEffectInstance is created</param>
        /// <param name="sampleRate">Sample rate, in Hertz (Hz), of audio content. Must between 8000 Hz and 48000 Hz</param>
        /// <param name="channels">Number of channels in the audio data.</param>
        /// <param name="encoding">Encoding of a sound data sample</param>
        /// <returns>A new DynamicSoundEffectInstance instance ready to filled with data and then played</returns>
        /// <exception cref="ArgumentOutOfRangeException">This exception is thrown for one of the following reason:
        /// <list type="bullet">
        /// <item>The value specified for sampleRate is less than 8000 Hz or greater than 48000 Hz. </item>
        /// <item>The value specified for channels is something other than mono or stereo. </item>
        /// <item>The value specified for data encoding is something other than 8 or 16 bits. </item>
        /// </list>
        ///  </exception>
        /// <exception cref="ArgumentNullException"><paramref name="engine"/> is null.</exception>
        public DynamicSoundEffectInstance(AudioEngine engine, int sampleRate, AudioChannels channels, AudioDataEncoding encoding)
            : base(engine)
        {
            if (engine == null)
            {
                throw new ArgumentNullException("engine");
            }

            if (sampleRate < 8000 || 48000 < sampleRate)
            {
                throw new ArgumentOutOfRangeException("sampleRate");
            }

            if (channels != AudioChannels.Mono && channels != AudioChannels.Stereo)
            {
                throw new ArgumentOutOfRangeException("channels");
            }

            if (encoding != AudioDataEncoding.PCM_8Bits && encoding != AudioDataEncoding.PCM_16Bits)
            {
                throw new ArgumentOutOfRangeException("encoding");
            }

            waveFormat = new WaveFormat(sampleRate, (int)encoding, (int)channels);

            Interlocked.Increment(ref totalNbOfInstances);
            Interlocked.Increment(ref numberOfInstances);

            // first instance of dynamic sound effect instance => we create the workerThead and the associated event.
            if (numberOfInstances == 1)
            {
                instancesNeedingBuffer = new ThreadSafeQueue <DynamicSoundEffectInstance>(); // to be sure that there is no remaining request from previous sessions
                awakeWorkerThread      = new AutoResetEvent(false);
                CreateWorkerThread();
            }

            Name = "Dynamic Sound Effect Instance - " + totalNbOfInstances;

            CreateVoice(WaveFormat);

            InitializeDynamicSound();

            AudioEngine.RegisterSound(this);

            ResetStateToDefault();
        }