Ejemplo n.º 1
0
        public AudioCase(AudioClip clip, AudioSource source, AudioType type, AudioCallback callback = null)
        {
            this.source = source;
            this.type   = type;

            this.source.clip = clip;
        }
Ejemplo n.º 2
0
    private IEnumerator DelayedCallback(float time, AudioCallback callback)
    {
        yield return(StartCoroutine(WaitForRealSeconds(time)));

        //yield return new WaitForSeconds(time);
        callback();
    }
Ejemplo n.º 3
0
        public SDLAudio()
        {
            this.on = Settings.Audio_PlaybackEnabled;
            if (on)
            {
                try {
                    Mixer.Initialize();
                    // TODO: find another way to adjust volume
                    this.Volume = Settings.Audio_PlaybackVolume;

                    callback     = new AudioCallback(Signed16BigCallback);
                    audio_stream = new AudioStream(Settings.Audio_PlaybackFrequency,
                                                   AudioFormat.Signed16Big,
                                                   SoundChannel.Mono, 2205, callback, "My Nes Sound");

                    samples_buffer = new short[samples_count];

                    Mixer.OpenAudio(audio_stream);

                    audio_stream.Paused = true;
                    r_pos = w_pos = 0;
                } catch (Exception ex) {
                    Console.WriteLine(ex.ToString());
                }
            }
            recorder = new WaveRecorder();
        }
Ejemplo n.º 4
0
		public SDLAudio ()
		{
			this.on = Settings.Audio_PlaybackEnabled;
			if (on) {
				try {
					Mixer.Initialize ();
					// TODO: find another way to adjust volume
					this.Volume = Settings.Audio_PlaybackVolume;

					callback = new AudioCallback (Signed16BigCallback);
					audio_stream = new AudioStream (Settings.Audio_PlaybackFrequency,
					                                AudioFormat.Signed16Big,
					                                SoundChannel.Mono, 2205, callback, "My Nes Sound");

					samples_buffer = new short[samples_count];

					Mixer.OpenAudio (audio_stream);
                   
					audio_stream.Paused = true;
					r_pos = w_pos = 0;
                    
				} catch (Exception ex) {
					Console.WriteLine (ex.ToString ());
				}
			}
			recorder = new WaveRecorder ();
		}
Ejemplo n.º 5
0
        /// <summary>
        /// Creates an AudioStream
        /// </summary>
        /// <param name="sampleFrequency">Frequency</param>
        /// <param name="format">format of stream data</param>
        /// <param name="channels">Mono or Stereo</param>
        /// <param name="samples">number of samples</param>
        /// <param name="callback">Method callback to get more data</param>
        /// <param name="data">data object</param>
        public AudioStream(int sampleFrequency, AudioFormat format, SoundChannel channels, short samples, AudioCallback callback, object data)
        {
            this.samples = samples;
            this.queue = new Queue<short[]>(5);
            this.sampleFrequency = sampleFrequency;

            // To keep compiler happy, we must 'initialize' these values
            spec.padding = 0;
            spec.size = 0;
            spec.silence = 0;

            spec.freq = sampleFrequency;
            spec.format = (short)format;
            spec.channels = (byte)channels;
            if (callback != null)
            {
                spec.callback = Marshal.GetFunctionPointerForDelegate(callback);
            }
            spec.samples = samples;
            spec.userdata = data;
            if (((ushort)spec.format & 0x8000) != 0x8000)    // signed
            {
                this.offset = 2 << ((byte)spec.format - 2);
                //this.offset = 0;
            }
            //else
            //{
            //    this.offset = 2 << ((byte)spec.format - 2);
            //}
        }
Ejemplo n.º 6
0
 private static IEnumerator WaitForComplete(AudioSource source, AudioCallback callback)
 {
     while (source.isPlaying)
     {
         yield return(null);
     }
     callback.Stop();
 }
Ejemplo n.º 7
0
        public AudioCase OnComplete(AudioCallback callback)
        {
            onAudioEnded = callback;

            endCoroutine = Tween.InvokeCoroutine(OnAudioEndCoroutine(source.clip.length));

            return(this);
        }
Ejemplo n.º 8
0
 public static void CreateAudioManager()
 {
     if (null == obj)
     {
         obj = new GameObject(CALLBACK_COMPONENT_NAME);
     }
     obj.AddComponent <AudioCallbackWrapper>();
     UnityEngine.Object.DontDestroyOnLoad(obj);
     m_AudioCallBack = AudioCallback.Instance;
 }
Ejemplo n.º 9
0
        public static WinAudioCallback SetCallback(AudioCallback AC)
        {
            int mix;

            WinAudioCallback wac = new WinAudioCallback(AC);

            mixerOpen(out mix, 0, wac.Handle, 0, CALLBACK_WINDOW);

            return(wac);
        }
Ejemplo n.º 10
0
 /// <summary>
 /// Creates an AudioStream
 /// </summary>
 /// <param name="sampleFrequency">Frequency</param>
 /// <param name="format">format of stream data</param>
 /// <param name="channels">Mono or Stereo</param>
 /// <param name="samples">number of samples</param>
 public AudioStream(int sampleFrequency, AudioFormat format, SoundChannel channels, short samples)
     : this(sampleFrequency, format, channels, samples, null, null)
 {
     if (format != AudioFormat.Unsigned16Little)
     {
         throw new AudioException(Events.StringManager.GetString("SupportedAudioFormats"));
     }
     callback      = new AudioCallback(Unsigned16LittleStream);
     spec.callback = Marshal.GetFunctionPointerForDelegate(callback);
 }
Ejemplo n.º 11
0
        public static T OnStart <T>(this T _container, AudioCallback _onStart) where T : BaseAudio
        {
            if (_container == null)
            {
                return(_container);
            }

            _container.onStart = _onStart;

            return(_container);
        }
Ejemplo n.º 12
0
        public static T OnEveryComplete <T>(this T _container, AudioCallback _onEveryComplete) where T : BaseAudio
        {
            if (_container == null)
            {
                return(_container);
            }

            _container.onEveryComplete = _onEveryComplete;

            return(_container);
        }
Ejemplo n.º 13
0
    public void PlaySoundWithCallback(SoundClipId clipId, AudioCallback callback)
    {
        PlayBGSoundClip(clipId);

        if (audioCallBackCoroutine != null)
        {
            StopCoroutine(audioCallBackCoroutine);
        }

        audioCallBackCoroutine = StartCoroutine(DelayedCallback(bgm.clip.length, callback));
    }
Ejemplo n.º 14
0
 static public int get_onMusicVolumeChanged(IntPtr l)
 {
     try {
         AudioCallback self = (AudioCallback)checkSelf(l);
         pushValue(l, true);
         pushValue(l, self.onMusicVolumeChanged);
         return(2);
     }
     catch (Exception e) {
         return(error(l, e));
     }
 }
Ejemplo n.º 15
0
 static public int constructor(IntPtr l)
 {
     try {
         AudioCallback o;
         o = new AudioCallback();
         pushValue(l, true);
         pushValue(l, o);
         return(2);
     }
     catch (Exception e) {
         return(error(l, e));
     }
 }
Ejemplo n.º 16
0
    private static AudioCallback PlaySoundUsing(AudioSource source, AudioClip clip, AudioContext context, Vector3 position)
    {
        source.clip = clip;
        source.transform.position = position;
        source.Apply(context);
        source.time = 0f;
        source.Play();
        freeVoices.Remove(source);

        AudioCallback callback = new AudioCallback(source, clip, context);

        // return to free voices when done
        instance.StartCoroutine(WaitForComplete(source, callback));

        return(callback);
    }
Ejemplo n.º 17
0
        int ISoundPcmDriver.Open(AudioCallback callback, ToneChannel tc)
        {
            (this as ISoundPcmDriver).Lock();

            Sdl2SoundChannel channel = new Sdl2SoundChannel();

            channel.Handle = this.CreateHandle();
            if (channel.Handle == 0)
            {
                return(0);
            }

            channel.Callback    = callback;
            channel.ToneChannel = tc;
            channel.Available   = 1;

            this.channels.Add(channel);

            (this as ISoundPcmDriver).Unlock();

            return(channel.Handle);
        }
Ejemplo n.º 18
0
 public void RegisterCallback(AudioCallback callback)
 {
     _callback = callback;
     Mixers.Playback.MixerLineChanged += new WaveLib.AudioMixer.Mixer.MixerLineChangeHandler(RunCallback);
 }
Ejemplo n.º 19
0
 private void PlayDialogClip(AudioClip clip, AudioCallback callback)
 {
     AudioSource.PlayClipAtPoint (clip, transform.position);
     StartCoroutine (DelayedCallback (clip.length, callback));
 }
Ejemplo n.º 20
0
 private System.Collections.IEnumerator DelayedCallback(float para_time, AudioCallback para_callback)
 {
     yield return  new WaitForSeconds(para_time);
     para_callback();
 }
Ejemplo n.º 21
0
        /// <summary>
        /// Creates an AudioStream
        /// </summary>
        /// <param name="sampleFrequency">Frequency</param>
        /// <param name="format">format of stream data</param>
        /// <param name="channels">Mono or Stereo</param>
        /// <param name="samples">number of samples</param>
        /// <param name="callback">Method callback to get more data</param>
        /// <param name="data">data object</param>
        public AudioStream(int sampleFrequency, AudioFormat format, SoundChannel channels, short samples, AudioCallback callback, object data)
        {
            this.samples         = samples;
            this.queue           = new Queue <short[]>(5);
            this.sampleFrequency = sampleFrequency;

            // To keep compiler happy, we must 'initialize' these values
            spec.padding = 0;
            spec.size    = 0;
            spec.silence = 0;

            spec.freq     = sampleFrequency;
            spec.format   = (short)format;
            spec.channels = (byte)channels;
            if (callback != null)
            {
                spec.callback = Marshal.GetFunctionPointerForDelegate(callback);
            }
            spec.samples  = samples;
            spec.userdata = data;
            if (((ushort)spec.format & 0x8000) != 0x8000)    // signed
            {
                this.offset = 2 << ((byte)spec.format - 2);
                //this.offset = 0;
            }
            //else
            //{
            //    this.offset = 2 << ((byte)spec.format - 2);
            //}
        }
Ejemplo n.º 22
0
 public void RegisterCallback(AudioCallback callback)
 {
     _callback = callback;
     Mixers.Playback.MixerLineChanged += new WaveLib.AudioMixer.Mixer.MixerLineChangeHandler(RunCallback);
 }
        /// <summary>
        /// Used to map a spoken command to a callback.  When the command is recognized, 
        /// the callback is invoked.
        /// </summary>
        /// <param name="name">The audio command to listen for.</param>
        /// <param name="callBack">The method to invoke when the spoken command is recognized.</param>
        public void Subscribe(string name, AudioCallback callBack)
        {
            if (string.IsNullOrEmpty(name)) {
                throw new ArgumentException("Name argument was either null or empty.", "name");
            }

            if (callBack == null) {
                throw new ArgumentException("Callback argument was null.", "callback");
            }

            if (!this.AudioCallbacks.ContainsKey(name)) {
                AudioCallbacks.Add(name, callBack);

                // To update the grammar the recognizer needs to be restarted
                if (Status == RecognizerStatus.Running || Status == RecognizerStatus.Error) {
                    Stop();
                    Start();
                }
            }
            else {
                throw new ArgumentException("A key already exists with the name " + name + ".", "name");
            }
        }
Ejemplo n.º 24
0
    public void PlayVibrationWithCallback(AudioCallback callback)
    {
        //spt_playerControls.controllerVibration("Both", vibeForce, vibeTime);
        spt_victoryListener.vibrationz = true;
        spt_victoryListener.vibrationForce = vibeForce;
        spt_victoryListener.vibrationTime = vibeTime;
        spt_victoryListener.Both = true;

        StartCoroutine(DelayedCallback(vibeTime, callback));
    }
Ejemplo n.º 25
0
 /// <summary>
 /// Creates an AudioStream
 /// </summary>
 /// <param name="sampleFrequency">Frequency</param>
 /// <param name="format">format of stream data</param>
 /// <param name="channels">Mono or Stereo</param>
 /// <param name="samples">number of samples</param>
 public AudioStream(int sampleFrequency, AudioFormat format, SoundChannel channels, short samples)
     : this(sampleFrequency, format, channels, samples, null, null)
 {
     if (format != AudioFormat.Unsigned16Little)
     {
         throw new AudioException(Events.StringManager.GetString("SupportedAudioFormats"));
     }
     callback = new AudioCallback(Unsigned16LittleStream);
     spec.callback = Marshal.GetFunctionPointerForDelegate(callback);
 }
Ejemplo n.º 26
0
 public void addAudioCallback(AudioCallback callback)
 {
     this.callbacks.Add(callback);
     //Debug.Log(callback);
 }
Ejemplo n.º 27
0
 public void removeAudioCallback(AudioCallback callback)
 {
     this.callbacks.Remove(callback);
 }
Ejemplo n.º 28
0
 public void PlaySoundWithCallback(AudioSource clip, AudioCallback callback)
 {
     clip.Play();
     StartCoroutine(DelayedCallback(40, callback));
 }
Ejemplo n.º 29
0
        private void Go()
        {
            if (File.Exists(fileName))
            {
                filePath = "";
                fileDirectory = "";
            }
            else if (File.Exists(Path.Combine(fileDirectory, fileName)))
            {
                filePath = "";
            }

            string file = Path.Combine(Path.Combine(filePath, fileDirectory), fileName);

            textDisplay = new TextSprite(" ", new SdlDotNet.Graphics.Font(file, 20), Color.Red);
            Video.WindowIcon();
            Video.WindowCaption = "SDL.NET - StreamingAudio";
            screen = Video.SetVideoMode(width, height);

            switch (streamChoice)
            {
                case StreamChoice.InternalCallback:
                    stream = new AudioStream(playbackFreq, AudioFormat.Unsigned16Little, SoundChannel.Mono, samples);
                    buffer16 = new short[samples];
                    break;
                case StreamChoice.CustomCallback:
                    callback = new AudioCallback(Unsigned16LittleCallback);
                    stream = new AudioStream(playbackFreq, AudioFormat.Unsigned16Little, SoundChannel.Mono, samples, callback, "Hello World");
                    buffer16 = new short[samples];
                    break;
                case StreamChoice.CustomCallback8Bit:
                    callback = new AudioCallback(Unsigned8Callback);
                    stream = new AudioStream(playbackFreq, AudioFormat.Unsigned8, SoundChannel.Mono, samples, new AudioCallback(Unsigned8Callback), "Hello World");
                    buffer8 = new byte[samples];
                    break;
                default:
                    stream = new AudioStream(playbackFreq, AudioFormat.Unsigned16Little, SoundChannel.Mono, samples);
                    buffer16 = new short[samples];
                    break;
            }

            offset = stream.Offset;
            volume = 0.9 * 32768;

            buffer16 = new short[samples];

            osc.Rate = 20;
            osc.Amplitude = 1;

            osc2.Rate = 3;
            osc2.Amplitude = 10;

            stream.Paused = false;
            textDisplay.Text = SdlDotNetExamplesBrowser.StringManager.GetString(
                        "StreamingAudioDirections", CultureInfo.CurrentUICulture);
            textDisplay.TextWidth = 350;
            Events.Run();
        }
Ejemplo n.º 30
0
 private void PlayDialogClip(AudioClip clip, AudioCallback callback)
 {
     AudioSource.PlayClipAtPoint(clip, transform.position);
     StartCoroutine(DelayedCallback(clip.length, callback));
 }
Ejemplo n.º 31
0
 public void PlaySoundWithCallback(AudioClip clip, float endVolume, AudioCallback callback)
 {
     audioSource.GetComponent<AudioSource>().PlayOneShot(audioSource.clip, endVolume);
     StartCoroutine(DelayedCallback(audioSource.clip.length, callback));
 }
Ejemplo n.º 32
0
 public void PlaySoundWithCallback(AudioClip clip, AudioCallback callback)
 {
     gameObject.GetComponent <AudioSource>().PlayOneShot(clip);
     StartCoroutine(DelayedCallback(clip.length, callback));
 }
Ejemplo n.º 33
0
 public void PlaySoundWithCallback(AudioClip clip, AudioCallback callback)
 {
     audio.PlayOneShot(clip);
     StartCoroutine(DelayedCallback(clip.length, callback));
 }
Ejemplo n.º 34
0
 public void RegisterCallback(AudioCallback callback)
 {
     _callback = callback;
 }
Ejemplo n.º 35
0
 public void RegisterCallback(AudioCallback callback)
 {
     _callback = callback;
     Mixers.Playback.MixerLineChanged  += RunCallback;
     Mixers.Recording.MixerLineChanged += RunMicroCallback;
 }
Ejemplo n.º 36
0
 public void RegisterCallback(AudioCallback callback)
 {
     _callback = callback;
 }
Ejemplo n.º 37
0
 private WinAudioCallback(AudioCallback AC)
 {
     this.ac = AC;
     this.CreateHandle(new CreateParams());
 }
Ejemplo n.º 38
0
 private static extern void audio_subscribe(IntPtr handle, AudioCallback callback);
Ejemplo n.º 39
0
 private IEnumerator DelayedCallback(float time, AudioCallback callback)
 {
     yield return new WaitForSeconds(time);
     callback();
 }
Ejemplo n.º 40
0
 /// <summary>
 /// Create a new low-level audio device
 /// </summary>
 /// <param name="numSamples">Number of sasmples per second</param>
 /// <param name="bitsPerSample">Amount of bits per sample</param>
 /// <param name="numChannels">Amount of channels</param>
 /// <param name="blockAlign">Block alighn</param>
 /// <param name="avgBps">Average bytes per second</param>
 public AudioDevice(int numSamples, uint bitsPerSample, uint numChannels, uint blockAlign, uint avgBps)
 {
     handle   = audio_open(numSamples, bitsPerSample, numChannels, blockAlign, avgBps);
     callback = CallbackFun;
     audio_subscribe(handle, callback);
 }
Ejemplo n.º 41
0
 public void PlaySoundWithCallback(AudioClip clip, float endVolume, AudioCallback callback)
 {
     audioSource.audio.PlayOneShot(audioSource.clip, endVolume);
     StartCoroutine(DelayedCallback(audioSource.clip.length, callback));
 }
Ejemplo n.º 42
0
        /// <summary>
        /// Opens the media by initializing the DirectShow graph
        //
        //                                          +----------------+          +----------------+       +-----------------------+
        //     +---------------------+              | LavVideo       |          | VobSub         |       | EVR+CustomPresenter   |
        //     | LavSplitterSource   |              |----------------|          |----------------|       |-----------------------|
        //     +---------------------+              |                |          |                |       |                       |
        //     |                     |              |                |          |                |       |    VIDEO              |
        //     |             video  +|->+---------+<-+ IN       OUT +->+------+<-+ VID_IN   OUT +-> +-+ <-+   RENDERER           |
        //     |                     |              +----------------+          |                |       |                       |
        //     |             audio  +|->+------+                                |                |       +-----------------------+
        //     |                     |         |    +----------------+      +-+<-+ TXT_IN        |
        //     |          subtitle  +|->+--+   |    | LavAudio       |      |   |                |
        //     +---------------------+    |   |    |----------------|      |   +----------------+       +-----------------------+
        //                                 |   |    |                |      |                            | DShow output device   |
        //                                 |   |    |                |     xxx                           |-----------------------|
        //                                 |   +--+<-+ IN       OUT +->+--x | x-----------------------+  |                       |
        //                                 |        +----------------+      |                            |    AUDIO              |
        //                                 |                                |                           <-+   RENDERER           |
        //                                 |                                |                            |                       |
        //                                 +--------------------------------+                            +-----------------------+
        //
        /// </summary>
//        protected virtual void OpenSource()
//        {
//            /* Make sure we clean up any remaining mess */
//            //if (m_graph != null) RemoveAllFilters(m_graph);
//            FreeResources();

//            if (m_sourceUri == null)
//                return;

//            string fileSource = m_sourceUri.OriginalString;

//            if (string.IsNullOrEmpty(fileSource))
//                return;

//            try
//            {
//                if (m_graph != null) Marshal.ReleaseComObject(m_graph);

//                /* Creates the GraphBuilder COM object */
//                m_graph = new FilterGraphNoThread() as IGraphBuilder;

//                if (m_graph == null)
//                    throw new Exception("Could not create a graph");

//                /* Add our prefered audio renderer */
//                var audioRenderer = InsertAudioRenderer(AudioRenderer);
//                if (_audioRenderer != null) Marshal.ReleaseComObject(_audioRenderer);
//                _audioRenderer = audioRenderer;

//                if ((System.Environment.OSVersion.Platform == PlatformID.Win32NT &&
//                (System.Environment.OSVersion.Version.Major == 5)))
//                    VideoRenderer = VideoRendererType.VideoMixingRenderer9;

//                IBaseFilter renderer = CreateVideoRenderer(VideoRenderer, m_graph, 2);
//                if (_renderer != null) Marshal.ReleaseComObject(_renderer);
//                _renderer = renderer;
//                //if (_renderer != null)
//                //    m_graph.AddFilter((IBaseFilter)_renderer, "Renderer");

//                var filterGraph = m_graph as IFilterGraph2;

//                if (filterGraph == null)
//                    throw new Exception("Could not QueryInterface for the IFilterGraph2");

//                ILAVAudioSettings lavAudioSettings;
//                ILAVAudioStatus lavStatus;
//                IBaseFilter audioDecoder = FilterProvider.GetAudioFilter(out lavAudioSettings, out lavStatus);
//                if (audioDecoder != null)
//                {
//                    if (_audio != null) Marshal.ReleaseComObject(_audio);
//                    _audio = audioDecoder;
//                    lavAudioSettings.SetRuntimeConfig(true);
//                    m_graph.AddFilter((IBaseFilter)_audio, "LavAudio");
//                }

//                ILAVSplitterSettings splitterSettings;
//                IFileSourceFilter splitter = FilterProvider.GetSplitterSource(out splitterSettings);
//                //IBaseFilter splitter = FilterProvider.GetSplitter(out splitterSettings);

//                if (splitter != null)
//                {
//                    splitter.Load(fileSource, null);
//                    if (_splitter != null) Marshal.ReleaseComObject(_splitter);
//                    _splitter = splitter;
//                    splitterSettings.SetRuntimeConfig(true);
//                    m_graph.AddFilter((IBaseFilter)splitter, "LavSplitter");
//                }

//                int hr = 0;


//                /* We will want to enum all the pins on the source filter */
//                IEnumPins pinEnum;

//                hr = ((IBaseFilter)splitter).EnumPins(out pinEnum);
//                DsError.ThrowExceptionForHR(hr);

//                IntPtr fetched = IntPtr.Zero;
//                IPin[] pins = { null };

//                /* Counter for how many pins successfully rendered */


//                if (VideoRenderer == VideoRendererType.VideoMixingRenderer9)
//                {
//                    var mixer = renderer as IVMRMixerControl9;

//                    if (mixer != null)
//                    {
//                        VMR9MixerPrefs dwPrefs;
//                        mixer.GetMixingPrefs(out dwPrefs);
//                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
//                        dwPrefs |= VMR9MixerPrefs.RenderTargetRGB;
//                        //mixer.SetMixingPrefs(dwPrefs);
//                    }
//                }

//                // Test using FFDShow Video Decoder Filter
//                ILAVVideoSettings lavVideoSettings;
//                IBaseFilter lavVideo = FilterProvider.GetVideoFilter(out lavVideoSettings);
//                if (_video != null) Marshal.ReleaseComObject(_video);
//                _video = lavVideo;

//                IBaseFilter vobSub = FilterProvider.GetVobSubFilter();

//                if (vobSub != null)
//                {
//                    m_graph.AddFilter(vobSub, "VobSub");
//                    IDirectVobSub vss = vobSub as IDirectVobSub;
//                    if (_vobsub != null) Marshal.ReleaseComObject(_vobsub);
//                    _vobsub = vss;
//                    InitSubSettings();
//                }

//                if (lavVideo != null)
//                {
//                    lavVideoSettings.SetRuntimeConfig(true);
//                    m_graph.AddFilter(lavVideo, "LavVideo");
//                }

//                int ret;

//                IBaseFilter dcDsp = FilterProvider.GetDCDSPFilter();
//                if (dcDsp != null)
//                {
//                    _dspFilter = (IDCDSPFilterInterface)dcDsp;

//                    //hr = i.set_PCMDataBeforeMainDSP(true);
//                    hr = m_graph.AddFilter((IBaseFilter)dcDsp, "VobSub");

//                    ret = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Audio"), DsFindPin.ByDirection(audioDecoder, PinDirection.Input, 0));
//                    ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)audioDecoder, PinDirection.Output, 0), DsFindPin.ByDirection(_dspFilter, PinDirection.Input, 0));
//                    ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_dspFilter, PinDirection.Output, 0), DsFindPin.ByDirection(audioRenderer, PinDirection.Input, 0));

//                    //bool d = false;
//                    //int delay = 0;
//                    //hr = i.get_EnableDelay(ref d);
//                    int cnt = 0;
//                    object intf = null;
//                    //hr = i.set_EnableDelay(true);
//                    //hr = i.set_Delay(0);
//                    hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftEqualizer);
//                    hr = _dspFilter.get_FilterCount(ref cnt);
//                    hr = _dspFilter.get_FilterInterface(0, out intf);
//                    _equalizer = (IDCEqualizer)intf;
//                    hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftDownMix);
//                    hr = _dspFilter.get_FilterInterface(0, out intf);
//                    _downmix = (IDCDownMix)intf;
//                    hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftAmplify);
//                    hr = _dspFilter.get_FilterInterface(0, out intf);
//                    _amplify = (IDCAmplify)intf;

//                    _equalizer.set_Seperate(false);
//                }

//                bool subconnected = false;
//                ret = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Video"), DsFindPin.ByDirection(lavVideo, PinDirection.Input, 0));
//                ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)lavVideo, PinDirection.Output, 0), DsFindPin.ByDirection(vobSub, PinDirection.Input, 0));
//                if (ret == 0)
//                {
//                    int lc;
//                    ((IDirectVobSub)vobSub).get_LanguageCount(out lc);
//                    subconnected = (lc != 0);
//                    IPin pn = DsFindPin.ByName((IBaseFilter)splitter, "Subtitle");
//                    if (pn != null)
//                    {
//                        ret = m_graph.Connect(pn, DsFindPin.ByDirection(vobSub, PinDirection.Input, 1));
//                        ((IDirectVobSub)vobSub).get_LanguageCount(out lc);
//                        subconnected = (lc != 0);
//                    }
//                    ret = m_graph.Connect(DsFindPin.ByDirection(vobSub, PinDirection.Output, 0),
//                                          DsFindPin.ByDirection(renderer, PinDirection.Input, 0));
//                }
//                else
//                {
//                    ret = m_graph.Connect(DsFindPin.ByDirection(lavVideo, PinDirection.Output, 0),
//                                      DsFindPin.ByDirection(renderer, PinDirection.Input, 0));
//                }

//                /* Loop over each pin of the source filter */
//                while (pinEnum.Next(pins.Length, pins, fetched) == 0)
//                {
//                    IPin cTo;
//                    pins[0].ConnectedTo(out cTo);
//                    if (cTo == null)
//                    {
//                        // this should not happen if the filtegraph is manually connected in a good manner
//                        ret = filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero);
//                    }
//                    else
//                    {
//                        Marshal.ReleaseComObject(cTo);
//                    }
//                    Marshal.ReleaseComObject(pins[0]);
//                }

//                if (lavVideoSettings != null)
//                {
//                    if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_CUDA) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_CUDA);
//                    }
//                    else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_QuickSync) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_QuickSync);
//                    }
//                    else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2Native) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2Native);
//                    }
//                    else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2);
//                    }
//                    else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2CopyBack) != 0)
//                    {
//                        ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2CopyBack);
//                    }
//                }

//                //hr = m_graph.RenderFile(fileSource, null);

//                Marshal.ReleaseComObject(pinEnum);

//                IAMStreamSelect selector = splitter as IAMStreamSelect;
//                int numstreams;
//                selector.Count(out numstreams);
//                AMMediaType mt;
//                AMStreamSelectInfoFlags fl;
//                SubtitleStreams.Clear();
//                VideoStreams.Clear();
//                AudioStreams.Clear();
//                for (int i = 0; i < numstreams; i++)
//                {
//                    int lcid;
//                    int group;
//                    string name;
//                    object o, o2;
//                    selector.Info(i, out mt, out fl, out lcid, out group, out name, out o, out o2);
//                    switch (group)
//                    {
//                        case 0:
//                            VideoStreams.Add(name);
//                            break;
//                        case 1:
//                            AudioStreams.Add(name);
//                            break;
//                        case 2:
//                            SubtitleStreams.Add(name);
//                            break;
//                    }

//                    if (o != null) Marshal.ReleaseComObject(o);
//                    if (o2 != null) Marshal.ReleaseComObject(o2);
//                }

//                OnPropertyChanged("SubtitleStreams");
//                OnPropertyChanged("VideoStreams");
//                OnPropertyChanged("AudioStreams");

//                //Marshal.ReleaseComObject(splitter);


//                /* Configure the graph in the base class */
//                SetupFilterGraph(m_graph);

//#if DEBUG
//                /* Adds the GB to the ROT so we can view
//* it in graphedit */
//                m_dsRotEntry = new DsROTEntry(m_graph);
//#endif

//                //if (_splitterSettings != null)
//                //{
//                // Marshal.ReleaseComObject(_splitterSettings);
//                // _splitterSettings = null;
//                //}
//                if (_splitterSettings != null) Marshal.ReleaseComObject(_splitterSettings);
//                _splitterSettings = (ILAVSplitterSettings)splitter;
//                //ret = _splitterSettings.SetRuntimeConfig(true);
//                //if (ret != 0)
//                // throw new Exception("Could not set SetRuntimeConfig to true");

//                //string sss = "*:*";

//                //LAVSubtitleMode mode = LAVSubtitleMode.LAVSubtitleMode_NoSubs;
//                //mode = _splitterSettings.GetSubtitleMode();
//                //if (mode != LAVSubtitleMode.LAVSubtitleMode_Default)
//                // throw new Exception("Could not set GetAdvancedSubtitleConfige");

//                //ret = _splitterSettings.SetSubtitleMode(LAVSubtitleMode.LAVSubtitleMode_Advanced);
//                //if (ret != 1)
//                // throw new Exception("Could not set SetAdvancedSubtitleConfige");

//                //ret = _splitterSettings.SetAdvancedSubtitleConfig(sss);
//                //if (ret != 1)
//                // throw new Exception("Could not set SetAdvancedSubtitleConfige");

//                //sss = "";
//                //ret = _splitterSettings.GetAdvancedSubtitleConfig(out sss);
//                //if (ret != 0)
//                // throw new Exception("Could not set GetAdvancedSubtitleConfige");

//                //IPin sub = DsFindPin.ByDirection((IBaseFilter)splitter, PinDirection.Output, 2);
//                //PinInfo pi;
//                //sub.QueryPinInfo(out pi);
//                SIZE a, b;
//                if ((_displayControl).GetNativeVideoSize(out a, out b) == 0)
//                {
//                    if (a.cx > 0 && a.cy > 0)
//                    {
//                        HasVideo = true;
//                        SetNativePixelSizes(a);
//                    }
//                }

//                if (!subconnected)
//                {
//                    InvokeNoSubtitleLoaded(new EventArgs());
//                }
//                else
//                {
//                    InitSubSettings();
//                }
//                /* Sets the NaturalVideoWidth/Height */
//                //SetNativePixelSizes(renderer);

//                //InvokeMediaFailed(new MediaFailedEventArgs(sss, null));
//            }
//            catch (Exception ex)
//            {
//                /* This exection will happen usually if the media does
//                * not exist or could not open due to not having the
//                * proper filters installed */
//                FreeResources();

//                /* Fire our failed event */
//                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
//            }
//            finally
//            {
//                string filters = string.Join(Environment.NewLine, EnumAllFilters(m_graph).ToArray());
//                System.Diagnostics.Debug.WriteLine(filters);
//            }
//            InvokeMediaOpened();
//        }
        protected virtual void OpenSource()
        {
            _eqEnabled = false;
            //if (m_graph != null)
            //{
            //    //RemoveAllFilters(m_graph);
            //    Marshal.ReleaseComObject(m_graph);
            //}

            /* Make sure we clean up any remaining mess */
            FreeResources();

            if (m_sourceUri == null)
            {
                return;
            }

            string fileSource = m_sourceUri.OriginalString;

            if (string.IsNullOrEmpty(fileSource))
            {
                return;
            }

            try
            {
                int hr = 0;

                /* Creates the GraphBuilder COM object */
                m_graph = new FilterGraphNoThread() as IGraphBuilder;

                if (_displayControl != null)
                {
                    Marshal.ReleaseComObject(_displayControl);
                    _displayControl = null;
                }

                if (_displayControlVMR != null)
                {
                    Marshal.ReleaseComObject(_displayControlVMR);
                    _displayControlVMR = null;
                }

                if (m_graph == null)
                {
                    throw new Exception("Could not create a graph");
                }

                var filterGraph = m_graph as IFilterGraph2;

                var flt = EnumAllFilters(m_graph).ToList();

                if (filterGraph == null)
                {
                    throw new Exception("Could not QueryInterface for the IFilterGraph2");
                }

                /* Add our prefered audio renderer */
                var audioRenderer = InsertAudioRenderer(AudioRenderer);
                if (audioRenderer != null)
                {
                    if (_audioRenderer != null)
                    {
                        Marshal.ReleaseComObject(_audioRenderer);
                    }
                    _audioRenderer = audioRenderer;
                }

                if ((System.Environment.OSVersion.Platform == PlatformID.Win32NT &&
                     (System.Environment.OSVersion.Version.Major == 5)))
                {
                    VideoRenderer = VideoRendererType.VideoMixingRenderer9;
                }

                if (_presenterSettings != null)
                {
                    Marshal.ReleaseComObject(_presenterSettings);
                }
                if (_renderer != null)
                {
                    Marshal.ReleaseComObject(_renderer);
                }

                IBaseFilter renderer = InsertVideoRenderer(VideoRenderer, m_graph, 1);
                _renderer = renderer;

                ILAVAudioSettings lavAudioSettings;
                ILAVAudioStatus   lavStatus;
                IBaseFilter       audioDecoder = FilterProvider.GetAudioFilter(out lavAudioSettings, out lavStatus);
                if (audioDecoder != null)
                {
                    if (_audio != null)
                    {
                        Marshal.ReleaseComObject(_audio);
                    }
                    _audio         = audioDecoder;
                    _audioStatus   = lavStatus;
                    _audioSettings = lavAudioSettings;

                    hr = (int)lavAudioSettings.SetRuntimeConfig(true);
                    hr = m_graph.AddFilter((IBaseFilter)audioDecoder, "LavAudio");
                    DsError.ThrowExceptionForHR(hr);
#if DEBUG
                    hr = (int)lavAudioSettings.SetTrayIcon(true);
#endif
                }

                ILAVSplitterSettings splitterSettings;
                IFileSourceFilter    splitter = FilterProvider.GetSplitterSource(out splitterSettings);

                if (splitter != null)
                {
                    if (_splitter != null)
                    {
                        Marshal.ReleaseComObject(_splitter);
                    }
                    _splitter = splitter;

                    _splitterSettings = (ILAVSplitterSettings)splitterSettings;

                    hr = splitterSettings.SetRuntimeConfig(true);
                    hr = splitter.Load(fileSource, null);
                    if (hr != 0)
                    {
                        throw new Exception("Playback of this file is not supported!");
                    }
                    hr = m_graph.AddFilter((IBaseFilter)splitter, "LavSplitter");
                    DsError.ThrowExceptionForHR(hr);
                }

                IEnumPins pinEnum;
                hr = ((IBaseFilter)splitter).EnumPins(out pinEnum);
                DsError.ThrowExceptionForHR(hr);

                IntPtr fetched = IntPtr.Zero;
                IPin[] pins    = { null };

                if (VideoRenderer == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = _renderer as IVMRMixerControl9;

                    if (mixer != null)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetRGB;
                        mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                ILAVVideoSettings lavVideoSettings;
                IBaseFilter       lavVideo = FilterProvider.GetVideoFilter(out lavVideoSettings);

                if (lavVideo != null)
                {
                    if (_video != null)
                    {
                        Marshal.ReleaseComObject(_video);
                    }
                    _video = lavVideo;

                    if (lavVideoSettings != null)
                    {
                        _videoSettings = lavVideoSettings;

                        lavVideoSettings.SetRuntimeConfig(true);
                        hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_None);

                        // check for best acceleration available
                        //if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_CUDA) != 0)
                        //{
                        //    hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_CUDA);
                        //    hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD);
                        //}
                        //else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_QuickSync) != 0)
                        //{
                        //    hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_QuickSync);
                        //    hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD);
                        //}
                        //else
                        if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2Native) != 0)
                        {
                            hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2Native);
                            hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD);
                        }
                        //else
                        //if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2CopyBack) != 0)
                        //{
                        //    hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2CopyBack);
                        //    hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD);
                        //}

#if DEBUG
                        hr = lavVideoSettings.SetTrayIcon(true);
#endif
                    }

                    hr = m_graph.AddFilter(_video, "LavVideo");
                    DsError.ThrowExceptionForHR(hr);
                }

                IBaseFilter vobSub = FilterProvider.GetVobSubFilter();

                if (vobSub != null)
                {
                    try
                    {
                        hr = m_graph.AddFilter(vobSub, "VobSub");
                        DsError.ThrowExceptionForHR(hr);
                        IDirectVobSub vss = vobSub as IDirectVobSub;
                        if (_vobsub != null)
                        {
                            Marshal.ReleaseComObject(_vobsub);
                        }
                        _vobsub = vss;
                        InitSubSettings();
                    }
                    catch { }
                }

                hr = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Audio"), DsFindPin.ByDirection(_audio, PinDirection.Input, 0));
                if (hr == 0)
                {
                    HasAudio = true;
                }
                else
                {
                    HasAudio = false;
                }


                IBaseFilter dcDsp = FilterProvider.GetDCDSPFilter();
                if (dcDsp != null)
                {
                    if (_dspFilter != null)
                    {
                        Marshal.ReleaseComObject(_dspFilter);
                    }
                    _dspFilter = (IDCDSPFilterInterface)dcDsp;

                    if (HasAudio)
                    {
                        hr = m_graph.AddFilter((IBaseFilter)_dspFilter, "AudioProcessor");
                        hr = _dspFilter.set_EnableBitrateConversionBeforeDSP(true);
                        hr = ((IDCDSPFilterVisualInterface)_dspFilter).set_VISafterDSP(true);
                        hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_audio, PinDirection.Output, 0), DsFindPin.ByDirection(_dspFilter, PinDirection.Input, 0));
                        DsError.ThrowExceptionForHR(hr);
                        hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_dspFilter, PinDirection.Output, 0), DsFindPin.ByDirection(_audioRenderer, PinDirection.Input, 0));

                        var cb = new AudioCallback(this);
                        hr = _dspFilter.set_CallBackPCM(cb);

                        object intf = null;
                        hr         = _dspFilter.set_AddFilter(0, TDCFilterType.ftEqualizer);
                        hr         = _dspFilter.get_FilterInterface(0, out intf);
                        _equalizer = (IDCEqualizer)intf;
                        _equalizer.set_Seperate(false);
                    }
                }
                else
                {
                    if (HasAudio)
                    {
                        hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_audio, PinDirection.Output, 0), DsFindPin.ByDirection(_audioRenderer, PinDirection.Input, 0));
                    }
                }

                bool subconnected = false;

                hr = m_graph.Connect(DsFindPin.ByName((IBaseFilter)_splitter, "Video"), DsFindPin.ByDirection(_video, PinDirection.Input, 0));
                if (hr == 0)
                {
                    HasVideo = true;
                }
                else
                {
                    HasVideo = false;
                }

                if (HasVideo)
                {
                    hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_video, PinDirection.Output, 0), DsFindPin.ByDirection(vobSub, PinDirection.Input, 0));
                    DsError.ThrowExceptionForHR(hr);
                    if (hr == 0)
                    {
                        int lc;
                        ((IDirectVobSub)vobSub).get_LanguageCount(out lc);
                        subconnected = (lc != 0);
                        IPin pn = DsFindPin.ByName((IBaseFilter)splitter, "Subtitle");
                        if (pn != null)
                        {
                            hr = m_graph.Connect(pn, DsFindPin.ByDirection(vobSub, PinDirection.Input, 1));
                            ((IDirectVobSub)vobSub).get_LanguageCount(out lc);
                            subconnected = (lc != 0);
                        }
                        hr = m_graph.Connect(DsFindPin.ByDirection(vobSub, PinDirection.Output, 0),
                                             DsFindPin.ByDirection(_renderer, PinDirection.Input, 0));
                    }
                    else
                    {
                        if (_vobsub != null)
                        {
                            Marshal.ReleaseComObject(_vobsub);
                        }
                        _vobsub = null;
                        hr      = m_graph.Connect(DsFindPin.ByDirection(_video, PinDirection.Output, 0),
                                                  DsFindPin.ByDirection(_renderer, PinDirection.Input, 0));
                    }
                }

                /* Loop over each pin of the source filter */
                while (pinEnum.Next(pins.Length, pins, fetched) == 0)
                {
                    IPin cTo;
                    pins[0].ConnectedTo(out cTo);
                    if (cTo == null)
                    {
                        // this should not happen if the filtegraph is manually connected in a good manner
                        hr = filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero);
                    }
                    else
                    {
                        Marshal.ReleaseComObject(cTo);
                    }
                    Marshal.ReleaseComObject(pins[0]);
                }

                Marshal.ReleaseComObject(pinEnum);

                var selector = splitter as IAMStreamSelect;
                int numstreams;
                selector.Count(out numstreams);
                AMMediaType             mt;
                AMStreamSelectInfoFlags fl;
                SubtitleStreams.Clear();
                VideoStreams.Clear();
                AudioStreams.Clear();
                for (int i = 0; i < numstreams; i++)
                {
                    int    lcid;
                    int    group;
                    string name;
                    object o, o2;
                    selector.Info(i, out mt, out fl, out lcid, out group, out name, out o, out o2);
                    switch (group)
                    {
                    case 0:
                        VideoStreams.Add(name);
                        break;

                    case 1:
                        AudioStreams.Add(name);
                        break;

                    case 2:
                        SubtitleStreams.Add(name);
                        break;
                    }

                    if (o != null)
                    {
                        Marshal.ReleaseComObject(o);
                    }
                    if (o2 != null)
                    {
                        Marshal.ReleaseComObject(o2);
                    }
                }

                OnPropertyChanged("SubtitleStreams");
                OnPropertyChanged("VideoStreams");
                OnPropertyChanged("AudioStreams");

                /* Configure the graph in the base class */
                SetupFilterGraph(m_graph);

#if DEBUG
                /* Adds the GB to the ROT so we can view
                 * it in graphedit */
                m_dsRotEntry = new DsROTEntry(m_graph);
#endif

                SIZE a, b;
                if (HasVideo && _displayControl != null && (_displayControl).GetNativeVideoSize(out a, out b) == 0)
                {
                    var sz = MediaPlayerBase.GetVideoSize(_renderer, PinDirection.Input, 0);
                    if (a.cx > 0 && a.cy > 0)
                    {
                        SetNativePixelSizes(a);
                    }
                }

                if (!subconnected)
                {
                    InvokeNoSubtitleLoaded(new EventArgs());
                }
                else
                {
                    InitSubSettings();
                }
            }
            catch (Exception ex)
            {
                /* This exection will happen usually if the media does
                 * not exist or could not open due to not having the
                 * proper filters installed */
                FreeResources();

                /* Fire our failed event */
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
            }

            InvokeMediaOpened();
        }
Ejemplo n.º 43
0
 public void PlaySoundWithCallback(AudioClip clip, AudioCallback callback)
 {
     audioSource.PlayOneShot(clip);
     StartCoroutine(DelayedCallback(clip.length, callback));
 }
Ejemplo n.º 44
0
 public CallbackWrapper(AudioCallback callback)
 {
     _callback = callback;
 }
Ejemplo n.º 45
0
    private IEnumerator DelayedCallback(float time, AudioCallback callback)
    {
        yield return(new WaitForSeconds(time));

        callback();
    }
Ejemplo n.º 46
0
        private void Go()
        {
            if (File.Exists(fileName))
            {
                filePath      = "";
                fileDirectory = "";
            }
            else if (File.Exists(Path.Combine(fileDirectory, fileName)))
            {
                filePath = "";
            }

            string file = Path.Combine(Path.Combine(filePath, fileDirectory), fileName);

            textDisplay = new TextSprite(" ", new SdlDotNet.Graphics.Font(file, 20), Color.Red);
            Video.WindowIcon();
            Video.WindowCaption = "SDL.NET - StreamingAudio";
            screen = Video.SetVideoMode(width, height);

            switch (streamChoice)
            {
            case StreamChoice.InternalCallback:
                stream   = new AudioStream(playbackFreq, AudioFormat.Unsigned16Little, SoundChannel.Mono, samples);
                buffer16 = new short[samples];
                break;

            case StreamChoice.CustomCallback:
                callback = new AudioCallback(Unsigned16LittleCallback);
                stream   = new AudioStream(playbackFreq, AudioFormat.Unsigned16Little, SoundChannel.Mono, samples, callback, "Hello World");
                buffer16 = new short[samples];
                break;

            case StreamChoice.CustomCallback8Bit:
                callback = new AudioCallback(Unsigned8Callback);
                stream   = new AudioStream(playbackFreq, AudioFormat.Unsigned8, SoundChannel.Mono, samples, new AudioCallback(Unsigned8Callback), "Hello World");
                buffer8  = new byte[samples];
                break;

            default:
                stream   = new AudioStream(playbackFreq, AudioFormat.Unsigned16Little, SoundChannel.Mono, samples);
                buffer16 = new short[samples];
                break;
            }

            offset = stream.Offset;
            volume = 0.9 * 32768;

            buffer16 = new short[samples];

            osc.Rate      = 20;
            osc.Amplitude = 1;

            osc2.Rate      = 3;
            osc2.Amplitude = 10;

            stream.Paused    = false;
            textDisplay.Text = SdlDotNetExamplesBrowser.StringManager.GetString(
                "StreamingAudioDirections", CultureInfo.CurrentUICulture);
            textDisplay.TextWidth = 350;
            Events.Run();
        }