public async Task PlayAudio(IAudioSource audioSource, CancellationToken cancellationToken = default) { ThrowIfStale(); var tcs = new TaskCompletionSource <bool>(); EventHandler handler = (a, b) => { tcs.TrySetResult(true); }; AudioStreamer.OnAudioStop += handler; var inControl = false; try { cancellationToken.Register(() => { if (inControl) { AudioStreamer.ClearAudio(); } }); inControl = true; AudioStreamer.SendAudio(audioSource, cancellationToken: cancellationToken); AudioStreamer.QueueSilence(); await tcs.Task; } finally { inControl = false; AudioStreamer.OnAudioStop -= handler; } }
private void SetupAudioStream(AudioStreamSettings settings) { logger.Debug("StartAudioStream(...)"); if (audioStreamer != null) { audioStreamer.StateChanged -= AudioStreamer_StateChanged; } try { audioStreamer = new AudioStreamer(audioSource); audioStreamer.Setup(audioSettings.EncodingParams, audioSettings.NetworkParams); audioStreamer.StateChanged += AudioStreamer_StateChanged; } catch (Exception ex) { logger.Error(ex); if (audioStreamer != null) { audioStreamer.Close(); audioStreamer.StateChanged -= AudioStreamer_StateChanged; audioStreamer = null; } throw; } }
public AudioStreamer CreateAudioStreamer() { if (!Ready) { throw new InvalidOperationException("Voice socket is not ready to create audio streamer."); } var endPointInfo = new VoiceEndPointInfo { SocketEndPoint = GetUdpEndpoint(), LocalPort = LocalPort, Ssrc = Ssrc, EncryptionKey = SecretKey, EncryptionMode = EncryptionMode, }; var streamer = new AudioStreamer(new OpusAudioEncoder(Connectivity.VoiceSampleRate, Connectivity.VoiceChannels), endPointInfo, cancellationToken: CancellationTokenSource.Token); streamer.OnAudioStart += async(a, b) => { await BeginSpeaking(); }; streamer.OnAudioStop += async(a, b) => { await EndSpeaking(); }; return(streamer); }
/// <summary> /// Called when a new track requires audio decoding /// (typically because it is about to start playing) /// </summary> /// <param name="track"> /// The track that needs audio streaming /// </param> /// <param name="streamer"> /// The AudioStreamer object to which a MediaStreamSource should be /// attached to commence playback /// </param> /// <remarks> /// To invoke this method for a track set the Source parameter of the AudioTrack to null /// before setting into the Track property of the BackgroundAudioPlayer instance /// property set to true; /// otherwise it is assumed that the system will perform all streaming /// and decoding /// </remarks> protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { //TODO: Set the SetSource property of streamer to a MSS source var data = track.Tag.ToString().Split('$'); var url = data[data.Length - 1]; var type = data[2]; #if DEBUG System.Diagnostics.Debug.WriteLine("AudioStreamer:OnBeginStreaming - Type: " + type); #endif switch (type.ToLower()) { case "shoutcast": { mms = new Silverlight.Media.ShoutcastMediaStreamSource(new Uri(url), true); //track.Title = "Moo"; mms.MetadataChanged += mms_MetadataChanged; mms.Connected += mms_Connected; mms.Closed += mms_Closed; streamer.SetSource(mms); } break; } }
private bool DestroyAudioStreamer() { if (mAudioStreamer != null) { mAudioStreamer.Dispose(); mAudioStreamer = null; } return(true); }
protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { System.Diagnostics.Debug.WriteLine("OnBeginStreaming"); MidiStreamSource mss = new MidiStreamSource(); // Event handler for when a track is complete or the user switches tracks mss.StreamComplete += new EventHandler(mss_StreamComplete); // Set the source streamer.SetSource(mss); }
private bool CreateAudioStreamer() { // 1. Create Instance mAudioStreamer = new AudioStreamer(); mAudioStreamer.MessageAvailable += OnWebsocketMessage; mAudioStreamer.Opened += OnWebsocketConnectionOpened; mAudioStreamer.Closed += OnWebsocketConnectionClosed; // 2. Create Connection mAudioStreamer.StartStreaming(Utils.ParsingWeboscketUrl(tb_SessionId.Text)); return(true); }
/// <summary> /// Called when a new track requires audio decoding /// (typically because it is about to start playing) /// </summary> /// <param name="track"> /// The track that needs audio streaming /// </param> /// <param name="streamer"> /// The AudioStreamer object to which a MediaStreamSource should be /// attached to commence playback /// </param> /// <remarks> /// To invoke this method for a track set the Source parameter of the AudioTrack to null /// before setting into the Track property of the BackgroundAudioPlayer instance /// property set to true; /// otherwise it is assumed that the system will perform all streaming /// and decoding /// </remarks> protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { lock (AudioTrackStreamer.syncRoot) { AudioTrackStreamer.mss = new ShoutcastMediaStreamSource(new Uri(track.Tag)); AudioTrackStreamer.mss.MetadataChanged += new RoutedEventHandler(AudioTrackStreamer.MetadataChanged); AudioTrackStreamer.mss.Closed += (s, e) => { this.NotifyComplete(); }; streamer.SetSource(AudioTrackStreamer.mss); } }
/// <summary> /// Called when a new track requires audio decoding /// (typically because it is about to start playing) /// </summary> /// <param name="track"> /// The track that needs audio streaming /// </param> /// <param name="streamer"> /// The AudioStreamer object to which a MediaStreamSource should be /// attached to commence playback /// </param> /// <remarks> /// To invoke this method for a track set the Source parameter of the AudioTrack to null /// before setting into the Track property of the BackgroundAudioPlayer instance /// property set to true; /// otherwise it is assumed that the system will perform all streaming /// and decoding /// </remarks> protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { // Set the source of streamer to a media stream source double freq = Convert.ToDouble(track.Tag); // Use sine wave audio generator to simulate a streaming audio feed SineMediaStreamSource mss = new SineMediaStreamSource(freq, 1.0, TimeSpan.FromSeconds(5)); // Event handler for when a track is complete or the user switches tracks mss.StreamComplete += new EventHandler(mss_StreamComplete); // Set the source streamer.SetSource(mss); }
/// <summary> /// Called when a new track requires audio decoding /// (typically because it is about to start playing) /// </summary> /// <param name="track"> /// The track that needs audio streaming /// </param> /// <param name="streamer"> /// The AudioStreamer object to which a MediaStreamSource should be /// attached to commence playback /// </param> /// <remarks> /// To invoke this method for a track set the Source parameter of the AudioTrack to null /// before setting into the Track property of the BackgroundAudioPlayer instance /// property set to true; /// otherwise it is assumed that the system will perform all streaming /// and decoding /// </remarks> protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { // Set the source of streamer to a media stream source double freq = Convert.ToDouble(track.Tag); /* Windev : Uncomment below stuff if u want to listen to original sine waves. // Use sine wave audio generator to simulate a streaming audio feed SineMediaStreamSource mss = new SineMediaStreamSource(freq, 1.0, TimeSpan.FromSeconds(5)); // Event handler for when a track is complete or the user switches tracks mss.StreamComplete += new EventHandler(mss_StreamComplete); // Set the source streamer.SetSource(mss); */ }
protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { using (var store = IsolatedStorageFile.GetUserStoreForApplication()) { using (var stream = new IsolatedStorageFileStream("widgets.xml", FileMode.OpenOrCreate, store)) { using (var reader = new StreamReader(stream)) { try { var serial = new XmlSerializer(typeof(appdata)); appSettings = (appdata)serial.Deserialize(stream); } catch { appSettings = new appdata(); } } } } Assembly a = Assembly.Load("Microsoft.Phone.InteropServices, Version=7.0.0.0, Culture=neutral, PublicKeyToken=24eec0d8c86cda1e"); Type comBridgeType = a.GetType("Microsoft.Phone.InteropServices.ComBridge"); MethodInfo dynMethod = comBridgeType.GetMethod("RegisterComDll", BindingFlags.Public | BindingFlags.Static); object retValue = dynMethod.Invoke(null, new object[] { "liblw.dll", new Guid("E79018CB-46A6-432D-8077-8C0863533001") }); instance = (Imangodll) new Cmangodll(); instance.MessageBox7("BG Agent Fired", "lockwidgets", 0, out o); checkForDataChange(); while (true) { System.Threading.Thread.Sleep(5000); if (instance.StringCall("aygshell", "SHIsLocked", "") == 1) { checkForDataChange(); } GC.Collect(); System.Diagnostics.Debug.WriteLine("1: " + Microsoft.Phone.Info.DeviceExtendedProperties.GetValue("ApplicationCurrentMemoryUsage")); System.Diagnostics.Debug.WriteLine("1: " + Microsoft.Phone.Info.DeviceExtendedProperties.GetValue("ApplicationPeakMemoryUsage")); } }
/// <summary> /// Called when a new track requires audio decoding /// (typically because it is about to start playing) /// </summary> /// <param name="track"> /// The track that needs audio streaming /// </param> /// <param name="streamer"> /// The AudioStreamer object to which a MediaStreamSource should be /// attached to commence playback /// </param> /// <remarks> /// To invoke this method for a track set the Source parameter of the AudioTrack to null /// before setting into the Track property of the BackgroundAudioPlayer instance /// property set to true; /// otherwise it is assumed that the system will perform all streaming /// and decoding /// </remarks> protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { lock (AudioTrackStreamer.syncRoot) { try { AudioTrackStreamer.mss = new ShoutcastMediaStreamSource(new Uri(track.Tag)); AudioTrackStreamer.mss.MetadataChanged += new RoutedEventHandler(AudioTrackStreamer.MetadataChanged); AudioTrackStreamer.mss.Closed += (s, e) => { this.NotifyComplete(); }; streamer.SetSource(AudioTrackStreamer.mss); } catch (Exception ex) { Debug.WriteLine("----------==============="); Debug.WriteLine("OnBeginStreaming:" + ex); } } }
private void StopStreaming() { logger.Debug("StopStreaming()"); if (videoSource != null) { videoSource.CaptureStarted -= VideoSource_CaptureStarted; videoSource.CaptureStopped -= VideoSource_CaptureStopped; videoSource.Close(); } if (videoStreamer != null) { videoStreamer.Close(); } if (audioSource != null) { audioSource.Stop(); } if (audioStreamer != null) { //audioStreamer.SetWaveformPainter(null); audioStreamer.Close(); audioStreamer.StateChanged -= AudioStreamer_StateChanged; audioStreamer = null; } //ScreencastChannelsInfos.Clear(); communicationService?.Close(); //IsStreaming = false; //logger.Info(SharpDX.Diagnostics.ObjectTracker.ReportActiveObjects()); }
private void StartStreaming() { logger.Debug("ScreenStreamer::StartStreaming()"); var videoSettings = Session.VideoSettings; if (videoSettings.Enabled) { var captureDevice = (VideoCaptureDevice)videoSettings.CaptureDevice.Clone(); if (captureDevice.CaptureMode == CaptureMode.Screen) { var screenDevice = (ScreenCaptureDevice)captureDevice; if (screenDevice.CaptureRegion.Width > Config.MaxVideoEncoderWidth) { screenDevice.CaptureRegion.Width = Config.MaxVideoEncoderWidth; } if (screenDevice.CaptureRegion.Height > Config.MaxVideoEncoderHeight) { screenDevice.CaptureRegion.Height = Config.MaxVideoEncoderHeight; } screenDevice.Resolution = MediaToolkit.Utils.GraphicTools.DecreaseToEven(screenDevice.CaptureRegion.Size); } var videoEncoderSettings = (VideoEncoderSettings)videoSettings.EncoderSettings.Clone(); if (videoSettings.UseEncoderResoulutionFromSource) { videoEncoderSettings.Width = captureDevice.Resolution.Width; videoEncoderSettings.Height = captureDevice.Resolution.Height; } else { captureDevice.Resolution = videoEncoderSettings.Resolution; } if (captureDevice.CaptureMode == CaptureMode.UvcDevice) { videoSource = new VideoCaptureSource(); } else if (captureDevice.CaptureMode == CaptureMode.Screen) { videoSource = new ScreenSource(); } videoSource.Setup(captureDevice); videoSource.CaptureStarted += VideoSource_CaptureStarted; videoSource.CaptureStopped += VideoSource_CaptureStopped; if (Session.TransportMode == TransportMode.Tcp || Session.IsMulticast) { videoStreamer = new VideoStreamer(videoSource); videoStreamer.Setup(videoEncoderSettings, videoSettings.NetworkSettings); //videoStreamer.Setup(videoSettings.EncoderSettings, videoSettings.NetworkSettings); } else { // currently not supported... } } var audioSettings = Session.AudioSettings; if (audioSettings.Enabled) { audioSource = new AudioSource(); var captureDevice = audioSettings.CaptureDevice; var deviceId = captureDevice.DeviceId; var captureProps = captureDevice.Properties; audioSource.Setup(deviceId, captureProps); if (Session.TransportMode == TransportMode.Tcp || Session.IsMulticast) { audioStreamer = new AudioStreamer(audioSource); audioStreamer.Setup(audioSettings.EncoderSettings, audioSettings.NetworkSettings); audioStreamer.StateChanged += AudioStreamer_StateChanged; } else { // currently not supported... } } communicationService = new CommunicationService(this); communicationService.Open(); if (videoSettings.Enabled) { videoSource.Start(); videoStreamer.Start(); } if (audioSettings.Enabled) { audioSource.Start(); audioStreamer.Start(); } }
protected override async void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { Debug.WriteLine("AudioTrackStreamer.OnBeginStreaming() track.Source {0} track.Tag {1}", null == track ? "<no track>" : null == track.Source ? "<none>" : track.Source.ToString(), null == track ? "<no track>" : track.Tag ?? "<none>"); try { Debug.Assert(null == _mediaStreamFacade, "_mediaStreamFacade is in use"); StartPoll(); if (_cancellationTokenSource.IsCancellationRequested) _cancellationTokenSource = new CancellationTokenSource(); await RunStreamingAsync(track, streamer).ConfigureAwait(false); } catch (OperationCanceledException) { } catch (Exception ex) { Debug.WriteLine("AudioTrackStreamer.OnBeginStreaming() failed: " + ex.ExtendedMessage()); } finally { StopPoll(); Debug.WriteLine("AudioTrackStreamer.OnBeginStreaming() play done NotifyComplete"); NotifyComplete(); } }
/// <summary> /// Called when a new track requires audio decoding /// (typically because it is about to start playing) /// </summary> /// <param name="track"> /// The track that needs audio streaming /// </param> /// <param name="streamer"> /// The AudioStreamer object to which a MediaStreamSource should be /// attached to commence playback /// </param> /// <remarks> /// To invoke this method for a track set the Source parameter of the AudioTrack to null /// before setting into the Track property of the BackgroundAudioPlayer instance /// property set to true; /// otherwise it is assumed that the system will perform all streaming /// and decoding /// </remarks> protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer) { //TODO: Set the SetSource property of streamer to a MSS source NotifyComplete(); }
async Task RunStreamingAsync(AudioTrack track, AudioStreamer streamer) { IMediaStreamFacade mediaStreamFacade = null; try { if (null == track || null == track.Tag) { Debug.WriteLine("AudioTrackStreamer.RunStreamingAsync() null url"); return; } Uri url; if (!Uri.TryCreate(track.Tag, UriKind.Absolute, out url)) { Debug.WriteLine("AudioTrackStreamer.RunStreamingAsync() invalid url: " + track.Tag); return; } var defaultTitle = "Unknown"; var mediaTrack = TrackManager.Tracks.Where(t => null != t).FirstOrDefault(t => t.Url == url); if (null != mediaTrack) defaultTitle = mediaTrack.Title; _metadataHandler.DefaultTitle = defaultTitle; if (!string.Equals(track.Title, defaultTitle)) { track.BeginEdit(); track.Title = defaultTitle; track.EndEdit(); } mediaStreamFacade = await InitializeMediaStreamAsync().ConfigureAwait(false); Debug.Assert(null != mediaStreamFacade); MediaStreamSource mss; if (null != mediaTrack) mss = await mediaStreamFacade.CreateMediaStreamSourceAsync(mediaTrack, _cancellationTokenSource.Token).ConfigureAwait(false); else mss = await mediaStreamFacade.CreateMediaStreamSourceAsync(url, _cancellationTokenSource.Token).ConfigureAwait(false); if (null == mss) { Debug.WriteLine("AudioTrackStreamer.RunStreamingAsync() unable to create media stream source"); } else { streamer.SetSource(mss); await mediaStreamFacade.PlayingTask.ConfigureAwait(false); return; } } catch (OperationCanceledException) { return; } catch (Exception ex) { Debug.WriteLine("AudioTrackStreamer.RunStreamingAsync() failed: " + ex.ExtendedMessage()); } if (null == mediaStreamFacade) return; await CleanupMediaStreamFacadeAsync(mediaStreamFacade).ConfigureAwait(false); }
public Music(AudioStreamer a_audioStreamer, Logger a_logger) { m_audioStreamer = a_audioStreamer; m_logger = a_logger; }