public void Handle(AudioTrackAddedRemoved message) { if (!this.IsGroup && this.UserOrGroupId == AppGlobalStateManager.Current.LoggedInUserId) { Execute.ExecuteOnUIThread(delegate { if (message.Added && this.AlbumId == 0) { this.AllTracks.LoadData(true, false, null, false); return; } Func <AudioHeader, bool> arg_5E_1 = (((AudioHeader h) => h.Track.aid == message.Audio.aid)); AudioHeader audioHeader = Enumerable.FirstOrDefault <AudioHeader>(this.AllTracks.Collection, arg_5E_1); if (audioHeader != null) {// if (message.IsSavedAudiosAlbum && this.AlbumId != AllAlbumsViewModel.SAVED_ALBUM_ID) { audioHeader.NotifyChanged(); return; }// this.AllTracks.Delete(audioHeader); } }); } }
private void Grid_Tap(object sender, System.Windows.Input.GestureEventArgs e) { string tag = (sender as Grid).Tag.ToString(); AudioHeader track = this.VM.AllTracks.Collection.First((h) => h.Track.UniqueId == tag); this.NavigateToAudioPlayer(track, this.VM.AllTracks.Collection, true); }
public void Reordered(AudioHeader item, AudioHeader before, AudioHeader after) { if (item == null) { return; } AudioService.Instance.ReorderAudio(item.Track.aid, AppGlobalStateManager.Current.LoggedInUserId, this.AlbumId, after == null ? 0L : after.Track.aid, before == null ? 0L : before.Track.aid, (Action <BackendResult <long, ResultCode> >)(res => { })); }
/// <summary> /// Calculates the File Quality Coefficient extracting information from the MPEG file Header. /// For implementation details read the Wiki Page for this coefficient (http://code.google.com/p/p2p-player/wiki/ImplicitQoS#Coefficiente_di_Qualità_del_File) /// </summary> /// <param name="filepath">Path of the file</param> /// <returns>File quality coeffient of the given file</returns> private static FileQualityCoefficient calculateFQ(String filepath) { AudioFile mpegFile = new AudioFile(filepath); AudioHeader header; if (AudioHeader.Find(out header, mpegFile, 0)) { double brComp = Math.Truncate((Math.Log10(header.AudioBitrate / 192.0) + 0.78) * 100.0) / 100.0; double srComp = 0.0; switch (header.AudioSampleRate) { case 32000: srComp = 0.2; break; case 44100: srComp = 0.8; break; case 48000: srComp = 1.0; break; default: srComp = 0.0; break; } double cmComp = 0.0; switch (header.ChannelMode) { case ChannelMode.SingleChannel: cmComp = 0.2; break; case ChannelMode.DualChannel: cmComp = 0.5; break; case ChannelMode.JointStereo: cmComp = 0.8; break; case ChannelMode.Stereo: cmComp = 1.0; break; default: cmComp = 0.0; break; } return(new FileQualityCoefficient(brComp, cmComp, srComp)); } else { return(new FileQualityCoefficient()); } }
private void NavigateToAudioPlayer(AudioHeader track, IEnumerable enumerable, bool need_navigate = false) { if (track == null) { return; } if (track.IsContentRestricted) { track.ShowContentRestrictedMessage(); } else if (track.Track.UniqueId == BGAudioPlayerWrapper.Instance.Track.GetTagId()) { if (need_navigate) { Navigator.Current.NavigateToAudioPlayer(true); } } else { List <AudioObj> tracks = new List <AudioObj>(); IEnumerator enumerator = enumerable.GetEnumerator(); try { while (enumerator.MoveNext()) { AudioHeader current = enumerator.Current as AudioHeader; if (current != null) { tracks.Add(current.Track); } } } finally { IDisposable disposable = enumerator as IDisposable; if (disposable != null) { disposable.Dispose(); } } PlaylistManager.SetAudioAgentPlaylist(tracks, CurrentMediaSource.AudioSource); if (!track.TryAssignTrack()) { return; } if (need_navigate) { Navigator.Current.NavigateToAudioPlayer(true); } else { BGAudioPlayerWrapper.Instance.Play(); } } }
private void AllAudios_SelectionChanged(object sender, SelectionChangedEventArgs e) { ExtendedLongListSelector longListSelector = sender as ExtendedLongListSelector; AudioHeader audioHeader = (longListSelector != null ? longListSelector.SelectedItem : null) as AudioHeader; if (audioHeader == null) { return; } audioHeader.TryAssignTrack(); longListSelector.SelectedItem = null; }
private void TrackAction(AudioPageViewModel.Audios.AudioData2 item) { if (BGAudioPlayerWrapper.Instance.Track != null && item.UniqueId == BGAudioPlayerWrapper.Instance.Track.Tag) { BGAudioPlayerWrapper.Instance.Pause(); } else { AudioObj a = new AudioObj(); a.duration = item.duration.ToString(); a.artist = item.artist; a.title = item.title; a.url = item.url; a.id = item.id; a.owner_id = item.owner_id; BGAudioPlayerWrapper.Instance.Track = AudioTrackHelper.CreateTrack(a); BGAudioPlayerWrapper.Instance.Play(); //todo:плейлист формировать из того списка, который открыт // List <AudioObj> tracks = new List <AudioObj>(); foreach (AudioPageViewModel.Audios.AudioData2 adata in (base.DataContext as AudioPageViewModel).audios.items) { AudioObj o = new AudioObj(); //AudioHeader ah = new AudioHeader(o); o.artist = adata.artist; o.duration = adata.duration.ToString(); o.id = adata.id; o.owner_id = adata.owner_id; o.title = adata.title; o.url = adata.url; tracks.Add(o); } PlaylistManager.SetAudioAgentPlaylist(tracks, CurrentMediaSource.AudioSource); AudioHeader track = new AudioHeader(a); if (!track.TryAssignTrack()) { return; } //if (need_navigate) // Navigator.Current.NavigateToAudioPlayer(true); //else // BGAudioPlayerWrapper.Instance.Play(); // } }
private void HandleAudioSelectionChanged(object sender, object selectedItem, bool fromSearch) { ListBox listBox = sender as ListBox; ExtendedLongListSelector longListSelector = sender as ExtendedLongListSelector; AudioHeader track = selectedItem as AudioHeader; if (listBox != null) { ((Selector)listBox).SelectedItem = null; } if (longListSelector != null) { longListSelector.SelectedItem = null; } if (track == null) { return; } if (this._pageMode == AudioPage.PageMode.PickAudio) { ParametersRepository.SetParameterForId("PickedAudio", track.Track); if (this._albumId != 0L) { ((Page)this).NavigationService.RemoveBackEntrySafe(); } Navigator.Current.GoBack(); } else if (listBox != null) { if (fromSearch) { CurrentMediaSource.AudioSource = StatisticsActionSource.search; } this.NavigateToAudioPlayer(track, ((ItemsControl)listBox).ItemsSource, true); } else { if (longListSelector == null) { return; } if (fromSearch) { CurrentMediaSource.AudioSource = StatisticsActionSource.search; } IEnumerable enumerable = !longListSelector.IsFlatList ? this.GetExtendedSelectorGroupedItems(longListSelector.ItemsSource) : longListSelector.ItemsSource; this.NavigateToAudioPlayer(track, enumerable, true); } }
public override SpeakerStreamStatus SendFrame(Guid speakerStreamInstance, AudioHeader audioHeader, byte[] data) { Toolbox.Log.Trace("Speaker header: {0}", audioHeader); DemoSpeakerStreamSession s = Container.StreamManager.GetSession(DemoDeviceConstants.DeviceSpeakerChannel) as DemoSpeakerStreamSession; if (s != null) { s.StoreFrameForLoopback(audioHeader, data); } Container.ConnectionManager.SendSpeakerData(data); Container.ConnectionManager.SendInfo(Constants.Speaker1.ToString(), "SpeakerFrame, len=" + data.Length); return(SpeakerStreamStatus.DataSent); }
public override SpeakerStreamStatus SendFrame(Guid speakerStreamInstance, AudioHeader audioHeader, byte[] data) { Toolbox.Log.Trace("Speaker header: {0}", audioHeader); BeiaDeviceDriver_LightSpeakerStreamSession s = Container.StreamManager.GetSession(1 /* TODO: Specify correct channel numer */) as BeiaDeviceDriver_LightSpeakerStreamSession; if (s != null) { s.StoreFrameForLoopback(audioHeader, data); } // TODO: Make request to device for sending data to the speaker return(SpeakerStreamStatus.DataSent); }
private void EditTrackItem_Tap(object sender, RoutedEventArgs e) { FrameworkElement frameworkElement = sender as FrameworkElement; if (frameworkElement == null) { return; } AudioHeader dataContext = frameworkElement.DataContext as AudioHeader; if (dataContext == null) { return; } Navigator.Current.NavigateToEditAudio(dataContext.Track); }
public THP_Parser(Stream stream) { using (var reader = new FileReader(stream, true)) { reader.SetByteOrder(true); FileHeader = reader.ReadStruct <Header>(); bool isVersion11 = FileHeader.Version == 0x00011000; reader.SeekBegin(FileHeader.ComponentsOffset); uint numComponents = reader.ReadUInt32(); byte[] components = reader.ReadBytes(16); for (int i = 0; i < components.Length; i++) { if (components[i] == (byte)ComponentType.Video) { Video = new VideoHeader(reader, isVersion11); } if (components[i] == (byte)ComponentType.Audio) { Audio = new AudioHeader(reader, isVersion11); } } bool hasVideo = components.Any(x => x == (byte)ComponentType.Video); bool hasAudio = components.Any(x => x == (byte)ComponentType.Audio); reader.SeekBegin(FileHeader.FirstFrameOffset); for (int i = 0; i < FileHeader.FrameCount; i++) { long startFrame = reader.Position; var frame = new FrameHeader(reader, hasAudio ? Audio.NumChannels : 0); Frames.Add(frame); if (i == 0) { reader.SeekBegin(startFrame + FileHeader.FirstFrameLength); } else { reader.SeekBegin(startFrame + Frames[i - 1].NextFrameSize); } } } }
private void GoToMessage_OnClicked(object sender, RoutedEventArgs e) { long message_id = 0; object dataContext = ((FrameworkElement)sender).DataContext; switch (this.pivot.SelectedIndex) { case 0: AlbumPhoto albumPhoto = dataContext as AlbumPhoto; message_id = albumPhoto != null ? albumPhoto.MessageId : 0L; break; case 1: VideoHeader videoHeader = dataContext as VideoHeader; message_id = videoHeader != null ? videoHeader.MessageId : 0L; break; case 2: AudioHeader audioHeader = dataContext as AudioHeader; message_id = audioHeader != null ? audioHeader.MessageId : 0L; break; case 3: DocumentHeader documentHeader = dataContext as DocumentHeader; message_id = documentHeader != null ? documentHeader.MessageId : 0L; break; case 4: LinkHeader linkHeader = dataContext as LinkHeader; message_id = linkHeader != null ? linkHeader.MessageId : 0L; break; } if (message_id == 0L) { return; } long peerId = this.ViewModel.PeerId; if (this.ViewModel.IsChat) { peerId -= 2000000000L; } Navigator.Current.NavigateToConversation(peerId, this.ViewModel.IsChat, false, "", message_id, false); }
private void DeleteTrackItem_Tap(object sender, RoutedEventArgs e) { FrameworkElement frameworkElement = sender as FrameworkElement; if (frameworkElement == null) { return; } AudioHeader dataContext = frameworkElement.DataContext as AudioHeader; if (dataContext == null) { return; } this.DeleteAudios(new List <AudioHeader>() { dataContext }); }
private void Next_Tap(object sender, System.Windows.Input.GestureEventArgs e) { DialogService expr_12 = new DialogService(); expr_12.SetStatusBarBackground = true; expr_12.HideOnNavigation = false; PlaylistUC ucPlaylist = new PlaylistUC(); expr_12.Child = ucPlaylist; expr_12.Opened += delegate(object s, EventArgs ev) { PlaylistViewModel vm = new PlaylistViewModel(); vm.Shuffle = this.VM.Shuffle; ucPlaylist.DataContext = (vm); //Action _9__2 = null; vm.Audios.LoadData(false, false, delegate(BackendResult <List <AudioObj>, ResultCode> res) { Action arg_1F_0; //if ((arg_1F_0 = _9__2) == null) //{ arg_1F_0 = (/*_9__2 =*/ delegate { IEnumerable <AudioHeader> arg_2F_0 = vm.Audios.Collection; Func <AudioHeader, bool> arg_2F_1 = new Func <AudioHeader, bool>((i) => { return(i.IsCurrentTrack); }); AudioHeader audioHeader = Enumerable.FirstOrDefault <AudioHeader>(arg_2F_0, arg_2F_1); if (audioHeader != null) { int num = vm.Audios.Collection.IndexOf(audioHeader); if (num > 0) { audioHeader = vm.Audios.Collection[num - 1]; } ucPlaylist.AllAudios.ScrollTo(audioHeader); } }); //} Execute.ExecuteOnUIThread(arg_1F_0); }, false); }; expr_12.Show(null); }
/// <summary> /// Reads information from a MPEG file and fills the tag fields. /// </summary> /// <param name="filename">Filename from w</param> /// <returns>Filename from whom extract the information for the tag</returns> private bool _fillTagMpeg(string filename) { using (AudioFile mpegFile = new AudioFile(filename)) { TagLib.Tag fileTag = mpegFile.GetTag(TagLib.TagTypes.Id3v2, false); if (fileTag == null) { mpegFile.GetTag(TagLib.TagTypes.Id3v1, false); } if (fileTag != null) { this.Title = fileTag.Title; this.Album = fileTag.Album; this.Artist = fileTag.Performers.FirstOrDefault(); this.Genre = fileTag.Genres.FirstOrDefault(); this.Track = fileTag.Track; this.Year = fileTag.Year; } else { return(false); } AudioHeader header; if (AudioHeader.Find(out header, mpegFile, 0)) { this.Bitrate = header.AudioBitrate; this.Length = (int)mpegFile.Properties.Duration.TotalSeconds; this.Channels = header.AudioChannels; this.SampleRate = header.AudioSampleRate; } else { return(false); } FileInfo fi = new FileInfo(filename); this.FileSize = fi.Length; } return(true); }
// private void Temp_Click(object sender, System.Windows.Input.GestureEventArgs e) { string tag = (sender as Grid).Tag.ToString(); AudioHeader track = this.VM.AllTracks.Collection.First((h) => h.Track.UniqueId == tag); if (track.Track.UniqueId == BGAudioPlayerWrapper.Instance.Track.GetTagId()) { if (BGAudioPlayerWrapper.Instance.PlayerState == PlayState.Playing) { BGAudioPlayerWrapper.Instance.Pause(); } else { BGAudioPlayerWrapper.Instance.Play(); } return; } else { this.NavigateToAudioPlayer(track, this.VM.AllTracks.Collection, false); } }
private void audiosList_OnSelectionChanged(object sender, SelectionChangedEventArgs e) { AudioHeader selectedItem = this.audiosList.SelectedItem as AudioHeader; this.audiosList.SelectedItem = null; if (selectedItem == null) { return; } if (selectedItem.IsContentRestricted) { selectedItem.ShowContentRestrictedMessage(); } else { PlaylistManager.SetAudioAgentPlaylist((List <AudioObj>)Enumerable.ToList <AudioObj>(Enumerable.Select <AudioHeader, AudioObj>(Enumerable.OfType <AudioHeader>((IEnumerable)this.audiosList.ItemsSource), (Func <AudioHeader, AudioObj>)(item => item.Track))), CurrentMediaSource.AudioSource); if (!selectedItem.TryAssignTrack()) { return; } Navigator.Current.NavigateToAudioPlayer(false); } }
internal async void DeleteAudios(List <AudioHeader> list)//todo:IsSavedAudiosAlbum { AudioService.Instance.DeleteAudios((List <long>)Enumerable.ToList <long>(Enumerable.Select <AudioHeader, long>(list, (Func <AudioHeader, long>)(a => a.Track.aid)))); await AudioCacheManager.Instance.ClearCache((IEnumerable <string>) Enumerable.Select <AudioHeader, string>(list, (Func <AudioHeader, string>)(h => h.Track.Id))); List <AudioHeader> .Enumerator enumerator = list.GetEnumerator(); try { while (enumerator.MoveNext()) { AudioHeader current1 = enumerator.Current; EventAggregator current2 = EventAggregator.Current; AudioTrackAddedRemoved trackAddedRemoved = new AudioTrackAddedRemoved(); trackAddedRemoved.Added = false; AudioObj track = current1.Track; trackAddedRemoved.Audio = track; current2.Publish(trackAddedRemoved); } } finally { enumerator.Dispose(); } }
public void TestRiffHeader() { var riffHeader = new AudioHeader().GetRiffHeader(new NAudio.Wave.WaveFormat(48000, 2)); Assert.AreEqual(58, riffHeader.Length); }
/// <summary> /// Start the capture process. /// </summary> public void Start() { try { // If not capturing. if (!_capture.Capturing) { // If headers have not been written. if (!_hasHeaders) { // If in live streaming mode then // there is no need to set the duration. if (!_isLiveStreaming) { // Write the header initially. _hasHeaders = true; VideoHeader videoHeader = new VideoHeader(); AudioHeader audioHeader = new AudioHeader(); VideoAudioHeader headers = new VideoAudioHeader(); // Select what needs to be captured. switch (_active) { case MediaActiveType.Video | MediaActiveType.Audio: // Video and audio capture. videoHeader.ContainsVideo = true; videoHeader.Duration = 0.0; videoHeader.FrameRate = VideoFrameRate; videoHeader.FrameSizeHeight = VideoFrameSize.Height; videoHeader.FrameSizeWidth = VideoFrameSize.Width; videoHeader.ImageType = _imageType; videoHeader.CompressionAlgorithm = _compressionAlgorithm; audioHeader.ContainsAudio = true; audioHeader.Channels = AudioChannels; audioHeader.Duration = 0.0; audioHeader.SampleSize = AudioSampleSize; audioHeader.SamplingRate = AudioSamplingRate; audioHeader.SoundType = _soundType; audioHeader.CompressionAlgorithm = _compressionAlgorithm; break; case MediaActiveType.Video: // Video capture. videoHeader.ContainsVideo = true; videoHeader.Duration = 0.0; videoHeader.FrameRate = VideoFrameRate; videoHeader.FrameSizeHeight = VideoFrameSize.Height; videoHeader.FrameSizeWidth = VideoFrameSize.Width; videoHeader.ImageType = _imageType; videoHeader.CompressionAlgorithm = _compressionAlgorithm; break; case MediaActiveType.Audio: // Audio capture. audioHeader.ContainsAudio = true; audioHeader.Channels = AudioChannels; audioHeader.Duration = 0.0; audioHeader.SampleSize = AudioSampleSize; audioHeader.SamplingRate = AudioSamplingRate; audioHeader.SoundType = _soundType; audioHeader.CompressionAlgorithm = _compressionAlgorithm; break; } // Add the header. headers.MediaFormat = Nequeo.Media.Streaming.MediaFormat; headers.Video = videoHeader; headers.Audio = audioHeader; _mux.WriteHeader(headers); } } // Build the graph. if (!_capture.Cued) { _capture.Cue(); } // Select what needs to be captured. switch (_active) { case MediaActiveType.Video | MediaActiveType.Audio: // Video and audio capture. // Start sample capture. _capture.StartSnapshotImageSound(); // Create the samplers. ImageSampler(); SoundSampler(); break; case MediaActiveType.Video: // Video capture. // Start sample capture. _capture.StartSnapshotImage(); // Create the samplers. ImageSampler(); break; case MediaActiveType.Audio: // Audio capture. // Start sample capture. _capture.StartSnapshotSound(); // Create the samplers. SoundSampler(); break; } } } catch (Exception) { try { // If the engine has been created. if (_capture != null) { _capture.Stop(); } if (_imageCapture != null) { _imageCapture.Stop(); _imageCapture.Dispose(); } if (_soundCapture != null) { _soundCapture.Stop(); _soundCapture.Dispose(); } _imageCapture = null; _soundCapture = null; } catch { } } // Un pause. _isPaused = false; }
private void SaveStream(string destKey) { AudioHeader header = new AudioHeader(); AudioProcessor.SavePcm(Buffer[destKey], header); }
private void InitShoutcastStream(string url, int timeout, bool skipMetaInfo) { HttpWebResponse response = null; if (netStream != null) { netStream.Close(); netStream = null; } HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(url); request.Headers.Clear(); request.Headers.Add("Icy-MetaData", "1"); request.Proxy = AppConfig.GetWebProxy(); request.KeepAlive = false; request.UserAgent = ProTONEConstants.PlayerUserAgent; request.ServicePoint.Expect100Continue = false; request.Timeout = timeout; try { ToggleAllowUnsafeHeaderParsing(true); response = (HttpWebResponse)request.GetResponse(); bool icyHeaderFound = false; Dictionary <string, string> nvc = new Dictionary <string, string>(); foreach (string key in response.Headers.AllKeys) { if (response.Headers[key] != null) { string headerName = key.ToLowerInvariant(); nvc.Add(headerName, response.Headers[key].ToLowerInvariant()); icyHeaderFound |= headerName.StartsWith("icy"); } } if (!icyHeaderFound) { // No ICY header => this is NOT a shoutcast stream. connected = false; // Exit with no exception. return; } try { metaInt = int.Parse(nvc["icy-metaint"]); } catch { } try { if (skipMetaInfo == false) { bitrate = int.Parse(nvc["icy-br"]); } } catch { } try { contentType = nvc["content-type"]; } catch { } switch (contentType) { case "audio/mpg": case "audio/mpeg": //case "audio/aac": //case "audio/aacp": this.ContentType = contentType; break; default: throw new NotSupportedException(string.Format("Unsupported content type: {0}.", contentType)); } receivedBytes = 0; netStream = response.GetResponseStream(); if (skipMetaInfo == false) { int passes = 0; while (passes < 100 * 1024) { passes++; byte[] buff = new byte[sizeof(int)]; int result = netStream.Read(buff, 0, buff.Length); if (result > 0) { ByteVector vector = new ByteVector(buff, result); AudioHeader hdr = AudioHeader.Unknown; if (AudioHeader.Find(out hdr, vector) == true && hdr.Version == TagLib.Mpeg.Version.Version1 && hdr.AudioLayer == 3) { sampleRate = hdr.AudioSampleRate; bitrate = hdr.AudioBitrate; break; } } } Dictionary <string, string> data = new Dictionary <string, string>(); data.Add("TXT_FREQUENCY", sampleRate.ToString()); data.Add("TXT_BITRATE", bitrate.ToString()); data.Add("Content-Type", contentType); MediaRenderer.DefaultInstance.FireStreamPropertyChanged(data); } connected = true; } catch (Exception ex) { connected = false; throw; } finally { ToggleAllowUnsafeHeaderParsing(false); } }
/// <summary> /// Stop the capture process. /// </summary> public void Stop() { try { // If capturing. if (_capture.Capturing) { // Stop the capture. _capture.Stop(); if (_imageCapture != null) { _imageCapture.Stop(); } if (_soundCapture != null) { _soundCapture.Stop(); } // If not paused then stop the capture. if (!_isPaused) { if (_imageCapture != null) { _imageCapture.Dispose(); } if (_soundCapture != null) { _soundCapture.Dispose(); } _imageCapture = null; _soundCapture = null; // Un pause. _isPaused = false; // If in live streaming mode then // there is no need to set the duration. if (!_isLiveStreaming) { // Read the header and set the duration of the video and audio. VideoAudioHeader headers = _mux.ReadHeader(); VideoHeader videoHeader = (headers.Video.HasValue ? headers.Video.Value : new VideoHeader()); AudioHeader audioHeader = (headers.Audio.HasValue ? headers.Audio.Value : new AudioHeader()); // Get the video and audio duration. double videoDuration = _mux.VideoDuration; double audioDuration = _mux.AudioDuration; // Select what needs to be captured. switch (_active) { case MediaActiveType.Video | MediaActiveType.Audio: // Video and audio capture. videoHeader.Duration = videoDuration; audioHeader.Duration = audioDuration; break; case MediaActiveType.Video: // Video capture. videoHeader.Duration = videoDuration; break; case MediaActiveType.Audio: // Audio capture. audioHeader.Duration = audioDuration; break; } // Write the header. headers.Video = videoHeader; headers.Audio = audioHeader; _mux.WriteHeader(headers); } } } } catch { } }
public void StoreFrameForLoopback(AudioHeader ah, byte[] data) { _currentSpeakerHeader = ah; _currentSpeakerData = data; }