Ejemplo n.º 1
0
 private void StopPlayback()
 {
     lock (this)
     {
         if (this.CurrentBuffer != null)
         {
             this.CurrentBuffer.Stop();
             this.CurrentBuffer.Dispose();
             this.CurrentBuffer = null;
         }
         if (this.CurrentStream != null)
         {
             this.CurrentStream.Close();
             this.CurrentStream = null;
         }
         if (this.AudioUpdateThread != null)
         {
             this.AudioUpdateThread = null;
         }
     }
     this.btnPause.Enabled = false;
     this.btnPause.Text    = "Pa&use";
     this.btnStop.Enabled  = false;
     this.AudioIsLooping   = false;
 }
Ejemplo n.º 2
0
        /// <summary>
        /// Cleaning up all the native Resource
        /// </summary>
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                if (OutputQueue != null)
                {
                    OutputQueue.Stop(true);
                }

                if (outputBuffers != null)
                {
                    foreach (var b in outputBuffers)
                    {
                        OutputQueue.FreeBuffer(b.Buffer);
                    }

                    outputBuffers.Clear();
                    outputBuffers = null;
                }

                if (fileStream != null)
                {
                    fileStream.Close();
                    fileStream = null;
                }

                if (OutputQueue != null)
                {
                    OutputQueue.Dispose();
                    OutputQueue = null;
                }
            }
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Loads the audio stream from the given byte array. If the AudioFileStream does not return an Ok status
        /// then a ContentLoadException is thrown.
        /// </summary>
        /// <param name="audiodata">The full byte array of the audio stream.</param>

        void LoadAudioStream(byte[] audiodata)
        {
            AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE);

            //long pac = afs.DataPacketCount;
            afs.ParseBytes(audiodata, false);  // AudioFileStreamStatus status
            AudioStreamBasicDescription asbd = afs.StreamBasicDescription;

            Rate = (float)asbd.SampleRate;
            Size = (int)afs.DataByteCount;

            if (asbd.ChannelsPerFrame == 1)
            {
                Format = asbd.BitsPerChannel == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
            }
            else
            {
                Format = asbd.BitsPerChannel == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16;
            }

            byte [] d = new byte[afs.DataByteCount];
            Array.Copy(audiodata, afs.DataOffset, d, 0, afs.DataByteCount);

            _data = d;

            var _dblDuration = (Size / ((asbd.BitsPerChannel / 8) * ((asbd.ChannelsPerFrame == 0) ? 1 : asbd.ChannelsPerFrame))) / asbd.SampleRate;

            _duration = TimeSpan.FromSeconds(_dblDuration);

            afs.Close();
            //if(status != AudioFileStreamStatus.Ok) {
            //    throw new Content.ContentLoadException("Could not load audio data. The status code was " + status);
            //}
        }
Ejemplo n.º 4
0
        public WavStreamPlayer()
        {
            afs = new AudioFileStream(AudioFileType.WAVE);

            // event handlers, these are never triggered
            afs.PacketDecoded += OnPacketDecoded;
            afs.PropertyFound += OnPropertyFound;
        }
Ejemplo n.º 5
0
        void HandlePacketDecoded(object sender, PacketReceivedEventArgs e)
        {
            AudioFileStream afs = (AudioFileStream)sender;

            byte[] audioData = new byte[e.Bytes];
            Marshal.Copy(e.InputData, audioData, 0, e.Bytes);
            //Console.WriteLine ("Packet decoded ");
            AudioStreamBasicDescription asbd = afs.StreamBasicDescription;

            Rate = (float)asbd.SampleRate;
            Size = e.Bytes;

            if (asbd.ChannelsPerFrame == 1)
            {
                if (asbd.BitsPerChannel == 8)
                {
                    Format = ALFormat.Mono8;
                }
                else if (asbd.BitsPerChannel == 0)                 // This shouldn't happen. hackking around bad data for now.
                {
                    //TODO: Remove this when sound's been fixed on iOS and other devices.
                    Format = ALFormat.Mono16;
                    Debug.WriteLine("Warning, bad decoded audio packet in SoundEffect.HandlePacketDecoded. Squelching sound.");
                    _duration = TimeSpan.Zero;
                    _data     = audioData;
                    return;
                }
                else
                {
                    Format = ALFormat.Mono16;
                }
            }
            else
            {
                if (asbd.BitsPerChannel == 8)
                {
                    Format = ALFormat.Stereo8;
                }
                else
                {
                    Format = ALFormat.Stereo16;
                }
            }
            _data = audioData;


            var _dblDuration = (e.Bytes / ((asbd.BitsPerChannel / 8) * asbd.ChannelsPerFrame)) / asbd.SampleRate;

            _duration = TimeSpan.FromSeconds(_dblDuration);
//			Console.WriteLine ("From Data: " + _name + " - " + Format + " = " + Rate + " / " + Size + " -- "  + Duration);
//			Console.WriteLine("Duration: " + _dblDuration
//			                        + " / size: " + e.Bytes
//			                        + " bits: " + asbd.BitsPerChannel
//			                        + " channels: " + asbd.ChannelsPerFrame
//			                        + " rate: " + asbd.SampleRate);
        }
Ejemplo n.º 6
0
        void LoadAudioStream(byte[] audiodata)
        {
            AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE);

            //long pac = afs.DataPacketCount;
            afs.PacketDecoded += HandlePacketDecoded;

            afs.ParseBytes(audiodata, false);
            afs.Close();
        }
Ejemplo n.º 7
0
 public void Reset()
 {
     if (fileStream != null)
     {
         fileStream.Close();
         fileStream                = new AudioFileStream(AudioFileType.MP3);
         currentByteCount          = 0;
         fileStream.PacketDecoded += AudioPacketDecoded;
         fileStream.PropertyFound += AudioPropertyFound;
     }
 }
        public void Start()
        {
            if (StationURL == null)
            {
                return;
            }

            audioFileStream = new AudioFileStream(AudioFileType.MP3);
            audioFileStream.PropertyFound += StreamPropertyListenerProc;
            audioFileStream.PacketDecoded += StreamPacketsProc;

            var request = new NSUrlRequest(StationURL);

            new NSUrlConnection(request, new ConnectionDelegate(audioFileStream));
        }
Ejemplo n.º 9
0
        void HandlePacketDecoded(object sender, PacketReceivedEventArgs e)
        {
            AudioFileStream afs = (AudioFileStream)sender;

            byte[] audioData = new byte[e.Bytes];
            Marshal.Copy(e.InputData, audioData, 0, e.Bytes);
            //Console.WriteLine ("Packet decoded ");
            AudioStreamBasicDescription asbd = afs.StreamBasicDescription;

            Rate = (float)asbd.SampleRate;
            Size = e.Bytes;

            if (asbd.ChannelsPerFrame == 1)
            {
                if (asbd.BitsPerChannel == 8)
                {
                    Format = ALFormat.Mono8;
                }
                else
                {
                    Format = ALFormat.Mono16;
                }
            }
            else
            {
                if (asbd.BitsPerChannel == 8)
                {
                    Format = ALFormat.Stereo8;
                }
                else
                {
                    Format = ALFormat.Stereo16;
                }
            }
            _data = audioData;

            var _dblDuration = (e.Bytes / ((asbd.BitsPerChannel / 8) * asbd.ChannelsPerFrame)) / asbd.SampleRate;

            _duration = TimeSpan.FromSeconds(_dblDuration);
//			Console.WriteLine ("From Data: " + _name + " - " + Format + " = " + Rate + " / " + Size + " -- "  + Duration);
//			Console.WriteLine("Duration: " + _dblDuration
//			                        + " / size: " + e.Bytes
//			                        + " bits: " + asbd.BitsPerChannel
//			                        + " channels: " + asbd.ChannelsPerFrame
//			                        + " rate: " + asbd.SampleRate);
        }
Ejemplo n.º 10
0
        public MainWindow()
        {
            InitializeComponent();

            BassManager.Initialize("../../../Bass/", -1, 44100, InitializationConfig._3D,
                new WindowInteropHelper(this).Handle, null);
            TagsLibManager.Initialize();

            var info = BassManager.Information;
            
            PluginManager.LoadPlugin(BassPlugin.BassFlac);
            PluginManager.LoadPlugin(BassPlugin.BassApe);
            PluginManager.LoadPlugin(BassPlugin.BassWma);

            PluginManager.FreePlugin(BassPlugin.BassWma);

            fileStream = new AudioFileStream(@"E:\CloudMusic\Perfume - STAR TRAIN.mp3", StreamCreateFileConfig.None);
        }
Ejemplo n.º 11
0
        public MainWindow()
        {
            InitializeComponent();

            BassManager.Initialize("../../../Bass/", -1, 44100, InitializationConfig._3D,
                                   new WindowInteropHelper(this).Handle, null);
            TagsLibManager.Initialize();

            var info = BassManager.Information;

            PluginManager.LoadPlugin(BassPlugin.BassFlac);
            PluginManager.LoadPlugin(BassPlugin.BassApe);
            PluginManager.LoadPlugin(BassPlugin.BassWma);

            PluginManager.FreePlugin(BassPlugin.BassWma);

            fileStream = new AudioFileStream(@"E:\CloudMusic\Perfume - STAR TRAIN.mp3", StreamCreateFileConfig.None);
        }
Ejemplo n.º 12
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad ();

            View.BackgroundColor = UIColor.White;

            using (var image = UIImage.FromFile ("XamarinLogo.png")) {
                logo = new UIImageView (image) {
                    Frame = new RectangleF (
                        new PointF (View.Center.X - image.Size.Width / 2, View.Center.Y - image.Size.Height / 2),
                        image.Size)
                };
                Add (logo);
            }

            audioFileStream = new AudioFileStream (AudioFileType.MP3);
            audioFileStream.PacketDecoded += OnPacketDecoded;
            audioFileStream.PropertyFound += OnPropertyFound;

            GetAudio ();
        }
Ejemplo n.º 13
0
        public void CafNoData()
        {
            using (AudioFileStream afs = new AudioFileStream(AudioFileType.CAF)) {
                Assert.That(afs.StreamBasicDescription.BitsPerChannel, Is.EqualTo(0), "StreamBasicDescription.BitsPerChannel");
                Assert.That(afs.StreamBasicDescription.BytesPerFrame, Is.EqualTo(0), "StreamBasicDescription.BytesPerFrame");
                Assert.That(afs.StreamBasicDescription.BytesPerPacket, Is.EqualTo(0), "StreamBasicDescription.BytesPerPacket");
                Assert.That(afs.StreamBasicDescription.ChannelsPerFrame, Is.EqualTo(0), "StreamBasicDescription.ChannelsPerFrame");
                Assert.That(afs.StreamBasicDescription.Format, Is.EqualTo((AudioFormatType)0), "StreamBasicDescription.Format");
                Assert.That(afs.StreamBasicDescription.FormatFlags, Is.EqualTo((AudioFormatFlags)0), "StreamBasicDescription.FormatFlags");
                Assert.That(afs.StreamBasicDescription.FramesPerPacket, Is.EqualTo(0), "StreamBasicDescription.FramesPerPacket");
                Assert.That(afs.StreamBasicDescription.Reserved, Is.EqualTo(0), "StreamBasicDescription.Reserved");
                Assert.That(afs.StreamBasicDescription.SampleRate, Is.EqualTo(0), "StreamBasicDescription.SampleRate");

                int offset;
                var packet = afs.FrameToPacket(0, out offset);
                Assert.That(packet, Is.LessThanOrEqualTo(0), "FrameToPacket");                          // -1 on first run
                Assert.That(offset, Is.EqualTo(0), "offset");

                long frame = afs.PacketToFrame(packet);
                Assert.That(frame, Is.LessThanOrEqualTo(0), "PacketToFrame");                           // -1 on first run
            }
        }
Ejemplo n.º 14
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            View.BackgroundColor = UIColor.White;

            using (var image = UIImage.FromFile("XamarinLogo.png")) {
                logo = new UIImageView(image)
                {
                    Frame = new RectangleF(
                        new PointF(View.Center.X - image.Size.Width / 2, View.Center.Y - image.Size.Height / 2),
                        image.Size)
                };
                Add(logo);
            }

            audioFileStream = new AudioFileStream(AudioFileType.MP3);
            audioFileStream.PacketDecoded += OnPacketDecoded;
            audioFileStream.PropertyFound += OnPropertyFound;

            GetAudio();
        }
Ejemplo n.º 15
0
        /// <summary>
        /// Cleaning up all the native Resource
        /// </summary>
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                if (OutputQueue != null)
                {
                    OutputQueue?.Stop(true);
                }

                if (_outputBuffers != null)
                {
                    foreach (var b in _outputBuffers)
                    {
                        if (b != null)
                        {
                            OutputQueue?.FreeBuffer(b.Buffer);
                        }
                    }

                    _outputBuffers.Clear();
                    _outputBuffers = null;
                }

                if (_audioFileStream != null)
                {
                    _audioFileStream.Close();
                    _audioFileStream = null;
                }

                if (OutputQueue != null)
                {
                    OutputQueue?.Dispose();
                    OutputQueue = null;
                }
            }
        }
		/// <summary>
		/// Cleaning up all the native Resource
		/// </summary>
		protected virtual void Dispose (bool disposing)
		{
			if (disposing) {
				if (OutputQueue != null)
					OutputQueue.Stop (false);
				
				if (outputBuffers != null)
					foreach (var b in outputBuffers)
						OutputQueue.FreeBuffer (b.Buffer);
				
				if (fileStream != null) {
					fileStream.Close ();
					fileStream = null;
				}
				
				if (OutputQueue != null) {
					OutputQueue.Dispose ();
					OutputQueue = null;
				}
			}
		}
Ejemplo n.º 17
0
		/// <summary>
		/// Cleaning up all the native Resource
		/// </summary>
		protected virtual void Dispose (bool disposing)
		{
			if (disposing) {
				previousCancellation.Cancel ();
				previousPlayAsync.WaitOrCanceled ().Wait ();

				Reset ();
				using (fileStream)
					fileStream = null;
				availableBuffers = null;
				outputBuffers = null;
			}
		}
Ejemplo n.º 18
0
        protected virtual void Dispose(bool disposing)
        {
            // Release unmanaged buffers, flush output, close files.
            if (disposing){
                if (outputBuffers != null)
                    foreach (var b in outputBuffers)
                        OutputQueue.FreeBuffer (b);

                if (fileStream != null){
                    fileStream.Close ();
                    fileStream = null;
                }
                if (OutputQueue != null){
                    OutputQueue.Dispose ();
                    OutputQueue = null;
                }
            }
        }
Ejemplo n.º 19
0
		void LoadAudioStream (byte[] audiodata)
		{
			AudioFileStream afs = new AudioFileStream (AudioFileType.WAVE);
			//long pac = afs.DataPacketCount;
			afs.PacketDecoded += HandlePacketDecoded;

			afs.ParseBytes (audiodata, false);
			afs.Close ();
		}
Ejemplo n.º 20
0
 private void PlayFile(FileInfo FI)
 {
     lock (this) {
         if (this.DS == null)
         {
             this.DS = new DirectSound();
             this.DS.SetCooperativeLevel(this.Handle, CooperativeLevel.Normal);
         }
         this.StopPlayback();
         var bd = new SoundBufferDescription {
             Format      = new WaveFormat(FI.AudioFile.SampleRate, 16, FI.AudioFile.Channels),
             BufferBytes = this.AudioBufferSize,
             Flags       = BufferFlags.GlobalFocus | BufferFlags.StickyFocus | BufferFlags.ControlVolume | BufferFlags.GetCurrentPosition2 | BufferFlags.ControlPositionNotify
         };
         this.CurrentBuffer = new SecondarySoundBuffer(this.DS, bd);
         if (this.AudioUpdateTrigger == null)
         {
             this.AudioUpdateTrigger = new AutoResetEvent(false);
         }
         var chunkSize       = this.AudioBufferSize / this.AudioBufferMarkers;
         var updatePositions = new NotificationPosition[this.AudioBufferMarkers];
         for (var i = 0; i < this.AudioBufferMarkers; ++i)
         {
             updatePositions[i] = new NotificationPosition()
             {
                 WaitHandle = this.AudioUpdateTrigger,
                 Offset     = chunkSize * i
             };
         }
         this.CurrentBuffer.SetNotificationPositions(updatePositions);
         this.CurrentStream = FI.AudioFile.OpenStream();
         {
             var bytes     = new byte[this.CurrentBuffer.Capabilities.BufferBytes];
             var readbytes = this.CurrentStream.Read(bytes, 0, this.CurrentBuffer.Capabilities.BufferBytes);
             if (readbytes < this.CurrentBuffer.Capabilities.BufferBytes)
             {
                 Array.Clear(bytes, readbytes, this.CurrentBuffer.Capabilities.BufferBytes - readbytes);
             }
             DataStream audiodata2;
             var        audiodata1 = this.CurrentBuffer.Lock(0, this.CurrentBuffer.Capabilities.BufferBytes, LockFlags.EntireBuffer, out audiodata2);
             audiodata1.Write(bytes, 0, this.CurrentBuffer.Capabilities.BufferBytes);
             this.CurrentBuffer.Unlock(audiodata1, audiodata2);
         }
         if (this.CurrentStream.Position < this.CurrentStream.Length)
         {
             this.AudioUpdateTrigger.Reset();
             this.AudioUpdateThread = new Thread(this.AudioUpdate);
             this.AudioUpdateThread.Start();
             this.btnPause.Enabled = true;
             this.btnStop.Enabled  = true;
             this.AudioIsLooping   = true;
         }
         else
         {
             this.CurrentStream.Close();
             this.CurrentStream  = null;
             this.AudioIsLooping = false;
         }
         this.CurrentBuffer.Play(0, (this.AudioIsLooping ? PlayFlags.Looping : PlayFlags.None));
     }
 }
Ejemplo n.º 21
0
		/// <summary>
		/// Begins playing the audio stroed in the supplied stream
		/// </summary>
		/// <returns>The async.</returns>
		/// <param name="stream">Stream.</param>
		/// <param name="token">Token.</param>
		public async Task PlayAsync (Stream stream, IProgress<TimeSpan> monitor, CancellationToken token)
		{
			// Cancel the previous 'PlayAsync' call and then set up the cancellation for this invocation
			var cancellation = CancellationTokenSource.CreateLinkedTokenSource (token);
			previousCancellation.Cancel ();
			previousCancellation = cancellation;

			await previousPlayAsync.WaitOrCanceled ();
			token.ThrowIfCancellationRequested ();

			Reset ();
			fileStream = new AudioFileStream (AudioFileType.MP3);
			fileStream.PacketDecoded += AudioPacketDecoded;
			fileStream.PropertyFound += AudioPropertyFound;

			previousPlayAsync = BeginPlayingAsync (stream, monitor, cancellation);
			await previousPlayAsync.WaitOrCanceled ();
			// If the original token is cancelled we should throw a cancelled exception.
			token.ThrowIfCancellationRequested ();
		}
Ejemplo n.º 22
0
 public SessionDelegate(AudioFileStream audioFileStream)
 {
     this.audioFileStream = audioFileStream;
 }
Ejemplo n.º 23
0
 public SessionDelegate(AudioFileStream audioFileStream)
 {
     this.audioFileStream = audioFileStream;
 }
Ejemplo n.º 24
0
        /// <summary>
        /// Loads the audio stream from the given byte array. If the AudioFileStream does not return an Ok status
        /// then a ContentLoadException is thrown.
        /// </summary>
        /// <param name="audiodata">The full byte array of the audio stream.</param>

		void LoadAudioStream (byte[] audiodata)
		{
			AudioFileStream afs = new AudioFileStream (AudioFileType.WAVE);
			//long pac = afs.DataPacketCount;
			AudioFileStreamStatus status = afs.ParseBytes (audiodata, false);
            AudioStreamBasicDescription asbd = afs.StreamBasicDescription;
            
            Rate = (float)asbd.SampleRate;
            Size = (int)afs.DataByteCount;
            
            if (asbd.ChannelsPerFrame == 1)
                Format = asbd.BitsPerChannel == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
            else
                Format = asbd.BitsPerChannel == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16;

            _data = audiodata;

            var _dblDuration = (Size / ((asbd.BitsPerChannel / 8) * asbd.ChannelsPerFrame == 0 ? 1 : asbd.ChannelsPerFrame)) / asbd.SampleRate;
            _duration = TimeSpan.FromSeconds(_dblDuration);

			afs.Close ();
            //if(status != AudioFileStreamStatus.Ok) {
            //    throw new Content.ContentLoadException("Could not load audio data. The status code was " + status);
            //}
		}
Ejemplo n.º 25
0
        private void PlatformLoadAudioStream(Stream s, out TimeSpan duration)
        {
            byte[] buffer;

#if OPENAL && !(MONOMAC || IOS)
            ALFormat format;
            int      size;
            int      freq;

            var stream = s;

            buffer = AudioLoader.Load(stream, out format, out size, out freq);

            Format = format;
            Size   = size;
            Rate   = freq;

            duration = TimeSpan.FromSeconds((float)size / freq);
#endif

#if MONOMAC || IOS
            var audiodata = new byte[s.Length];
            s.Read(audiodata, 0, (int)s.Length);

            using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE))
            {
                afs.ParseBytes(audiodata, false);
                Size = (int)afs.DataByteCount;

                buffer = new byte[afs.DataByteCount];
                Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount);

                AudioStreamBasicDescription asbd = afs.DataFormat;
                int channelsPerFrame             = asbd.ChannelsPerFrame;
                int bitsPerChannel = asbd.BitsPerChannel;

                // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS
                // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds)
                if (channelsPerFrame <= 0 || bitsPerChannel <= 0)
                {
                    NSError err;
                    using (NSData nsData = NSData.FromArray(audiodata))
                        using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err))
                        {
                            channelsPerFrame = (int)player.NumberOfChannels;
                            bitsPerChannel   = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16);

                            Rate     = (float)player.SoundSetting.SampleRate;
                            duration = TimeSpan.FromSeconds(player.Duration);
                        }
                }
                else
                {
                    Rate = (float)asbd.SampleRate;
                    double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate;
                    duration = TimeSpan.FromSeconds(durationSec);
                }

                if (channelsPerFrame == 1)
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16;
                }
                else
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16;
                }
            }
#endif
            // bind buffer
            SoundBuffer = new OALSoundBuffer();
            SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate);
        }
Ejemplo n.º 26
0
 private void PlayFile(FileInfo FI)
 {
     lock (this)
     {
         if (this.AudioDevice == null)
         {
             this.AudioDevice = new Device();
             AudioDevice.SetCooperativeLevel(this, CooperativeLevel.Normal);
         }
         this.StopPlayback();
         WaveFormat fmt = new WaveFormat();
         fmt.FormatTag             = WaveFormatTag.Pcm;
         fmt.Channels              = FI.AudioFile.Channels;
         fmt.SamplesPerSecond      = FI.AudioFile.SampleRate;
         fmt.BitsPerSample         = 16;
         fmt.BlockAlign            = (short)(FI.AudioFile.Channels * (fmt.BitsPerSample / 8));
         fmt.AverageBytesPerSecond = fmt.SamplesPerSecond * fmt.BlockAlign;
         BufferDescription BD = new BufferDescription(fmt);
         BD.BufferBytes = this.AudioBufferSize;
         BD.GlobalFocus = true;
         BD.StickyFocus = true;
         if (this.chkBufferedPlayback.Checked)
         {
             BD.ControlPositionNotify = true;
             this.CurrentBuffer       = new SecondaryBuffer(BD, this.AudioDevice);
             if (this.AudioUpdateTrigger == null)
             {
                 this.AudioUpdateTrigger = new AutoResetEvent(false);
             }
             int ChunkSize = this.AudioBufferSize / this.AudioBufferMarkers;
             BufferPositionNotify[] UpdatePositions = new BufferPositionNotify[this.AudioBufferMarkers];
             for (int i = 0; i < this.AudioBufferMarkers; ++i)
             {
                 UpdatePositions[i] = new BufferPositionNotify();
                 UpdatePositions[i].EventNotifyHandle = this.AudioUpdateTrigger.SafeWaitHandle.DangerousGetHandle();
                 UpdatePositions[i].Offset            = ChunkSize * i;
             }
             Notify N = new Notify(this.CurrentBuffer);
             N.SetNotificationPositions(UpdatePositions);
             this.CurrentStream = FI.AudioFile.OpenStream();
             this.CurrentBuffer.Write(0, this.CurrentStream, this.CurrentBuffer.Caps.BufferBytes, LockFlag.EntireBuffer);
             if (this.CurrentStream.Position < this.CurrentStream.Length)
             {
                 this.AudioUpdateTrigger.Reset();
                 this.AudioUpdateThread = new Thread(new ThreadStart(this.AudioUpdate));
                 this.AudioUpdateThread.Start();
                 this.btnPause.Enabled = true;
                 this.btnStop.Enabled  = true;
                 this.AudioIsLooping   = true;
             }
             else
             {
                 this.CurrentStream.Close();
                 this.CurrentStream  = null;
                 this.AudioIsLooping = false;
             }
         }
         else
         {
             this.CurrentStream    = FI.AudioFile.OpenStream(true);
             this.CurrentBuffer    = new SecondaryBuffer(this.CurrentStream, BD, this.AudioDevice);
             this.btnPause.Enabled = true;
             this.btnStop.Enabled  = true;
         }
         this.CurrentBuffer.Play(0, (this.AudioIsLooping ? BufferPlayFlags.Looping : BufferPlayFlags.Default));
     }
 }
		public StreamingPlayback (AudioFileType type) 
		{
			fileStream = new AudioFileStream (type);
			fileStream.PacketDecoded += AudioPacketDecoded;
			fileStream.PropertyFound += AudioPropertyFound;
		}
		public void Reset ()
		{
			if (fileStream != null) {
				fileStream.Close ();
				fileStream = new AudioFileStream (AudioFileType.MP3);
				currentByteCount = 0;
				fileStream.PacketDecoded += AudioPacketDecoded;
				fileStream.PropertyFound += AudioPropertyFound;
			}
		}
Ejemplo n.º 29
0
			public ConnectionDelegate (AudioFileStream stream)
			{
				audioFileStream = stream;
			}
 public ConnectionDelegate(AudioFileStream stream)
 {
     audioFileStream = stream;
 }
Ejemplo n.º 31
0
		public void Start ()
		{
			if (StationURL == null)
				return;

			audioFileStream = new AudioFileStream (AudioFileType.MP3);
			audioFileStream.PropertyFound += StreamPropertyListenerProc;
			audioFileStream.PacketDecoded += StreamPacketsProc;

			var request = new NSUrlRequest (StationURL);
			new NSUrlConnection (request, new ConnectionDelegate (audioFileStream));
		}
Ejemplo n.º 32
0
		void Reset ()
		{
			if (fileStream != null) {
				fileStream.Dispose ();
				fileStream = null;
			}

			if (outputQueue != null) {
				outputQueue.RemoveListener (AudioQueueProperty.IsRunning, EmitFinishedEvent);

				outputQueue.Stop (true);
				outputQueue.Reset ();
				foreach (AudioBuffer buf in outputBuffers.Values) {
					buf.PacketDescriptions.Clear ();
					outputQueue.FreeBuffer (buf.Buffer);
				}
				outputQueue.Dispose ();

				availableBuffers = null;
				outputBuffers = null;
				outputQueue = null;
			}
		}
Ejemplo n.º 33
0
        private void PlatformLoadAudioStream(Stream s)
        {
#if OPENAL && !(MONOMAC || IOS)
            ALFormat format;
            int      size;
            int      freq;

            var stream = s;
#if ANDROID
            var needsDispose = false;
            try
            {
                // If seek is not supported (usually an indicator of a stream opened into the AssetManager), then copy
                // into a temporary MemoryStream.
                if (!s.CanSeek)
                {
                    needsDispose = true;
                    stream       = new MemoryStream();
                    s.CopyTo(stream);
                    stream.Position = 0;
                }
#endif
            _data = AudioLoader.Load(stream, out format, out size, out freq);
#if ANDROID
        }
        finally
        {
            if (needsDispose)
            {
                stream.Dispose();
            }
        }
#endif
            Format = format;
            Size   = size;
            Rate   = freq;
#endif

#if MONOMAC || IOS
            var audiodata = new byte[s.Length];
            s.Read(audiodata, 0, (int)s.Length);

            using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE))
            {
                afs.ParseBytes(audiodata, false);
                Size = (int)afs.DataByteCount;

                _data = new byte[afs.DataByteCount];
                Array.Copy(audiodata, afs.DataOffset, _data, 0, afs.DataByteCount);

                AudioStreamBasicDescription asbd = afs.DataFormat;
                int channelsPerFrame             = asbd.ChannelsPerFrame;
                int bitsPerChannel = asbd.BitsPerChannel;

                // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS
                // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds)
                if (channelsPerFrame <= 0 || bitsPerChannel <= 0)
                {
                    NSError err;
                    using (NSData nsData = NSData.FromArray(audiodata))
                        using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err))
                        {
                            channelsPerFrame = (int)player.NumberOfChannels;
                            bitsPerChannel   = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16);

                            Rate      = (float)player.SoundSetting.SampleRate;
                            _duration = TimeSpan.FromSeconds(player.Duration);
                        }
                }
                else
                {
                    Rate = (float)asbd.SampleRate;
                    double duration = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate;
                    _duration = TimeSpan.FromSeconds(duration);
                }

                if (channelsPerFrame == 1)
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16;
                }
                else
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16;
                }
            }
#endif
        }
Ejemplo n.º 34
0
 public StreamingPlayback(AudioFileType type)
 {
     fileStream = new AudioFileStream(type);
     fileStream.PacketDecoded += AudioPacketDecoded;
     fileStream.PropertyFound += AudioPropertyFound;
 }
 public void Start()
 {
     audioFileStream = new AudioFileStream (AudioFileType.MP3);
     audioFileStream.PropertyFound += StreamPropertyListenerProc;
     audioFileStream.PacketDecoded += StreamPacketsProc;
 }
Ejemplo n.º 36
0
        private void PlatformLoadAudioStream(Stream s)
        {
#if OPENAL && !(MONOMAC || IOS)
            
            ALFormat format;
            int size;
            int freq;

            var stream = s;
#if ANDROID
            var needsDispose = false;
            try
            {
                // If seek is not supported (usually an indicator of a stream opened into the AssetManager), then copy
                // into a temporary MemoryStream.
                if (!s.CanSeek)
                {
                    needsDispose = true;
                    stream = new MemoryStream();
                    s.CopyTo(stream);
                    stream.Position = 0;
                }
#endif
                _data = AudioLoader.Load(stream, out format, out size, out freq);
#if ANDROID
            }
            finally
            {
                if (needsDispose)
                    stream.Dispose();
            }
#endif
            Format = format;
            Size = size;
            Rate = freq;

#endif

#if MONOMAC || IOS

            var audiodata = new byte[s.Length];
            s.Read(audiodata, 0, (int)s.Length);

            using (AudioFileStream afs = new AudioFileStream (AudioFileType.WAVE))
            {
                afs.ParseBytes (audiodata, false);
                Size = (int)afs.DataByteCount;

                _data = new byte[afs.DataByteCount];
                Array.Copy (audiodata, afs.DataOffset, _data, 0, afs.DataByteCount);

                AudioStreamBasicDescription asbd = afs.DataFormat;
                int channelsPerFrame = asbd.ChannelsPerFrame;
                int bitsPerChannel = asbd.BitsPerChannel;

                // There is a random chance that properties asbd.ChannelsPerFrame and asbd.BitsPerChannel are invalid because of a bug in Xamarin.iOS
                // See: https://bugzilla.xamarin.com/show_bug.cgi?id=11074 (Failed to get buffer attributes error when playing sounds)
                if (channelsPerFrame <= 0 || bitsPerChannel <= 0)
                {
                    NSError err;
                    using (NSData nsData = NSData.FromArray(audiodata))
                    using (AVAudioPlayer player = AVAudioPlayer.FromData(nsData, out err))
                    {
                        channelsPerFrame = (int)player.NumberOfChannels;
                        bitsPerChannel = player.SoundSetting.LinearPcmBitDepth.GetValueOrDefault(16);

						Rate = (float)player.SoundSetting.SampleRate;
                        _duration = TimeSpan.FromSeconds(player.Duration);
                    }
                }
                else
                {
                    Rate = (float)asbd.SampleRate;
                    double duration = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate;
                    _duration = TimeSpan.FromSeconds(duration);
                }

                if (channelsPerFrame == 1)
                    Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16;
                else
                    Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16;
            }

#endif
        }
Ejemplo n.º 37
0
        private void PlatformLoadAudioStream(Stream s, out TimeSpan duration)
        {
            byte[] buffer;

#if OPENAL && !(MONOMAC || IOS)
            ALFormat format;
            int      size;
            int      freq;

            var stream = s;

            buffer = AudioLoader.Load(stream, out format, out size, out freq);

            Format = format;
            Size   = size;
            Rate   = freq;

            var bytesPerSecond = freq;
            if (format == ALFormat.Mono16 || format == ALFormat.Stereo8)
            {
                bytesPerSecond *= 2;
            }
            else if (format == ALFormat.Stereo16)
            {
                bytesPerSecond *= 4;
            }

            duration = TimeSpan.FromSeconds((float)size / bytesPerSecond);
#endif

#if MONOMAC || IOS
            var audiodata = new byte[s.Length];
            s.Read(audiodata, 0, (int)s.Length);

            using (AudioFileStream afs = new AudioFileStream(AudioFileType.WAVE))
            {
                afs.ParseBytes(audiodata, false);
                Size = (int)afs.DataByteCount;

                buffer = new byte[afs.DataByteCount];
                Array.Copy(audiodata, afs.DataOffset, buffer, 0, afs.DataByteCount);

                AudioStreamBasicDescription asbd = afs.DataFormat;
                int channelsPerFrame             = asbd.ChannelsPerFrame;
                int bitsPerChannel = asbd.BitsPerChannel;

                Rate = (float)asbd.SampleRate;
                double durationSec = (Size / ((bitsPerChannel / 8) * channelsPerFrame)) / asbd.SampleRate;
                duration = TimeSpan.FromSeconds(durationSec);

                if (channelsPerFrame == 1)
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Mono8 : ALFormat.Mono16;
                }
                else
                {
                    Format = (bitsPerChannel == 8) ? ALFormat.Stereo8 : ALFormat.Stereo16;
                }
            }
#endif
            // bind buffer
            SoundBuffer = new OALSoundBuffer();
            SoundBuffer.BindDataBuffer(buffer, Format, Size, (int)Rate);
        }