Exemple #1
0
 private void OnPlaybackStopped(object sender, StoppedEventArgs args)
 {
     OutputDevice.Dispose();
     OutputDevice = null;
     AudioFile.Dispose();
     AudioFile = null;
 }
Exemple #2
0
 public void PlayFile(FileInfo file)
 {
     FileName = file.Name;
     WavePlayer.Stop();
     AudioFile?.Dispose();
     AudioFile = new AudioFileReader(file.FullName);
     WavePlayer.Init(AudioFile);
     TotalLength = AudioFile.TotalTime;
 }
        public void Stop()
        {
            _isPlaying = false;

            _audioQueue.Stop(true);
            _audioFile.Dispose();
            _audioQueue.Dispose();
            _isPrepared = false;
        }
Exemple #4
0
        public void Dispose()
        {
            foreach (var instance in fireAndForgetQueue)
            {
                instance.Dispose();
            }
            fireAndForgetQueue.Clear();

            audioFile.Dispose();
            audioFile = null;
        }
Exemple #5
0
        public void Dispose()
        {
            _isRecording = false;

            _queue.Stop(true);
            _audioFile.Dispose();
            _queue.Dispose();

            _queue     = null;
            _audioFile = null;
        }
Exemple #6
0
        private void DisposeCurrentSong()
        {
            SongTimer.Enabled = false;

            if (OutputDevice != null)
            {
                OutputDevice.PlaybackStopped -= OutputDevice_PlaybackStopped;
                OutputDevice.Dispose();
            }
            if (AudioFile != null)
            {
                AudioFile.Dispose();
            }

            pictureBoxAlbumArt.Image = null;
            textBoxCurrentSong.Text  = string.Empty;
            CurrentSong = string.Empty;

            SongTimer.Enabled = true;
        }
Exemple #7
0
        public void OpenWaveFile(string fileName, MMDevice device)
        {
            CloseWaveFile();
            mDrawSource = CodecFactory.Instance.GetCodec(fileName).ToSampleSource().ToMono();
            mDevice     = device;

            mFilename = fileName;

            AudioFile file = new AudioFile(fileName, ReadStyle.Average);

            if (file.Tag.Performers != null && file.Tag.Title != null)
            {
                label1.Text = file.Tag.Performers[0] + " - " + file.Tag.Title;
            }
            else
            {
                label1.Text = Filename;
            }
            file.Dispose();

            //ThreadStart starter = OpenWaveFileThread;d
            //starter += () =>
            //{
            //    //комментарий
            //};
            //Thread thread = new Thread(starter) { IsBackground = true };
            ////MainForm.ThreadStartEvent();
            //thread.Start();


            Task nt = Task.Run(() => OpenWaveFileThread());

            var divisionawaiter = nt.GetAwaiter();

            divisionawaiter.OnCompleted(() => {
                MainForm.ThreadFinishEvent();
            });
        }
        bool DoConvertFile(CFUrl sourceURL, NSUrl destinationURL, AudioFormatType outputFormatType, double outputSampleRate)
        {
            // in this sample we should never be on the main thread here
            Debug.Assert(!NSThread.IsMain);

            // transition thread state to State::Running before continuing
            AppDelegate.ThreadStateSetRunning();

            Debug.WriteLine("DoConvertFile");

            // get the source file
            AudioFile sourceFile = AudioFile.Open(sourceURL, AudioFilePermission.Read);

            var srcFormat = (AudioStreamBasicDescription)sourceFile.DataFormat;
            var dstFormat = new AudioStreamBasicDescription();

            // setup the output file format
            dstFormat.SampleRate = (outputSampleRate == 0 ? srcFormat.SampleRate : outputSampleRate); // set sample rate
            if (outputFormatType == AudioFormatType.LinearPCM)
            {
                // if the output format is PCM create a 16-bit int PCM file format description as an example
                dstFormat.Format           = AudioFormatType.LinearPCM;
                dstFormat.ChannelsPerFrame = srcFormat.ChannelsPerFrame;
                dstFormat.BitsPerChannel   = 16;
                dstFormat.BytesPerPacket   = dstFormat.BytesPerFrame = 2 * dstFormat.ChannelsPerFrame;
                dstFormat.FramesPerPacket  = 1;
                dstFormat.FormatFlags      = AudioFormatFlags.LinearPCMIsPacked | AudioFormatFlags.LinearPCMIsSignedInteger;
            }
            else
            {
                // compressed format - need to set at least format, sample rate and channel fields for kAudioFormatProperty_FormatInfo
                dstFormat.Format           = outputFormatType;
                dstFormat.ChannelsPerFrame = (outputFormatType == AudioFormatType.iLBC ? 1 : srcFormat.ChannelsPerFrame); // for iLBC num channels must be 1

                // use AudioFormat API to fill out the rest of the description
                AudioFormatError afe = AudioStreamBasicDescription.GetFormatInfo(ref dstFormat);
                if (afe != AudioFormatError.None)
                {
                    Debug.Print("Cannot create destination format {0:x}", afe);

                    AppDelegate.ThreadStateSetDone();
                    return(false);
                }
            }

            // create the AudioConverter
            AudioConverterError ce;
            var converter = AudioConverter.Create(srcFormat, dstFormat, out ce);

            Debug.Assert(ce == AudioConverterError.None);

            converter.InputData += EncoderDataProc;

            // if the source has a cookie, get it and set it on the Audio Converter
            ReadCookie(sourceFile, converter);

            // get the actual formats back from the Audio Converter
            srcFormat = converter.CurrentInputStreamDescription;
            dstFormat = converter.CurrentOutputStreamDescription;

            // if encoding to AAC set the bitrate to 192k which is a nice value for this demo
            // kAudioConverterEncodeBitRate is a UInt32 value containing the number of bits per second to aim for when encoding data
            if (dstFormat.Format == AudioFormatType.MPEG4AAC)
            {
                uint outputBitRate = 192000; // 192k

                // ignore errors as setting may be invalid depending on format specifics such as samplerate
                try
                {
                    converter.EncodeBitRate = outputBitRate;
                }
                catch
                {
                }

                // get it back and print it out
                outputBitRate = converter.EncodeBitRate;
                Debug.Print("AAC Encode Bitrate: {0}", outputBitRate);
            }

            // can the Audio Converter resume conversion after an interruption?
            // this property may be queried at any time after construction of the Audio Converter after setting its output format
            // there's no clear reason to prefer construction time, interruption time, or potential resumption time but we prefer
            // construction time since it means less code to execute during or after interruption time
            bool canResumeFromInterruption;

            try
            {
                canResumeFromInterruption = converter.CanResumeFromInterruption;
                Debug.Print("Audio Converter {0} continue after interruption!", canResumeFromInterruption ? "CAN" : "CANNOT");
            }
            catch (Exception e)
            {
                // if the property is unimplemented (kAudioConverterErr_PropertyNotSupported, or paramErr returned in the case of PCM),
                // then the codec being used is not a hardware codec so we're not concerned about codec state
                // we are always going to be able to resume conversion after an interruption

                canResumeFromInterruption = false;
                Debug.Print("CanResumeFromInterruption: {0}", e.Message);
            }

            // create the destination file
            var destinationFile = AudioFile.Create(destinationURL, AudioFileType.CAF, dstFormat, AudioFileFlags.EraseFlags);

            // set up source buffers and data proc info struct
            afio            = new AudioFileIO(32 * 1024); // 32Kb
            afio.SourceFile = sourceFile;
            afio.SrcFormat  = srcFormat;

            if (srcFormat.BytesPerPacket == 0)
            {
                // if the source format is VBR, we need to get the maximum packet size
                // use kAudioFilePropertyPacketSizeUpperBound which returns the theoretical maximum packet size
                // in the file (without actually scanning the whole file to find the largest packet,
                // as may happen with kAudioFilePropertyMaximumPacketSize)
                afio.SrcSizePerPacket = sourceFile.PacketSizeUpperBound;

                // how many packets can we read for our buffer size?
                afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket;

                // allocate memory for the PacketDescription structures describing the layout of each packet
                afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead];
            }
            else
            {
                // CBR source format
                afio.SrcSizePerPacket  = srcFormat.BytesPerPacket;
                afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket;
            }

            // set up output buffers
            int       outputSizePerPacket = dstFormat.BytesPerPacket; // this will be non-zero if the format is CBR
            const int theOutputBufSize    = 32 * 1024;                // 32Kb
            var       outputBuffer        = Marshal.AllocHGlobal(theOutputBufSize);

            AudioStreamPacketDescription[] outputPacketDescriptions = null;

            if (outputSizePerPacket == 0)
            {
                // if the destination format is VBR, we need to get max size per packet from the converter
                outputSizePerPacket = (int)converter.MaximumOutputPacketSize;

                // allocate memory for the PacketDescription structures describing the layout of each packet
                outputPacketDescriptions = new AudioStreamPacketDescription [theOutputBufSize / outputSizePerPacket];
            }
            int numOutputPackets = theOutputBufSize / outputSizePerPacket;

            // if the destination format has a cookie, get it and set it on the output file
            WriteCookie(converter, destinationFile);

            // write destination channel layout
            if (srcFormat.ChannelsPerFrame > 2)
            {
                WriteDestinationChannelLayout(converter, sourceFile, destinationFile);
            }

            long         totalOutputFrames = 0; // used for debugging
            long         outputFilePos     = 0;
            AudioBuffers fillBufList       = new AudioBuffers(1);
            bool         error             = false;

            // loop to convert data
            Debug.WriteLine("Converting...");
            while (true)
            {
                // set up output buffer list
                fillBufList[0] = new AudioBuffer()
                {
                    NumberChannels = dstFormat.ChannelsPerFrame,
                    DataByteSize   = theOutputBufSize,
                    Data           = outputBuffer
                };

                // this will block if we're interrupted
                var wasInterrupted = AppDelegate.ThreadStatePausedCheck();

                if (wasInterrupted && !canResumeFromInterruption)
                {
                    // this is our interruption termination condition
                    // an interruption has occured but the Audio Converter cannot continue
                    Debug.WriteLine("Cannot resume from interruption");
                    error = true;
                    break;
                }

                // convert data
                int ioOutputDataPackets = numOutputPackets;
                var fe = converter.FillComplexBuffer(ref ioOutputDataPackets, fillBufList, outputPacketDescriptions);
                // if interrupted in the process of the conversion call, we must handle the error appropriately
                if (fe != AudioConverterError.None)
                {
                    Debug.Print("FillComplexBuffer: {0}", fe);
                    error = true;
                    break;
                }

                if (ioOutputDataPackets == 0)
                {
                    // this is the EOF conditon
                    break;
                }

                // write to output file
                var inNumBytes = fillBufList[0].DataByteSize;

                var we = destinationFile.WritePackets(false, inNumBytes, outputPacketDescriptions, outputFilePos, ref ioOutputDataPackets, outputBuffer);
                if (we != 0)
                {
                    Debug.Print("WritePackets: {0}", we);
                    error = true;
                    break;
                }

                // advance output file packet position
                outputFilePos += ioOutputDataPackets;

                if (dstFormat.FramesPerPacket != 0)
                {
                    // the format has constant frames per packet
                    totalOutputFrames += (ioOutputDataPackets * dstFormat.FramesPerPacket);
                }
                else
                {
                    // variable frames per packet require doing this for each packet (adding up the number of sample frames of data in each packet)
                    for (var i = 0; i < ioOutputDataPackets; ++i)
                    {
                        totalOutputFrames += outputPacketDescriptions[i].VariableFramesInPacket;
                    }
                }
            }

            Marshal.FreeHGlobal(outputBuffer);

            if (!error)
            {
                // write out any of the leading and trailing frames for compressed formats only
                if (dstFormat.BitsPerChannel == 0)
                {
                    // our output frame count should jive with
                    Debug.Print("Total number of output frames counted: {0}", totalOutputFrames);
                    WritePacketTableInfo(converter, destinationFile);
                }

                // write the cookie again - sometimes codecs will update cookies at the end of a conversion
                WriteCookie(converter, destinationFile);
            }

            converter.Dispose();
            destinationFile.Dispose();
            sourceFile.Dispose();

            // transition thread state to State.Done before continuing
            AppDelegate.ThreadStateSetDone();

            return(!error);
        }