Inheritance: AVFrameAbs
Ejemplo n.º 1
0
    protected void Click_Handler(object sender, EventArgs e)
    {
        Gtk.FileChooserDialog dialog = new FileChooserDialog("Choose a file",
                                                             this,
                                                             FileChooserAction.Open,
                                                             new object[] {
            Gtk.Stock.Open,
            Gtk.ResponseType.Ok
        });
        dialog.Run();

        string file = dialog.Filename;

        dialog.Destroy();
        //writer.Seek(44, SeekOrigin.Begin);
        workingThread = new Thread(new ThreadStart(() =>
        {
            var stream     = FFMpegBase.Instance.GetAVStream(file);
            IAVFrame frame = null;
            while (((frame = stream.GetNext()) != null) && (!closing))
            {
                if (frame.FrameType == AVFrameType.Video)
                {
                    SharpFFmpeg.VideoFrame video = (SharpFFmpeg.VideoFrame)frame;
                    if (frame.Decode())
                    {
                        var data = video.ImgData;
                        DrawImage(data);
                    }
                }
                else if (frame.FrameType == AVFrameType.Audio)
                {
                    SharpFFmpeg.AudioFrame audio = (SharpFFmpeg.AudioFrame)frame;
                    if (audio.Decode())
                    {
                        var data = audio.WaveDate;
                        audioPlayer.PutSample(data);
                    }
                }
                frame.Close();
            }
            audioPlayer.Stop();
            stream.Close();
        }));
        workingThread.Start();
    }
Ejemplo n.º 2
0
		void PlayUsingASound (AudioFrame frame)
		{
            var type = frame.WaveDate;
			int ret = 0;
			if (pcm == IntPtr.Zero) {
				int dir;
                int rate = type.bit_rate == 0 ? 44100 : type.bit_rate;

                int channel = type.channel == 0 ? 2 : type.channel;

				string device = "default";
				ret = Asound.snd_pcm_open (out pcm, device, 
				                    Asound.snd_pcm_stream_t.SND_PCM_STREAM_PLAYBACK,
				                    0);
				if (ret < 0) {
					string err = Asound._snd_strerror(ret);
					Console.WriteLine(err);
					pcm = IntPtr.Zero;
					return;
				}
				else
				{
					Console.WriteLine("open audio device ok, pcm is {0}", pcm);
				}
				IntPtr param = Asound.snd_pcm_hw_params_alloca ();
				ret = Asound.snd_pcm_hw_params_any (pcm, param);
				ret = Asound.snd_pcm_hw_params_set_access (pcm, param, Asound.snd_pcm_access_t.SND_PCM_ACCESS_RW_INTERLEAVED);
				ret = Asound.snd_pcm_hw_params_set_format (pcm, param, Asound.snd_pcm_format_t.SND_PCM_FORMAT_S16_LE);
				ret = Asound.snd_pcm_hw_params_set_channels (pcm, param, channel);
				int val = rate;
				ret = Asound.snd_pcm_hw_params_set_rate_near (pcm, param, ref val, out dir);
                ulong frames = (ulong)type.nb_samples;
				ret = Asound.snd_pcm_hw_params_set_period_size_near (pcm, param, ref frames, out dir);
				ret = Asound.snd_pcm_hw_params (pcm, param);
				Asound.snd_pcm_params_free(param);
			}
			if (pcm == IntPtr.Zero)
				return;

            //long r = Asound.snd_pcm_writei(pcm, type, (ulong)type.nb_samples);
            //Console.WriteLine("snd_pcm_writei {0}:{1}, return {2}", type.sample, type.nb_samples, r);
		}
Ejemplo n.º 3
0
        public IAVFrame GetNext()
        {
            IAVFrame frame   = null;
            IntPtr   pPacket = Marshal.AllocHGlobal(Marshal.SizeOf(new AV.AVPacket()));

            if (AV.av_read_frame(rawFormatCtx, pPacket) != 0)
            {
                Marshal.FreeHGlobal(pPacket);
                pPacket = IntPtr.Zero;
                return(null);
            }


            AV.AVPacket packet = new NativeGetter <AV.AVPacket>(pPacket).Get();
            if (!decoderTable.ContainsKey(packet.stream_index) ||
                !mediaTypeTable.ContainsKey(packet.stream_index))
            {
                Marshal.FreeHGlobal(pPacket);
                pPacket = IntPtr.Zero;
                return(null);
            }

            var codec = decoderTable[packet.stream_index];
            var type  = mediaTypeTable[packet.stream_index];

            switch (type)
            {
            case AV.AVMediaType.AVMEDIA_TYPE_AUDIO:
                frame = new AudioFrame(pPacket, codec);
                return(frame);

            case AV.AVMediaType.AVMEDIA_TYPE_VIDEO:
                frame = new VideoFrame(pPacket, codec);
                return(frame);

            default:
                throw new Exception("Not support media type " + type.ToString());
            }
            return(null);
        }
Ejemplo n.º 4
0
        //private void WriteWaveHeader(BinaryWriter writer, WaveDataType format)
        //{
        //    WAVE_Header wav_Header = new WAVE_Header();
        //    wav_Header.RIFF_ID[0] = 'R';
        //    wav_Header.RIFF_ID[1] = 'I';
        //    wav_Header.RIFF_ID[2] = 'F';
        //    wav_Header.RIFF_ID[3] = 'F';
        //    wav_Header.File_Size = waveDataSize + 36;
        //    wav_Header.RIFF_Type[0] = 'W';
        //    wav_Header.RIFF_Type[1] = 'A';
        //    wav_Header.RIFF_Type[2] = 'V';
        //    wav_Header.RIFF_Type[3] = 'E';

        //    wav_Header.FMT_ID[0] = 'f';
        //    wav_Header.FMT_ID[1] = 'm';
        //    wav_Header.FMT_ID[2] = 't';
        //    wav_Header.FMT_ID[3] = ' ';
        //    wav_Header.FMT_Size = 16;
        //    wav_Header.FMT_Tag = 0x0001;
        //    wav_Header.FMT_Channel = (ushort)format.channel;
        //    wav_Header.FMT_SamplesPerSec = format.sample_rate;

        //    var nBlockAlign = (short)(format.channel * (format.bit_per_sample / 8));
        //    var nAvgBytesPerSec = format.sample_rate * nBlockAlign;

        //    wav_Header.AvgBytesPerSec = nAvgBytesPerSec;
        //    wav_Header.BlockAlign = (ushort)nBlockAlign;
        //    wav_Header.BitsPerSample = (ushort)format.bit_per_sample;

        //    wav_Header.DATA_ID[0] = 'd';
        //    wav_Header.DATA_ID[1] = 'a';
        //    wav_Header.DATA_ID[2] = 't';
        //    wav_Header.DATA_ID[3] = 'a';
        //    wav_Header.DATA_Size = waveDataSize;


        //    int waveHdrSize = Marshal.SizeOf(wav_Header);
        //    var ptr = Marshal.AllocHGlobal(waveHdrSize);
        //    byte[] data = new byte[waveHdrSize];
        //    Marshal.StructureToPtr(wav_Header, ptr, false);
        //    Marshal.Copy(ptr, data, 0, waveHdrSize);
        //    writer.Seek(0, SeekOrigin.Begin);
        //    writer.Write(data);
        //    writer.Flush();
        //    Marshal.FreeHGlobal(ptr);
        //}


        private void button1_Click(object sender, EventArgs e)
        {
            OpenFileDialog dialog = new OpenFileDialog();

            dialog.ShowDialog();
            string file = dialog.FileName;
            //writer.Seek(44, SeekOrigin.Begin);
            WaveDataType first = null;

            workingThread = new Thread(new ThreadStart(() =>
            {
                var stream     = FFMpegBase.Instance.GetAVStream(file);
                IAVFrame frame = null;
                while (((frame = stream.GetNext()) != null) && (!closing))
                {
                    if (frame.FrameType == AVFrameType.Video)
                    {
                        SharpFFmpeg.VideoFrame video = (SharpFFmpeg.VideoFrame)frame;
                        if (frame.Decode())
                        {
                            var data = video.ImgData;
                            DrawImage(data);
                        }
                    }
                    else if (frame.FrameType == AVFrameType.Audio)
                    {
                        SharpFFmpeg.AudioFrame audio = (SharpFFmpeg.AudioFrame)frame;
                        if (audio.Decode())
                        {
                            var data = audio.WaveDate;
                            audioPlayer.PutSample(data);
                        }
                    }
                    frame.Close();
                }
                stream.Close();
                audioPlayer.Stop();
            }));
            workingThread.Start();
        }
Ejemplo n.º 5
0
        private void PlayUsingWaveOut(AudioFrame frame)
        {
            var type = frame.WaveDate;
            int ret;
            int size = type.size;
            int rate = type.sample_rate == 0 ? 44100 : type.sample_rate;
            int bit = type.bit_per_sample == 0 ? 16 : type.bit_per_sample;
            int channel = type.channel == 0 ? 2 : type.channel;
            if (waveOut == IntPtr.Zero){
                WaveLib.WaveFormat fmt = new WaveLib.WaveFormat(rate, bit, channel);
                ret = WaveNative.waveOutOpen(out waveOut, -1, fmt, null, 0, WaveNative.CALLBACK_NULL);
                if (ret != WaveNative.MMSYSERR_NOERROR)
                    throw new Exception("can not open wave device");
            }


            queue.Enqueue(type);
            //ret = WriteWaveOut(frame);
        }
Ejemplo n.º 6
0
        public IAVFrame GetNext()
        {
            IAVFrame frame = null;
            IntPtr pPacket = Marshal.AllocHGlobal(Marshal.SizeOf(new AV.AVPacket()));
            if (AV.av_read_frame(rawFormatCtx, pPacket) != 0)
            {
                Marshal.FreeHGlobal(pPacket);
                pPacket = IntPtr.Zero;
                return null;
            }


            AV.AVPacket packet = new NativeGetter<AV.AVPacket>(pPacket).Get();
            if (! decoderTable.ContainsKey(packet.stream_index) ||
                ! mediaTypeTable.ContainsKey(packet.stream_index))
            {
                Marshal.FreeHGlobal(pPacket);
                pPacket = IntPtr.Zero;
                return null;
            }

            var codec = decoderTable[packet.stream_index];
            var type = mediaTypeTable[packet.stream_index];
            switch(type)
            {
                case AV.AVMediaType.AVMEDIA_TYPE_AUDIO:
                    frame = new AudioFrame(pPacket, codec);
                    return frame;
                case AV.AVMediaType.AVMEDIA_TYPE_VIDEO:
                    frame = new VideoFrame(pPacket, codec);
                    return frame;
                default:
                    throw new Exception("Not support media type " + type.ToString());
            }
            return null;

        }