Esempio n. 1
0
        internal H264Decoder()
        {
            try
            {
                RegisterFFmpegBinaries();
                var pCodec = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_H264);

                _pCodecCtx               = ffmpeg.avcodec_alloc_context3(pCodec);
                _pCodecCtx->qcompress    = 1F;
                _pCodecCtx->frame_number = 1;
                _pCodecCtx->codec_type   = AVMediaType.AVMEDIA_TYPE_VIDEO;
                _pCodecParserCtx         = ffmpeg.av_parser_init((int)AVCodecID.AV_CODEC_ID_H264);
                if (null == _pCodecParserCtx)
                {
                    throw new Exception("_pCodecParserCtx is null");
                }

                if (pCodec->capabilities > 0)
                {
                    _pCodecCtx->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
                }


                var ret = ffmpeg.avcodec_open2(_pCodecCtx, pCodec, null);
                if (ret < 0)
                {
                    throw new Exception("ret is null");
                }

                _pFrame = ffmpeg.av_frame_alloc();
            }
            catch
            {
                throw new Exception("dll is null");
            }
        }
Esempio n. 2
0
        public void StartPlay(NetworkStream stream)
        {
            try
            {
                AVCodec *mCodec = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_H264);
                if (mCodec == null)
                {
                    Console.WriteLine("can not find h264 decoder");
                    return;
                }

                AVCodecContext *mCodecContext = ffmpeg.avcodec_alloc_context3(mCodec);
                if (mCodecContext == null)
                {
                    Console.WriteLine("can not allcote codec context");
                    return;
                }

                if (ffmpeg.avcodec_open2(mCodecContext, mCodec, null) < 0)
                {
                    Console.WriteLine("can not open codec");
                    ffmpeg.avcodec_free_context(&mCodecContext);
                    return;
                }

                AVCodecParserContext *parser = ffmpeg.av_parser_init((int)AVCodecID.AV_CODEC_ID_H264);
                if (parser == null)
                {
                    Console.WriteLine("can not initialize parser");
                    ffmpeg.avcodec_close(mCodecContext);
                    ffmpeg.avcodec_free_context(&mCodecContext);
                    return;
                }

                //AVFrame用于存储解码后的像素数据(YUV)
                //内存分配
                AVFrame *pFrame = ffmpeg.av_frame_alloc();
                //YUV420
                AVFrame *pFrameYUV = ffmpeg.av_frame_alloc();
                //只有指定了AVFrame的像素格式、画面大小才能真正分配内存
                //缓冲区分配内存
                int   out_buffer_size = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_YUV420P, 720, 1280);
                byte *out_buffer      = (byte *)ffmpeg.av_malloc((ulong)out_buffer_size);
                //初始化缓冲区
                ffmpeg.avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AVPixelFormat.AV_PIX_FMT_YUV420P, 720, 1280);

                //用于转码(缩放)的参数,转之前的宽高,转之后的宽高,格式等
                //SwsContext* sws_ctx = ffmpeg.sws_getContext(mCodecContext->width, mCodecContext->height, AVPixelFormat.AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/, mCodecContext->width, mCodecContext->height, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null);
                SwsContext *sws_ctx = ffmpeg.sws_getContext(720, 1280, AVPixelFormat.AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/, 720, 1280, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null);


                AVPacket packet;
                ffmpeg.av_init_packet(&packet);

                byte[] inbuffer = new byte[4096];
                int    cur_size, ret, got_picture;
                int    readLen    = 0;
                bool   first_time = true;

                sdlvideo.SDL_Init(720, 1280);

                while (true)
                {
                    readLen  = 0;
                    cur_size = stream.Read(inbuffer, 0, 4096);
                    if (cur_size <= 0)
                    {
                        Console.WriteLine("read eof");
                        break;
                    }

                    while (cur_size > 0)
                    {
                        fixed(byte *cur_ptr = inbuffer)
                        {
                            int len = ffmpeg.av_parser_parse2(parser, mCodecContext,
                                                              &packet.data, &packet.size, cur_ptr + readLen, cur_size,
                                                              ffmpeg.AV_NOPTS_VALUE, ffmpeg.AV_NOPTS_VALUE, -1);

                            readLen  += len;
                            cur_size -= len;
                        }

                        if (packet.size == 0)
                        {
                            continue;
                        }

                        Console.WriteLine($"[Packet]Size:{packet.size}");

                        ret = ffmpeg.avcodec_decode_video2(mCodecContext, pFrame, &got_picture, &packet);
                        if (ret < 0)
                        {
                            Console.WriteLine("Decode Error.\n");
                            break;
                        }
                        // 读取解码后的帧数据
                        if (got_picture > 0)
                        {
                            if (first_time)
                            {
                                Console.WriteLine($"width:{mCodecContext->width}\nheight:{mCodecContext->height}\n\n");
                                first_time = false;
                            }
                            //AVFrame转为像素格式YUV420,宽高
                            ffmpeg.sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, mCodecContext->height, pFrameYUV->data, pFrameYUV->linesize);

                            //SDL播放YUV数据
                            var data = out_buffer;
                            sdlvideo.SDL_Display(mCodecContext->width, mCodecContext->height, (IntPtr)data, out_buffer_size, pFrameYUV->linesize[0]);
                        }
                    }
                }

                Console.WriteLine("exit loop");
                ffmpeg.av_parser_close(parser);
                ffmpeg.avcodec_close(mCodecContext);
                ffmpeg.avcodec_free_context(&mCodecContext);
                // todo notify stopped
            }
            catch (Exception ex)
            {
                Console.WriteLine("H264SocketParser.thread error:", ex);
            }
        }
Esempio n. 3
0
        public async Task Play(ICoreHandler handler, Message message, string param)
        {
            Video vid = (await client.Search.GetVideosAsync(param)).FirstOrDefault();

            if (vid != null)
            {
                if (vid.Duration <= TimeSpan.FromMinutes(10))
                {
                    StreamManifest streamManifest = await client.Videos.Streams.GetManifestAsync(vid.Id);

                    AudioOnlyStreamInfo audio = streamManifest.GetAudioOnly().FirstOrDefault();
                    if (audio != null)
                    {
                        MemoryStream stream = new MemoryStream();
                        await client.Videos.Streams.CopyToAsync(audio, stream);

                        Debug(audio.AudioCodec);
                        unsafe {
                            AVPacket *pkt   = ffmpeg.av_packet_alloc();
                            AVCodec * codec = ffmpeg.avcodec_find_decoder_by_name(audio.AudioCodec);
                            if (codec == null)
                            {
                                Error($"Codec {audio.AudioCodec} not found.");
                                return;
                            }
                            AVCodecParserContext *parser = ffmpeg.av_parser_init((int)codec->id);
                            if (parser == null)
                            {
                                Error("Could not allocate audio codec context.");
                                return;
                            }
                            AVCodecContext *context = ffmpeg.avcodec_alloc_context3(codec);
                            if (context == null)
                            {
                                Error("Could not allocate audio codec context.");
                                return;
                            }
                            if (ffmpeg.avcodec_open2(context, codec, null) < 0)
                            {
                                Error("Could not open audio codec context.");
                                return;
                            }
                            AVFrame *decoded_frame = null;
                            while (stream.Length - stream.Position > 0)
                            {
                                if (decoded_frame == null)
                                {
                                    decoded_frame = ffmpeg.av_frame_alloc();
                                }
                                byte[] buffer = new byte[pkt->size];
                                stream.Read(buffer, 0, buffer.Length);
                                IntPtr unmanagedPointer = Marshal.AllocHGlobal(buffer.Length);
                                Marshal.Copy(buffer, 0, unmanagedPointer, buffer.Length);
                                ffmpeg.av_parser_parse2(parser, context, &pkt->data, &pkt->size, (byte *)unmanagedPointer, buffer.Length, ffmpeg.AV_NOPTS_VALUE, ffmpeg.AV_NOPTS_VALUE, 0);
                                int ret = ffmpeg.avcodec_send_packet(context, pkt);
                                while (ret > 0)
                                {
                                    ret = ffmpeg.avcodec_receive_frame(context, decoded_frame);
                                    int data_size = ffmpeg.av_get_bytes_per_sample(context->sample_fmt);
                                    int current   = 0;
                                    for (int i = 0; i < decoded_frame->nb_samples; i++)
                                    {
                                        for (uint ch = 0; ch < context->channels; ch++)
                                        {
                                            Marshal.Copy((IntPtr)decoded_frame->data[ch] + (data_size * i), buffer, current, data_size);
                                            current += data_size;
                                        }
                                    }
                                    message.TransferSound(buffer);
                                }
                                Marshal.FreeHGlobal(unmanagedPointer);
                            }
                        }
                    }
                }
                else
                {
                    Warn("Video too long.");
                }
            }
            else
            {
                Warn("No video by that term.");
            }

            /*DiscordGuildTextChannel Channel = client.GetChannel(e.Message.ChannelId).Result as DiscordGuildTextChannel;
             * Snowflake GuildId = Channel.GuildId;
             * DiscordGuild Guild = await client.GetGuild(GuildId);
             * Snowflake ChannelId = e.Message.ChannelId;
             * DiscordVoiceState voiceState = e.Shard.Cache.GetVoiceState(GuildId, e.Message.Author.Id);
             * if (voiceState == null) {
             *  return;
             * } else {
             *  if (!SharedObjectStorage.VoiceModuleObjects.Keys.Contains(voiceState.ChannelId.Value)) {
             *      VoiceModule music = new VoiceModule(e.Shard, voiceState.ChannelId.Value);
             *      SharedObjectStorage.VoiceModuleObjects.Add(voiceState.ChannelId.Value, music);
             *      if (Tempnamelist.Count <= 2) {
             *          Tempnamelist.Add($"Temp{i++}");
             *      }
             *      string filename = Tempnamelist[0];
             *      Tempnamelist.RemoveAt(0);
             *      voice = new YoutubeVoiceProvider();
             *      voice.DoQuery(query);
             *      client.CreateMessage(e.Message.ChannelId, "" + String.Join("\n", voice.Result));
             *      this.GuildId = ((await client.GetChannel(e.Message.ChannelId)) as DiscordGuildTextChannel).GuildId;
             *      UserId = e.Message.Author.Id;
             *      e.Shard.Gateway.OnMessageCreated += Gateway_OnMessageCreated1;
             *      stopsignal.WaitOne();
             *      e.Shard.Gateway.OnMessageCreated -= Gateway_OnMessageCreated1;
             *      voice.DownloadToFileByQuery($"Temp/{filename}").Wait();
             *      if (new FileInfo($"Temp/{filename}").Length <= 100) { return; }
             *      client.CreateMessage(e.Message.ChannelId, "Playing: " + voice.CurrentSelection);
             *      Converter c = new FFmpegConverter();
             *      c.TempfileClosed += TempfileClosed;
             *      music.Transfer(new FileInfo($"Temp/{filename}"), c, playCancellationTokenSource);
             *  }
             * }*/
        }
Esempio n. 4
0
 public static CodecParserContext FromNative(AVCodecParserContext *ptr, bool isOwner) => new CodecParserContext(ptr, isOwner);
Esempio n. 5
0
 protected CodecParserContext(AVCodecParserContext *ptr, bool isOwner) : base(NativeUtils.NotNull((IntPtr)ptr), isOwner)
 {
 }
Esempio n. 6
0
        private async void ButtonConnect_Click(object sender, RoutedEventArgs e)
        {
            string connectAddr = InputIP.Text + ":" + InputPort.Text;
            string connectPSK  = InputPSK.Text;

            ButtonConnect.IsEnabled = false;
            Debug.WriteLine("Connect to " + connectAddr);

            var myStatus = new CMsgRemoteClientBroadcastStatus()
            {
                Version         = 8,
                MinVersion      = 6,
                ConnectPort     = SteamDiscoveryTransport.STEAM_DISCOVERY_PORT,
                Hostname        = "my-fake-name",
                EnabledServices = (uint)ERemoteClientService.KEremoteClientServiceGameStreaming,
                Ostype          = (int)EOSType.Windows10,
                Is64Bit         = true,
                Euniverse       = (int)EUniverse.Public,
                GamesRunning    = false,
            };

            myStatus.Users.Add(new CMsgRemoteClientBroadcastStatus.Types.User()
            {
                Steamid   = 76561198009414634,                  // adjust to yours
                AuthKeyId = 00000000                            // removed
            });
            byte[] psk = Hexlify.StringToByteArray(connectPSK); // you can get this (and the AuthKeyId above) in C:\Program Files (x86)\Steam\userdata\[your user id]\config\localconfig.vdf under the SharedAuth section

            TlsPskClient tlsClient = new TlsPskClient();

            tlsClient.Connect(connectAddr, "steam", psk);
            SteamRemote steamRemote = new SteamRemote(tlsClient, false, 1337, new Random().NextLong(), myStatus);

            steamRemote.MyApps.Add(new CMsgRemoteClientAppStatus.Types.AppStatus()
            {
                AppId    = 391540,
                AppState = 4
            });
            await steamRemote.Start();

            CMsgRemoteClientStartStreamResponse startResponse = await steamRemote.StartStream(new CMsgRemoteClientStartStream()
            {
                AppId              = 391540,
                LaunchOption       = -1,
                MaximumResolutionX = 1920,
                MaximumResolutionY = 1080,
                AudioChannelCount  = 2
            });

            SteamStreamClient stream = new SteamStreamClient(new IPEndPoint(IPAddress.Parse(InputIP.Text), (int)startResponse.StreamPort), startResponse.AuthToken.ToByteArray(), this);

            stream.Connect();

            unsafe
            {
                ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
                //ffmpeg.av_log_set_level(ffmpeg.AV_LOG_DEBUG);
                cb = libav_log;
                ffmpeg.av_log_set_callback(new av_log_set_callback_callback_func()
                {
                    Pointer = Marshal.GetFunctionPointerForDelegate(cb)
                });

                AVCodec *codec = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_H264);
                if (codec == null)
                {
                    throw new InvalidOperationException("Unsupported codec");
                }
                avparser         = ffmpeg.av_parser_init((int)codec->id);
                avparser->flags |= ffmpeg.PARSER_FLAG_COMPLETE_FRAMES;
                avctx            = ffmpeg.avcodec_alloc_context3(codec);
                if (ffmpeg.avcodec_open2(avctx, codec, null) < 0)
                {
                    throw new Exception("Could not open codec");
                }
            }
        }
Esempio n. 7
0
 public static extern void av_parser_close(AVCodecParserContext *s);
Esempio n. 8
0
 public static extern int av_parser_change(AVCodecParserContext *s, AVCodecContext *avctx, byte **poutbuf, int *poutbuf_size, byte *buf, int buf_size, int keyframe);
Esempio n. 9
0
 public static extern int av_parser_parse2(AVCodecParserContext *s, AVCodecContext *avctx, byte **poutbuf, int *poutbuf_size, byte *buf, int buf_size, long pts, long dts, long pos);