// Start is called before the first frame update
 void Start()
 {
     Debug.Log("current directory: " + Environment.CurrentDirectory);
     Debug.Log("platform: " + (RuntimeInformation.IsOSPlatform(OSPlatform.Windows) == true ? "Windows" : "Other"));
     RegisterFFmpegBinaries();
     Debug.Log(ffmpeg.av_version_info());
     this.streamer      = new RtpVideoStreamer("rtp://127.0.0.1:9000/test/");
     this.circularColor = 0;
     this.srcFrame      = ffmpeg.av_frame_alloc();
 }
Ejemplo n.º 2
0
    // Start is called before the first frame update
    void Start()
    {
        RegisterFFmpegBinaries();

        // Prepare textures and initial values
        screenWidth  = GetComponent <Camera>().pixelWidth;
        screenHeight = GetComponent <Camera>().pixelHeight;
        Debug.Log("Width: " + screenWidth + ", Height: " + screenHeight);

        // RTP 스트림 할당
        this.streamer = new RtpVideoStreamer("rtp://127.0.0.1:9000/test/", screenWidth, screenHeight);
        // 송신할 프레임 할당
        this.srcFrame         = ffmpeg.av_frame_alloc();
        this.srcFrame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
        this.srcFrame->width  = screenWidth;
        this.srcFrame->height = screenHeight;
        ffmpeg.av_frame_get_buffer(this.srcFrame, 32);
        // 테스트를 위해 RGB24 to YUV420P 변환 컨텍스트 할당
        this._convertContext = ffmpeg.sws_getContext(
            screenWidth, screenHeight, AVPixelFormat.AV_PIX_FMT_RGB24,
            screenWidth, screenHeight, AVPixelFormat.AV_PIX_FMT_YUV420P,
            ffmpeg.SWS_BICUBIC, null, null, null);

        var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_RGB24, (int)screenWidth, (int)screenHeight, 1);

        this._convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
        this._convertDstData          = new byte_ptrArray4();
        this._convertDstLinesize      = new int_array4();

        // Set target frame rate (optional)
        Application.targetFrameRate = frameRate;

        tempRenderTexture = new RenderTexture(screenWidth, screenHeight, 0);
        tempTexture2D     = new Texture2D(screenWidth, screenHeight, TextureFormat.RGB24, false);
        frameQueue        = new Queue <byte[]>();

        frameNumber = 0;

        captureFrameTime = 1.0f / (float)frameRate;
        lastFrameTime    = Time.time;

        // Kill the encoder thread if running from a previous execution
        if (encoderThread != null && (threadIsProcessing || encoderThread.IsAlive))
        {
            threadIsProcessing = false;
            encoderThread.Join();
        }

        // Start a new encoder thread
        threadIsProcessing = true;
        encoderThread      = new Thread(EncodeAndSave);
        encoderThread.Start();
    }