Esempio n. 1
0
    void AllocateTextures()
    {
        yTexture          = new Texture2D(Width, Height, TextureFormat.Alpha8, false);
        uTexture          = new Texture2D(Width / 2, Height / 2, TextureFormat.Alpha8, false);
        vTexture          = new Texture2D(Width / 2, Height / 2, TextureFormat.Alpha8, false);
        yTexture.wrapMode = TextureWrapMode.Clamp;
        uTexture.wrapMode = TextureWrapMode.Clamp;
        vTexture.wrapMode = TextureWrapMode.Clamp;

        yTexture.Apply();
        uTexture.Apply();
        vTexture.Apply();

        VideoDecoderPlugin.SetYTexture(videoContext, yTexture.GetNativeTexturePtr());
        VideoDecoderPlugin.SetUTexture(videoContext, uTexture.GetNativeTexturePtr());
        VideoDecoderPlugin.SetVTexture(videoContext, vTexture.GetNativeTexturePtr());

        if (HasAlphaChannel)
        {
            aTexture          = new Texture2D(Width, Height, TextureFormat.Alpha8, false);
            aTexture.wrapMode = TextureWrapMode.Clamp;
            aTexture.Apply();
            VideoDecoderPlugin.SetATexture(videoContext, aTexture.GetNativeTexturePtr());
        }

        UpdateMaterialsTextures();
    }
Esempio n. 2
0
    public void Update()
    {
        if (!IsPlaying || !IsVideoLoaded)
        {
            return;
        }

        elapsedTime = UseAudioTime ? AudioSource.time : elapsedTime + Time.deltaTime * speed;

        bool syncAudioSourceStopped = UseAudioTime && !AudioSource.isPlaying;

        if (IsCycled && (elapsedTime > Duration || syncAudioSourceStopped))
        {
            Restart();
        }

        bool nextFrameSuccesfullyDecoded = VideoDecoderPlugin.DecodeFrameAtTime(videoContext, elapsedTime);

        if (!nextFrameSuccesfullyDecoded || syncAudioSourceStopped)
        {
            if (IsCycled)
            {
                Restart();
            }
            else
            {
                FinishPlaying();
            }
        }
    }
Esempio n. 3
0
 void DestroyContext()
 {
     if (videoContext != IntPtr.Zero)
     {
         VideoDecoderPlugin.DestroyVideoContext(videoContext);
         videoContext = IntPtr.Zero;
     }
 }
Esempio n. 4
0
    private IEnumerator DecodeCoroutine()
    {
        while (true)
        {
            yield return(new WaitForEndOfFrame());

            GL.IssuePluginEvent(VideoDecoderPlugin.GetRenderEventFunc(), 773);
        }
    }
Esempio n. 5
0
    public void Awake()
    {
        VideoDecoderPlugin.ResetVideoPluginSettings();

        if (VideoDecoderPlugin.GetLastPluginError() != VideoPluginEror.NoError)
        {
            Debug.LogError("VideoDecoderPlugin has an error = " + VideoDecoderPlugin.GetLastPluginError());
        }
    }
Esempio n. 6
0
    public void ShowLastFrame()
    {
        if (!IsVideoLoaded)
        {
            LoadVideo();
        }

        if (IsVideoLoaded)
        {
            VideoDecoderPlugin.DecodeLastFrame(videoContext);
        }
    }
Esempio n. 7
0
    void LoadVideo()
    {
        videoContext = VideoDecoderPlugin.CreateVideoContext(FullVideoPath, HasAlphaChannel, videoFileOffset, videoFileLenght);

        if (VideoDecoderPlugin.GetLastPluginError() != VideoPluginEror.NoError)
        {
            Debug.LogError("CreateVideoContext has an error = " + VideoDecoderPlugin.GetLastPluginError());
            Stop();
            videoContext = IntPtr.Zero;
            return;
        }

        Width    = VideoDecoderPlugin.GetVideoFrameWidth(videoContext);
        Height   = VideoDecoderPlugin.GetVideoFrameHeight(videoContext);
        Duration = VideoDecoderPlugin.GetVideoDuration(videoContext);
        HasAudio = AudioSource != null && AudioSource.clip != null;

        AllocateTextures();

        ShowFirstFrame();
        GL.IssuePluginEvent(VideoDecoderPlugin.GetRenderEventFunc(), 773);
    }