コード例 #1
0
 public void startDecoding()
 {
     if (decoderState == NativeClass.DecoderState.INITIALIZED)
     {
         if (!NativeClass.nativeStartDecoding(decoderID))
         {
             print(LOG_TAG + " Decoding not start.");
             return;
         }
         decoderState         = NativeClass.DecoderState.BUFFERING;
         globalStartTime      = curRealTime;
         hangTime             = curRealTime - globalStartTime;
         isVideoReadyToReplay = isAudioReadyToReplay = false;
         if (isAudioEnabled && !isAllAudioChEnabled)
         {
             StartCoroutine("audioPlay");
             backgroundWorker = new BackgroundWorker();
             backgroundWorker.WorkerSupportsCancellation = true;
             backgroundWorker.DoWork += pullAudioData;
             backgroundWorker.RunWorkerAsync();
         }
     }
 }
コード例 #2
0
 public bool setSeekTime(float seekTime)
 {
     if (decoderState != NativeClass.DecoderState.SEEK_FRAME && decoderState >= NativeClass.DecoderState.START)
     {
         lastState    = decoderState;
         decoderState = NativeClass.DecoderState.SEEK_FRAME;
         var setTime = 0.0f;
         if ((isVideoEnabled && seekTime > videoTotalTime) || (isAudioEnabled && seekTime > audioTotalTime) || isVideoReadyToReplay || isAudioReadyToReplay || seekTime < 0.0f)
         {
             print(LOG_TAG + " Seek over end. ");
             setTime = 0.0f;
         }
         else
         {
             setTime = seekTime;
         }
         print(LOG_TAG + " set seek time: " + setTime);
         hangTime = setTime;
         NativeClass.nativeSetSeekTime(decoderID, setTime);
         NativeClass.nativeSetVideoTime(decoderID, setTime);
         if (isAudioEnabled)
         {
             lock (_lock)
             {
                 audioDataBuff.Clear();
             }
             audioProgressTime = firstAudioFrameTime = -1.0;
             foreach (var src in audioSource)
             {
                 src.Stop();
             }
         }
         return(true);
     }
     return(false);
 }
コード例 #3
0
        public static void loadVideoThumb(GameObject obj, string filePath, float time)
        {
            if (!File.Exists(filePath))
            {
                print(LOG_TAG + " File not found!");
                return;
            }
            var decID     = -1;
            var width     = 0;
            var height    = 0;
            var totalTime = 0.0f;

            NativeClass.nativeCreateDecoder(filePath, ref decID);
            NativeClass.nativeGetVideoFormat(decID, ref width, ref height, ref totalTime);
            if (!NativeClass.nativeStartDecoding(decID))
            {
                print(LOG_TAG + " Decoding not start.");
                return;
            }
            var thumbY   = new Texture2D(width, height, TextureFormat.Alpha8, false);
            var thumbU   = new Texture2D(width / 2, height / 2, TextureFormat.Alpha8, false);
            var thumbV   = new Texture2D(width / 2, height / 2, TextureFormat.Alpha8, false);
            var thumbMat = getMaterial(obj);

            if (thumbMat == null)
            {
                print(LOG_TAG + " Target has no MeshRenderer.");
                NativeClass.nativeDestroyDecoder(decID);
                return;
            }
            thumbMat.SetTexture("_YTex", thumbY);
            thumbMat.SetTexture("_UTex", thumbU);
            thumbMat.SetTexture("_VTex", thumbV);
            NativeClass.nativeLoadThumbnail(decID, time, thumbY.GetNativeTexturePtr(), thumbU.GetNativeTexturePtr(), thumbV.GetNativeTexturePtr());
            NativeClass.nativeDestroyDecoder(decID);
        }
コード例 #4
0
        //  Video progress is triggered using Update. Progress time would be set by nativeSetVideoTime.
        private void Update()
        {
            switch (decoderState)
            {
            case NativeClass.DecoderState.START:
                if (isVideoEnabled)
                {
                    //  Prevent empty texture generate green screen.(default 0,0,0 in YUV which is green in RGB)
                    if (useDefault && NativeClass.nativeIsContentReady(decoderID))
                    {
                        getTextureFromNative();
                        setTextures(videoTexYch, videoTexUch, videoTexVch);
                        useDefault = false;
                    }

                    //	Update video frame by dspTime.
                    var setTime = curRealTime - globalStartTime;

                    //	Normal update frame.
                    if (setTime < videoTotalTime || videoTotalTime <= 0)
                    {
                        if (seekPreview && NativeClass.nativeIsContentReady(decoderID))
                        {
                            setPause();
                            seekPreview = false;
                            unmute();
                        }
                        else
                        {
                            NativeClass.nativeSetVideoTime(decoderID, (float)setTime);
                            GL.IssuePluginEvent(NativeClass.GetRenderEventFunc(), decoderID);
                        }
                    }
                    else
                    {
                        isVideoReadyToReplay = true;
                    }
                }
                if (NativeClass.nativeIsVideoBufferEmpty(decoderID) && !NativeClass.nativeIsEOF(decoderID))
                {
                    decoderState = NativeClass.DecoderState.BUFFERING;
                    hangTime     = curRealTime - globalStartTime;
                }
                break;

            case NativeClass.DecoderState.SEEK_FRAME:
                if (NativeClass.nativeIsSeekOver(decoderID))
                {
                    globalStartTime = curRealTime - hangTime;
                    decoderState    = NativeClass.DecoderState.START;
                    if (lastState == NativeClass.DecoderState.PAUSE)
                    {
                        seekPreview = true;
                        mute();
                    }
                }
                break;

            case NativeClass.DecoderState.BUFFERING:
                if (!NativeClass.nativeIsVideoBufferEmpty(decoderID) || NativeClass.nativeIsEOF(decoderID))
                {
                    decoderState    = NativeClass.DecoderState.START;
                    globalStartTime = curRealTime - hangTime;
                }
                break;

            case NativeClass.DecoderState.PAUSE:
            case NativeClass.DecoderState.EOF:
            default:
                break;
            }
            if (isVideoEnabled || isAudioEnabled)
            {
                if ((!isVideoEnabled || isVideoReadyToReplay) && (!isAudioEnabled || isAudioReadyToReplay))
                {
                    decoderState         = NativeClass.DecoderState.EOF;
                    isVideoReadyToReplay = isAudioReadyToReplay = false;
                    if (onVideoEnd != null)
                    {
                        onVideoEnd.Invoke();
                    }
                }
            }
        }
コード例 #5
0
 private void OnDestroy()
 {
     //print(LOG_TAG + " OnDestroy");
     stopDecoding();
     NativeClass.nativeRegistLogHandler(IntPtr.Zero);
 }
コード例 #6
0
        private IEnumerator initDecoderAsync(string path)
        {
            print(LOG_TAG + " init Decoder.");
            decoderState = NativeClass.DecoderState.INITIALIZING;
            mediaPath    = path;
            decoderID    = -1;
            NativeClass.nativeCreateDecoderAsync(mediaPath, ref decoderID);
            var result = 0;

            do
            {
                yield return(null);

                result = NativeClass.nativeGetDecoderState(decoderID);
            } while (!(result == 1 || result == -1));

            //  Init success.
            if (result == 1)
            {
                print(LOG_TAG + " Init success.");
                isVideoEnabled = NativeClass.nativeIsVideoEnabled(decoderID);
                if (isVideoEnabled)
                {
                    var duration = 0.0f;
                    NativeClass.nativeGetVideoFormat(decoderID, ref videoWidth, ref videoHeight, ref duration);
                    videoTotalTime = duration > 0 ? duration : -1.0f;
                    print(LOG_TAG + " Video format: (" + videoWidth + ", " + videoHeight + ")");
                    if (videoTotalTime > 0)
                    {
                        print(LOG_TAG + " Total time: " + videoTotalTime);
                    }
                    setTextures(null, null, null);
                    useDefault = true;
                }

                //	Initialize audio.
                isAudioEnabled = NativeClass.nativeIsAudioEnabled(decoderID);
                print(LOG_TAG + " isAudioEnabled = " + isAudioEnabled);
                if (isAudioEnabled)
                {
                    if (isAllAudioChEnabled)
                    {
                        NativeClass.nativeSetAudioAllChDataEnable(decoderID, isAllAudioChEnabled);
                        getAudioFormat();
                    }
                    else
                    {
                        getAudioFormat();
                        initAudioSource();
                    }
                }
                decoderState = NativeClass.DecoderState.INITIALIZED;
                if (onInitComplete != null)
                {
                    onInitComplete.Invoke();
                }
            }
            else
            {
                print(LOG_TAG + " Init fail.");
                decoderState = NativeClass.DecoderState.INIT_FAIL;
            }
        }
コード例 #7
0
 public bool isSeeking()
 {
     return(decoderState >= NativeClass.DecoderState.INITIALIZED && (decoderState == NativeClass.DecoderState.SEEK_FRAME || !NativeClass.nativeIsContentReady(decoderID)));
 }