コード例 #1
0
        //  Audio duration.

        public void getAllAudioChannelData(out float[] data, out double time, out int samplesPerChannel)
        {
            if (!isAllAudioChEnabled)
            {
                print(LOG_TAG + " this function only works for isAllAudioEnabled == true.");
                data = null;
                time = 0;
                samplesPerChannel = 0;
                return;
            }
            var    dataPtr          = new IntPtr();
            var    lengthPerChannel = 0;
            double audioNativeTime  = DecoderNative.nativeGetAudioData(decoderID, ref dataPtr, ref lengthPerChannel);

            float[] buff = null;
            if (lengthPerChannel > 0)
            {
                buff = new float[lengthPerChannel * audioChannels];
                Marshal.Copy(dataPtr, buff, 0, buff.Length);
                DecoderNative.nativeFreeAudioData(decoderID);
            }
            data = buff;
            time = audioNativeTime;
            samplesPerChannel = lengthPerChannel;
        }
コード例 #2
0
        private void registNativeLog()
        {
            var logDel          = new Action <string>(DecoderNative.nativeLogHandler);
            var intptr_delegate = Marshal.GetFunctionPointerForDelegate(logDel);

            DecoderNative.nativeRegistLogHandler(intptr_delegate);
        }
コード例 #3
0
        public static void getMetaData(string filePath, out string[] key, out string[] value)
        {
            var keyptr    = IntPtr.Zero;
            var valptr    = IntPtr.Zero;
            var metaCount = DecoderNative.nativeGetMetaData(filePath, out keyptr, out valptr);
            var keys      = new IntPtr[metaCount];
            var vals      = new IntPtr[metaCount];

            Marshal.Copy(keyptr, keys, 0, metaCount);
            Marshal.Copy(valptr, vals, 0, metaCount);
            var keyArray = new string[metaCount];
            var valArray = new string[metaCount];

            for (var i = 0; i < metaCount; i++)
            {
                keyArray[i] = Marshal.PtrToStringAnsi(keys[i]);
                valArray[i] = Marshal.PtrToStringAnsi(vals[i]);
                Marshal.FreeCoTaskMem(keys[i]);
                Marshal.FreeCoTaskMem(vals[i]);
            }
            Marshal.FreeCoTaskMem(keyptr);
            Marshal.FreeCoTaskMem(valptr);
            key   = keyArray;
            value = valArray;
        }
コード例 #4
0
 public override void stopDecoding()
 {
     if (decoderState >= DecoderNative.DecoderState.INITIALIZING)
     {
         print(LOG_TAG + " stop decoding.");
         decoderState = DecoderNative.DecoderState.STOP;
         releaseTextures();
         if (isAudioEnabled)
         {
             StopCoroutine("audioPlay");
             backgroundWorker.CancelAsync();
             if (audioSource != null)
             {
                 for (var i = 0; i < SWAP_BUFFER_NUM; i++)
                 {
                     if (audioSource[i] != null)
                     {
                         Destroy(audioSource[i].clip);
                         Destroy(audioSource[i]);
                         audioSource[i] = null;
                     }
                 }
             }
         }
         DecoderNative.nativeDestroyDecoder(decoderID);
         decoderID            = -1;
         decoderState         = DecoderNative.DecoderState.NOT_INITIALIZED;
         isVideoEnabled       = isAudioEnabled = false;
         isVideoReadyToReplay = isAudioReadyToReplay = false;
         isAllAudioChEnabled  = false;
     }
 }
コード例 #5
0
        public override void startDecoding()
        {
            if (decoderState == DecoderNative.DecoderState.INITIALIZED)
            {
                if (!DecoderNative.nativeStartDecoding(decoderID))
                {
                    print(LOG_TAG + " Decoding not start.");
                    return;
                }

                decoderState    = DecoderNative.DecoderState.BUFFERING;
                globalStartTime = AudioSettings.dspTime;
                hangTime        = AudioSettings.dspTime - globalStartTime;

                isVideoReadyToReplay = isAudioReadyToReplay = false;
                if (isAudioEnabled && !isAllAudioChEnabled)
                {
                    StartCoroutine("audioPlay");
                    backgroundWorker = new BackgroundWorker();
                    backgroundWorker.WorkerSupportsCancellation = true;
                    backgroundWorker.DoWork += pullAudioData;
                    backgroundWorker.RunWorkerAsync();
                }
            }
        }
コード例 #6
0
 public void setVideoEnable(bool isEnable)
 {
     DecoderNative.nativeSetVideoEnable(decoderID, isEnable);
     if (isEnable)
     {
         setSeekTime(getVideoCurrentTime());
     }
 }
コード例 #7
0
        private void pullAudioData(object sender, DoWorkEventArgs e)
        {
            var    dataPtr          = IntPtr.Zero;  //	Pointer to get audio data from native.
            var    tempBuff         = new float[0]; //	Buffer to copy audio data from dataPtr to audioDataBuff.
            var    audioFrameLength = 0;
            double lastTime         = -1.0f;        //	Avoid to schedule the same audio data set.

            audioDataBuff = new List <float>();
            while (decoderState >= DecoderNative.DecoderState.START)
            {
                if (decoderState != DecoderNative.DecoderState.SEEK_FRAME)
                {
                    double audioNativeTime =
                        DecoderNative.nativeGetAudioData(decoderID, ref dataPtr, ref audioFrameLength);
                    if (0 < audioNativeTime && lastTime != audioNativeTime &&
                        decoderState != DecoderNative.DecoderState.SEEK_FRAME &&
                        audioFrameLength != 0)
                    {
                        if (firstAudioFrameTime == -1.0)
                        {
                            firstAudioFrameTime = audioNativeTime;
                        }

                        lastTime          = audioNativeTime;
                        audioFrameLength *= audioChannels;
                        if (tempBuff.Length != audioFrameLength)
                        {
                            tempBuff = new float[audioFrameLength];
                        }
                        Marshal.Copy(dataPtr, tempBuff, 0, audioFrameLength);
                        lock (_lock)
                        {
                            audioDataBuff.AddRange(tempBuff);
                        }
                    }

                    if (audioNativeTime != -1.0)
                    {
                        DecoderNative.nativeFreeAudioData(decoderID);
                    }

                    Thread.Sleep(2);
                }
            }

            lock (_lock)
            {
                audioDataBuff.Clear();
                audioDataBuff = null;
            }
        }
コード例 #8
0
        private void getAudioFormat()
        {
            var channels = 0;
            var freqency = 0;
            var duration = 0.0f;

            DecoderNative.nativeGetAudioFormat(decoderID, ref channels, ref freqency, ref duration);
            audioChannels  = channels;
            audioFrequency = freqency;
            audioTotalTime = duration > 0 ? duration : -1.0f;
            print(LOG_TAG + " audioChannel " + audioChannels);
            print(LOG_TAG + " audioFrequency " + audioFrequency);
            print(LOG_TAG + " audioTotalTime " + audioTotalTime);
        }
コード例 #9
0
        public bool setSeekTime(float seekTime)
        {
            if (decoderState != DecoderNative.DecoderState.SEEK_FRAME &&
                decoderState >= DecoderNative.DecoderState.START)
            {
                lastState    = decoderState;
                decoderState = DecoderNative.DecoderState.SEEK_FRAME;

                var setTime = 0.0f;
                if (isVideoEnabled && seekTime > videoTotalTime ||
                    isAudioEnabled && !isAllAudioChEnabled && seekTime > audioTotalTime ||
                    isVideoReadyToReplay || isAudioReadyToReplay ||
                    seekTime < 0.0f)
                {
                    print(LOG_TAG + " Seek over end. ");
                    setTime = 0.0f;
                }
                else
                {
                    setTime = seekTime;
                }

                print(LOG_TAG + " set seek time: " + setTime);
                hangTime = setTime;
                DecoderNative.nativeSetSeekTime(decoderID, setTime);
                DecoderNative.nativeSetVideoTime(decoderID, setTime);

                if (isAudioEnabled && !isAllAudioChEnabled)
                {
                    lock (_lock)
                    {
                        audioDataBuff.Clear();
                    }

                    audioProgressTime = firstAudioFrameTime = -1.0;
                    foreach (var src in audioSource)
                    {
                        src.Stop();
                    }
                }

                return(true);
            }

            return(false);
        }
コード例 #10
0
        private void getTextureFromNative()
        {
            ReleaseTexture();

            var nativeTexturePtrY = new IntPtr();
            var nativeTexturePtrU = new IntPtr();
            var nativeTexturePtrV = new IntPtr();

            DecoderNative.nativeCreateTexture(decoderID, ref nativeTexturePtrY, ref nativeTexturePtrU,
                                              ref nativeTexturePtrV);
            videoTexYch = Texture2D.CreateExternalTexture(
                videoWidth, videoHeight, TextureFormat.Alpha8, false, false, nativeTexturePtrY);
            videoTexUch = Texture2D.CreateExternalTexture(
                videoWidth / 2, videoHeight / 2, TextureFormat.Alpha8, false, false, nativeTexturePtrU);
            videoTexVch = Texture2D.CreateExternalTexture(
                videoWidth / 2, videoHeight / 2, TextureFormat.Alpha8, false, false, nativeTexturePtrV);
        }
コード例 #11
0
        //  Render event

        private void getTextureFromNative()
        {
            releaseTextures();
            var nativeTexturePtrY = new IntPtr();
            var nativeTexturePtrU = new IntPtr();
            var nativeTexturePtrV = new IntPtr();
            var duration          = 0.0f;

            DecoderNative.nativeGetVideoFormat(decoderID, ref videoWidth, ref videoHeight, ref duration);
            videoTotalTime = duration > 0 ? duration : -1.0f;
            print(LOG_TAG + " Video format: (" + videoWidth + ", " + videoHeight + ")");
            if (videoTotalTime > 0)
            {
                print(LOG_TAG + " Total time: " + videoTotalTime);
            }
            DecoderNative.nativeCreateTexture(decoderID, ref nativeTexturePtrY, ref nativeTexturePtrU, ref nativeTexturePtrV);
            videoTexYch = Texture2D.CreateExternalTexture(videoWidth, videoHeight, TextureFormat.Alpha8, false, false, nativeTexturePtrY);
            videoTexUch = Texture2D.CreateExternalTexture(videoWidth / 2, videoHeight / 2, TextureFormat.Alpha8, false, false, nativeTexturePtrU);
            videoTexVch = Texture2D.CreateExternalTexture(videoWidth / 2, videoHeight / 2, TextureFormat.Alpha8, false, false, nativeTexturePtrV);
        }
コード例 #12
0
        public static void loadVideoThumb(GameObject obj, string filePath, float time)
        {
            if (!File.Exists(filePath))
            {
                print(LOG_TAG + " File not found!");
                return;
            }

            var decID     = -1;
            var width     = 0;
            var height    = 0;
            var totalTime = 0.0f;

            DecoderNative.nativeCreateDecoder(filePath, ref decID);
            DecoderNative.nativeGetVideoFormat(decID, ref width, ref height, ref totalTime);
            if (!DecoderNative.nativeStartDecoding(decID))
            {
                print(LOG_TAG + " Decoding not start.");
                return;
            }

            var thumbY   = new Texture2D(width, height, TextureFormat.Alpha8, false);
            var thumbU   = new Texture2D(width / 2, height / 2, TextureFormat.Alpha8, false);
            var thumbV   = new Texture2D(width / 2, height / 2, TextureFormat.Alpha8, false);
            var thumbMat = obj.GetComponent <MeshRenderer>().material;

            if (thumbMat == null)
            {
                print(LOG_TAG + " Target has no MeshRenderer.");
                DecoderNative.nativeDestroyDecoder(decID);
                return;
            }

            thumbMat.SetTexture("_YTex", thumbY);
            thumbMat.SetTexture("_UTex", thumbU);
            thumbMat.SetTexture("_VTex", thumbV);

            DecoderNative.nativeLoadThumbnail(decID, time, thumbY.GetNativeTexturePtr(), thumbU.GetNativeTexturePtr(),
                                              thumbV.GetNativeTexturePtr());
            DecoderNative.nativeDestroyDecoder(decID);
        }
コード例 #13
0
        //  Video progress is triggered using Update. Progress time would be set by nativeSetVideoTime.
        private void Update()
        {
            switch (decoderState)
            {
            case DecoderNative.DecoderState.START:
                if (isVideoEnabled)
                {
                    //  Prevent empty texture generate green screen.(default 0,0,0 in YUV which is green in RGB)
                    if (useDefault && DecoderNative.nativeIsContentReady(decoderID))
                    {
                        getTextureFromNative();
                        setTextures(videoTexYch, videoTexUch, videoTexVch);
                        useDefault = false;
                    }

                    //	Update video frame by dspTime.
                    var setTime = curRealTime - globalStartTime;

                    //	Normal update frame.
                    if (setTime < videoTotalTime || videoTotalTime <= 0)
                    {
                        if (seekPreview && DecoderNative.nativeIsContentReady(decoderID))
                        {
                            setPause();
                            seekPreview = false;
                            unmute();
                        }
                        else
                        {
                            DecoderNative.nativeSetVideoTime(decoderID, (float)setTime);
                            GL.IssuePluginEvent(DecoderNative.GetRenderEventFunc(), decoderID);
                        }
                    }
                    else
                    {
                        isVideoReadyToReplay = true;
                    }
                }
                if (DecoderNative.nativeIsVideoBufferEmpty(decoderID) && !DecoderNative.nativeIsEOF(decoderID))
                {
                    decoderState = DecoderNative.DecoderState.BUFFERING;
                    hangTime     = curRealTime - globalStartTime;
                }
                break;

            case DecoderNative.DecoderState.SEEK_FRAME:
                if (DecoderNative.nativeIsSeekOver(decoderID))
                {
                    globalStartTime = curRealTime - hangTime;
                    decoderState    = DecoderNative.DecoderState.START;
                    if (lastState == DecoderNative.DecoderState.PAUSE)
                    {
                        seekPreview = true;
                        mute();
                    }
                }
                break;

            case DecoderNative.DecoderState.BUFFERING:
                if (!DecoderNative.nativeIsVideoBufferEmpty(decoderID) || DecoderNative.nativeIsEOF(decoderID))
                {
                    decoderState    = DecoderNative.DecoderState.START;
                    globalStartTime = curRealTime - hangTime;
                }
                break;

            case DecoderNative.DecoderState.PAUSE:
            case DecoderNative.DecoderState.EOF:
            default:
                break;
            }
            if (isVideoEnabled || isAudioEnabled)
            {
                if ((!isVideoEnabled || isVideoReadyToReplay) && (!isAudioEnabled || isAudioReadyToReplay))
                {
                    decoderState         = DecoderNative.DecoderState.EOF;
                    isVideoReadyToReplay = isAudioReadyToReplay = false;
                    if (onVideoEnd != null)
                    {
                        onVideoEnd.Invoke();
                    }
                }
            }
        }
コード例 #14
0
 private void OnDestroy()
 {
     //print(LOG_TAG + " OnDestroy");
     stopDecoding();
     DecoderNative.nativeRegistLogHandler(IntPtr.Zero);
 }
コード例 #15
0
        private IEnumerator initDecoderAsync(string path)
        {
            print(LOG_TAG + " init Decoder.");
            decoderState = DecoderNative.DecoderState.INITIALIZING;
            mediaPath    = path;
            decoderID    = -1;
            DecoderNative.nativeCreateDecoderAsync(mediaPath, ref decoderID);
            var result = 0;

            do
            {
                yield return(null);

                result = DecoderNative.nativeGetDecoderState(decoderID);
            } while (!(result == 1 || result == -1));

            //  Init success.
            if (result == 1)
            {
                print(LOG_TAG + " Init success.");
                isVideoEnabled = DecoderNative.nativeIsVideoEnabled(decoderID);
                //if (isVideoEnabled)
                //{
                //	var duration = 0.0f;
                //	DecoderNative.nativeGetVideoFormat(decoderID, ref videoWidth, ref videoHeight, ref duration);
                //	videoTotalTime = duration > 0 ? duration : -1.0f;
                //	print(LOG_TAG + " Video format: (" + videoWidth + ", " + videoHeight + ")");
                //	if (videoTotalTime > 0)
                //		print(LOG_TAG + " Total time: " + videoTotalTime);
                //	setTextures(null, null, null);
                //	useDefault = true;
                //}

                ////	Initialize audio.
                isAudioEnabled = DecoderNative.nativeIsAudioEnabled(decoderID);
                //print(LOG_TAG + " isAudioEnabled = " + isAudioEnabled);
                //if (isAudioEnabled)
                //{
                //	if (isAllAudioChEnabled)
                //	{
                //		DecoderNative.nativeSetAudioAllChDataEnable(decoderID, isAllAudioChEnabled);
                //		getAudioFormat();
                //	}
                //	else
                //	{
                //		getAudioFormat();
                //		initAudioSource();
                //	}
                //}
                decoderState = DecoderNative.DecoderState.INITIALIZED;
                if (onInitComplete != null)
                {
                    onInitComplete.Invoke();
                }
            }
            else
            {
                print(LOG_TAG + " Init fail.");
                decoderState = DecoderNative.DecoderState.INIT_FAIL;
            }
        }
コード例 #16
0
 public bool isSeeking()
 {
     return(decoderState >= DecoderNative.DecoderState.INITIALIZED && (decoderState == DecoderNative.DecoderState.SEEK_FRAME || !DecoderNative.nativeIsContentReady(decoderID)));
 }