コード例 #1
0
ファイル: VideoPlayer.cs プロジェクト: zwcloud/FNA
        private void InitAudioStream()
        {
            // The number of buffers to queue into the source.
            const int NUM_BUFFERS = 4;

            // Generate the source.
            IntPtr audioPtr = IntPtr.Zero;

            do
            {
                audioPtr = TheoraPlay.THEORAPLAY_getAudio(Video.theoraDecoder);
            } while (audioPtr == IntPtr.Zero);
            TheoraPlay.THEORAPLAY_AudioPacket packet = TheoraPlay.getAudioPacket(audioPtr);
            audioStream = new DynamicSoundEffectInstance(
                packet.freq,
                (AudioChannels)packet.channels
                );
            audioStream.BufferNeeded += OnBufferRequest;
            UpdateVolume();

            // Fill and queue the buffers.
            for (int i = 0; i < NUM_BUFFERS; i += 1)
            {
                if (!StreamAudio())
                {
                    break;
                }
            }
        }
コード例 #2
0
        public void Dispose()
        {
            // Stop and unassign the decoder.
            if (theoraDecoder != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_stopDecode(theoraDecoder);
                theoraDecoder = IntPtr.Zero;
            }

            // Free and unassign the video stream.
            if (videoStream != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_freeVideo(videoStream);
                videoStream = IntPtr.Zero;
            }

            // Free and unassign the audio stream.
            if (audioStream != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_freeAudio(audioStream);
                audioStream = IntPtr.Zero;
            }

            IsDisposed = true;
        }
コード例 #3
0
ファイル: VideoPlayer.cs プロジェクト: zwcloud/FNA
        public void Stop()
        {
            checkDisposed();

            // Check the player state before attempting anything.
            if (State == MediaState.Stopped)
            {
                return;
            }

            // Update the player state.
            State = MediaState.Stopped;

            // Wait for the player to end if it's still going.
            System.Console.Write("Signaled Theora player to stop, waiting...");
            timer.Stop();
            timer.Reset();
            if (audioStream != null)
            {
                audioStream.Stop();
                audioStream.Dispose();
                audioStream = null;
            }
            if (previousFrame != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
            }
            Video.AttachedToPlayer = false;
            Video.Dispose();
            System.Console.WriteLine(" Done!");
        }
コード例 #4
0
        public void Stop()
        {
            checkDisposed();

            // Check the player state before attempting anything.
            if (State == MediaState.Stopped)
            {
                return;
            }

            // Update the player state.
            State = MediaState.Stopped;

            // Wait for the player to end if it's still going.
            if (!audioDecoderThread.IsAlive)
            {
                return;
            }
            Log.Editor.Write("Signaled Theora player to stop, waiting...");
            timer.Stop();
            timer.Reset();
            audioDecoderThread.Join();
            if (previousFrame != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
            }
            Video.Dispose();
            Log.Editor.Write("Theora player stopped!");
        }
コード例 #5
0
        private void DecodeAudio()
        {
            const int bufferSize = 4096 * 2;

            while (State != MediaState.Stopped && _theoraVideo != null)
            {
                var theoraDecoder = _theoraVideo.TheoraDecoder;

                while (State != MediaState.Stopped && TheoraPlay.THEORAPLAY_availableAudio(theoraDecoder) == 0)
                {
                    continue;
                }

                var data = new List <float>();
                TheoraPlay.THEORAPLAY_AudioPacket currentAudio;
                while (data.Count < bufferSize && TheoraPlay.THEORAPLAY_availableAudio(theoraDecoder) > 0)
                {
                    var audioPtr = TheoraPlay.THEORAPLAY_getAudio(theoraDecoder);
                    currentAudio = TheoraPlay.getAudioPacket(audioPtr);
                    data.AddRange(TheoraPlay.getSamples(currentAudio.samples, currentAudio.frames * currentAudio.channels));
                    TheoraPlay.THEORAPLAY_freeAudio(audioPtr);
                }

                if (State == MediaState.Playing)
                {
                    _fmodTheoraStream.Stream(data.ToArray());
                }
            }
        }
コード例 #6
0
        public void InitializeVideo(string fileName)
        {
            // set finished to true before we start so that anyone polling on this flag afterwards won't get stuck in a loop in the case
            //  where initialization fails.
            IsFinished = true;

            // Initialize the decoder.
            try
            {
                _theoraDecoder = TheoraPlay.THEORAPLAY_startDecodeFile(
                    fileName,
                    30,         // Arbitrarily 1 seconds in a 30fps movie.
                    //#if !VIDEOPLAYER_OPENGL
                    //                TheoraPlay.THEORAPLAY_VideoFormat.THEORAPLAY_VIDFMT_RGBA
                    //#else
                    TheoraPlay.THEORAPLAY_VideoFormat.THEORAPLAY_VIDFMT_IYUV
                    //#endif
                    );

                var counter = 0;
                // Wait until the decoder is ready.
                while (TheoraPlay.THEORAPLAY_isInitialized(_theoraDecoder) == 0 && counter < 100)
                {
                    Thread.Sleep(10);
                    counter++;
                }
                if (counter >= 100)
                {
                    Log.Editor.WriteError("Could not initialize {0}. Operation timed out sorry", fileName);
                    return;
                }
                // Initialize the video stream pointer and get our first frame.
                if (TheoraPlay.THEORAPLAY_hasVideoStream(_theoraDecoder) != 0)
                {
                    while (_videoStream == IntPtr.Zero)
                    {
                        _videoStream = TheoraPlay.THEORAPLAY_getVideo(_theoraDecoder);
                        Thread.Sleep(10);
                    }

                    var frame = GetVideoFrame(_videoStream);

                    // We get the FramesPerSecond from the first frame.
                    FramesPerSecond = (float)frame.fps;
                    Width           = (int)frame.width;
                    Height          = (int)frame.height;
                    IsFinished      = false;
                }
                _disposed = false;
            }
            catch (BadImageFormatException exception)
            {
                Log.Editor.WriteError("There was a problem initializing Theoraplay possibly running on x64, we only support x86 for the moment.{1} {0} {1} {2}", exception.Message, Environment.NewLine, exception.StackTrace);
            }
            catch (Exception exception)
            {
                Log.Editor.WriteError("There was a problem initializing video with Theoraplay. {1} {0} {1} {2}", exception.Message, Environment.NewLine, exception.StackTrace);
            }
        }
コード例 #7
0
        private bool StreamAudio(int buffer)
        {
            // The size of our abstracted buffer.
            const int BUFFER_SIZE = 4096 * 2;

            // Store our abstracted buffer into here.
            List <float> data = new List <float>();

            // Be sure we have an audio stream first!
            if (Video.audioStream == IntPtr.Zero)
            {
                return(false); // NOPE
            }

            // Add to the buffer from the decoder until it's large enough.
            while (data.Count < BUFFER_SIZE && State != MediaState.Stopped)
            {
                data.AddRange(
                    TheoraPlay.getSamples(
                        currentAudio.samples,
                        currentAudio.frames * currentAudio.channels
                        )
                    );

                // We've copied the audio, so free this.
                TheoraPlay.THEORAPLAY_freeAudio(Video.audioStream);

                do
                {
                    Video.audioStream = TheoraPlay.THEORAPLAY_getAudio(Video.theoraDecoder);
                    if (State == MediaState.Stopped)
                    {
                        // Screw it, just bail out ASAP.
                        return(false);
                    }
                } while (Video.audioStream == IntPtr.Zero);
                currentAudio = TheoraPlay.getAudioPacket(Video.audioStream);

                if ((BUFFER_SIZE - data.Count) < 4096)
                {
                    break;
                }
            }

            // If we actually got data, buffer it into OpenAL.
            if (data.Count > 0)
            {
                AL.BufferData(
                    buffer,
                    (currentAudio.channels == 2) ? ALFormat.StereoFloat32Ext : ALFormat.MonoFloat32Ext,
                    data.ToArray(),
                    data.Count * 2 * currentAudio.channels, // Dear OpenAL: WTF?! Love, flibit
                    currentAudio.freq
                    );
                return(true);
            }
            return(false);
        }
コード例 #8
0
        /// <summary>
        /// This method is to override some default behaviour in TheoraPlay-CS that we don't want for Android. Probably a better solution
        /// would be to update TheoraPlay-CS, but I'm not 100% sure what the change should be as it appears to vary by platform.
        /// </summary>
        /// <param name="videoStream"></param>
        /// <returns></returns>
        private static unsafe TheoraPlay.THEORAPLAY_VideoFrame GetVideoFrame(IntPtr videoStream)
        {
#if __ANDROID__
            var framePtr = (TheoraPlay.THEORAPLAY_VideoFrame *)videoStream;
            return(*framePtr);
#else
            return(TheoraPlay.getVideoFrame(videoStream));
#endif
        }
コード例 #9
0
        internal void Initialize()
        {
            if (!IsDisposed)
            {
                Dispose(); // We need to start from the beginning, don't we? :P
            }

            // Initialize the decoder.
            theoraDecoder = TheoraPlay.THEORAPLAY_startDecodeFile(
                _fileName,
                150, // Arbitrarily 5 seconds in a 30fps movie.
#if VIDEOPLAYER_OPENGL
                TheoraPlay.THEORAPLAY_VideoFormat.THEORAPLAY_VIDFMT_IYUV
#else
                // Use the TheoraPlay software converter.
                TheoraPlay.THEORAPLAY_VideoFormat.THEORAPLAY_VIDFMT_RGBA
#endif
                );

            // Wait until the decoder is ready.
            while (TheoraPlay.THEORAPLAY_isInitialized(theoraDecoder) == 0)
            {
                Thread.Sleep(10);
            }

            // Initialize the audio stream pointer and get our first packet.
            if (TheoraPlay.THEORAPLAY_hasAudioStream(theoraDecoder) != 0)
            {
                while (audioStream == IntPtr.Zero)
                {
                    audioStream = TheoraPlay.THEORAPLAY_getAudio(theoraDecoder);
                    Thread.Sleep(10);
                }
            }

            // Initialize the video stream pointer and get our first frame.
            if (TheoraPlay.THEORAPLAY_hasVideoStream(theoraDecoder) != 0)
            {
                while (videoStream == IntPtr.Zero)
                {
                    videoStream = TheoraPlay.THEORAPLAY_getVideo(theoraDecoder);
                    Thread.Sleep(10);
                }

                TheoraPlay.THEORAPLAY_VideoFrame frame = TheoraPlay.getVideoFrame(videoStream);

                // We get the FramesPerSecond from the first frame.
                FramesPerSecond = (float)frame.fps;
                Width           = (int)frame.width;
                Height          = (int)frame.height;
            }

            IsDisposed = false;
        }
コード例 #10
0
        private bool StreamAudio(int buffer)
        {
            // The size of our abstracted buffer.
            const int BUFFER_SIZE = 4096 * 2;

            // Store our abstracted buffer into here.
            List <float> data = new List <float>();

            // We'll store this here, so alBufferData can use it too.
            TheoraPlay.THEORAPLAY_AudioPacket currentAudio;
            currentAudio.channels = 0;
            currentAudio.freq     = 0;

            // Add to the buffer from the decoder until it's large enough.
            while (
                State != MediaState.Stopped &&
                TheoraPlay.THEORAPLAY_availableAudio(Video.TheoraDecoder) == 0
                )
            {
                ;
            }
            while (
                data.Count < BUFFER_SIZE &&
                State != MediaState.Stopped &&
                TheoraPlay.THEORAPLAY_availableAudio(Video.TheoraDecoder) > 0
                )
            {
                IntPtr audioPtr = TheoraPlay.THEORAPLAY_getAudio(Video.TheoraDecoder);
                currentAudio = TheoraPlay.getAudioPacket(audioPtr);
                data.AddRange(
                    TheoraPlay.getSamples(
                        currentAudio.samples,
                        currentAudio.frames * currentAudio.channels
                        )
                    );
                TheoraPlay.THEORAPLAY_freeAudio(audioPtr);
            }

            // If we actually got data, buffer it into OpenAL.
            if (data.Count > 0)
            {
                AL.BufferData(
                    buffer,
                    (currentAudio.channels == 2) ? ALFormat.StereoFloat32Ext : ALFormat.MonoFloat32Ext,
                    data.ToArray(),
                    data.Count * 2 * currentAudio.channels, // Dear OpenAL: WTF?! Love, flibit
                    currentAudio.freq
                    );
                return(true);
            }
            return(false);
        }
コード例 #11
0
        private bool StreamAudio()
        {
            // The size of our abstracted buffer.
            const int BUFFER_SIZE = 4096 * 2;

            // Store our abstracted buffer into here.
            List <float> data = new List <float>();

            // We'll store this here, so alBufferData can use it too.
            TheoraPlay.THEORAPLAY_AudioPacket currentAudio;

            // There might be an initial period of silence, so forcibly push through.
            while (audioStream.State == SoundState.Stopped &&
                   TheoraPlay.THEORAPLAY_availableAudio(Video.theoraDecoder) == 0)
            {
                ;
            }

            // Add to the buffer from the decoder until it's large enough.
            while (data.Count < BUFFER_SIZE)
            {
                IntPtr audioPtr = TheoraPlay.THEORAPLAY_getAudio(Video.theoraDecoder);
                if (audioPtr == IntPtr.Zero)
                {
                    // FIXME: THEORAPLAY_availableAudio has rounding issues! -flibit
                    break;
                }
                currentAudio = TheoraPlay.getAudioPacket(audioPtr);
                data.AddRange(
                    TheoraPlay.getSamples(
                        currentAudio.samples,
                        currentAudio.frames * currentAudio.channels
                        )
                    );
                TheoraPlay.THEORAPLAY_freeAudio(audioPtr);
            }

            // If we actually got data, queue it!
            if (data.Count > 0)
            {
                audioStream.SubmitFloatBufferEXT(data.ToArray());
            }
            else if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0)
            {
                // Okay, we ran out. No need for this!
                audioStream.BufferNeeded -= OnBufferRequest;
                return(false);
            }
            return(true);
        }
コード例 #12
0
        private void InitializeTheoraStream()
        {
            // Start the video if it hasn't been yet.
            if (Video.IsDisposed)
            {
                Video.Initialize();
            }

            // Grab the first bit of audio. We're trying to start the decoding ASAP.
            if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0)
            {
                // Generate the source.
                IntPtr audioPtr = IntPtr.Zero;
                do
                {
                    // The decoder miiight not be ready yet.
                    audioPtr = TheoraPlay.THEORAPLAY_getAudio(Video.theoraDecoder);
                } while (audioPtr == IntPtr.Zero);
                TheoraPlay.THEORAPLAY_AudioPacket packet = TheoraPlay.getAudioPacket(audioPtr);
                audioStream = new DynamicSoundEffectInstance(
                    packet.freq,
                    (AudioChannels)packet.channels
                    );
                audioStream.BufferNeeded += OnBufferRequest;
                UpdateVolume();

                // Fill and queue the buffers.
                for (int i = 0; i < 4; i += 1)
                {
                    if (!StreamAudio())
                    {
                        break;
                    }
                }
            }

            // Grab the first bit of video.
            if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0)
            {
                currentVideo  = TheoraPlay.getVideoFrame(Video.videoStream);
                previousFrame = Video.videoStream;
                do
                {
                    // The decoder miiight not be ready yet.
                    Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                } while (Video.videoStream == IntPtr.Zero);
                nextVideo = TheoraPlay.getVideoFrame(Video.videoStream);
            }
        }
コード例 #13
0
        private bool StreamAudio()
        {
            // The size of our abstracted buffer.
            const int BUFFER_SIZE = 4096 * 2;

            // Store our abstracted buffer into here.
            List <float> data = new List <float>();

            // We'll store this here, so alBufferData can use it too.
            TheoraPlay.THEORAPLAY_AudioPacket currentAudio;
            currentAudio.channels = 0;
            currentAudio.freq     = 0;

            // There might be an initial period of silence, so forcibly push through.
            while (audioStream.State == SoundState.Stopped &&
                   TheoraPlay.THEORAPLAY_availableAudio(Video.theoraDecoder) == 0)
            {
                ;
            }

            // Add to the buffer from the decoder until it's large enough.
            while (data.Count < BUFFER_SIZE &&
                   TheoraPlay.THEORAPLAY_availableAudio(Video.theoraDecoder) > 0)
            {
                IntPtr audioPtr = TheoraPlay.THEORAPLAY_getAudio(Video.theoraDecoder);
                currentAudio = TheoraPlay.getAudioPacket(audioPtr);
                data.AddRange(
                    TheoraPlay.getSamples(
                        currentAudio.samples,
                        currentAudio.frames * currentAudio.channels
                        )
                    );
                TheoraPlay.THEORAPLAY_freeAudio(audioPtr);
            }

            // If we actually got data, buffer it into OpenAL.
            if (data.Count > 0)
            {
                audioStream.SubmitFloatBuffer(
                    data.ToArray(),
                    currentAudio.channels,
                    currentAudio.freq
                    );
                return(true);
            }
            return(false);
        }
コード例 #14
0
        public void UpdateVideo(float elapsedFrameTime)
        {
            while (_currentVideo.playms <= elapsedFrameTime && TheoraPlay.THEORAPLAY_availableVideo(_theoraDecoder) != 0)
            {
                _currentVideo = _nextVideo;

                var nextFrame = TheoraPlay.THEORAPLAY_getVideo(_theoraDecoder);

                if (nextFrame != IntPtr.Zero)
                {
                    TheoraPlay.THEORAPLAY_freeVideo(_previousFrame);
                    _previousFrame = _videoStream;
                    _videoStream   = nextFrame;
                    _nextVideo     = GetVideoFrame(_videoStream);
                }
            }
            IsFinished = TheoraPlay.THEORAPLAY_isDecoding(_theoraDecoder) == 0;
        }
コード例 #15
0
        internal void Initialize()
        {
            if (!IsDisposed)
            {
                Dispose();                 // We need to start from the beginning, don't we? :P
            }

            // Initialize the decoder.
            theoraDecoder = TheoraPlay.THEORAPLAY_startDecodeFile(
                fileName,
                150,                 // Max frames to buffer.  Arbitrarily set 5 seconds, assuming 30fps.
                TheoraPlay.THEORAPLAY_VideoFormat.THEORAPLAY_VIDFMT_IYUV
                );

            // Wait until the decoder is ready.
            while (TheoraPlay.THEORAPLAY_isInitialized(theoraDecoder) == 0)
            {
                Thread.Sleep(10);
            }

            // Initialize the video stream pointer and get our first frame.
            if (TheoraPlay.THEORAPLAY_hasVideoStream(theoraDecoder) != 0)
            {
                while (videoStream == IntPtr.Zero)
                {
                    videoStream = TheoraPlay.THEORAPLAY_getVideo(theoraDecoder);
                    Thread.Sleep(10);
                }

                TheoraPlay.THEORAPLAY_VideoFrame frame = TheoraPlay.getVideoFrame(videoStream);

                // We get the FramesPerSecond from the first frame.
                FramesPerSecond = (float)frame.fps;
                Width           = (int)frame.width;
                Height          = (int)frame.height;
            }

            IsDisposed = false;
        }
コード例 #16
0
        public void Terminate()
        {
            // Stop and unassign the decoder.
            if (_theoraDecoder != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_stopDecode(_theoraDecoder);
                _theoraDecoder = IntPtr.Zero;
            }

            // Free and unassign the video stream.
            if (_videoStream != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_freeVideo(_videoStream);
                _videoStream = IntPtr.Zero;
            }
            _currentVideo = new TheoraPlay.THEORAPLAY_VideoFrame();
            _nextVideo    = new TheoraPlay.THEORAPLAY_VideoFrame();
            TheoraPlay.THEORAPLAY_freeVideo(_previousFrame);
            _previousFrame = IntPtr.Zero;
            _videoStream   = IntPtr.Zero;
            _disposed      = true;
        }
コード例 #17
0
        internal void Dispose()
        {
            if (AttachedToPlayer)
            {
                return;                 // NOPE. VideoPlayer will do the honors.
            }

            // Stop and unassign the decoder.
            if (theoraDecoder != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_stopDecode(theoraDecoder);
                theoraDecoder = IntPtr.Zero;
            }

            // Free and unassign the video stream.
            if (videoStream != IntPtr.Zero)
            {
                TheoraPlay.THEORAPLAY_freeVideo(videoStream);
                videoStream = IntPtr.Zero;
            }

            IsDisposed = true;
        }
コード例 #18
0
        private void DecodeAudio()
        {
            const int bufferSize = 4096 * 2;

            while (State != MediaState.Stopped && _theoraVideo != null)
            {
                var theoraDecoder = _theoraVideo.TheoraDecoder;

                while (State != MediaState.Stopped && TheoraPlay.THEORAPLAY_availableAudio(theoraDecoder) == 0)
                {
                    // don't use all of the cpu while waiting for data
                    Thread.Sleep(1);

                    // if the game object has somehow been disposed with the state being set to stopped, then the thread will never
                    // exit, so check for that explicitly here
                    if (GameObj != null && GameObj.Disposed)
                    {
                        return;
                    }
                }

                var data = new List <float>();
                TheoraPlay.THEORAPLAY_AudioPacket currentAudio;
                while (data.Count < bufferSize && TheoraPlay.THEORAPLAY_availableAudio(theoraDecoder) > 0)
                {
                    var audioPtr = TheoraPlay.THEORAPLAY_getAudio(theoraDecoder);
                    currentAudio = TheoraPlay.getAudioPacket(audioPtr);
                    data.AddRange(TheoraPlay.getSamples(currentAudio.samples, currentAudio.frames * currentAudio.channels));
                    TheoraPlay.THEORAPLAY_freeAudio(audioPtr);
                }

                if (State == MediaState.Playing)
                {
                    _fmodTheoraStream.Stream(data.ToArray());
                }
            }
        }
コード例 #19
0
ファイル: VideoPlayer.cs プロジェクト: BibleUs/FNA
        public void Play(Video video)
        {
            checkDisposed();

            // We need to assign this regardless of what happens next.
            Video = video;
            video.AttachedToPlayer = true;

            // FIXME: This is a part of the Duration hack!
            Video.Duration = TimeSpan.MaxValue;

            // Check the player state before attempting anything.
            if (State != MediaState.Stopped)
            {
                return;
            }

            // Update the player state now, for the thread we're about to make.
            State = MediaState.Playing;

            // Start the video if it hasn't been yet.
            if (Video.IsDisposed)
            {
                video.Initialize();
            }

            // Grab the first bit of audio. We're trying to start the decoding ASAP.
            if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0)
            {
                InitAudioStream();
            }

            // Grab the first bit of video, set up the texture.
            if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0)
            {
                currentVideo  = TheoraPlay.getVideoFrame(Video.videoStream);
                previousFrame = Video.videoStream;
                do
                {
                    // The decoder miiight not be ready yet.
                    Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                } while (Video.videoStream == IntPtr.Zero);
                nextVideo = TheoraPlay.getVideoFrame(Video.videoStream);

                // The VideoPlayer will use the GraphicsDevice that is set now.
                if (currentDevice != Video.GraphicsDevice)
                {
                    GL_dispose();
                    currentDevice = Video.GraphicsDevice;
                    GL_initialize();
                }

                RenderTargetBinding overlap = videoTexture[0];
                videoTexture[0] = new RenderTargetBinding(
                    new RenderTarget2D(
                        currentDevice,
                        (int)currentVideo.width,
                        (int)currentVideo.height,
                        false,
                        SurfaceFormat.Color,
                        DepthFormat.None,
                        0,
                        RenderTargetUsage.PreserveContents
                        )
                    );
                if (overlap.RenderTarget != null)
                {
                    overlap.RenderTarget.Dispose();
                }
                GL_setupTextures(
                    (int)currentVideo.width,
                    (int)currentVideo.height
                    );
            }

            // Initialize the thread!
            FNAPlatform.Log("Starting Theora player...");
            timer.Start();
            if (audioStream != null)
            {
                audioStream.Play();
            }
            FNAPlatform.Log("Started!");
        }
コード例 #20
0
        public Texture2D GetTexture()
        {
            checkDisposed();

            // Be sure we can even get something from TheoraPlay...
            if (State == MediaState.Stopped ||
                Video.theoraDecoder == IntPtr.Zero ||
                previousFrame == IntPtr.Zero ||
                Video.videoStream == IntPtr.Zero ||
                TheoraPlay.THEORAPLAY_isInitialized(Video.theoraDecoder) == 0)
            {
                return(videoTexture); // Screw it, give them the old one.
            }

            // Assign this locally, or else the thread will ruin your face.
            frameLocked = true;

#if VIDEOPLAYER_OPENGL
            // Set up an environment to muck about in.
            GL_pushState();

            // Bind our shader program.
            GL.UseProgram(shaderProgram);

            // Set uniform values.
            GL.Uniform1(
                GL.GetUniformLocation(shaderProgram, "samp0"),
                0
                );
            GL.Uniform1(
                GL.GetUniformLocation(shaderProgram, "samp1"),
                1
                );
            GL.Uniform1(
                GL.GetUniformLocation(shaderProgram, "samp2"),
                2
                );

            // Set up the vertex pointers/arrays.
            GL.VertexAttribPointer(
                0,
                2,
                VertexAttribPointerType.Float,
                false,
                2 * sizeof(float),
                vert_pos
                );
            GL.VertexAttribPointer(
                1,
                2,
                VertexAttribPointerType.Float,
                false,
                2 * sizeof(float),
                vert_tex
                );
            GL.EnableVertexAttribArray(0);
            GL.EnableVertexAttribArray(1);

            // Bind our target framebuffer.
            GL.BindFramebuffer(FramebufferTarget.Framebuffer, rgbaFramebuffer);

            // Prepare YUV GL textures with our current frame data
            GL.ActiveTexture(TextureUnit.Texture0);
            GL.BindTexture(TextureTarget.Texture2D, yuvTextures[0]);
            GL.TexSubImage2D(
                TextureTarget.Texture2D,
                0,
                0,
                0,
                (int)currentVideo.width,
                (int)currentVideo.height,
                PixelFormat.Luminance,
                PixelType.UnsignedByte,
                currentVideo.pixels
                );
            GL.ActiveTexture(TextureUnit.Texture1);
            GL.BindTexture(TextureTarget.Texture2D, yuvTextures[1]);
            GL.TexSubImage2D(
                TextureTarget.Texture2D,
                0,
                0,
                0,
                (int)currentVideo.width / 2,
                (int)currentVideo.height / 2,
                PixelFormat.Luminance,
                PixelType.UnsignedByte,
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height)
                    )
                );
            GL.ActiveTexture(TextureUnit.Texture2);
            GL.BindTexture(TextureTarget.Texture2D, yuvTextures[2]);
            GL.TexSubImage2D(
                TextureTarget.Texture2D,
                0,
                0,
                0,
                (int)currentVideo.width / 2,
                (int)currentVideo.height / 2,
                PixelFormat.Luminance,
                PixelType.UnsignedByte,
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height) +
                    (currentVideo.width / 2 * currentVideo.height / 2)
                    )
                );

            // Flip the viewport, because loldirectx
            GL.Viewport(
                0,
                0,
                (int)currentVideo.width,
                (int)currentVideo.height
                );

            // Draw the YUV textures to the framebuffer with our shader.
            GL.DrawArrays(BeginMode.TriangleStrip, 0, 4);

            // Clean up after ourselves.
            GL_popState();
#else
            // Just copy it to an array, since it's RGBA anyway.
            try
            {
                byte[] theoraPixels = TheoraPlay.getPixels(
                    currentVideo.pixels,
                    (int)currentVideo.width * (int)currentVideo.height * 4
                    );

                // TexImage2D.
                videoTexture.SetData <byte>(theoraPixels);
            }
            catch (Exception e)
            {
                System.Console.WriteLine(
                    "WARNING: THEORA FRAME COPY FAILED: " +
                    e.Message
                    );
                frameLocked = false;
                return(videoTexture); // Hope this still has something in it...
            }
#endif

            // Release the lock on the frame, we're done.
            frameLocked = false;

            return(videoTexture);
        }
コード例 #21
0
        public void Play(OgvComponent video)
        {
            checkDisposed();

            // We need to assign this regardless of what happens next.
            Video = video;

            // FIXME: This is a part of the Duration hack!
            Video.Duration = TimeSpan.MaxValue;

            // Check the player state before attempting anything.
            if (State != MediaState.Stopped)
            {
                return;
            }

            // In rare cases, the thread might still be going. Wait until it's done.
            if (audioDecoderThread != null && audioDecoderThread.IsAlive)
            {
                Stop();
            }

            // Create new Thread instances in case we use this player multiple times.
            audioDecoderThread = new Thread(new ThreadStart(this.DecodeAudio));

            // Update the player state now, for the thread we're about to make.
            State = MediaState.Playing;

            // Start the video if it hasn't been yet.
            if (Video.IsDisposed)
            {
                video.Initialize();
            }

            // Grab the first bit of audio. We're trying to start the decoding ASAP.
            if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.TheoraDecoder) != 0)
            {
                audioDecoderThread.Start();
            }
            else
            {
                audioStarted = true; // Welp.
            }

            // Grab the first bit of video, set up the texture.
            if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.TheoraDecoder) != 0)
            {
                currentVideo  = TheoraPlay.getVideoFrame(Video.VideoStream);
                previousFrame = Video.VideoStream;
                do
                {
                    // The decoder miiight not be ready yet.
                    Video.VideoStream = TheoraPlay.THEORAPLAY_getVideo(Video.TheoraDecoder);
                } while (Video.VideoStream == IntPtr.Zero);
                nextVideo = TheoraPlay.getVideoFrame(Video.VideoStream);

                Texture overlap = videoTexture;
                videoTexture = new Texture(
//                    Game.Instance.GraphicsDevice,
//                    (int)currentVideo.width,
//                    (int)currentVideo.height,
//                    false,
//                    SurfaceFormat.Color
                    );
                overlap.Dispose();
#if VIDEOPLAYER_OPENGL
                GL_setupTargets(
                    (int)currentVideo.width,
                    (int)currentVideo.height
                    );
#endif
            }

            // Initialize the thread!
            Log.Editor.Write("Starting Theora player...");
            while (!audioStarted)
            {
                ;
            }
            timer.Start();
            if (audioSourceIndex != -1)
            {
                AL.SourcePlay(audioSourceIndex);
            }
            Log.Editor.Write(" Done starting Theora player!");
        }
コード例 #22
0
        public Texture GetTexture()
        {
            checkDisposed();

            // Be sure we can even get something from TheoraPlay...
            if (State == MediaState.Stopped ||
                Video.TheoraDecoder == IntPtr.Zero ||
                TheoraPlay.THEORAPLAY_isInitialized(Video.TheoraDecoder) == 0 ||
                TheoraPlay.THEORAPLAY_hasVideoStream(Video.TheoraDecoder) == 0)
            {
                return(videoTexture); // Screw it, give them the old one.
            }

            // Get the latest video frames.
            bool missedFrame = false;

            while (nextVideo.playms <= timer.ElapsedMilliseconds && !missedFrame)
            {
                currentVideo = nextVideo;
                IntPtr nextFrame = TheoraPlay.THEORAPLAY_getVideo(Video.TheoraDecoder);
                if (nextFrame != IntPtr.Zero)
                {
                    TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                    previousFrame     = Video.VideoStream;
                    Video.VideoStream = nextFrame;
                    nextVideo         = TheoraPlay.getVideoFrame(Video.VideoStream);
                    missedFrame       = false;
                }
                else
                {
                    // Don't mind me, just ignoring that complete failure above!
                    missedFrame = true;
                }

                if (TheoraPlay.THEORAPLAY_isDecoding(Video.TheoraDecoder) == 0)
                {
                    // FIXME: This is part of the Duration hack!
                    Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms);

                    // Stop and reset the timer. If we're looping, the loop will start it again.
                    timer.Stop();
                    timer.Reset();

                    // If looping, go back to the start. Otherwise, we'll be exiting.
                    if (IsLooped && State == MediaState.Playing)
                    {
                        // Wait for the audio thread to end.
                        State = MediaState.Stopped;
                        if (audioDecoderThread.ThreadState != System.Threading.ThreadState.Unstarted && audioDecoderThread.IsAlive == true)
                        {
                            audioDecoderThread.Join();
                        }

                        // Now we pretend we're playing again.
                        State = MediaState.Playing;

                        // Free everything and start over.
                        TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                        previousFrame = IntPtr.Zero;
                        Video.Dispose();
                        Video.Initialize();

                        // Grab the initial audio again.
                        if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.TheoraDecoder) != 0)
                        {
                            audioDecoderThread = new Thread(new ThreadStart(DecodeAudio));
                            audioDecoderThread.Start();
                        }
                        else
                        {
                            audioStarted = true; // Welp.
                        }

                        // Grab the initial video again.
                        if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.TheoraDecoder) != 0)
                        {
                            currentVideo  = TheoraPlay.getVideoFrame(Video.VideoStream);
                            previousFrame = Video.VideoStream;
                            do
                            {
                                // The decoder miiight not be ready yet.
                                Video.VideoStream = TheoraPlay.THEORAPLAY_getVideo(Video.TheoraDecoder);
                            } while (Video.VideoStream == IntPtr.Zero);
                            nextVideo = TheoraPlay.getVideoFrame(Video.VideoStream);
                        }

                        // FIXME: Maybe use an actual thread synchronization technique.
                        while (!audioStarted)
                        {
                            ;
                        }

                        // Start! Again!
                        timer.Start();
                        if (audioSourceIndex != -1)
                        {
                            AL.SourcePlay(audioSourceIndex);
                        }
                    }
                    else
                    {
                        // Stop everything, clean up. We out.
                        State = MediaState.Stopped;
                        if (audioDecoderThread.ThreadState != System.Threading.ThreadState.Unstarted && audioDecoderThread.IsAlive == true)
                        {
                            audioDecoderThread.Join();
                        }
                        TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                        Video.Dispose();

                        // We're done, so give them the last frame.
                        return(videoTexture);
                    }
                }
            }

#if VIDEOPLAYER_OPENGL
            // Set up an environment to muck about in.
            GL_pushState();

            // Bind our shader program.
            GL.UseProgram(shaderProgram);

            // Set uniform values.
            GL.Uniform1(
                GL.GetUniformLocation(shaderProgram, "samp0"),
                0
                );
            GL.Uniform1(
                GL.GetUniformLocation(shaderProgram, "samp1"),
                1
                );
            GL.Uniform1(
                GL.GetUniformLocation(shaderProgram, "samp2"),
                2
                );

            // Set up the vertex pointers/arrays.
            GL.VertexAttribPointer(
                0,
                2,
                VertexAttribPointerType.Float,
                false,
                2 * sizeof(float),
                vert_pos
                );
            GL.VertexAttribPointer(
                1,
                2,
                VertexAttribPointerType.Float,
                false,
                2 * sizeof(float),
                vert_tex
                );
            GL.EnableVertexAttribArray(0);
            GL.EnableVertexAttribArray(1);

            // Bind our target framebuffer.
            GL.BindFramebuffer(FramebufferTarget.Framebuffer, rgbaFramebuffer);

            // Prepare YUV GL textures with our current frame data
            GL.ActiveTexture(TextureUnit.Texture0);
            GL.BindTexture(TextureTarget.Texture2D, yuvTextures[0]);
            GL.TexSubImage2D(
                TextureTarget.Texture2D,
                0,
                0,
                0,
                (int)currentVideo.width,
                (int)currentVideo.height,
                PixelFormat.Luminance,
                PixelType.UnsignedByte,
                currentVideo.pixels
                );
            GL.ActiveTexture(TextureUnit.Texture1);
            GL.BindTexture(TextureTarget.Texture2D, yuvTextures[1]);
            GL.TexSubImage2D(
                TextureTarget.Texture2D,
                0,
                0,
                0,
                (int)currentVideo.width / 2,
                (int)currentVideo.height / 2,
                PixelFormat.Luminance,
                PixelType.UnsignedByte,
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height)
                    )
                );
            GL.ActiveTexture(TextureUnit.Texture2);
            GL.BindTexture(TextureTarget.Texture2D, yuvTextures[2]);
            GL.TexSubImage2D(
                TextureTarget.Texture2D,
                0,
                0,
                0,
                (int)currentVideo.width / 2,
                (int)currentVideo.height / 2,
                PixelFormat.Luminance,
                PixelType.UnsignedByte,
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height) +
                    (currentVideo.width / 2 * currentVideo.height / 2)
                    )
                );

            // Flip the viewport, because loldirectx
            GL.Viewport(
                0,
                0,
                (int)currentVideo.width,
                (int)currentVideo.height
                );

            // Draw the YUV textures to the framebuffer with our shader.
            GL.DrawArrays(BeginMode.TriangleStrip, 0, 4);

            // Clean up after ourselves.
            GL_popState();
#else
            // Just copy it to an array, since it's RGBA anyway.
            try
            {
                byte[] theoraPixels = TheoraPlay.getPixels(
                    currentVideo.pixels,
                    (int)currentVideo.width * (int)currentVideo.height * 4
                    );

                // TexImage2D.
                videoTexture.SetData <byte>(theoraPixels);
            }
            catch (Exception e)
            {
                System.Console.WriteLine(
                    "WARNING: THEORA FRAME COPY FAILED: " +
                    e.Message
                    );
                frameLocked = false;
                return(videoTexture); // Hope this still has something in it...
            }
#endif

            return(videoTexture);
        }
コード例 #23
0
        private void RunVideo()
        {
            // FIXME: Maybe use an actual thread synchronization technique.
            while (!audioStarted && State != MediaState.Stopped)
            {
                ;
            }

            while (State != MediaState.Stopped)
            {
                // Someone needs to look at their memory management...
                if (Game.Instance == null)
                {
                    System.Console.WriteLine("Game exited before video player! Halting...");
                    State = MediaState.Stopped;
                }

                // Sleep when paused, update the video state when playing.
                if (State == MediaState.Paused)
                {
                    // Pause the OpenAL source.
                    if (AL.GetSourceState(audioSourceIndex) == ALSourceState.Playing)
                    {
                        AL.SourcePause(audioSourceIndex);
                    }

                    // Stop the timer in here so we know when we really stopped.
                    if (timer.IsRunning)
                    {
                        timer.Stop();
                    }

                    // Arbitrarily 1 frame in a 30fps movie.
                    Thread.Sleep(33);
                }
                else
                {
                    // Start the timer, whether we're starting or unpausing.
                    if (!timer.IsRunning)
                    {
                        timer.Start();
                    }

                    // If we're getting here, we should be playing the audio...
                    if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0)
                    {
                        if (AL.GetSourceState(audioSourceIndex) != ALSourceState.Playing)
                        {
                            AL.SourcePlay(audioSourceIndex);
                        }
                    }

                    // Get the next video from from the decoder, if a stream exists.
                    if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0)
                    {
                        // Only step when it's time to do so.
                        if (nextVideo.playms <= timer.ElapsedMilliseconds)
                        {
                            // Wait until GetTexture() is done.

                            // FIXME: Maybe use an actual thread synchronization technique.
                            while (frameLocked)
                            {
                                ;
                            }

                            // Assign the new currentVideo, free the old one.
                            currentVideo = nextVideo;

                            // Get the next frame ready, free the old one.
                            IntPtr oldestFrame = previousFrame;
                            previousFrame     = Video.videoStream;
                            Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                            if (Video.videoStream != IntPtr.Zero)
                            {
                                // Assign next frame, if it exists.
                                nextVideo = TheoraPlay.getVideoFrame(Video.videoStream);

                                // Then free the _really_ old frame.
                                TheoraPlay.THEORAPLAY_freeVideo(oldestFrame);
                            }
                        }
                    }

                    // If we're done decoding, we hit the end.
                    if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0)
                    {
                        // FIXME: This is a part of the Duration hack!
                        Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms);

                        // Stop and reset the timer.
                        // If we're looping, the loop will start it again.
                        timer.Stop();
                        timer.Reset();

                        // If looping, go back to the start. Otherwise, we'll be exiting.
                        if (IsLooped && State == MediaState.Playing)
                        {
                            // Wait for the audio thread to end.
                            State = MediaState.Stopped;
                            audioDecoderThread.Join();

                            // Now we pretend we're playing again.
                            State = MediaState.Playing;

                            // Free everything and start over.
                            TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                            previousFrame = IntPtr.Zero;
                            Video.Dispose();
                            Video.Initialize();

                            // Grab the initial audio again.
                            if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0)
                            {
                                currentAudio       = TheoraPlay.getAudioPacket(Video.audioStream);
                                audioDecoderThread = new Thread(new ThreadStart(DecodeAudio));
                                audioDecoderThread.Start();
                            }
                            else
                            {
                                audioStarted = true; // Welp.
                            }

                            // Grab the initial video again.
                            if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0)
                            {
                                currentVideo  = TheoraPlay.getVideoFrame(Video.videoStream);
                                previousFrame = Video.videoStream;
                                do
                                {
                                    // The decoder miiight not be ready yet.
                                    Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                                } while (Video.videoStream == IntPtr.Zero);
                                nextVideo = TheoraPlay.getVideoFrame(Video.videoStream);
                            }

                            // FIXME: Maybe use an actual thread synchronization technique.
                            while (!audioStarted && State != MediaState.Stopped)
                            {
                                ;
                            }
                        }
                        else
                        {
                            State = MediaState.Stopped;
                        }
                    }
                }
            }

            // Reset the video timer.
            timer.Stop();
            timer.Reset();

            // Stop the decoding, we don't need it anymore.
            audioDecoderThread.Join();

            // We're desperately trying to keep this until the very end.
            TheoraPlay.THEORAPLAY_freeVideo(previousFrame);

            // We're not playing any video anymore.
            Video.Dispose();
        }
コード例 #24
0
ファイル: VideoPlayer.cs プロジェクト: zwcloud/FNA
        public void Play(Video video)
        {
            checkDisposed();

            // We need to assign this regardless of what happens next.
            Video = video;
            video.AttachedToPlayer = true;

            // FIXME: This is a part of the Duration hack!
            Video.Duration = TimeSpan.MaxValue;

            // Check the player state before attempting anything.
            if (State != MediaState.Stopped)
            {
                return;
            }

            // Update the player state now, for the thread we're about to make.
            State = MediaState.Playing;

            // Start the video if it hasn't been yet.
            if (Video.IsDisposed)
            {
                video.Initialize();
            }

            // Grab the first bit of audio. We're trying to start the decoding ASAP.
            if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0)
            {
                InitAudioStream();
            }

            // Grab the first bit of video, set up the texture.
            if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0)
            {
                currentVideo  = TheoraPlay.getVideoFrame(Video.videoStream);
                previousFrame = Video.videoStream;
                do
                {
                    // The decoder miiight not be ready yet.
                    Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                } while (Video.videoStream == IntPtr.Zero);
                nextVideo = TheoraPlay.getVideoFrame(Video.videoStream);

                Texture2D overlap = videoTexture;
                videoTexture = new Texture2D(
                    currentDevice,
                    (int)currentVideo.width,
                    (int)currentVideo.height,
                    false,
                    SurfaceFormat.Color
                    );
                overlap.Dispose();
#if VIDEOPLAYER_OPENGL
                GL_setupTargets(
                    (int)currentVideo.width,
                    (int)currentVideo.height
                    );
#endif
            }

            // Initialize the thread!
            System.Console.Write("Starting Theora player...");
            timer.Start();
            if (audioStream != null)
            {
                audioStream.Play();
            }
            System.Console.WriteLine(" Done!");
        }
コード例 #25
0
ファイル: VideoPlayer.cs プロジェクト: zwcloud/FNA
        public Texture2D GetTexture()
        {
            checkDisposed();

            // Be sure we can even get something from TheoraPlay...
            if (State == MediaState.Stopped ||
                Video.theoraDecoder == IntPtr.Zero ||
                TheoraPlay.THEORAPLAY_isInitialized(Video.theoraDecoder) == 0 ||
                TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) == 0)
            {
                return(videoTexture);                // Screw it, give them the old one.
            }

            // Get the latest video frames.
            bool missedFrame = false;

            while (nextVideo.playms <= timer.ElapsedMilliseconds && !missedFrame)
            {
                currentVideo = nextVideo;
                IntPtr nextFrame = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                if (nextFrame != IntPtr.Zero)
                {
                    TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                    previousFrame     = Video.videoStream;
                    Video.videoStream = nextFrame;
                    nextVideo         = TheoraPlay.getVideoFrame(Video.videoStream);
                    missedFrame       = false;
                }
                else
                {
                    // Don't mind me, just ignoring that complete failure above!
                    missedFrame = true;
                }

                if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0)
                {
                    // FIXME: This is part of the Duration hack!
                    Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms);

                    // Stop and reset the timer. If we're looping, the loop will start it again.
                    timer.Stop();
                    timer.Reset();

                    // If looping, go back to the start. Otherwise, we'll be exiting.
                    if (IsLooped && State == MediaState.Playing)
                    {
                        // Kill the audio, no matter what.
                        if (audioStream != null)
                        {
                            audioStream.Stop();
                            audioStream.Dispose();
                            audioStream = null;
                        }

                        // Free everything and start over.
                        TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                        previousFrame          = IntPtr.Zero;
                        Video.AttachedToPlayer = false;
                        Video.Dispose();
                        Video.AttachedToPlayer = true;
                        Video.Initialize();

                        // Grab the initial audio again.
                        if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0)
                        {
                            InitAudioStream();
                        }

                        // Grab the initial video again.
                        if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0)
                        {
                            currentVideo  = TheoraPlay.getVideoFrame(Video.videoStream);
                            previousFrame = Video.videoStream;
                            do
                            {
                                // The decoder miiight not be ready yet.
                                Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                            } while (Video.videoStream == IntPtr.Zero);
                            nextVideo = TheoraPlay.getVideoFrame(Video.videoStream);
                        }

                        // Start! Again!
                        timer.Start();
                        if (audioStream != null)
                        {
                            audioStream.Play();
                        }
                    }
                    else
                    {
                        // Stop everything, clean up. We out.
                        State = MediaState.Stopped;
                        if (audioStream != null)
                        {
                            audioStream.Stop();
                            audioStream.Dispose();
                            audioStream = null;
                        }
                        TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                        Video.AttachedToPlayer = false;
                        Video.Dispose();

                        // We're done, so give them the last frame.
                        return(videoTexture);
                    }
                }
            }

#if VIDEOPLAYER_OPENGL
            // Set up an environment to muck about in.
            GL_pushState();

            // Bind our shader program.
            currentDevice.GLDevice.glUseProgram(shaderProgram);

            // We're using client-side arrays like CAVEMEN
            currentDevice.GLDevice.BindVertexBuffer(OpenGLDevice.OpenGLVertexBuffer.NullBuffer);

            // Set up the vertex pointers/arrays.
            currentDevice.GLDevice.AttributeEnabled[0] = true;
            currentDevice.GLDevice.AttributeEnabled[1] = true;
            for (int i = 2; i < currentDevice.GLDevice.AttributeEnabled.Length; i += 1)
            {
                currentDevice.GLDevice.AttributeEnabled[i] = false;
            }
            currentDevice.GLDevice.FlushGLVertexAttributes();
            currentDevice.GLDevice.VertexAttribPointer(
                0,
                2,
                VertexElementFormat.Single,
                false,
                2 * sizeof(float),
                vertPosPtr
                );
            currentDevice.GLDevice.VertexAttribPointer(
                1,
                2,
                VertexElementFormat.Single,
                false,
                2 * sizeof(float),
                vertTexPtr
                );

            // Bind our target framebuffer.
            currentDevice.GLDevice.BindDrawFramebuffer(rgbaFramebuffer);

            // Prepare YUV GL textures with our current frame data
            currentDevice.GLDevice.glActiveTexture(
                OpenGLDevice.GLenum.GL_TEXTURE0
                );
            currentDevice.GLDevice.glBindTexture(
                OpenGLDevice.GLenum.GL_TEXTURE_2D,
                yuvTextures[0]
                );
            currentDevice.GLDevice.glTexSubImage2D(
                OpenGLDevice.GLenum.GL_TEXTURE_2D,
                0,
                0,
                0,
                (int)currentVideo.width,
                (int)currentVideo.height,
                OpenGLDevice.GLenum.GL_LUMINANCE,
                OpenGLDevice.GLenum.GL_UNSIGNED_BYTE,
                currentVideo.pixels
                );
            currentDevice.GLDevice.glActiveTexture(
                OpenGLDevice.GLenum.GL_TEXTURE0 + 1
                );
            currentDevice.GLDevice.glBindTexture(
                OpenGLDevice.GLenum.GL_TEXTURE_2D,
                yuvTextures[1]
                );
            currentDevice.GLDevice.glTexSubImage2D(
                OpenGLDevice.GLenum.GL_TEXTURE_2D,
                0,
                0,
                0,
                (int)(currentVideo.width / 2),
                (int)(currentVideo.height / 2),
                OpenGLDevice.GLenum.GL_LUMINANCE,
                OpenGLDevice.GLenum.GL_UNSIGNED_BYTE,
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height)
                    )
                );
            currentDevice.GLDevice.glActiveTexture(
                OpenGLDevice.GLenum.GL_TEXTURE0 + 2
                );
            currentDevice.GLDevice.glBindTexture(
                OpenGLDevice.GLenum.GL_TEXTURE_2D,
                yuvTextures[2]
                );
            currentDevice.GLDevice.glTexSubImage2D(
                OpenGLDevice.GLenum.GL_TEXTURE_2D,
                0,
                0,
                0,
                (int)(currentVideo.width / 2),
                (int)(currentVideo.height / 2),
                OpenGLDevice.GLenum.GL_LUMINANCE,
                OpenGLDevice.GLenum.GL_UNSIGNED_BYTE,
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height) +
                    (currentVideo.width / 2 * currentVideo.height / 2)
                    )
                );

            // Flip the viewport, because loldirectx
            currentDevice.GLDevice.glViewport(
                0,
                0,
                (int)currentVideo.width,
                (int)currentVideo.height
                );

            // Draw the YUV textures to the framebuffer with our shader.
            currentDevice.GLDevice.glDrawArrays(
                OpenGLDevice.GLenum.GL_TRIANGLE_STRIP,
                0,
                4
                );

            // Clean up after ourselves.
            GL_popState();
#else
            // Just copy it to an array, since it's RGBA anyway.
            try
            {
                byte[] theoraPixels = TheoraPlay.getPixels(
                    currentVideo.pixels,
                    (int)currentVideo.width * (int)currentVideo.height * 4
                    );

                // TexImage2D.
                videoTexture.SetData <byte>(theoraPixels);
            }
            catch (Exception e)
            {
                // I hope we've still got something in videoTexture!
                System.Console.WriteLine(
                    "WARNING: THEORA FRAME COPY FAILED: " +
                    e.Message
                    );
            }
#endif

            return(videoTexture);
        }
コード例 #26
0
        public Texture2D GetTexture()
        {
            checkDisposed();

            // Be sure we can even get something from TheoraPlay...
            if (State == MediaState.Stopped ||
                Video.theoraDecoder == IntPtr.Zero ||
                TheoraPlay.THEORAPLAY_isInitialized(Video.theoraDecoder) == 0 ||
                TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) == 0)
            {
                // Screw it, give them the old one.
                return(videoTexture[0].RenderTarget as Texture2D);
            }

            // Get the latest video frames.
            bool missedFrame = false;

            while (nextVideo.playms <= timer.ElapsedMilliseconds && !missedFrame)
            {
                currentVideo = nextVideo;
                IntPtr nextFrame = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                if (nextFrame != IntPtr.Zero)
                {
                    TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                    previousFrame     = Video.videoStream;
                    Video.videoStream = nextFrame;
                    nextVideo         = TheoraPlay.getVideoFrame(Video.videoStream);
                    missedFrame       = false;
                }
                else
                {
                    // Don't mind me, just ignoring that complete failure above!
                    missedFrame = true;
                }

                if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0)
                {
                    // FIXME: This is part of the Duration hack!
                    Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms);

                    // Stop and reset the timer. If we're looping, the loop will start it again.
                    timer.Stop();
                    timer.Reset();

                    // If looping, go back to the start. Otherwise, we'll be exiting.
                    if (IsLooped && State == MediaState.Playing)
                    {
                        // Kill the audio, no matter what.
                        if (audioStream != null)
                        {
                            audioStream.Stop();
                            audioStream.Dispose();
                            audioStream = null;
                        }

                        // Free everything and start over.
                        TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                        previousFrame          = IntPtr.Zero;
                        Video.AttachedToPlayer = false;
                        Video.Dispose();
                        Video.AttachedToPlayer = true;
                        Video.Initialize();

                        // Grab the initial audio again.
                        if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0)
                        {
                            InitAudioStream();
                        }

                        // Grab the initial video again.
                        if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0)
                        {
                            currentVideo  = TheoraPlay.getVideoFrame(Video.videoStream);
                            previousFrame = Video.videoStream;
                            do
                            {
                                // The decoder miiight not be ready yet.
                                Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                            } while (Video.videoStream == IntPtr.Zero);
                            nextVideo = TheoraPlay.getVideoFrame(Video.videoStream);
                        }

                        // Start! Again!
                        timer.Start();
                        if (audioStream != null)
                        {
                            audioStream.Play();
                        }
                    }
                    else
                    {
                        // Stop everything, clean up. We out.
                        State = MediaState.Stopped;
                        if (audioStream != null)
                        {
                            audioStream.Stop();
                            audioStream.Dispose();
                            audioStream = null;
                        }
                        TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                        Video.AttachedToPlayer = false;
                        Video.Dispose();

                        // We're done, so give them the last frame.
                        return(videoTexture[0].RenderTarget as Texture2D);
                    }
                }
            }

            // Set up an environment to muck about in.
            GL_pushState();

            // Prepare YUV GL textures with our current frame data
            currentDevice.GLDevice.SetTextureData2DPointer(
                yuvTextures[0],
                currentVideo.pixels
                );
            currentDevice.GLDevice.SetTextureData2DPointer(
                yuvTextures[1],
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height)
                    )
                );
            currentDevice.GLDevice.SetTextureData2DPointer(
                yuvTextures[2],
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height) +
                    (currentVideo.width / 2 * currentVideo.height / 2)
                    )
                );

            // Draw the YUV textures to the framebuffer with our shader.
            currentDevice.DrawPrimitives(
                PrimitiveType.TriangleStrip,
                0,
                2
                );

            // Clean up after ourselves.
            GL_popState();

            // Finally.
            return(videoTexture[0].RenderTarget as Texture2D);
        }
コード例 #27
0
        public void Play(Video video)
        {
            checkDisposed();

            // We need to assign this regardless of what happens next.
            Video = video;
            Video.AttachedToPlayer = true;

            // FIXME: This is a part of the Duration hack!
            if (Video.needsDurationHack)
            {
                Video.Duration = TimeSpan.MaxValue;
            }

            // Check the player state before attempting anything.
            if (State != MediaState.Stopped)
            {
                return;
            }

            // Update the player state now, before initializing
            State = MediaState.Playing;

            // Hook up the decoder to this player
            InitializeTheoraStream();

            // Set up the texture data
            if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0)
            {
                // The VideoPlayer will use the GraphicsDevice that is set now.
                if (currentDevice != Video.GraphicsDevice)
                {
                    GL_dispose();
                    currentDevice = Video.GraphicsDevice;
                    GL_initialize();
                }

                RenderTargetBinding overlap = videoTexture[0];
                videoTexture[0] = new RenderTargetBinding(
                    new RenderTarget2D(
                        currentDevice,
                        (int)currentVideo.width,
                        (int)currentVideo.height,
                        false,
                        SurfaceFormat.Color,
                        DepthFormat.None,
                        0,
                        RenderTargetUsage.PreserveContents
                        )
                    );
                if (overlap.RenderTarget != null)
                {
                    overlap.RenderTarget.Dispose();
                }
                GL_setupTextures(
                    (int)currentVideo.width,
                    (int)currentVideo.height
                    );
            }

            // The player can finally start now!
            FNALoggerEXT.LogInfo("Starting Theora player...");
            timer.Start();
            if (audioStream != null)
            {
                audioStream.Play();
            }
            FNALoggerEXT.LogInfo("Started!");
        }
コード例 #28
0
        public Texture2D GetTexture()
        {
            checkDisposed();

            if (Video == null)
            {
                throw new InvalidOperationException();
            }

            // Be sure we can even get something from TheoraPlay...
            if (State == MediaState.Stopped ||
                Video.theoraDecoder == IntPtr.Zero ||
                TheoraPlay.THEORAPLAY_isInitialized(Video.theoraDecoder) == 0 ||
                TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) == 0)
            {
                // Screw it, give them the old one.
                return(videoTexture[0].RenderTarget as Texture2D);
            }

            // Get the latest video frames.
            bool hasFrames = true;

            while (nextVideo.playms <= timer.ElapsedMilliseconds && hasFrames)
            {
                currentVideo = nextVideo;
                hasFrames    = TheoraPlay.THEORAPLAY_availableVideo(Video.theoraDecoder) > 0;
                if (hasFrames)
                {
                    IntPtr nextFrame = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder);
                    TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                    previousFrame     = Video.videoStream;
                    Video.videoStream = nextFrame;
                    nextVideo         = TheoraPlay.getVideoFrame(Video.videoStream);
                }
            }

            // Check for the end...
            if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0)
            {
                // FIXME: This is part of the Duration hack!
                if (Video.needsDurationHack)
                {
                    Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms);
                }

                // Stop and reset the timer. If we're looping, the loop will start it again.
                timer.Stop();
                timer.Reset();

                // Kill whatever audio/video we've got
                if (audioStream != null)
                {
                    audioStream.Stop();
                    audioStream.Dispose();
                    audioStream = null;
                }
                TheoraPlay.THEORAPLAY_freeVideo(previousFrame);
                Video.AttachedToPlayer = false;
                Video.Dispose();

                // If looping, go back to the start. Otherwise, we'll be exiting.
                if (IsLooped && State == MediaState.Playing)
                {
                    // Starting over!
                    Video.AttachedToPlayer = true;
                    InitializeTheoraStream();

                    // Start! Again!
                    timer.Start();
                    if (audioStream != null)
                    {
                        audioStream.Play();
                    }
                }
                else
                {
                    // We out, give them the last frame.
                    State = MediaState.Stopped;
                    return(videoTexture[0].RenderTarget as Texture2D);
                }
            }

            // Set up an environment to muck about in.
            GL_pushState();

            // Prepare YUV GL textures with our current frame data
            currentDevice.GLDevice.SetTextureData2DPointer(
                yuvTextures[0],
                currentVideo.pixels
                );
            currentDevice.GLDevice.SetTextureData2DPointer(
                yuvTextures[1],
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height)
                    )
                );
            currentDevice.GLDevice.SetTextureData2DPointer(
                yuvTextures[2],
                new IntPtr(
                    currentVideo.pixels.ToInt64() +
                    (currentVideo.width * currentVideo.height) +
                    (currentVideo.width / 2 * currentVideo.height / 2)
                    )
                );

            // Draw the YUV textures to the framebuffer with our shader.
            currentDevice.DrawPrimitives(
                PrimitiveType.TriangleStrip,
                0,
                2
                );

            // Clean up after ourselves.
            GL_popState();

            // Finally.
            return(videoTexture[0].RenderTarget as Texture2D);
        }