void Awake() { flash = GetComponentInChildren <Image>(); timeout = InitialTimeout; Player.Events.AddListener((mp, e, code) => { switch (e) { case MediaPlayerEvent.EventType.Closing: case MediaPlayerEvent.EventType.FinishedPlaying: CurrentState = VideoState.None; break; case MediaPlayerEvent.EventType.Error: Debug.LogError("ERROR VID: " + Link.tag + " || " + Link.title); CurrentState = VideoState.Error; break; case MediaPlayerEvent.EventType.ReadyToPlay: CurrentState = VideoState.Ready; break; case MediaPlayerEvent.EventType.Started: WordManager.Inst.CreateMaterialsForTagPair(Index, Link.tag, WrongTag); CurrentState = VideoState.Playing; break; } }); }
void processVideo() { if (videostate == VideoState.VIDEO_READY) { videostate = VideoState.VIDEO_STARTED; MLog("Starting video"); MLCamera.StartVideoCapture(filepath); MLog("Started video"); count = 0; } count++; if (count > 240 && videostate == VideoState.VIDEO_STARTED) { videostate = VideoState.VIDEO_ENDED; MLog("Stopping video"); MLCamera.StopVideoCapture(); MLog("Stopped video"); } if (videostate == VideoState.VIDEO_ENDED) { sendVideo(); } }
private void GoToPauseState() { ME_MainShow.LoadedBehavior = MediaState.Manual; ME_MainShow.Pause(); CurrentState = VideoState.Pause; progressTimer.Start(); //BTN_KuaiJin.IsEnabled = true; //BTN_KuaiTui.IsEnabled = true; // UC_ProgressBar.ProgressValue = 0; UC_ProgressBar.IsEnabled = true; BTN_Vdown.IsEnabled = true; BTN_Vup.IsEnabled = true; BTN_Pause.Visibility = Visibility.Collapsed; BTN_Play.Visibility = Visibility.Visible; BTN_Pause.IsEnabled = true; BTN_Play.IsEnabled = true; TBK_Fail.Visibility = Visibility.Collapsed; BD_DownConsole.Visibility = Visibility.Visible; this.RTF_Buffering.BeginAnimation(RotateTransform.AngleProperty, null); EP_Buffering.Visibility = Visibility.Collapsed; }
private void GoToBufferingState() { CurrentState = VideoState.Buffering; progressTimer.Start(); //BTN_KuaiJin.IsEnabled = false; //BTN_KuaiTui.IsEnabled = false; UC_ProgressBar.ProgressValue = 0; UC_ProgressBar.IsEnabled = false; BTN_Vdown.IsEnabled = false; BTN_Vup.IsEnabled = false; BTN_Pause.Visibility = Visibility.Visible; BTN_Play.Visibility = Visibility.Collapsed; BTN_Pause.IsEnabled = false; BTN_Play.IsEnabled = false; TBK_Fail.Visibility = Visibility.Collapsed; BD_DownConsole.Visibility = Visibility.Visible; this.RTF_Buffering.BeginAnimation(RotateTransform.AngleProperty, ANI_Rotate); EP_Buffering.Visibility = Visibility.Visible; }
protected void eventEnd() { _state = VideoState.END; Debug.Log(getStamp() + videoPlayer.clip.name + " | eventEnd", transform); if (videoPlayer.isLooping) { Debug.Log(getStamp() + "event end > looping"); solveLooping(); } else if (pauseAtEnd) { Debug.Log(getStamp() + videoPlayer.clip.name + " | eventEnd | paused"); videoPlayer.Pause(); Debug.Log("end > pauseatend (player is playing ? " + videoPlayer.isPlaying); _state = VideoState.PAUSED; } else { Debug.Log(getStamp() + "player not looping, calling stop"); stop(); // not visible } //callback on video end if (onVideoEnd != null) { onVideoEnd(); } }
private VideoState?ReadVmdDecoder(IntPtr decoderAddr) { var coktelDecoderAddr = MemoryReader.ReadIntPtr(decoderAddr + vmdAdvancedDecoderDecoderOffset); var curFrame = MemoryReader.ReadInt32(coktelDecoderAddr + coktelDecoderCurFrameOffset); if (curFrame <= -1) { return(null); } var state = new VideoState(); state.CurrentFrame = (uint)curFrame; state.FrameCount = MemoryReader.ReadUInt32(coktelDecoderAddr + coktelDecoderFrameCountOffset); state.FrameRate = ReadRational(coktelDecoderAddr + coktelDecoderFrameRateOffset); if (RttiReader.HasBaseClass(coktelDecoderAddr, ".?AVVMDDecoder@Video@@")) { var streamPtrVal = MemoryReader.ReadIntPtr(coktelDecoderAddr + vmdDecoderStreamOffset); state.FileName = ReadFileName(streamPtrVal); } return(state); }
virtual protected void eventStop() { _state = VideoState.STOP; Debug.Log(getStamp() + videoPlayer.clip.name + " | eventStop", transform); Debug.Log("event stop"); }
private VideoState?ReadAviDecoder(IntPtr decoderAddr) { var videoTrackPtrVal = MemoryReader.ReadIntPtr(decoderAddr + videoDecNextVideoTrackOffset); if (videoTrackPtrVal == IntPtr.Zero) { return(null); } var curFrame = MemoryReader.ReadInt32(videoTrackPtrVal + aviTrackCurFrameOffset); if (curFrame == -1) { return(null); } var streamHeaderAddr = videoTrackPtrVal + aviTrackVidsHeaderOffset; var state = new VideoState(); state.CurrentFrame = (uint)curFrame; state.FrameCount = MemoryReader.ReadUInt32(videoTrackPtrVal + aviTrackFrameCountOffset); var fileStreamPtrVal = MemoryReader.ReadIntPtr(decoderAddr + aviDecFileStreamOffset); state.FileName = ReadFileName(fileStreamPtrVal); state.FrameRate = new Rational() { Numerator = (int)MemoryReader.ReadUInt32(streamHeaderAddr + aviStreamHeadRateOffset), Denominator = (int)MemoryReader.ReadUInt32(streamHeaderAddr + aviStreamHeadScaleOffset) }; return(state); }
void updateObservableVariables() { VideoClock = getVideoClock(); AudioClock = audioPlayer.getAudioClock(); PositionSeconds = videoDecoder.HasAudio == true ? AudioClock : VideoClock; StringBuilder builder = new StringBuilder(); builder.AppendLine("State: " + VideoState.ToString()); builder.AppendLine("Resolution: " + videoDecoder.Width + " x " + videoDecoder.Height + "@" + videoDecoder.FramesPerSecond.ToString("0.##")); builder.Append("Free Packets (" + videoDecoder.FrameQueue.FreePacketQueueState.ToString() + ") "); builder.AppendLine(": " + videoDecoder.FrameQueue.NrFreePacketsInQueue + "/" + videoDecoder.FrameQueue.MaxFreePackets); builder.Append("Video Packets (" + videoDecoder.FrameQueue.VideoPacketQueueState.ToString() + ") "); builder.AppendLine(": " + videoDecoder.FrameQueue.NrVideoPacketsInQueue + "/" + videoDecoder.FrameQueue.MaxVideoPackets); builder.Append("Audio Packets (" + videoDecoder.FrameQueue.AudioPacketQueueState.ToString() + ") "); builder.AppendLine(": " + videoDecoder.FrameQueue.NrAudioPacketsInQueue + "/" + videoDecoder.FrameQueue.MaxAudioPackets); builder.AppendLine("Audio State: " + audioPlayer.Status.ToString()); builder.AppendLine("Buffering: " + videoDecoder.FrameQueue.IsBuffering.ToString()); builder.AppendLine("Packet Errors (V / A): " + videoDecoder.FrameQueue.NrVideoPacketReadErrors.ToString() + " / " + videoDecoder.FrameQueue.NrAudioPacketReadErrors.ToString()); builder.AppendLine("Nr Frames Dropped: " + NrFramesDropped + " / " + NrFramesRendered); builder.AppendLine("Video Clock: " + VideoClock.ToString("#.####")); builder.AppendLine("Audio Clock: " + AudioClock.ToString("#.####")); builder.AppendLine("Frame Pts: " + framePts); builder.AppendLine("Keyframe: " + isKeyFrame.ToString()); videoRender.InfoText = builder.ToString(); videoRender.SubtitleItem = Subtitles.getSubtitle(VideoClock); }
public async void OnPlaybackEnded(VideoState obj) { if (obj.Duration == obj.CurrentTime) { await DisplayNextTitleBox(); } }
void HandleStateCommand(VideoState result) { switch (result) { case VideoState.Play: LoadVideo(); break; case VideoState.Pause: if (VideoPlayer?.IsPlaying == true) { VideoPlayer.Pause(); } break; case VideoState.Stop: VideoPlayer?.Stop(); VideoPlayer?.Reset(); break; case VideoState.SeekToBegining: VideoPlayer?.Reset(); break; default: break; } }
/// <summary> /// Updates the buttons on the UI thread based on current state. /// </summary> /// <param name="currentState">current UI state</param> private void UpdateUI(VideoState currentState) { Dispatcher.BeginInvoke(delegate { switch (currentState) { case VideoState.CameraNotSupported: btnStartRecording.IsEnabled = false; btnTakeVideo.IsEnabled = false; break; case VideoState.Initialized: btnStartRecording.Text = RecordingStartCaption; btnStartRecording.IconUri = new Uri(StartIconUri, UriKind.Relative); btnTakeVideo.IsEnabled = false; break; case VideoState.Ready: btnStartRecording.Text = RecordingStartCaption; btnStartRecording.IconUri = new Uri(StartIconUri, UriKind.Relative); btnTakeVideo.IsEnabled = true; break; case VideoState.Recording: btnStartRecording.Text = RecordingStopCaption; btnStartRecording.IconUri = new Uri(StopIconUri, UriKind.Relative); btnTakeVideo.IsEnabled = false; break; default: break; } currentVideoState = currentState; }); }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public VideoPlayer(string FileName, GraphicsDevice graphicsDevice) { try { currentState = VideoState.Stopped; filename = FileName; InitInterfaces(); SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel) outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color); } catch { throw new Exception("Unable to Load or Play the video file"); } }
private void ChangeVideoState(VideoState state) { _currentState = state; if (OnVideoStateChanged != null) { OnVideoStateChanged(state); } }
public async void OnPlayerPlayed(VideoState obj) { LastKnownPosition = obj.CurrentTime; await UpdatePlayback(); Timer.Start(); ShowTitleInCurrentPlayback = true; }
/// <summary> /// Pauses the video /// </summary> public void Pause() { // Stop the FilterGraph (but remembers the current position) m_mediaControl.Stop(); // Update VideoState currentState = VideoState.Paused; }
public void Dispose() { if (_capture == null) return; _capture.Stop(); _capture.ImageGrabbed -= CaptureOnImageGrabbed; _capture.Dispose(); _capture = null; VideoState = VideoState.Unknown; }
private void toolStripButtonStop_Click(object sender, EventArgs e) { if (VideoState != VideoState.Stopped) { capture.Stop(); VideoState = VideoState.Stopped; } UpdateUI(); }
private void Update() { checkCanvasVisibility(); solveSkippable(); if (videoPlayer.isPlaying && videoPlayer.isPrepared) { //return; switch (_state) { case VideoState.IDLE: //on att que le vp commence a jouer pour balancer l'event if (videoPlayer.isPlaying && videoPlayer.frame > 1) { Debug.Log(getStamp() + videoPlayer.clip.name + " has started playing, frame count (" + videoPlayer.frame + ") is positive, calling eventPlay()"); eventPlay(); } break; case VideoState.PLAY: headCatchup(); checkForEndOfVideo(); break; case VideoState.STOP: if (videoPlayer.isPlaying) { Debug.Log(getStamp() + "resume from stop"); eventResume(); } break; case VideoState.PAUSED: if (videoPlayer.isPlaying) { Debug.Log(getStamp() + "resume from pause"); eventResume(); } break; case VideoState.SPAWN: if (videoPlayer.frame >= frameHead) { Debug.Log(getStamp() + "is now ready after spawning"); _state = VideoState.PLAY; } break; } } }
private VideoState ReadSmkVideoTrack(IntPtr videoTrackAddr) { var state = new VideoState(); state.CurrentFrame = MemoryReader.ReadUInt32(videoTrackAddr + smkTrackCurFrameOffset); state.FrameCount = (uint)MemoryReader.ReadInt32(videoTrackAddr + smkTrackFrameCountOffset); state.FrameRate = ReadRational(videoTrackAddr + smkTrackFrameRateOffset); return(state); }
private bool canPlay() { VideoState state = GetStateEnum(); if (state != VideoState.ERROR || state != VideoState.NOT_READY) { return(true); } return(false); }
/// <summary> /// Starts playing the video /// </summary> public void Play() { if (currentState != VideoState.Playing) { // Start the FilterGraph m_mediaControl.Run(); // Update VideoState currentState = VideoState.Playing; } }
public bool PlayVideo(VideoState videoState){ int index = (int)videoState; if(!_videoPlayedList[index]){ this.gameObject.SetActive(true); videoMaterial.material = _videoList[index]; StartPlayVideo(); PlayAudio(videoState); _videoPlayedList[index] = true; } return _videoPlayedList[index]; }
/// <summary> /// Stops playing the video /// </summary> public void Stop() { // Stop the FilterGraph m_mediaControl.Stop(); // Reset the current position m_mediaSeeking.SetPositions(new DsOptInt64(0), SeekingFlags.AbsolutePositioning, new DsOptInt64(0), SeekingFlags.NoPositioning); // Update VideoState currentState = VideoState.Stopped; }
/// <summary> /// Pauses the video /// </summary> public void Pause() { // End threads StoppingEvent.Set(); // Stop the FilterGraph (but remembers the current position) DsError.ThrowExceptionForHR(FG_MediaControl.Stop()); // Update VideoState currentState = VideoState.Paused; }
public MyVideoPlayer(string filename) // : base(filename) { m_wrapper = XB1Interface.XB1Interface.CreateVideoPlayer(filename); //m_texture = MyRwTextures.CreateDynamicTexture(VideoWidth, VideoHeight, VideoFormat); videoState = VideoState.Stopped; int w = 0, h = 0; m_wrapper.GetVideoFrameSize(ref w, ref h); m_texture = MyRwTextures.CreateDynamicTexture(w, h, VideoFormat); }
// Update is called once per frame void Update() { switch (videoState) { case VideoState.Playing: if (player.time > 0 && ((ulong)player.frame == player.frameCount)) { // Debug.Log("Change to Idle"); videoState = VideoState.Finished; } break; } }
private void GoToFailState() { ME_MainShow.LoadedBehavior = MediaState.Manual; this.RTF_Buffering.BeginAnimation(RotateTransform.AngleProperty, null); TBK_Fail.Visibility = Visibility.Visible; EP_Buffering.Visibility = Visibility.Collapsed; BTN_Pause.IsEnabled = false; BTN_Play.IsEnabled = false; BTN_Play.Visibility = Visibility.Visible; BTN_Pause.Visibility = Visibility.Collapsed; UC_ProgressBar.SL_Progress.IsEnabled = false; CurrentState = VideoState.Failed; }
public void TogglePlay() { videoState = VideoState.Watching; if (!playing) { Play(); } else { Pause(); } }
/// <summary> /// Stops playing the video /// </summary> public void Stop() { StoppingEvent.Set(); // Stop the FilterGraph DsError.ThrowExceptionForHR(FG_MediaControl.Stop()); // Reset the current position DsError.ThrowExceptionForHR(FG_MediaSeeking.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning)); // Update VideoState currentState = VideoState.Stopped; }
public HttpResponseMessage GetStreamUrl(string assetId) { try { AzureHelper cc = AzureHelper.AzureInstance; VideoState state = cc.GetStreamUrl(assetId); return(Request.CreateResponse(HttpStatusCode.OK, state)); } catch (System.Exception e) { return(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, e.Message, e)); } }
public async void OnPlayerPaused(VideoState obj) { ShowTitleInCurrentPlayback = true; LastKnownPosition = obj.CurrentTime; await UpdatePlayback(); Timer.Stop(); StateHasChanged(); if (NextTitleBox != null && NextTitleBox.Display) { NextTitleBox.StopTimer(); } }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> protected VideoPlayer(string FileName) { try { // Set video state currentState = VideoState.Stopped; // Store Filename filename = FileName; // Open DirectShow Interfaces InitInterfaces(); // Create a SampleGrabber Filter and add it to the FilterGraph //SampleGrabber sg = new SampleGrabber(); var comtype = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (comtype == null) throw new NotSupportedException("DirectX (8.1 or higher) not installed?"); m_comObject = Activator.CreateInstance(comtype); ISampleGrabber sampleGrabber = (ISampleGrabber)m_comObject; m_graphBuilder.AddFilter((IBaseFilter)m_comObject, "Grabber"); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB32; // RGB32 mt.formatType = FORMAT_VideoInfo; // VideoInfo sampleGrabber.SetMediaType(mt); // Construct the rest of the FilterGraph m_graphBuilder.RenderFile(filename, null); // Set SampleGrabber Properties sampleGrabber.SetBufferSamples(true); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback((ISampleGrabberCB)this, 1); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)m_graphBuilder; //pVideoWindow.put_AutoShow(OABool.False); pVideoWindow.put_AutoShow(0); // Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); sampleGrabber.GetConnectedMediaType(MediaType); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; m_mediaSeeking.GetDuration(out videoDuration); // Create byte arrays to hold video data m_videoDataRgba = new MySwapQueue<byte[]>(() => new byte[(videoHeight * videoWidth) * 4]); // RGBA format (4 bytes per pixel) } catch (Exception e) { throw new Exception("Unable to Load or Play the video file", e); } }
/// <summary> /// Updates the Output Frame data using data from the video stream. Call this in Game.Update(). /// </summary> public void Update() { //using (MyRenderStats.Measure("VideoUpdate-CopyTexture", MyStatTypeEnum.Max)) { // Set video data into the Output Frame if (m_videoDataRgba.RefreshRead()) { } // now for some reason after changing to fullscreen it's not refreshed, so we will always call it { OnFrame(m_videoDataRgba.Read); } // Update current position read-out m_mediaSeeking.GetCurrentPosition(out currentPosition); if (currentPosition >= videoDuration) { currentState = VideoState.Stopped; } } }
/// <summary> /// Stops playing the video /// </summary> public void Stop() { // End Threads if (updateThread != null) updateThread.Abort(); updateThread = null; if (waitThread != null) waitThread.Abort(); waitThread = null; // Stop the FilterGraph DsError.ThrowExceptionForHR(mc.Stop()); // Reset the current position DsError.ThrowExceptionForHR(ms.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning)); // Update VideoState currentState = VideoState.Stopped; }
void videoPlayer_StateChanged(object sender, VideoState newVideoState) { VideoState = newVideoState; switch (videoState) { case VideoState.OPEN: { PlayCommand.IsExecutable = true; PauseCommand.IsExecutable = false; ScreenShotCommand.IsExecutable = false; CloseCommand.IsExecutable = true; SeekCommand.IsExecutable = false; StepForwardCommand.IsExecutable = false; StepBackwardCommand.IsExecutable = false; FrameByFrameCommand.IsExecutable = false; SetLeftMarkerCommand.IsExecutable = true; SetRightMarkerCommand.IsExecutable = true; break; } case VideoState.PLAYING: { PlayCommand.IsExecutable = false; PauseCommand.IsExecutable = true; ScreenShotCommand.IsExecutable = true; CloseCommand.IsExecutable = true; SeekCommand.IsExecutable = true; StepForwardCommand.IsExecutable = true; StepBackwardCommand.IsExecutable = true; FrameByFrameCommand.IsExecutable = true; SetLeftMarkerCommand.IsExecutable = true; SetRightMarkerCommand.IsExecutable = true; break; } case VideoState.PAUSED: { PlayCommand.IsExecutable = true; PauseCommand.IsExecutable = false; ScreenShotCommand.IsExecutable = true; CloseCommand.IsExecutable = true; SeekCommand.IsExecutable = true; StepForwardCommand.IsExecutable = true; StepBackwardCommand.IsExecutable = true; FrameByFrameCommand.IsExecutable = true; SetLeftMarkerCommand.IsExecutable = true; SetRightMarkerCommand.IsExecutable = true; break; } case VideoState.CLOSED: { PlayCommand.IsExecutable = true; PauseCommand.IsExecutable = false; ScreenShotCommand.IsExecutable = false; CloseCommand.IsExecutable = false; SeekCommand.IsExecutable = false; StepForwardCommand.IsExecutable = false; StepBackwardCommand.IsExecutable = false; FrameByFrameCommand.IsExecutable = false; IsTimeRangeEnabled = false; SetLeftMarkerCommand.IsExecutable = false; SetRightMarkerCommand.IsExecutable = false; break; } } }
public void Play() { videoState = VideoState.Playing; }
/// <summary> /// Pauses the video /// </summary> public void Pause() { // End threads if (updateThread != null) updateThread.Abort(); updateThread = null; if (waitThread != null) waitThread.Abort(); waitThread = null; // Stop the FilterGraph (but remembers the current position) m_mediaControl.Stop(); // Update VideoState currentState = VideoState.Paused; }
public void Start() { if (VideoState == VideoState.Running) return; if (VideoState != VideoState.Stopped) throw new InvalidOperationException("Cannot start video - not initialised"); _nextFrame = DateTime.Now.AddMilliseconds(_profile.FrameSpacingMs); _capture.Start(); VideoState = VideoState.Running; }
public MyVideoPlayer(string filename) // : base(filename) { //m_texture = MyRwTextures.CreateDynamicTexture(VideoWidth, VideoHeight, VideoFormat); videoState = VideoState.Stopped; }
public void Stop() { if (VideoState == VideoState.Stopped) return; if (VideoState != VideoState.Running) throw new InvalidOperationException("Cannot stop video - not initialised"); _capture.Stop(); VideoState = VideoState.Stopped; }
public MyVideoPlayer(string filename) // : base(filename) { m_wrapper = XB1Interface.XB1Interface.CreateVideoPlayer(filename); //m_texture = MyRwTextures.CreateDynamicTexture(VideoWidth, VideoHeight, VideoFormat); videoState = VideoState.Stopped; int w = 0, h = 0; m_wrapper.GetVideoFrameSize(ref w,ref h); m_texture = MyRwTextures.CreateDynamicTexture(w, h, VideoFormat); }
public float GetVideoDuration(VideoState videoState){ return _videoDurationList[(int)videoState]; }
void PlayAudio(VideoState videoState){ videoAudioSource.clip = videoAudioList[(int)videoState]; videoAudioSource.Play(); }
/// <summary> /// Stops playing the video /// </summary> public void Stop() { // End Threads if (updateThread != null) updateThread.Abort(); updateThread = null; if (waitThread != null) waitThread.Abort(); waitThread = null; // Stop the FilterGraph m_mediaControl.Stop(); // Reset the current position m_mediaSeeking.SetPositions(new DsOptInt64(0), SeekingFlags.AbsolutePositioning, new DsOptInt64(0), SeekingFlags.NoPositioning); // Update VideoState currentState = VideoState.Stopped; }
/// <summary> /// Waits for the video to finish, then calls the OnVideoComplete event /// </summary> private void WaitForCompletion() { int waitTime = avgTimePerFrame != 0 ? (int)((float)avgTimePerFrame / 10000) : 20; try { while (videoDuration > currentPosition) { Thread.Sleep(waitTime); } if (OnVideoComplete != null) OnVideoComplete.Invoke(this, EventArgs.Empty); currentState = VideoState.Stopped; } catch { } }
private void Initialise() { try { _capture = new Capture(_profile.FilePath); _capture.ImageGrabbed += CaptureOnImageGrabbed; VideoState = VideoState.Stopped; } catch (Exception ex) { Log.ErrorFormat("Unable to open file {0}", ex, _profile.FilePath); throw; } }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public VideoPlayer(string FileName, GraphicsDevice graphicsDevice) { try { // Set video state currentState = VideoState.Stopped; // Store Filename filename = FileName; // Open DirectShow Interfaces InitInterfaces(); // Create a SampleGrabber Filter and add it to the FilterGraph SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); //mt.majorType = MediaType.Video; // Video mt.majorType = MEDIATYPE_Video; mt.subType = MediaSubType.RGB24; // RGB24 mt.formatType = FormatType.VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); // Construct the rest of the FilterGraph DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); // Set SampleGrabber Properties DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); // Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); // Create byte arrays to hold video data videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel) // Create Output Frame Texture2D with the height and width of the video outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, false, SurfaceFormat.Color); } catch { throw new Exception("Unable to Load or Play the video file"); } }
/// <summary> /// Pauses the video /// </summary> public void Pause() { // End threads if (updateThread != null) updateThread.Abort(); updateThread = null; if (waitThread != null) waitThread.Abort(); waitThread = null; // Stop the FilterGraph (but remembers the current position) DsError.ThrowExceptionForHR(mc.Stop()); // Update VideoState currentState = VideoState.Paused; }
/// <summary> /// Starts playing the video /// </summary> public void Play() { if (currentState != VideoState.Playing) { // Create video threads updateThread = new Thread(new ThreadStart(UpdateBuffer)); waitThread = new Thread(new ThreadStart(WaitForCompletion)); // Start the FilterGraph DsError.ThrowExceptionForHR(mc.Run()); // Start Threads updateThread.Start(); waitThread.Start(); // Update VideoState currentState = VideoState.Playing; } }
public void Stop() { videoState = VideoState.Stopped; }
/// <summary> /// Starts playing the video /// </summary> public void Play() { if (currentState != VideoState.Playing) { // Create video threads updateThread = new Thread(new ThreadStart(UpdateBuffer)); updateThread.IsBackground = true; waitThread = new Thread(new ThreadStart(WaitForCompletion)); waitThread.IsBackground = true; // Start the FilterGraph m_mediaControl.Run(); // Start Threads updateThread.Start(); waitThread.Start(); // Update VideoState currentState = VideoState.Playing; } }