/// <summary> /// Setup the IMediaSeeking interface /// </summary> protected void SetMediaSeekingInterface(IMediaSeeking mediaSeeking) { m_mediaSeeking = mediaSeeking; if (mediaSeeking == null) { CurrentPositionFormat = MediaPositionFormat.None; Duration = 0; return; } /* Get our prefered DirectShow TimeFormat */ Guid preferedFormat = ConvertPositionFormat(PreferedPositionFormat); /* Attempt to set the time format */ mediaSeeking.SetTimeFormat(preferedFormat); Guid currentFormat; /* Gets the current time format * we may not have been successful * setting our prefered format */ mediaSeeking.GetTimeFormat(out currentFormat); /* Set our property up with the right format */ CurrentPositionFormat = ConvertPositionFormat(currentFormat); SetDuration(); }
public PlayerCore(string file) { graphBuilder = (new FilterGraph()) as IFilterGraph2; if (graphBuilder == null) return; mediaControl = graphBuilder as IMediaControl; mediaSeeking = graphBuilder as IMediaSeeking; audioControl = graphBuilder as IBasicAudio; if (mediaControl == null || mediaSeeking == null || audioControl == null) return; //int hr = mediaControl.RenderFile(file); FileInfo info = new FileInfo(file); ISupport support = Supports.Instance[info.Extension]; int hr = -1; if (support != null) hr = support.RenderGraph(graphBuilder, file); else hr = mediaControl.RenderFile(file); fileName = file; if (hr != 0) errorStack.Push(hr); if (hr != 0) return; mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); isValidate = true; window = graphBuilder as IVideoWindow; if (window != null) { int width = 0; int height = 0; window.get_Width(out width); window.get_Height(out height); nativeSize = new Size(width, height); } }
/// <summary> /// Check Format related functions /// </summary> void TestFormats() { int hr; Guid pFormat; // Query to see what the preferred time format is hr = m_ims.QueryPreferredFormat(out pFormat); Marshal.ThrowExceptionForHR(hr); // Is the preferred format supported? Certainly hope so. hr = m_ims.IsFormatSupported(pFormat); Marshal.ThrowExceptionForHR(hr); // Might return and S_ code Debug.Assert(hr == 0); // Read the current time format hr = m_ims.GetTimeFormat(out pFormat); Marshal.ThrowExceptionForHR(hr); // See if the current format is the one we are using. // Better be. hr = m_ims.IsUsingTimeFormat(pFormat); Marshal.ThrowExceptionForHR(hr); // Try setting the format to the current value (the // only one we are sure is supported). hr = m_ims.SetTimeFormat(pFormat); Marshal.ThrowExceptionForHR(hr); }
//Play a specific shot public void PlayShot(int start, int stop) { long l_start = (long)start; long l_stop = (long)stop; IMediaSeeking i_media_seeking = m_FilterGraph as IMediaSeeking; i_media_seeking.SetTimeFormat(TimeFormat.Frame); i_media_seeking.SetPositions(l_start, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning, l_stop, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning); }
//Reset: start position is 0, end position is the last frame, needed after playshot public void Reset() { IMediaSeeking i_media_seeking = m_FilterGraph as IMediaSeeking; i_media_seeking.SetTimeFormat(TimeFormat.Frame); long endFrame = 0; int test = i_media_seeking.GetDuration(out endFrame); i_media_seeking.SetPositions(0, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning, endFrame, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning); }
private void CreateGraph() { graphBuilder = (IGraphBuilder) new FilterGraph(); graphBuilder.RenderFile(tempFileName, null); mediaControl = (IMediaControl)graphBuilder; mediaSeeking = (IMediaSeeking)graphBuilder; mediaEvent = (IMediaEventEx)graphBuilder; //var filter = new MpqFileSourceFilter(File); //DsError.ThrowExceptionForHR(graphBuilder.AddFilter(filter, filter.Name)); //DsError.ThrowExceptionForHR(graphBuilder.Render(filter.OutputPin)); mediaSeeking.GetCapabilities(out seekingCapabilities); mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); mediaEvent.SetNotifyWindow(Handle, WM_GRAPHNOTIFY, IntPtr.Zero); }
/// <summary> /// Queries the current video source for its capabilities regarding seeking and time info. /// The graph should be fully constructed for accurate information /// </summary> protected void QuerySeekingCapabilities() { try { _mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); //get capabilities from the graph, and see what it supports that interests us AMSeekingSeekingCapabilities caps; int r = _mediaSeeking.GetCapabilities(out caps); long lTest = 0; double dblTest = 0; if (r != 0) { _seek_canGetCurrentPos = false; _seek_canSeek = false; _seek_canGetDuration = false; } else //if we were able to read the capabilities, then determine if the capability works, both by checking the // advertisement, and actually trying it out. { _seek_canSeek = ((caps & AMSeekingSeekingCapabilities.CanSeekAbsolute) == AMSeekingSeekingCapabilities.CanSeekAbsolute) && (_mediaSeeking.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning) == 0); _seek_canGetDuration = ((caps & AMSeekingSeekingCapabilities.CanGetDuration) == AMSeekingSeekingCapabilities.CanGetDuration) && (_mediaSeeking.GetDuration(out lTest) == 0); _seek_canGetCurrentPos = ((caps & AMSeekingSeekingCapabilities.CanGetCurrentPos) == AMSeekingSeekingCapabilities.CanGetCurrentPos) && (_mediaSeeking.GetCurrentPosition(out lTest) == 0); } //check capabilities for the IMediaPosition interface _pos_canSeek = (_mediaPosition.put_CurrentPosition(0) == 0); _pos_canGetDuration = (_mediaPosition.get_Duration(out dblTest) == 0); _pos_canGetCurrentPos = (_mediaPosition.get_CurrentPosition(out dblTest) == 0); } catch (Exception) { _seek_canSeek = false; _pos_canSeek = false; } }
private void open() { int hr; if (this.GraphBuilder == null) { this.GraphBuilder = (IGraphBuilder) new FilterGraph(); hr = GraphBuilder.RenderFile(file, null);//读取文件 DsError.ThrowExceptionForHR(hr); this.MediaControl = (IMediaControl)this.GraphBuilder; this.MediaEventEx = (IMediaEventEx)this.GraphBuilder; MediaSeeking = (IMediaSeeking)this.GraphBuilder; MediaSeeking.SetTimeFormat(TIME_FORMAT_FRAME); MediaSeeking.SetRate(0.3); this.VideoFrameStep = (IVideoFrameStep)this.GraphBuilder; // MediaPosition= (IMediaPosition)this.GraphBuilder; this.VideoWindow = this.GraphBuilder as IVideoWindow; this.BasicVideo = this.GraphBuilder as IBasicVideo; this.BasicAudio = this.GraphBuilder as IBasicAudio; hr = this.MediaEventEx.SetNotifyWindow(this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); hr = this.VideoWindow.put_Owner(this.Handle); DsError.ThrowExceptionForHR(hr); hr = this.VideoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); DsError.ThrowExceptionForHR(hr); this.Focus(); hr = InitVideoWindow(1, 1); DsError.ThrowExceptionForHR(hr); long time; MediaSeeking.GetDuration(out time); label20.Text = time.ToString(); trackBar1.SetRange(0, (int)time); t = new Thread(new ThreadStart(updateTimeBarThread)); } }
/// <summary> /// Crée le graphe DirectShow avec LAVFilters /// </summary> void SetupLAVGraph() { //System.IO.FileStream fs = null; try { try { /* Creates the GraphBuilder COM object */ _graph = new FilterGraphNoThread() as IGraphBuilder; if (_graph == null) { throw new Exception("Could not create a graph"); } //if (_graphLogLocation != null) //{ // fs = System.IO.File.Create(_graphLogLocation); // int r = _graph.SetLogFile(fs.Handle); //} // // Creating FileSource filter // IBaseFilter sourceFilter = GraphHelper.CreateLAVSplitterSourceFilter(_graph, _sourceFilePath, out IPin parserVideoOutputPin, out IPin parserAudioOutputPin); // // Creating renderer // Type videoRendererFilterType = null; IBaseFilter videoRenderer = null; GraphHelper.CreateFilter(NULL_RENDERER, "Null Renderer", ref videoRendererFilterType, ref videoRenderer); int hr = _graph.AddFilter(videoRenderer, "Null Renderer"); DsError.ThrowExceptionForHR(hr); IPin videoRendererInputPin = DsFindPin.ByDirection(videoRenderer, PinDirection.Input, 0); // // Creating Video filter // // Connection du sample Grabber var sampleGrabberFilter = (IBaseFilter)_sampleGrabber; hr = _graph.AddFilter(sampleGrabberFilter, "Sample Grabber"); DsError.ThrowExceptionForHR(hr); IPin sampleGrabberInputPin = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Input, 0); IPin sampleGrabberOuputPin = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Output, 0); //Insertion du Color Space Converter Type filterType = null; IBaseFilter colorSpaceConverter = null; GraphHelper.CreateFilter(GraphHelper.CLSID_COLOR_SPACE_CONVERTER, GraphHelper.COLOR_SPACE_CONVERTER_FRIENDLYNAME, ref filterType, ref colorSpaceConverter); hr = _graph.AddFilter(colorSpaceConverter, GraphHelper.COLOR_SPACE_CONVERTER_FRIENDLYNAME); DsError.ThrowExceptionForHR(hr); IPin colorSpaceConverterInputPin = DsFindPin.ByDirection(colorSpaceConverter, PinDirection.Input, 0); GraphHelper.ConnectLAVSplitterAndRendererWithLAVDecoder(_graph, parserVideoOutputPin, colorSpaceConverterInputPin); IPin colorSpaceConverterOutputPin = DsFindPin.ByDirection(colorSpaceConverter, PinDirection.Output, 0); hr = _graph.ConnectDirect(colorSpaceConverterOutputPin, sampleGrabberInputPin, null); DsError.ThrowExceptionForHR(hr); hr = _graph.Connect(sampleGrabberOuputPin, videoRendererInputPin); DsError.ThrowExceptionForHR(hr); // Removes the clock to run the graph as fast as possible ((IMediaFilter)_graph).SetSyncSource(null); GraphHelper.SafeRelease(parserAudioOutputPin); GraphHelper.SafeRelease(parserVideoOutputPin); GraphHelper.SafeRelease(videoRendererInputPin); GraphHelper.SafeRelease(sampleGrabberInputPin); GraphHelper.SafeRelease(sampleGrabberOuputPin); GraphHelper.SafeRelease(colorSpaceConverterInputPin); GraphHelper.SafeRelease(colorSpaceConverterOutputPin); _mediaSeeking = _graph as IMediaSeeking; _mediaControl = _graph as IMediaControl; _mediaEvent = _graph as IMediaEventEx; /* Attempt to set the time format */ hr = _mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); DsError.ThrowExceptionForHR(hr); } catch (Exception ex) { this.TraceError(ex, ex.Message); /* This exection will happen usually if the media does * not exist or could not open due to not having the * proper filters installed */ FreeResources(); } } finally { //if (_graphLogLocation != null && fs != null) // fs.Close(); } }
private void loadVideo(String videoPath) { videoFilepath = videoPath; videoFileName.Text = getDisplayVideoName(); if (graph != null) { graph = null; } if (mediaControl != null) { // Stop media playback this.mediaControl.Stop(); mediaControl = null; } if (videoWindow != null) { videoWindow.put_Owner(IntPtr.Zero); videoWindow = null; } if (mediaSeeking != null) { mediaSeeking = null; } if (basicAudio != null) { basicAudio = null; } GC.Collect(); /* if (mediaPosition != null) { mediaPosition = null; }*/ graph = (IGraphBuilder)new FilterGraph(); mediaControl = (IMediaControl)graph; //mediaPosition = (IMediaPosition)graph; videoWindow = (IVideoWindow)graph; mediaSeeking = (IMediaSeeking)graph; basicAudio = (IBasicAudio)graph; AviSplitter spliter = new AviSplitter(); graph.AddFilter((IBaseFilter)spliter, null); graph.RenderFile(videoPath, null); graph.SetDefaultSyncSource(); /* * AMSeekingSeekingCapabilities cap = AMSeekingSeekingCapabilities.CanGetCurrentPos; if (mediaSeeking.CheckCapabilities(ref cap) > 0) { this.consoleErreur.AppendText("Impossible de recuperer la position de la frame"); } * */ videoWindow.put_Owner(videoPanel.Handle); videoWindow.put_MessageDrain(videoPanel.Handle); videoWindow.put_WindowStyle(WindowStyle.Child); videoWindow.put_WindowStyleEx(WindowStyleEx.ControlParent); videoWindow.put_Left(0); videoWindow.put_Top(0); videoWindow.put_Width(videoPanel.Width); videoWindow.put_Height(videoPanel.Height); //positionTrackbar.Enabled = true; speedTrackBar.Enabled = true; mediaSeeking.SetTimeFormat(TimeFormat.Frame); double rate; mediaSeeking.GetRate(out rate); rateText.Text = rate.ToString(); speedTrackBar.Value = (int)(speedTrackBar.Maximum * rate / 2); trackBar1.Value = trackBar1.Maximum / 2; this.basicAudio.put_Volume(-5000 + 5000 * trackBar1.Value / trackBar1.Maximum); //mediaPosition.put_Rate(0.5); running = false; frameChanged = false; }
private void CreateGraph() { graphBuilder = (IGraphBuilder)new FilterGraph(); graphBuilder.RenderFile(tempFileName, null); mediaControl = (IMediaControl)graphBuilder; mediaSeeking = (IMediaSeeking)graphBuilder; mediaEvent = (IMediaEventEx)graphBuilder; //var filter = new MpqFileSourceFilter(File); //DsError.ThrowExceptionForHR(graphBuilder.AddFilter(filter, filter.Name)); //DsError.ThrowExceptionForHR(graphBuilder.Render(filter.OutputPin)); mediaSeeking.GetCapabilities(out seekingCapabilities); mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); mediaEvent.SetNotifyWindow(Handle, WM_GRAPHNOTIFY, IntPtr.Zero); }
public void openVid(string fileName) { if (!File.Exists(fileName)) { errorMsg("El archivo '" + fileName + "' no existe."); videoPanel.Visible = false; isVideoLoaded = false; drawPositions(); return; } if (VideoBoxType == PreviewType.AviSynth) { avsClip = null; //butPause.Enabled = true; //butPlayR.Enabled = true; //butPlay.Enabled = true; //butStop.Enabled = true; videoPictureBox.Visible = false; //closeVidDShow(); // añadido } if (mediaControl != null) { mediaControl.Stop(); videoWindow.put_Visible(DirectShowLib.OABool.False); videoWindow.put_Owner(IntPtr.Zero); } // Dshow :~~ graphBuilder = (IGraphBuilder)new FilterGraph(); graphBuilder.RenderFile(fileName, null); mediaControl = (IMediaControl)graphBuilder; // mediaEventEx = (IMediaEventEx)this.graphBuilder; mediaSeeking = (IMediaSeeking)graphBuilder; mediaPosition = (IMediaPosition)graphBuilder; basicVideo = graphBuilder as IBasicVideo; videoWindow = graphBuilder as IVideoWindow; VideoBoxType = PreviewType.DirectShow; // sacando información int x, y; double atpf; basicVideo.GetVideoSize(out x, out y); if (x == 0 || y == 0) { errorMsg("No se puede abrir un vídeo sin dimensiones."); videoPanel.Visible = false; isVideoLoaded = false; drawPositions(); return; } if (videoInfo == null) videoInfo = new VideoInfo(fileName); videoInfo.Resolution = new Size(x, y); basicVideo.get_AvgTimePerFrame(out atpf); videoInfo.FrameRate = Math.Round(1 / atpf, 3); //labelResFPS.Text = x.ToString() + "x" + y.ToString() + " @ " + videoInfo.FrameRate.ToString() + " fps"; textResX.Text = x.ToString(); textResY.Text = y.ToString(); textFPS.Text = videoInfo.FrameRate.ToString(); if (File.Exists(Application.StartupPath+"\\MediaInfo.dll") && File.Exists(Application.StartupPath+"\\MediaInfoWrapper.dll")) { treeView1.Enabled = true; try { RetrieveMediaFileInfo(fileName); } catch { treeView1.Enabled = false; } } else treeView1.Enabled = false; if (x != 0) { vidScaleFactor.Enabled = true; try { vidScaleFactor.Text = getFromConfigFile("mainW_Zoom"); } catch { vidScaleFactor.Text = "50%"; }; double p = double.Parse(vidScaleFactor.Text.Substring(0, vidScaleFactor.Text.IndexOf('%'))); p = p / 100; int new_x = (int)(x * p); int new_y = (int)(y * p); videoWindow.put_Height(new_x); videoWindow.put_Width(new_y); videoWindow.put_Owner(videoPanel.Handle); videoPanel.Size = new System.Drawing.Size(new_x, new_y); videoWindow.SetWindowPosition(0, 0, videoPanel.Width, videoPanel.Height); videoWindow.put_WindowStyle(WindowStyle.Child); videoWindow.put_Visible(DirectShowLib.OABool.True); } else vidScaleFactor.Enabled = false; // timer actualizaFrames.Interval = 10; actualizaFrames.Enabled = true; //mediaControl.Run(); drawPositions(); framesFin.Enabled = true; buttonAddFrameInicio.Enabled = buttonAddFrameInicio.Visible = true; framesInicio.Enabled = true; buttonAddFrameFin.Enabled = buttonAddFrameFin.Visible = true; butClip.Enabled = false; mediaSeeking.SetTimeFormat(DirectShowLib.TimeFormat.Frame); videoInfo.FrameTotal = VideoUnitConversion.getTotal(mediaSeeking, videoInfo.FrameRate); seekBar.Maximum = FrameTotal; seekBar.TickFrequency = seekBar.Maximum / 10; // VFW ( __ SOLO AVIs __ ) try { AVIFileWrapper.AVIFileInit(); int aviFile = 0; IntPtr aviStream; int res = AVIFileWrapper.AVIFileOpen(ref aviFile, fileName, 0x20, 0); res = AVIFileWrapper.AVIFileGetStream(aviFile, out aviStream, 1935960438, 0); videoInfo.KeyFrames = new ArrayList(); int nFrames = FrameTotal; for (int i = 0; i < nFrames; i++) { if (isKeyFrame(aviStream.ToInt32(), i)) videoInfo.KeyFrames.Add(i); } setStatus(videoInfo.KeyFrames.Count + " detectados"); AVIFileWrapper.AVIStreamRelease(aviStream); AVIFileWrapper.AVIFileRelease(aviFile); AVIFileWrapper.AVIFileExit(); KeyframesAvailable = true; /* nextK.Enabled = true; prevK.Enabled = true; drawKeyFrameBox(); */ } catch { /* nextK.Enabled = false; prevK.Enabled = false; keyFrameBox.Visible = false; */ KeyframesAvailable = false; } if (openFile != null && al.Count > 1 && gridASS.RowCount>0) { gridASS.Rows[1].Selected = true; gridASS.Rows[0].Selected = true; gridASS.Rows[1].Selected = false; } frameTime = new Hashtable(); for (int i = 0; i < FrameTotal; i++) frameTime.Add(i, new Tiempo((double)((double)i / videoInfo.FrameRate))); isVideoLoaded = true; updateMenuEnables(); mediaControl.Pause(); setStatus("Vídeo " + fileName + " cargado. [DirectShow]"); script.GetHeader().SetHeaderValue("Video File", fileName); if (isKeyframeGuessNeeded()) KeyframeGuess(false); //FrameIndex = 0; }
private void translateW_Load(object sender, EventArgs e) { //this.MaximumSize = this.Size; //this.MinimumSize = this.Size; toolStripStatusLabel2.Text = "Cargando el Asistente de Traducción..."; // cargamos script autoComplete = new ArrayList(); al = mW.al; gridCont.RowCount = al.Count; bool hasAutoComplete = (mW.script.GetHeader().GetHeaderValue("AutoComplete") != string.Empty); for (int i = 0; i < al.Count; i++) { lineaASS lass = (lineaASS)al[i]; gridCont[0, i].Value = lass.personaje; if (!autoComplete.Contains(lass.personaje) && !hasAutoComplete) if (lass.personaje.Trim()!="") autoComplete.Add(lass.personaje); gridCont[1, i].Value = lass.texto; } if (hasAutoComplete) InsertAutoCompleteFromScript(); labelLineaActual.Text = "1 de " + (al.Count) + " (0%)"; textPersonaje.Text = gridCont[0, 0].Value.ToString(); textOrig.Text = gridCont[1, 0].Value.ToString(); // cargamos video graphBuilder = (IGraphBuilder)new FilterGraph(); graphBuilder.RenderFile(videoInfo.FileName, null); mediaControl = (IMediaControl)graphBuilder; // mediaEventEx = (IMediaEventEx)this.graphBuilder; mediaSeeking = (IMediaSeeking)graphBuilder; mediaPosition = (IMediaPosition)graphBuilder; basicVideo = graphBuilder as IBasicVideo; basicAudio = graphBuilder as IBasicAudio; videoWindow = graphBuilder as IVideoWindow; try { int x, y; double atpf; basicVideo.GetVideoSize(out x, out y); basicVideo.get_AvgTimePerFrame(out atpf); videoInfo.FrameRate = Math.Round(1 / atpf, 3); int new_x = videoPanel.Width; int new_y = (new_x * y) / x; videoWindow.put_Height(new_x); videoWindow.put_Width(new_y); videoWindow.put_Owner(videoPanel.Handle); videoPanel.Size = new System.Drawing.Size(new_x, new_y); videoWindow.SetWindowPosition(0, 0, videoPanel.Width, videoPanel.Height); videoWindow.put_WindowStyle(WindowStyle.Child); videoWindow.put_Visible(DirectShowLib.OABool.True); mediaSeeking.SetTimeFormat(DirectShowLib.TimeFormat.Frame); mediaControl.Run(); } catch { mW.errorMsg("Imposible cargar el vídeo. Debe haber algún problema con el mismo, y el asistente será muy inestable"); } // activamos timers & handlers timer1.Tick += new EventHandler(timer1_Tick); timer1.Enabled = true; timer2.Tick += new EventHandler(timer2_Tick); AutoSaveTimer.Tick += new EventHandler(timer3_Tick); AutoSaveTimer.Enabled = true; gridCont.CellClick += new DataGridViewCellEventHandler(gridCont_CellClick); textPersonaje.TextChanged += new EventHandler(textPersonaje_TextChanged); textTradu.TextChanged += new EventHandler(textTradu_TextChanged); textTradu.KeyUp += new KeyEventHandler(textBox1_KeyUp); textTradu.KeyDown += new KeyEventHandler(textTradu_KeyDown); textTradu.KeyPress += new KeyPressEventHandler(textTradu_KeyPress); textPersonaje.KeyDown += new KeyEventHandler(textPersonaje_KeyDown); textPersonaje.KeyPress += new KeyPressEventHandler(textPersonaje_KeyPress); button8.GotFocus += new EventHandler(button8_GotFocus); button9.GotFocus += new EventHandler(button9_GotFocus); gridCont.DoubleClick += new EventHandler(gridCont_DoubleClick); gridCont.SelectionChanged += new EventHandler(gridCont_SelectionChanged); gridCont.KeyUp += new KeyEventHandler(gridCont_KeyUp); listBox1.KeyUp += new KeyEventHandler(listBox1_KeyUp); textToAdd.KeyPress += new KeyPressEventHandler(textToAdd_KeyPress); progressBar1.MouseDown += new MouseEventHandler(progressBar1_MouseDown); tiempoInicio_.TimeValidated += new TimeTextBox.OnTimeTextBoxValidated(tiempo_TimeValidated); tiempoFin_.TimeValidated += new TimeTextBox.OnTimeTextBoxValidated(tiempo_TimeValidated); this.Move += new EventHandler(translateW_Move); //textTradu.ContextMenu = new ASSTextBoxRegExDefaultContextMenu(textTradu); mediaControl.Pause(); // cargar de config try { checkAutoComplete.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_autoC")); checkTagSSA.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_tagSSA")); checkComment.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_Comment")); checkVideo.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_aud")); checkAudio.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_vid")); checkSaveTime.Checked = Convert.ToBoolean(mW.getFromConfigFile("translateW_preTime")); } catch {} try { AutoSaveTimer.Interval = int.Parse(mW.getFromConfigFile("translateW_AutoSaveInterval")); } catch { AutoSaveTimer.Interval = 30000; } // fin de inicializacion vil textTradu.Focus(); try { string[] bleh = mW.getFromConfigFileA("translateW_Reference"); for (int i = 0; i < bleh.Length; i++) Diccionarios.Add(bleh[i]); } catch { Diccionarios.Add("WordReference|http://www.wordreference.com/"); Diccionarios.Add("Wikipedia|http://es.wikipedia.org"); Diccionarios.Add("RAE|http://www.rae.es"); Diccionarios.Add("Dictionary|http://dictionary.reference.com/"); } diccionarios.DataSource = Diccionarios; CreateReferenceTabs(); UpdateStatusFile(); TiempoInicio = DateTime.Now; Estadisticas.Interval = 1000; Estadisticas.Tick += new EventHandler(Estadisticas_Tick); Estadisticas.Enabled = true; InitRPC(); archivosView.KeyDown += new KeyEventHandler(archivosView_KeyDown); archivosView.SelectedIndexChanged += new EventHandler(archivosView_SelectedIndexChanged); textTradu.EnableSpellChecking = mW.spellEnabled; if (mW.spellEnabled) { textTradu.DictionaryPath = mW.dictDir; textTradu.Dictionary = mW.ActiveDict; } toolStripStatusLabel2.Text = "Asistente cargado correctamente."; bool showpopup = true; try { showpopup = Convert.ToBoolean(mW.getFromConfigFile("translateW_ShowPopup")); } catch { mW.updateReplaceConfigFile("translateW_ShowPopup", showpopup.ToString()); } if (showpopup) { TranslationStyle estilo = TranslationStyle.FromScriptWithActors; translateW_Popup pop = new translateW_Popup(mW); switch (pop.ShowDialog()) { case DialogResult.Yes: estilo = TranslationStyle.FromScriptWithActors; break; case DialogResult.No: estilo = TranslationStyle.FromScriptWithoutActors; break; case DialogResult.Cancel: estilo = TranslationStyle.FromScratch; break; case DialogResult.Ignore: estilo = TranslationStyle.FromScratchAudio; break; } switch (estilo) { case TranslationStyle.FromScriptWithActors: modeSelector.Checked = true; splitText.Checked = false; audioMode.Checked = false; break; case TranslationStyle.FromScriptWithoutActors: modeSelector.Checked = true; splitText.Checked = true; audioMode.Checked = false; break; case TranslationStyle.FromScratch: modeSelector.Checked = false; splitText.Checked = false; audioMode.Checked = false; break; case TranslationStyle.FromScratchAudio: modeSelector.Checked = false; splitText.Checked = true; audioMode.Checked = true; break; } } }
private void CreateFilters(Guid audioSubType) { isValid = false; int r; // grabber grabberAudio = new GrabberAudio(this); // objects graphObject = null; grabberObjectAudio = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object r = graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph r = graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaType = new AMMediaType { MajorType = MediaType.Audio, SubType = audioSubType, FormatType = FormatType.WaveEx }; r = sampleGrabberAudio.SetMediaType(mediaType); // render pin // TODO: Improve this! We can't always assume that the second pin will always be the audio pin -- we need to find it. IPin sbPin = Tools.GetOutPin(sourceBase, 1); if (sbPin == null) { sbPin = Tools.GetOutPin(sourceBase, 0); } r = graph.Render(sbPin); IPin outPin = Tools.GetOutPin(grabberBaseAudio, 0); AMMediaType mt = new AMMediaType(); r = outPin.ConnectionMediaType(mt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media information"); } // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; r = mediaFilter.SetSyncSource(null); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(mt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber r = sampleGrabberAudio.SetBufferSamples(false); r = sampleGrabberAudio.SetOneShot(false); r = sampleGrabberAudio.SetCallback(grabberAudio, 1); if (useNullRenderer) { // Get a list of all the filters connected to the sample grabber List <Tools.FilterInfo2> filtersInfo2 = new List <Tools.FilterInfo2>(); Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(grabberBaseAudio, PinDirection.Output, 0); while (true) { filtersInfo2.Add(testFilterInfo2); testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0); if (testFilterInfo2.Filter == null) { break; } } // Remove the last filter, the audio renderer r = graph.RemoveFilter(filtersInfo2[filtersInfo2.Count - 1].Filter); // create null renderer type = Type.GetTypeFromCLSID(Clsid.NullRenderer); if (type == null) { throw new ApplicationException("Failed creating null renderer"); } nullRendererObjectAudio = Activator.CreateInstance(type); IBaseFilter nullRendererAudio = (IBaseFilter)nullRendererObjectAudio; // add grabber filters to graph r = graph.AddFilter(nullRendererAudio, "nullRenderer"); //outPin = Tools.GetOutPin(filtersInfo2[filtersInfo2.Count - 2].Filter, 0); outPin = Tools.GetOutPin(grabberBaseAudio, 0); IPin inPin = Tools.GetInPin(nullRendererAudio, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed obtaining media audio information"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); } // configure video window IVideoWindow window = (IVideoWindow)graphObject; if (window != null) { window.put_AutoShow(false); window = null; } // get media control mediaControl = (IMediaControl)graphObject; // get media seek control mediaSeekControl = (IMediaSeeking)graphObject; mediaSeekControl.SetTimeFormat(TimeFormat.MediaTime); // get media events' interface mediaEvent = (IMediaEventEx)graphObject; // get media audio control basicAudio = (IBasicAudio)graphObject; isValid = true; } catch (Exception exception) { DestroyFilters(); // provide information to clients AudioSourceError?.Invoke(this, new AudioSourceErrorEventArgs(exception.Message)); } }
/// <summary> /// This method sets up the DirectShow filter graph and obtains the interfaces necessary to control playback /// for VideoTextures created from video files. This method works for .avi, .mpeg, and .wmv files. /// </summary> /// <param name="filename">The .avi, .mpeg, or .wmv video file.</param> private void SetupGraph(string filename) { try { int hr; // 1. Start building the graph, using FilterGraph and CaptureGraphBuilder2 graphBuilder = (IGraphBuilder) new FilterGraph(); ICaptureGraphBuilder2 builder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); hr = builder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); // 2. Add the source filter for the video file input. IBaseFilter sourceFilter; hr = graphBuilder.AddSourceFilter(filename, filename, out sourceFilter); DsError.ThrowExceptionForHR(hr); // 3. Get the SampleGrabber interface, configure it, and add it to the graph. ISampleGrabber sampGrabber = (ISampleGrabber) new SampleGrabber(); ConfigureSampleGrabber(sampGrabber); hr = graphBuilder.AddFilter((IBaseFilter)sampGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); // 4. Add the null renderer (since we don't want to render in a seperate window.) IBaseFilter nullRenderer = (IBaseFilter) new NullRenderer(); hr = graphBuilder.AddFilter(nullRenderer, "Null Renderer"); DsError.ThrowExceptionForHR(hr); // 5. Render the stream. The way the stream is rendered depends on its type. switch (vidType) { case VideoType.AVI: case VideoType.MPEG: hr = builder.RenderStream(null, null, sourceFilter, (IBaseFilter)sampGrabber, nullRenderer); break; case VideoType.WMV: hr = builder.RenderStream(null, MediaType.Video, sourceFilter, (IBaseFilter)sampGrabber, nullRenderer); break; default: throw new Exception("Unsupported Video type: " + vidType); } DsError.ThrowExceptionForHR(hr); // 6. Now that everthing is configured and set up, save the width, height, stride information for use later. SaveSizeInfo(sampGrabber); // 7. Obtain the interfaces that we will use to control the execution of the filter graph. mediaControl = graphBuilder as IMediaControl; mediaSeeking = graphBuilder as IMediaSeeking; mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); long duration; mediaSeeking.GetDuration(out duration); length = (int)(duration / 10000); } catch (Exception ex) { Console.WriteLine(ex.Message); throw; } }