/// <summary> /// Initializes a new instance of the <see cref="Country"/> class. /// </summary> /// <param name="id">The id.</param> /// <param name="country">The country.</param> /// <param name="code">The code.</param> /// <param name="standard">The AnalogVideoStandard.</param> public Country(int id, string country, string code, AnalogVideoStandard standard) { _id = id; _name = country; _code = code; _standard = standard; }
/// <summary> /// private constructor /// </summary> private Capture() { _frameRate = -1; _imageWidth = -1; _availableVideoStandard = AnalogVideoStandard.None; _currentVideoStandard = AnalogVideoStandard.None; }
public void SetTVMode(AnalogVideoStandard mode) { if (this.DX.CaptureFilter != null) { IAMAnalogVideoDecoder captureFilter = this.DX.CaptureFilter as IAMAnalogVideoDecoder; if (captureFilter != null) { DsError.ThrowExceptionForHR(captureFilter.put_TVFormat(mode)); } } }
/// <summary> /// Sets the new video format on the video decoder interface /// </summary> /// <param name="newVideoFormat">The new video deocder format</param> private void SetVideoDecoder(AnalogVideoStandard newVideoFormat) { if (_analogVideoDecoder != null && (newVideoFormat != AnalogVideoStandard.None)) { int hr = _analogVideoDecoder.put_TVFormat(newVideoFormat); if (hr == 0) { _currentVideoFormat = newVideoFormat; Log.Log.Info("Set new video format to: {0}", _currentVideoFormat); } } }
public AnalogVideoStandard GetTVMode() { if (this.DX.CaptureFilter == null) { return(AnalogVideoStandard.None); } IAMAnalogVideoDecoder captureFilter = this.DX.CaptureFilter as IAMAnalogVideoDecoder; if (captureFilter == null) { return(AnalogVideoStandard.None); } AnalogVideoStandard none = AnalogVideoStandard.None; DsError.ThrowExceptionForHR(captureFilter.get_TVFormat(out none)); return(none); }
/// <summary> /// Sets TV Mode for device. /// </summary> /// <param name="mode">TV Mode to set (analog video standard).</param> public void SetTVMode(AnalogVideoStandard mode) { if (DX.CaptureFilter == null) return; IAMAnalogVideoDecoder pDecoder = DX.CaptureFilter as IAMAnalogVideoDecoder; if (pDecoder == null) return; int hr = pDecoder.put_TVFormat(mode); DsError.ThrowExceptionForHR(hr); //Marshal.ReleaseComObject(pDecoder); }
// ==================================================================== #region TV Mode /// <summary> /// Sets TV Mode for device. /// </summary> /// <param name="mode">TV Mode to set (analog video standard).</param> public void SetTVMode(AnalogVideoStandard mode) { _ThrowIfCameraWasNotCreated(); _Camera.SetTVMode(mode); }
public void Start(int fromFreq, int toFreq, int stepFreq, ChannelList channelList, AnalogVideoStandard analogVideoStandart) { this.fromFreq = fromFreq; this.toFreq = toFreq; this.stepFreq = stepFreq; currFreq = fromFreq; tuner.Mute = true; terminated = false; tuner.AnalogVideoDecoder.put_TVFormat(analogVideoStandart); this.analogVideoStandart = analogVideoStandart; this.chanList = channelList; }
/// <summary> /// Creates the filter based on the configuration file /// </summary> /// <param name="tvAudio">The tvaudio component</param> /// <param name="crossbar">The crossbar componen</param> /// <param name="tuner">The tuner component</param> /// <param name="graph">The stored graph</param> /// <param name="graphBuilder">The graphBuilder</param> /// <param name="capBuilder">The Capture graph builder</param> /// <returns>true, if the graph building was successful</returns> private bool CreateConfigurationBasedFilterInstance(Graph graph, ICaptureGraphBuilder2 capBuilder, IFilterGraph2 graphBuilder, Tuner tuner, Crossbar crossbar, TvAudio tvAudio) { string audioDeviceName = graph.Capture.AudioCaptureName; string videoDeviceName = graph.Capture.Name; DsDevice[] devices; bool videoConnected = false; bool audioConnected = false; //get a list of all video capture devices try { if (tuner.TunerName == "Adaptec USB TvTuner") { Log.Log.WriteFile("analog: Adaptec USB device detected!"); devices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); } else { devices = DsDevice.GetDevicesOfCat(FilterCategory.AMKSCapture); //shouldn't be VideoInputDevice devices = DeviceSorter.Sort(devices, tuner.TunerName, tvAudio.TvAudioName, crossbar.CrossBarName); } } catch (Exception) { Log.Log.WriteFile("analog: AddTvCaptureFiler error in allocating devices collection"); return(false); } if (devices.Length == 0) { Log.Log.WriteFile("analog: AddTvCaptureFilter no tvcapture devices found"); return(false); } //try each video capture filter for (int i = 0; i < devices.Length; i++) { bool filterUsed = false; IBaseFilter tmp; if (_badCaptureDevices.Contains(devices[i].Name)) { Log.Log.WriteFile("analog: AddTvCaptureFilter bypassing: {0}", devices[i].Name); continue; } Log.Log.WriteFile("analog: AddTvCaptureFilter try:{0} {1}", devices[i].Name, i); // if video capture filter is in use, then we can skip it if (DevicesInUse.Instance.IsUsed(devices[i])) { Log.Log.WriteFile("analog: Device: {0} in use?", devices[i].Name); continue; } if (!videoDeviceName.Equals(devices[i].Name) && (audioDeviceName == null || !audioDeviceName.Equals(devices[i].Name))) { continue; } int hr; try { // add video capture filter to graph hr = graphBuilder.AddSourceFilterForMoniker(devices[i].Mon, null, devices[i].Name, out tmp); } catch (Exception) { Log.Log.WriteFile("analog: cannot add filter to graph"); continue; } if (hr != 0) { //cannot add video capture filter to graph, try next one if (tmp != null) { Log.Log.WriteFile("analog: cannot add filter: {0} to graph", devices[i].Name); graphBuilder.RemoveFilter(tmp); Release.ComObject("TvCaptureFilter", tmp); } continue; } // connect crossbar->video capture filter if (videoDeviceName.Equals(devices[i].Name) && FilterGraphTools.ConnectPin(graphBuilder, crossbar.VideoOut, tmp, graph.Capture.VideoIn)) { _filterVideoCapture = tmp; _videoCaptureDevice = devices[i]; if (_audioCaptureDevice != _videoCaptureDevice) { DevicesInUse.Instance.Add(_videoCaptureDevice); } Log.Log.WriteFile("analog: AddTvCaptureFilter connected video to crossbar successfully"); videoConnected = true; filterUsed = true; } // crossbar->audio capture filter // Many video capture are also the audio capture filter, so we can always try it again if (audioDeviceName.Equals(devices[i].Name) && FilterGraphTools.ConnectPin(graphBuilder, crossbar.AudioOut, tmp, graph.Capture.AudioIn)) { _filterAudioCapture = tmp; _audioCaptureDevice = devices[i]; if (_audioCaptureDevice != _videoCaptureDevice) { DevicesInUse.Instance.Add(_audioCaptureDevice); } Log.Log.WriteFile("analog: AddTvCaptureFilter connected audio to crossbar successfully"); audioConnected = true; filterUsed = true; } // _audioCaptureDevice should never be null - avoids null exception crashes with Encoder.cs else { _audioCaptureDevice = devices[i]; } if (!filterUsed) { // cannot connect crossbar->video capture filter, remove filter from graph // cand continue with the next vieo capture filter Log.Log.WriteFile("analog: AddTvCaptureFilter failed to connect to crossbar"); graphBuilder.RemoveFilter(tmp); Release.ComObject("capture filter", tmp); } else { i = -1; // Go through the devices again from the start... } if (videoConnected && audioConnected) { break; } } if (_filterVideoCapture != null) { if (graph.Capture.TeletextPin != -1) { _pinVBI = DsFindPin.ByDirection(_filterVideoCapture, PinDirection.Output, graph.Capture.TeletextPin); } _videoProcAmp = _filterVideoCapture as IAMVideoProcAmp; _analogVideoDecoder = _filterVideoCapture as IAMAnalogVideoDecoder; _streamConfig = _filterVideoCapture as IAMStreamConfig; _videoFormats = graph.Capture.AvailableVideoStandard; _defaultVideoProcAmpValues = graph.Capture.VideoProcAmpValues; _frameRate = graph.Capture.FrameRate; _imageWidth = graph.Capture.ImageWidth; _imageHeight = graph.Capture.ImageHeight; CheckCapabilitiesStreamConfig(graph, capBuilder); SetCaptureConfiguration(graph); } return(_filterVideoCapture != null); }
// ----------------- Constructor --------------------- /// <summary> Retrieve capabilities of a video device </summary> internal VideoCapabilities(IAMStreamConfig videoStreamConfig) { if ( videoStreamConfig == null ) throw new ArgumentNullException( "videoStreamConfig" ); AMMediaType mediaType = null; VideoStreamConfigCaps caps = null; IntPtr pCaps = IntPtr.Zero; #if DSHOWNET IntPtr pMediaType; #endif try { // Ensure this device reports capabilities int c, size; int hr = videoStreamConfig.GetNumberOfCapabilities( out c, out size ); if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); if ( c <= 0 ) throw new NotSupportedException( "This video device does not report capabilities." ); if ( size > Marshal.SizeOf( typeof( VideoStreamConfigCaps ) ) ) throw new NotSupportedException( "Unable to retrieve video device capabilities. This video device requires a larger VideoStreamConfigCaps structure." ); if ( c > 1 ) Debug.WriteLine("This video device supports " + c + " capability structures. Only the first structure will be used." ); // Alloc memory for structure pCaps = Marshal.AllocCoTaskMem( Marshal.SizeOf( typeof( VideoStreamConfigCaps ) ) ); #if DEBUG for(int i = c - 1; i >= 0; i--) { #if DSHOWNET hr = videoStreamConfig.GetStreamCaps(i, out pMediaType, pCaps); #else hr = videoStreamConfig.GetStreamCaps(i, out mediaType, pCaps); #endif #else // Retrieve first (and hopefully only) capabilities struct #if DSHOWNET hr = videoStreamConfig.GetStreamCaps( 0, out pMediaType, pCaps ); #else hr = videoStreamConfig.GetStreamCaps( 0, out mediaType, pCaps ); #endif #endif if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); #if DSHOWNET // Convert pointers to managed structures mediaType = (AMMediaType)Marshal.PtrToStructure(pMediaType, typeof(AMMediaType)); #endif // Convert pointers to managed structures caps = (VideoStreamConfigCaps) Marshal.PtrToStructure(pCaps, typeof(VideoStreamConfigCaps)); // Extract info InputSize = caps.InputSize; MinFrameSize = caps.MinOutputSize; MaxFrameSize = caps.MaxOutputSize; FrameSizeGranularityX = caps.OutputGranularityX; FrameSizeGranularityY = caps.OutputGranularityY; MinFrameRate = (double)10000000 / caps.MaxFrameInterval; MaxFrameRate = (double)10000000 / caps.MinFrameInterval; //#if NEWCODE this.AnalogVideoStandard = caps.VideoStandard; //#endif #if DEBUG if (caps.VideoStandard > AnalogVideoStandard.None) { Debug.WriteLine("Caps=" + caps.InputSize.ToString() + " " + caps.MinOutputSize.ToString() + " " + caps.MaxOutputSize.ToString() + " " + MinFrameRate.ToString() + "-" + MaxFrameRate.ToString() + " " + caps.VideoStandard.ToString()); Debug.WriteLine("MediaType=" + mediaType.majorType.ToString() + " " + mediaType.subType.ToString() + " " + mediaType.formatType.ToString() + " " + mediaType.formatSize.ToString() + " " + mediaType.fixedSizeSamples.ToString() + " " + mediaType.sampleSize.ToString() + " " + mediaType.temporalCompression.ToString()); } } #endif } finally { if ( pCaps != IntPtr.Zero ) Marshal.FreeCoTaskMem( pCaps ); pCaps = IntPtr.Zero; if ( mediaType != null ) DsUtils.FreeAMMediaType( mediaType ); mediaType = null; } }