public TVViewer() { // initialize sdl stuff screen = Sdl.SDL_SetVideoMode(720, 576, 32, Sdl.SDL_HWSURFACE | Sdl.SDL_DOUBLEBUF); yuvOverlay = Sdl.SDL_CreateYUVOverlay(720, 576, Sdl.SDL_YUY2_OVERLAY, screen); rect = new Sdl.SDL_Rect(0, 0, 720, 576); pixels = Marshal.ReadIntPtr(Marshal.ReadIntPtr(yuvOverlay, 20)); // initialize adapter adapter = new Adapter("/dev/video0"); adapter.SetControlValue(Control.Mute, 0); // set desired capture method adapter.CaptureMethod = CaptureMethod.ReadWrite; // set video format VideoCaptureFormat format = new VideoCaptureFormat(720, 576); format.PixelFormat = v4l2_pix_format_id.YUYV; format.Field = v4l2_field.Interlaced; adapter.SetFormat(format); adapter.Input = adapter.Inputs[adapter.Inputs.IndexOf("Name", "Television")]; adapter.Standard = adapter.Standards[adapter.Standards.IndexOf("Name", "PAL")]; Events.Quit += quit; Events.KeyboardDown += keyDown; }
/// <summary> /// Creates a tuner. /// </summary> /// <param name="device">The parental Video4Linux device.</param> /// <param name="index">The index of the tuner.</param> /// <param name="type">The type of the tuner.</param> internal Tuner(Adapter adapter, uint index, TunerType type) { this.adapter = adapter; tuner = new v4l2_tuner(); tuner.index = index; tuner.type = type; getTuner(); }
public void Start() { var adapterone = new Adapter(_deviceone); var adaptertwo = new Adapter(_devicetwo); VideoCaptureFormat fone, ftwo; bufferone = Init(adapterone,out fone); buffertwo = Init(adaptertwo,out ftwo); _w = (int)fone.Width; _h = (int)fone.Height; //lastImage = new byte[_w * _h * 2]; adapterone.StartStreaming(); adaptertwo.StartStreaming(); Stopwatch sw = new Stopwatch(); sw.Start(); //WL("started "+(Console.KeyAvailable?"haskey":"nokey")); try { while(_enabled) { int rone = adapterone.VideoStream.Read(bufferone,0,bufferone.Length); int rtwo = adaptertwo.VideoStream.Read(buffertwo,0,buffertwo.Length); //WL("Read "+rone+","+rtwo+" bytes"); long stamp = sw.ElapsedTicks; //RawToBitmap("_1",fone,bufferone,false); //RawToBitmap("_2",ftwo,buffertwo,false); //RawToPGM("C1",stamp,fone,bufferone,true); //RawToPGM("C2",stamp,ftwo,buffertwo,true); //WL("ellapsed "+sw.ElapsedMilliseconds); lock(lastlock) { //DiffImage(fone.Width,fone.Height,bufferone,buffertwo,lastImage); lastImage = StereoProcess.Go(fone.Width,fone.Height,bufferone,buffertwo); } //RawToPGM("D",stamp,fone,bufferdif,true); //WL("ellapsed "+sw.ElapsedMilliseconds); } } finally { adapterone.StopStreaming(); adaptertwo.StopStreaming(); } }
static void Info(Adapter adapter) { WL("= Capabilities"); foreach(AdapterCapability cap in adapter.Capabilities) { WL(cap.ToString()); } WL("= Driver"); WL(adapter.Driver); WL("= BusInfo"); WL(adapter.BusInfo); //VideoCaptureFormat format = new VideoCaptureFormat(); //adapter.GetFormat(format); //WL("= vc "+format.Width+" "+format.Height+" "+format.PixelFormat); //adapter.SetFormat(format); WL("=Inputs"); foreach(Input inp in adapter.Inputs) { WL(inp.Name+" "+inp.Status+" "+inp.Type+" "+inp.SupportedStandards); } WL("=Outputs"); foreach(Output outp in adapter.Outputs) { WL(outp.Name+" "+outp.Status+" "+outp.Type+" "+outp.SupportedStandards); } WL("=Standard"); foreach(Standard std in adapter.Standards) { WL(std.Name+" "+std.FrameLines); } }
static void Read(Adapter adapter,byte[] buffer) { int r = adapter.VideoStream.Read(buffer,0,buffer.Length); //WL("Read "+r+" bytes"); }
static byte[] Init(Adapter adapter, out VideoCaptureFormat format) { format = new VideoCaptureFormat() { Field = Video4Linux.Analog.Kernel.v4l2_field.Any ,Width = 358,Height = 288 ,PixelFormat = Video4Linux.Analog.Kernel.v4l2_pix_format_id.GREY }; adapter.SetFormat(format); //adapter.GetFormat(format); int count = (int)(format.Width*format.Height*2); byte[] buffer = new byte[count]; //WL("Method "+adapter.CaptureMethod+" "+format.Width+" "+format.Height+" "+format.PixelFormat+" "+format.Field+" "+format.BytesPerLine+" "+count); return buffer; }