示例#1
0
        private void mItCaptureeDevice_Click(object sender, EventArgs e)
        {
            // Create instance of caller.
            ToolStripMenuItem item = (ToolStripMenuItem)sender;

            // Display text.
            this.pbMain.Tag = item.Text;

            // Get device.
            VideoDevice videoDevice = (VideoDevice)item.Tag;

            foreach (ToolStripMenuItem mItem in this.captureToolStripMenuItem.DropDown.Items)
            {
                item.Checked = false;
            }

            item.Checked = true;

            try
            {
                // Create camera.
                this.camera1 = new Emgu.CV.Capture(videoDevice.Index);
                // Stop if other stream was displaying.
                this.camera1.Stop();
                // Start the new stream.
                this.camera1.Start();
            }
            catch (Exception exception)
            {
                Console.WriteLine(exception.ToString());
            }
        }
示例#2
0
        public H264(VideoDevice device)
        {
            this.device = device;

            // Pi4 only supports two:
            // "H.264", H264
            // "Motion-JPEG", MJPEG
            sImageFormatDescription formatDesc = device.enumerateFormats(inputBufferType)
                                                 .first(i => i.pixelFormat == inputPixelFormat, "h.264 decoder requires a hardware capable of decoding h.264. The provided video device can’t do that.");

            // Logger.logVerbose( "Compressed format: {0}", formatDesc );
            inputFormat = new sImageFormat(ref formatDesc);

            // We gonna be using NV12, but generally speaking Pi4 supports following:
            // "Planar YUV 4:2:0", YUV420
            // "Planar YVU 4:2:0", YVU420
            // "Y/CbCr 4:2:0", NV12
            // "Y/CrCb 4:2:0", NV21
            // "16-bit RGB 5-6-5", RGB565
            formatDesc = device.enumerateFormats(outputBufferType)
                         .first(i => i.pixelFormat == outputPixelFormat, "h.264 decoder requires a hardware capable of decoding h.264 into NV12. The provided video device can’t do that.");
            outputFormat = new sImageFormat(ref formatDesc);

            inputSize  = SizeSupported.query(device, inputPixelFormat);
            outputSize = SizeSupported.query(device, outputPixelFormat);
        }
示例#3
0
        /// <summary>
        /// Capture device.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void tsmiCaptureeDevice_Click(object sender, EventArgs e)
        {
            // Create instance of caller.
            ToolStripMenuItem item = (ToolStripMenuItem)sender;

            // Get device.
            VideoDevice videoDevice = (VideoDevice)item.Tag;

            // Uncheck all cameras.
            foreach (ToolStripMenuItem mItem in this.tsmiCameraCapture.DropDown.Items)
            {
                mItem.Checked = false;
            }

            // Check only this camera.
            item.Checked = true;

            // Disconnect from the camera.
            this.DisconnectFromCamera();

            // Stop the image timer.
            this.StopSendImageTimer();

            // Connect to camera.
            this.ConnecToCamera(videoDevice.MonikerString);
        }
示例#4
0
 public void dbgPrintBuffers(VideoDevice device)
 {
     for (int i = 0; i < buffersCount; i++)
     {
         Logger.logInfo("#{0}: {1}", i, buffers[i].queryStatus(device.file));
     }
 }
示例#5
0
 public LinuxEngine(iSimdUtils simd, Rational?displayRefresh)
 {
     MiscUtils.simd      = simd;
     h264                = new H264(VideoDevice.open(h264DecoderDevice));
     this.displayRefresh = displayRefresh;
     // Logger.logVerbose( "Decoder: {0}", h264.ToString() );
     // PrintSizeofs.print();
 }
示例#6
0
 public DecodedQueue(VideoDevice videoDevice, int dbc) :
     base(allocateBuffers(videoDevice, dbc), videoDevice.file)
 {
     for (int i = 0; i < buffersCount; i++)
     {
         var db = new DecodedBuffer(i, videoDevice);
         buffers[i] = db;
     }
 }
示例#7
0
文件: Camera.cs 项目: mauricio-bv/iot
        private Camera()
        {
            // You can select other size and other format, this is a very basic one supported by all types of webcams including old ones
            VideoConnectionSettings settings = new VideoConnectionSettings(0, (640, 480), Iot.Device.Media.PixelFormat.JPEG);

            Device    = VideoDevice.Create(settings);
            IsRunning = true;
            new Thread(() => { TakePictures(); }).Start();
        }
示例#8
0
        /// <summary>
        /// Initiate the camera
        /// </summary>
        public Camera()
        {
            // You can select other size and other format, this is a very basic one supported by all types of webcams including old ones
            VideoConnectionSettings settings = new VideoConnectionSettings(0, (640, 480), Iot.Device.Media.PixelFormat.JPEG);

            _device = VideoDevice.Create(settings);
            // if the device has sufficent ram, enabling pooling significantly improves frames per second by preventing GC.
            _device.ImageBufferPoolingEnabled = true;
        }
示例#9
0
        public Nv12Texture exportNv12(iGlesRenderDevice gles, VideoDevice device, ref sPixelFormatMP pixelFormat, ref ColorFormat colorFormat)
        {
            device.file.call(eControlCode.QUERYBUF, ref buffer);

            sDmaBuffer dma     = exportOutputBuffer(device, ref pixelFormat);
            ITexture   texture = gles.importNv12Texture(ref dma, ref colorFormat);

            // Logger.logVerbose( "Exported NV12 texture: {0}", dma );
            return(new Nv12Texture(texture));
        }
示例#10
0
        /// <summary>Export all buffers from V4L2, import them into GLES in Diligent Engine</summary>
        public void exportTextures(IRenderDevice renderDevice, VideoDevice device, ref sPixelFormatMP pixelFormat, ref ColorFormat color)
        {
            iGlesRenderDevice gles = ComLightCast.cast <iGlesRenderDevice>(renderDevice);

            textures = new Nv12Texture[buffers.Length];
            for (int i = 0; i < buffers.Length; i++)
            {
                textures[i] = buffers[i].exportNv12(gles, device, ref pixelFormat, ref color);
            }
        }
示例#11
0
        /// <summary>
        /// Method which gets available video input devices.
        /// </summary>
        public void GetAvailableCameraDevices()
        {
            Cameras = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);
            var webCams = new VideoDevice[Cameras.Length];

            for (var i = 0; i < Cameras.Length; i++)
            {
                webCams[i] = new VideoDevice(Cameras[i].Name, Cameras[i].ClassID);
                comboBox1.Items.Add(webCams[i].ToString());
            }
        }
示例#12
0
        public EncodedQueue(VideoDevice device, int encodedBuffersCount, ref sPixelFormatMP encodedFormat) :
            base(allocateBuffers(device, encodedBuffersCount), device.file)
        {
            bufferCapacity = encodedFormat.getPlaneFormat(0).sizeImage;

            // Create encoded buffers, this does the memory mapping
            for (int i = 0; i < buffersCount; i++)
            {
                var eb = new EncodedBuffer(device.file, i);
                buffers[i] = eb;
            }
        }
示例#13
0
        static void Main()
        {
            var renderForm = new RenderForm("");

            SharpDX.DXGI.Factory f = new SharpDX.DXGI.Factory();
            SharpDX.DXGI.Adapter a = f.GetAdapter(1);


            SharpDX.Direct3D11.Device  d  = new Device(a, DeviceCreationFlags.VideoSupport | DeviceCreationFlags.BgraSupport);
            SharpDX.Direct3D11.Device2 d2 = d.QueryInterface <SharpDX.Direct3D11.Device2>();
            //d.Dispose();*/

            /* var device = new RenderDevice(, 1);
             * RenderContext context = new RenderContext(device);
             *
             * var d = device.Device;*/
            var multithread = d.QueryInterface <SharpDX.Direct3D.DeviceMultithread>();

            multithread.SetMultithreadProtected(true);

            // Create a DXGI Device Manager
            var dxgiDeviceManager = new SharpDX.MediaFoundation.DXGIDeviceManager();

            dxgiDeviceManager.ResetDevice(d);

            VideoDevice vd = d.QueryInterface <VideoDevice>();

            //vd.VideoDecoderProfileCount

            /*VideoDecoderDescription desc = new VideoDecoderDescription()
             * {
             *
             * }*/

            VideoContext ctx = d.ImmediateContext.QueryInterface <VideoContext>();

            /*var swapChain = new DX11SwapChain(device, renderForm.Handle);
             *
             * /*var dx = new VideoDecoderDescription()
             * {
             *
             * }*/
            /*int i = vd.VideoDecoderProfileCount;
             *
             * renderForm.ResizeEnd += (s, e) => swapChain.Resize();
             *
             * RenderLoop.Run(renderForm, () =>
             * {
             *  context.Context.ClearRenderTargetView(swapChain.RenderView, new SharpDX.Color4(1, 1, 1, 1));
             *
             *  swapChain.Present(1, SharpDX.DXGI.PresentFlags.None);
             * });*/
        }
示例#14
0
        /// <summary>Allocate encoded buffers</summary>
        static int allocateBuffers(VideoDevice device, int encodedBuffersCount)
        {
            int buffersCount = device.allocateEncodedBuffers(encodedBuffersCount);

            if (buffersCount == encodedBuffersCount)
            {
                Logger.logVerbose("eControlCode.REQBUFS completed, created {0} encoded buffers", buffersCount);
            }
            else
            {
                Logger.logInfo("eControlCode.REQBUFS: asked for {0} encoded buffer, GPU driver created {1} instead", buffersCount, buffersCount);
            }
            return(buffersCount);
        }
示例#15
0
        static int allocateBuffers(VideoDevice videoDevice, int buffersCount)
        {
            // Allocate decoded buffers
            int c = videoDevice.allocateDecodedFrames(buffersCount);

            if (c == buffersCount)
            {
                Logger.logVerbose("allocateDecodedFrames created {0} frames", buffersCount);
            }
            else
            {
                Logger.logVerbose("allocateDecodedFrames asked for {0} frames, allocated {1} instead", buffersCount, c);
            }
            return(c);
        }
示例#16
0
        public sDmaBuffer exportOutputBuffer(VideoDevice device, ref sPixelFormatMP pixelFormat)
        {
            sExportBuffer     eb          = device.exportOutputBuffer(bufferIndex);
            sPlanePixelFormat planeFormat = pixelFormat.getPlaneFormat(0);

            sDmaBuffer dma = new sDmaBuffer()
            {
                fd          = eb.fd,
                offset      = 0,
                stride      = planeFormat.bytesPerLine,
                imageSize   = planeFormat.bytesPerLine * pixelFormat.size.cy,
                sizePixels  = pixelFormat.size,
                bufferIndex = bufferIndex
            };

            return(dma);
        }
示例#17
0
        private static IEnumerable <Guid> FindVideoFormatCompatibleProfiles(VideoDevice videoDevice, AVCodec codec)
        {
            for (var i = 0; i < videoDevice.VideoDecoderProfileCount; ++i)
            {
                videoDevice.GetVideoDecoderProfile(i, out var profile);

                // TODO Check profile id

                videoDevice.CheckVideoDecoderFormat(profile, DecoderOuputFormat, out var suppported);
                if (suppported)
                {
                    yield return(profile);
                }
            }

            yield break;
        }
        private void ComboBoxCamera_Loaded(object sender, RoutedEventArgs e)
        {
            var devs = VideoDevice.Devices().ToList();

            ComboBoxCamera.ItemsSource       = devs;
            ComboBoxCamera.DisplayMemberPath = nameof(VideoDevice.Name);
            var devIndex = devs.FindIndex(dev => dev.SymbolicLink == Config.Raw.DeviceId);

            if (devIndex >= 0)
            {
                var res = devs[devIndex].Resolutions.ToList();
                ComboBoxResolution.ItemsSource = res;
                var resIndex = res.FindIndex(r => r.Width == Config.Raw.Width && r.Height == Config.Raw.Height);
                ComboBoxResolution.SelectedIndex = resIndex >= 0 ? resIndex : 0;
            }
            ComboBoxCamera.SelectedIndex = devIndex >= 0 ? devIndex : 0;
        }
示例#19
0
        static void Main(string[] args)
        {
            VideoConnectionSettings settings = new VideoConnectionSettings(0)
            {
                CaptureSize  = (2560, 1920),
                PixelFormat  = PixelFormat.JPEG,
                ExposureType = ExposureType.Auto
            };

            using VideoDevice device = VideoDevice.Create(settings);

            // Get the supported formats of the device
            foreach (PixelFormat item in device.GetSupportedPixelFormats())
            {
                Console.Write($"{item} ");
            }
            Console.WriteLine();

            // Get the resolutions of the format
            foreach ((uint Width, uint Height) in device.GetPixelFormatResolutions(PixelFormat.YUYV))
            {
                Console.Write($"{Width}x{Height} ");
            }
            Console.WriteLine();

            // Query v4l2 controls default and current value
            VideoDeviceValue value = device.GetVideoDeviceValue(VideoDeviceValueType.Rotate);

            Console.WriteLine($"{value.Name} Min: {value.Minimum} Max: {value.Maximum} Step: {value.Step} Default: {value.DefaultValue} Current: {value.CurrentValue}");

            string path = Directory.GetCurrentDirectory();

            // Take photos
            device.Capture($"{path}/jpg_direct_output.jpg");

            // Change capture setting
            device.Settings.PixelFormat = PixelFormat.YUV420;

            // Convert pixel format
            Color[] colors = VideoDevice.Yv12ToRgb(device.Capture(), settings.CaptureSize);
            Bitmap  bitmap = VideoDevice.RgbToBitmap(settings.CaptureSize, colors);

            bitmap.Save($"{path}/yuyv_to_jpg.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
        }
    }
    public void DoSomething()
    {
        // Get video device here. It will likely be best to
        // delegate that to another specialized service
        // that is injected into this class.
        VideoDevice device = ...;

        var graphics = this.graphicsFactory.Create(device);

        try
        {
            // Do something with graphics
        }
        finally
        {
            this.graphicsFactory.Release(graphics);
        }
    }
示例#21
0
        /// <summary>
        /// Get list of all available devices on the PC.
        /// </summary>
        /// <returns>Video device.</returns>
        private VideoDevice[] GetDevices()
        {
            //Set up the capture method
            //-> Find systems cameras with DirectShow.Net DLL, thanks to Charles Lorette.
            //DsDevice[] systemCamereas = DsDevice.GetDevicesOfCat(AForge.Video.DirectShow.FilterCategory.VideoInputDevice);

            // Enumerate video devices
            FilterInfoCollection systemCamereas = new FilterInfoCollection(FilterCategory.VideoInputDevice);

            VideoDevice[] videoDevices = new VideoDevice[systemCamereas.Count];

            for (int index = 0; index < systemCamereas.Count; index++)
            {
                videoDevices[index] = new VideoDevice(index, systemCamereas[index].Name, systemCamereas[index].MonikerString);
            }

            return(videoDevices);
        }
示例#22
0
        public DecodedBuffer(int bufferIndex, VideoDevice device)
            : base(bufferIndex)
        {
            // Create small unmanaged buffer for our 2 planes
            planes = new PlanesArray(2);

            buffer.index    = bufferIndex;
            buffer.type     = eBufferType.VideoCaptureMPlane;
            buffer.field    = eField.Progressive;
            buffer.memory   = eMemory.MemoryMap;
            buffer.length   = 2;
            buffer.m.planes = planes;

            device.file.call(eControlCode.QUERYBUF, ref buffer);
            // Logger.logVerbose( "Decoded buffer: {0}", buffer );

            buffer.flags |= eBufferFlags.NoCacheInvalidate;
        }
示例#23
0
 private void button1_Click(object sender, EventArgs e)
 {
     _videoDevice = new VideoDevice(0, null, null, 0, string.Empty, string.Empty, 0, @"E:\杂项\宝宝聊天照片\VID_20150715_153556.mp4", null, null);
     _videoDevice.StartVideoSource(panel1.Handle);
     //var param = new ZWVideoParam();
     //param.CameraIp = "192.168.100.19";
     //param.LocalIp = null;
     //param.CameraPort = 8000;
     //param.FuncMask = 15;
     //param.AddressNum = 35;
     //param.UserName = "******";
     //param.Password = "******";
     //param.UrlOrAddressName = @"E:\杂项\宝宝聊天照片\VID_20150715_153556.mp4";
     //onRealImgReady = new RealTimeImageCallback(OnRealImgReadyProc);
     //gchOnRealImgReady = GCHandle.Alloc(onRealImgReady);
     //_zwVideo = ZWVideoAPI.InitialVideoSource(ref param, null, onRealImgReady, null,ZW_PIX_FMT.ZW_PIX_FMT_NONE,null, null, ZW_PIX_FMT.ZW_PIX_FMT_NONE, IntPtr.Zero);
     //ZWVideoAPI.StartVideoSource(_zwVideo, panel1.Handle);
     //ZW_CODEC_ID codec = ZWVideoAPI.GetVideoCodec(_zwVideo);
 }
示例#24
0
        public static SizeSupported query(VideoDevice device, ePixelFormat pixelFormat)
        {
            sFrameSizeEnum fse = device.frameSizeFirst(pixelFormat);

            switch (fse.type)
            {
            case eFrameSizeType.Discrete:
                break;

            case eFrameSizeType.Continuous:
            case eFrameSizeType.Stepwise:
                return(new ContinuousSizes(ref fse));

            default:
                throw new ApplicationException();
            }

            return(new DiscreteSizes(device.frameSizeEnum(pixelFormat)));
        }
示例#25
0
        public EventSubscription(VideoDevice videoDevice)
        {
            device = videoDevice.file;

            eEventType[] eventTypes = new eEventType[]
            {
                eEventType.EndOfStream,
                eEventType.SourceChange,
            };

            foreach (var e in eventTypes)
            {
                // Subscribe for the events
                sEventSubscription evt = new sEventSubscription()
                {
                    type = e
                };
                device.call(eControlCode.SUBSCRIBE_EVENT, ref evt);
            }
        }
示例#26
0
        private void doWork()
        {
            int dataCount = Int32.Parse(AddDevNumtextBox.Text);//数据生成数量

            //设置进度条
            this.Invoke((EventHandler) delegate { this.AddDevprogressBar.Minimum = 0; });
            this.Invoke((EventHandler) delegate { this.AddDevprogressBar.Step = 1; });
            this.Invoke((EventHandler) delegate { this.AddDevprogressBar.Maximum = dataCount; });
            //AddDevprogressBar.Maximum = dataCount;

            string name       = "视频主机";                      //资产名称的前缀
            string deviceCode = StringUtil.getDateTimeNum(); //资产编码和设备唯一标识码的前缀

            string[] sArray       = AddIpAddresstextBox.Text.Split('.');
            int      Port         = Int32.Parse(AddPorttextBox.Text);
            int      IPCount      = Int32.Parse(sArray[3]);              //ip尾号
            int      channelCount = Int32.Parse(AddChanneltextBox.Text); //通道数量

            for (int i = 0; i < dataCount;)
            {
                int left = dataCount - i;
                //控制一次insert新增的数据量
                int           count   = left > AddDeviceHelper.ONCE_INSERT ? AddDeviceHelper.ONCE_INSERT : left;
                List <Device> devices = new List <Device>();
                for (int j = 0; j < count; j++, i++)
                {
                    VideoDevice video = new VideoDevice();
                    video.setIp(string.Format("{0}.{1}.{2}.{3}", sArray[0], sArray[1], sArray[2], IPCount++));
                    video.setPort(Port.ToString());
                    video.setDeviceCode(deviceCode + i.ToString());
                    video.setDeviceName(name + i.ToString());
                    video.setChannelCount(channelCount);
                    devices.Add(video);

                    this.Invoke((EventHandler) delegate { this.AddDevprogressBar.Value += AddDevprogressBar.Step; });
                    //AddDevprogressBar.Value += AddDevprogressBar.Step;//让进度条增加一次
                }
                AddDeviceHelper.multiExcute(LoginMysqlFm.getNewDbHelper(), devices);//执行数据新增操作
            }
            MessageBox.Show("添加成功");
        }
示例#27
0
        private bool RestartVideoMonitor(bool enableVideo)
        {
            lock (this.syncLock)
            {
                if (this.VideoMonitor != null)
                {
                    this.VideoMonitor.Dispose();
                    this.VideoMonitor = null;
                }

                if (!enableVideo)
                {
                    return(false);
                }

                this.VideoMonitor = new VideoDevice(true);
                this.VideoMonitor.OnFrameAvailable += this.OnVideoDeviceFrameAvailable;
                this.VideoMonitor.StartRecording();

                return(true);
            }
        }
示例#28
0
        static void Main(string[] args)
        {
            CompareType <v4l2_capability>();
            CompareType <v4l2_fmtdesc>();
            CompareType <v4l2_requestbuffers>();
            CompareType <int>();
            CompareType <v4l2_control>();
            CompareType <v4l2_queryctrl>();
            CompareType <v4l2_cropcap>();
            CompareType <v4l2_crop>();
            CompareType <v4l2_format>();
            CompareType <v4l2_format_aligned>();
            CompareType <v4l2_frmsizeenum>();
            CompareType <v4l2_buffer>();

            VideoConnectionSettings settings = new VideoConnectionSettings(0)
            {
                CaptureSize  = (1920, 1080),
                PixelFormat  = PixelFormat.MJPEG,
                ExposureType = ExposureType.Auto
            };

            using VideoDevice device = VideoDevice.Create(settings);

            string path = Directory.GetCurrentDirectory();

            // Take photos
            device.Capture($"{path}/jpg_direct_output.jpg");

            // Change capture setting
            // device.Settings.PixelFormat = PixelFormat.YUV420;

            // Convert pixel format
            //  Color[] colors = VideoDevice.Yv12ToRgb(device.Capture(), settings.CaptureSize);
            //  Bitmap bitmap = VideoDevice.RgbToBitmap(settings.CaptureSize, colors);
            //  bitmap.Save($"{path}/yuyv_to_jpg.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
        }
    }
示例#29
0
        public VideoTextures exportTextures(iGlesRenderDevice gles, VideoDevice device, ref sPixelFormatMP pixelFormat)
        {
            sExportBuffer eb = new sExportBuffer()
            {
                type  = eBufferType.VideoCaptureMPlane,
                index = bufferIndex,
                plane = 0,
                flags = eFileFlags.O_RDONLY | eFileFlags.O_CLOEXEC
            };

            device.file.call(eControlCode.EXPBUF, ref eb);

            sPlanePixelFormat planeFormat = pixelFormat.getPlaneFormat(0);

            sDmaBuffer dma = new sDmaBuffer()
            {
                fd          = eb.fd,
                offset      = 0,
                stride      = planeFormat.bytesPerLine,
                imageSize   = planeFormat.bytesPerLine * pixelFormat.size.cy,
                sizePixels  = pixelFormat.size,
                bufferIndex = bufferIndex
            };
            ITexture luma = gles.importLumaTexture(ref dma);

            Logger.logVerbose("Exported luma texture: {0}", dma);

            // I asked V4L2 for 2 planes, however QUERYBUF returned a single plane, with the complete NV12 image in it.
            // No big deal, we have EGL_DMA_BUF_PLANE0_OFFSET_EXT for that; that's where the sDmaBuffer.offset field goes.
            dma.offset     = dma.imageSize;
            dma.sizePixels = chromaSize(pixelFormat.size);
            dma.imageSize  = dma.stride * dma.sizePixels.cy;
            ITexture chroma = gles.importChromaTexture(ref dma);

            Logger.logVerbose("Exported chroma texture: {0}", dma);

            return(new VideoTextures(luma, chroma));
        }
示例#30
0
        public DecoderThread(VideoDevice device, iVideoTrackReader reader, EncodedQueue encoded, DecodedQueue decoded, int shutdownEvent, iDecoderEvents eventsSink,
                             iAudioTrackReader audioReader, Audio.iDecoderQueues audioQueue)
        {
            this.device        = device;
            this.reader        = reader;
            this.encoded       = encoded;
            this.decoded       = decoded;
            this.shutdownEvent = shutdownEvent;
            this.audioReader   = audioReader;
            this.audioQueue    = audioQueue;

            // Enqueue decoded buffers
            decoded.enqueueAll();

            this.eventsSink = eventsSink;
            seekEventHandle = EventHandle.create();

            // Remaining work is done in the thread
            thread = new Thread(threadMain);
            thread.IsBackground = true;
            thread.Name         = "Media player thread";
            Logger.logInfo("Launching the video decoding thread");
            thread.Start();
        }
示例#31
0
        private void webcamSelector_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (webcamSelector.SelectedItem == null) return;
            webcam = (VideoDevice)webcamSelector.SelectedItem;

            if (liveSession == null) return;
            SktParticipant.List me = liveSession.GetParticipants(SktConversation.PARTICIPANTFILTER.MYSELF);
            SktVideo mirror = me[0].GetVideo();
            mirror.SelectVideoSource(SktVideo.MEDIATYPE.MEDIA_VIDEO, webcam.name, webcam.path, false);
            this.Text = webcam.name;
        }