public void SendTestPatternSample(object state)
        {
            try
            {
                if (SampleReady != null && !_isDisposing)
                {
                    lock (_vpxEncoder)
                    {
                        unsafe
                        {
                            byte[] sampleBuffer  = null;
                            byte[] encodedBuffer = null;

                            var stampedTestPattern = _testPattern.Clone() as System.Drawing.Image;
                            AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                            sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap);

                            fixed(byte *p = sampleBuffer)
                            {
                                byte[] convertedFrame = null;
                                _colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, (int)_width, (int)_height, (int)_stride, VideoSubTypesEnum.I420, ref convertedFrame);

                                fixed(byte *q = convertedFrame)
                                {
                                    int encodeResult = _vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        throw new ApplicationException("VPX encode of video sample failed.");
                                    }
                                }
                            }

                            stampedTestPattern.Dispose();

                            SampleReady?.Invoke(encodedBuffer);
                        }
                    }
                }
            }
            catch (Exception excp)
            {
                logger.LogError("Exception SendTestPatternSample. " + excp);
            }
        }
Esempio n. 2
0
        /// <summary>
        /// Used when the video source is originating as bitmaps produced locally. For example
        /// the audio scope generates bitmaps in response to an audio signal. The generated bitmaps
        /// then need to be encoded and transmitted to the remote party.
        /// </summary>
        /// <param name="bmp">The locally generated bitmap to transmit to the remote party.</param>
        private void LocalBitmapAvailable(Bitmap bmp)
        {
            if (_vpxEncoder == null)
            {
                _extBmpWidth  = bmp.Width;
                _extBmpHeight = bmp.Height;
                _extBmpStride = (int)VideoUtils.GetStride(bmp);

                _vpxEncoder = new VpxEncoder();
                int res = _vpxEncoder.InitEncoder((uint)bmp.Width, (uint)bmp.Height, (uint)_extBmpStride);
                if (res != 0)
                {
                    throw new ApplicationException("VPX encoder initialisation failed.");
                }
                _imgEncConverter = new ImageConvert();
            }

            var sampleBuffer = VideoUtils.BitmapToRGB24(bmp);

            unsafe
            {
                fixed(byte *p = sampleBuffer)
                {
                    byte[] convertedFrame = null;
                    _imgEncConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, _extBmpWidth, _extBmpHeight, _extBmpStride, VideoSubTypesEnum.I420, ref convertedFrame);

                    fixed(byte *q = convertedFrame)
                    {
                        byte[] encodedBuffer = null;
                        int    encodeResult  = _vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                        if (encodeResult != 0)
                        {
                            throw new ApplicationException("VPX encode of video sample failed.");
                        }

                        base.SendVp8Frame(_rtpVideoTimestampPeriod, (int)SDPMediaFormatsEnum.VP8, encodedBuffer);
                    }
                }
            }

            bmp.Dispose();
        }
Esempio n. 3
0
        private async Task ExpireConnections()
        {
            try
            {
                logger.LogDebug("Starting expire connections thread.");

                byte[] encodedBuffer = null;

                if (File.Exists(_expiredImagePath))
                {
                    Bitmap expiredImage = expiredImage = new Bitmap(_expiredImagePath);

                    // Get the stride.
                    Rectangle rect = new Rectangle(0, 0, expiredImage.Width, expiredImage.Height);
                    System.Drawing.Imaging.BitmapData bmpData =
                        expiredImage.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                              expiredImage.PixelFormat);

                    // Get the address of the first line.
                    int stride = bmpData.Stride;

                    expiredImage.UnlockBits(bmpData);

                    // Initialise the video codec and color converter.
                    SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder();
                    vpxEncoder.InitEncoder((uint)expiredImage.Width, (uint)expiredImage.Height, (uint)stride);

                    SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                    byte[] sampleBuffer = null;


                    sampleBuffer = BitmapToRGB24(expiredImage);

                    unsafe
                    {
                        fixed(byte *p = sampleBuffer)
                        {
                            byte[] convertedFrame = null;
                            colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, expiredImage.Width, expiredImage.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame);

                            fixed(byte *q = convertedFrame)
                            {
                                int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                                if (encodeResult != 0)
                                {
                                    logger.LogWarning("VPX encode of expired image failed.");
                                }
                            }
                        }
                    }

                    expiredImage.Dispose();
                    vpxEncoder.Dispose();
                }

                while (!_exit)
                {
                    foreach (var conn in _webRtcConnections.Where(x => DateTime.Now.Subtract(x.Value.CreatedAt).TotalSeconds > _connectionTimeLimitSeconds).Select(x => x.Value))
                    {
                        OnMp4MediaSampleReady    -= conn.SendMedia;
                        OnTestPatternSampleReady -= conn.SendMedia;

                        if (conn.WebRtcSession.IsDtlsNegotiationComplete && !conn.WebRtcSession.IsClosed && encodedBuffer != null)
                        {
                            // Send the expired frame 3 times as a crude attempt to cope with packet loss.
                            conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer);
                            await Task.Delay(1);

                            conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer);
                            await Task.Delay(1);

                            conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer);
                        }

                        conn.WebRtcSession.Close("expired");
                    }

                    await Task.Delay(1000);
                }
            }
            catch (Exception excp)
            {
                logger.LogError("Exception ExpireConnections. " + excp);
            }
        }
Esempio n. 4
0
        private async void SampleTestPattern()
        {
            try
            {
                logger.LogDebug("Starting test pattern sampling thread.");

                _isTestPatternSampling = true;

                Bitmap testPattern = new Bitmap(_testPatternImagePath);

                // Get the stride.
                Rectangle rect = new Rectangle(0, 0, testPattern.Width, testPattern.Height);
                System.Drawing.Imaging.BitmapData bmpData =
                    testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                         testPattern.PixelFormat);

                // Get the address of the first line.
                int stride = bmpData.Stride;

                testPattern.UnlockBits(bmpData);

                // Initialise the video codec and color converter.
                SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder();
                vpxEncoder.InitEncoder((uint)testPattern.Width, (uint)testPattern.Height, (uint)stride);

                SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                byte[] sampleBuffer  = null;
                byte[] encodedBuffer = null;
                int    sampleCount   = 0;
                uint   rtpTimestamp  = 0;

                while (!_exit)
                {
                    if (OnTestPatternSampleReady == null)
                    {
                        logger.LogDebug("No active clients, test pattern sampling paused.");
                        break;
                    }
                    else
                    {
                        var stampedTestPattern = testPattern.Clone() as System.Drawing.Image;
                        AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                        sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap);

                        unsafe
                        {
                            fixed(byte *p = sampleBuffer)
                            {
                                byte[] convertedFrame = null;
                                colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, testPattern.Width, testPattern.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame);

                                fixed(byte *q = convertedFrame)
                                {
                                    int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                        continue;
                                    }
                                }
                            }

                            stampedTestPattern.Dispose();

                            OnTestPatternSampleReady?.Invoke(SDPMediaTypesEnum.video, rtpTimestamp, encodedBuffer);

                            sampleCount++;
                            rtpTimestamp += VP8_TIMESTAMP_SPACING;
                        }

                        await Task.Delay(30);
                    }
                }

                testPattern.Dispose();
                vpxEncoder.Dispose();
            }
            catch (Exception excp)
            {
                logger.LogError("Exception SampleTestPattern. " + excp);
            }
            finally
            {
                logger.LogDebug("test pattern sampling thread stopped.");
                _isTestPatternSampling = false;
            }
        }
Esempio n. 5
0
        /// <summary>
        /// Starts the Media Foundation sampling.
        /// </summary>
        unsafe private void SampleMp4Media()
        {
            try
            {
                logger.LogDebug("Starting mp4 media sampling thread.");

                _isMp4Sampling = true;

                VpxEncoder vpxEncoder     = null;
                uint       vp8Timestamp   = 0;
                uint       mulawTimestamp = 0;

                while (!_exit)
                {
                    if (OnMp4MediaSampleReady == null)
                    {
                        logger.LogDebug("No active clients, media sampling paused.");
                        break;
                    }
                    else
                    {
                        byte[] sampleBuffer = null;
                        var    sample       = _mediaSource.GetSample(ref sampleBuffer);

                        if (sample != null && sample.HasVideoSample)
                        {
                            if (vpxEncoder == null ||
                                (vpxEncoder.GetWidth() != sample.Width || vpxEncoder.GetHeight() != sample.Height || vpxEncoder.GetStride() != sample.Stride))
                            {
                                if (vpxEncoder != null)
                                {
                                    vpxEncoder.Dispose();
                                }

                                vpxEncoder = InitialiseVpxEncoder((uint)sample.Width, (uint)sample.Height, (uint)sample.Stride);
                            }

                            byte[] vpxEncodedBuffer = null;

                            unsafe
                            {
                                fixed(byte *p = sampleBuffer)
                                {
                                    int encodeResult = vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref vpxEncodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                    }
                                }
                            }

                            OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.video, vp8Timestamp, vpxEncodedBuffer);

                            //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}.");

                            vp8Timestamp += VP8_TIMESTAMP_SPACING;
                        }
                        else if (sample != null && sample.HasAudioSample)
                        {
                            uint sampleDuration = (uint)(sampleBuffer.Length / 2);

                            byte[] mulawSample = new byte[sampleDuration];
                            int    sampleIndex = 0;

                            for (int index = 0; index < sampleBuffer.Length; index += 2)
                            {
                                var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index));
                                mulawSample[sampleIndex++] = ulawByte;
                            }

                            OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, mulawTimestamp, mulawSample);

                            //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}.");

                            mulawTimestamp += sampleDuration;
                        }
                    }
                }

                vpxEncoder.Dispose();
            }
            catch (Exception excp)
            {
                logger.LogWarning("Exception SampleMp4Media. " + excp.Message);
            }
            finally
            {
                logger.LogDebug("mp4 sampling thread stopped.");
                _isMp4Sampling = false;
            }
        }
Esempio n. 6
0
        unsafe private static void OnProcessSampleEvent(int mediaTypeID, uint dwSampleFlags, long llSampleTime, long llSampleDuration, uint dwSampleSize, ref byte[] sampleBuffer)
        {
            try
            {
                if (OnMediaSampleReady == null)
                {
                    if (!_mfSampleGrabber.Paused)
                    {
                        _mfSampleGrabber.Pause();
                        logger.LogDebug("No active clients, media sampling paused.");
                    }
                }
                else
                {
                    if (mediaTypeID == 0)
                    {
                        if (!_vpxEncoderReady)
                        {
                            logger.LogWarning("Video sample cannot be processed as the VPX encoder is not in a ready state.");
                        }
                        else
                        {
                            byte[] vpxEncodedBuffer = null;

                            unsafe
                            {
                                fixed(byte *p = sampleBuffer)
                                {
                                    int encodeResult = _vpxEncoder.Encode(p, (int)dwSampleSize, 1, ref vpxEncodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                    }
                                }
                            }

                            OnMediaSampleReady?.Invoke(SDPMediaTypesEnum.video, _vp8Timestamp, vpxEncodedBuffer);

                            //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}.");

                            _vp8Timestamp += VP8_TIMESTAMP_SPACING;
                        }
                    }
                    else
                    {
                        uint sampleDuration = (uint)(sampleBuffer.Length / 2);

                        byte[] mulawSample = new byte[sampleDuration];
                        int    sampleIndex = 0;

                        // ToDo: Find a way to wire up the Media foundation WAVE_FORMAT_MULAW codec so the encoding below is not necessary.
                        for (int index = 0; index < sampleBuffer.Length; index += 2)
                        {
                            var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index));
                            mulawSample[sampleIndex++] = ulawByte;
                        }

                        OnMediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, _mulawTimestamp, mulawSample);

                        //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}.");

                        _mulawTimestamp += sampleDuration;
                    }
                }
            }
            catch (Exception excp)
            {
                logger.LogWarning("Exception MfSampleGrabber_OnProcessSampleEvent. " + excp.Message);
            }
        }
Esempio n. 7
0
        private static void SendTestPattern()
        {
            try
            {
                unsafe
                {
                    Bitmap testPattern = new Bitmap(TEST_PATTERN_IMAGE_PATH);

                    // Get the stride.
                    Rectangle rect = new Rectangle(0, 0, testPattern.Width, testPattern.Height);
                    System.Drawing.Imaging.BitmapData bmpData =
                        testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                             testPattern.PixelFormat);

                    // Get the address of the first line.
                    int stride = bmpData.Stride;

                    testPattern.UnlockBits(bmpData);

                    // Initialise the video codec and color converter.
                    SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder();
                    vpxEncoder.InitEncoder((uint)testPattern.Width, (uint)testPattern.Height, (uint)stride);

                    SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert();

                    byte[] sampleBuffer  = null;
                    byte[] encodedBuffer = null;
                    int    sampleCount   = 0;
                    uint   rtpTimestamp  = 0;

                    while (!_exit)
                    {
                        if (OnTestPatternSampleReady != null)
                        {
                            var stampedTestPattern = testPattern.Clone() as System.Drawing.Image;
                            AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                            sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap);

                            fixed(byte *p = sampleBuffer)
                            {
                                byte[] convertedFrame = null;
                                colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, testPattern.Width, testPattern.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame);

                                fixed(byte *q = convertedFrame)
                                {
                                    int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                        continue;
                                    }
                                }
                            }

                            stampedTestPattern.Dispose();
                            stampedTestPattern = null;

                            OnTestPatternSampleReady(rtpTimestamp, encodedBuffer);

                            encodedBuffer = null;

                            sampleCount++;
                            rtpTimestamp += VP8_TIMESTAMP_SPACING;
                        }

                        Thread.Sleep(30);
                    }
                }
            }
            catch (Exception excp)
            {
                logger.LogError("Exception SendTestPattern. " + excp);
            }
        }
Esempio n. 8
0
        /// <summary>
        /// Starts the Media Foundation sampling.
        /// </summary>
        unsafe private static void StartMedia()
        {
            try
            {
                logger.LogDebug("Starting media sampling thread.");

                _isSampling = true;

                while (true)
                {
                    if (OnMediaSampleReady == null)
                    {
                        logger.LogDebug("No active clients, media sampling paused.");
                        break;
                    }
                    else
                    {
                        byte[] sampleBuffer = null;
                        var    sample       = _mediaSource.GetSample(ref sampleBuffer);

                        if (sample != null && sample.HasVideoSample)
                        {
                            if (_vpxEncoder == null ||
                                (_vpxEncoder.GetWidth() != sample.Width || _vpxEncoder.GetHeight() != sample.Height || _vpxEncoder.GetStride() != sample.Stride))
                            {
                                OnVideoResolutionChanged((uint)sample.Width, (uint)sample.Height, (uint)sample.Stride);
                            }

                            byte[] vpxEncodedBuffer = null;

                            unsafe
                            {
                                fixed(byte *p = sampleBuffer)
                                {
                                    int encodeResult = _vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref vpxEncodedBuffer);

                                    if (encodeResult != 0)
                                    {
                                        logger.LogWarning("VPX encode of video sample failed.");
                                    }
                                }
                            }

                            OnMediaSampleReady?.Invoke(SDPMediaTypesEnum.video, _vp8Timestamp, vpxEncodedBuffer);

                            //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}.");

                            _vp8Timestamp += VP8_TIMESTAMP_SPACING;
                        }
                        else if (sample != null && sample.HasAudioSample)
                        {
                            uint sampleDuration = (uint)(sampleBuffer.Length / 2);

                            byte[] mulawSample = new byte[sampleDuration];
                            int    sampleIndex = 0;

                            // ToDo: Find a way to wire up the Media foundation WAVE_FORMAT_MULAW codec so the encoding below is not necessary.
                            for (int index = 0; index < sampleBuffer.Length; index += 2)
                            {
                                var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index));
                                mulawSample[sampleIndex++] = ulawByte;
                            }

                            OnMediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, _mulawTimestamp, mulawSample);

                            //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}.");

                            _mulawTimestamp += sampleDuration;
                        }
                    }
                }
            }
            catch (Exception excp)
            {
                logger.LogWarning("Exception OnProcessSampleEvent. " + excp.Message);
            }
            finally
            {
                logger.LogDebug("Media sampling thread stopped.");

                _isSampling = false;
            }
        }
Esempio n. 9
0
        private void SampleWebCam(MFVideoSampler videoSampler, VideoMode videoMode, CancellationTokenSource cts)
        {
            try
            {
                Thread.CurrentThread.Name = "vidsampler_" + videoMode.DeviceIndex + "_" + videoMode.Width + "_" + videoMode.Height;

                var vpxEncoder = new VpxEncoder();
                // TODO: The last parameter passed to the vpx encoder init needs to be the frame stride not the width.
                vpxEncoder.InitEncoder(Convert.ToUInt32(videoMode.Width), Convert.ToUInt32(videoMode.Height), Convert.ToUInt32(videoMode.Width));

                // var videoSampler = new MFVideoSampler();
                //videoSampler.Init(videoMode.DeviceIndex, videoMode.Width, videoMode.Height);
                // videoSampler.InitFromFile();

                while (!_stop && !cts.IsCancellationRequested)
                {
                    byte[] videoSample = null;
                    var    sample      = videoSampler.GetSample(ref videoSample);

                    //if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING)
                    //{
                    //    logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                    //    OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use.");
                    //    break;
                    //}
                    //else if (result != 0)
                    //{
                    //    logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                    //    OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result);
                    //    break;
                    //}
                    //else
                    if (sample?.HasVideoSample == true)
                    {
                        // This event sends the raw bitmap to the WPF UI.
                        OnLocalVideoSampleReady?.Invoke(videoSample, videoSampler.Width, videoSampler.Height);

                        // This event encodes the sample and forwards it to the RTP manager for network transmission.
                        if (OnLocalVideoEncodedSampleReady != null)
                        {
                            IntPtr rawSamplePtr = Marshal.AllocHGlobal(videoSample.Length);
                            Marshal.Copy(videoSample, 0, rawSamplePtr, videoSample.Length);

                            byte[] yuv = null;

                            unsafe
                            {
                                // TODO: using width instead of stride.
                                _imageConverter.ConvertRGBtoYUV((byte *)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), Convert.ToInt32(videoMode.Width), VideoSubTypesEnum.I420, ref yuv);
                                //_imageConverter.ConvertToI420((byte*)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), ref yuv);
                            }

                            Marshal.FreeHGlobal(rawSamplePtr);

                            IntPtr yuvPtr = Marshal.AllocHGlobal(yuv.Length);
                            Marshal.Copy(yuv, 0, yuvPtr, yuv.Length);

                            byte[] encodedBuffer = null;

                            unsafe
                            {
                                vpxEncoder.Encode((byte *)yuvPtr, yuv.Length, _encodingSample++, ref encodedBuffer);
                            }

                            Marshal.FreeHGlobal(yuvPtr);

                            OnLocalVideoEncodedSampleReady(encodedBuffer);
                        }
                    }
                }

                videoSampler.Stop();
                vpxEncoder.Dispose();
            }
            catch (Exception excp)
            {
                logger.LogError($"Exception SampleWebCam. {excp.Message}");
            }
        }