Пример #1
0
        private void SendSingleFrame(IEnumerable <byte> bytes, bool isCRCIncluded)
        {
            if (forwardCRCFromApplication.Value && !isCRCIncluded)
            {
                this.Log(LogLevel.Error, "CRC needs to be provided by the application but is missing");
                return;
            }

            var bytesArray = bytes.ToArray();
            var newLength  = isCRCIncluded ? 64 : 60;

            if (bytesArray.Length < newLength)
            {
                Array.Resize(ref bytesArray, newLength);
            }

            var addCrc = !isCRCIncluded && !forwardCRCFromApplication.Value;

            if (!Misc.TryCreateFrameOrLogWarning(this, bytesArray, out var frame, addCrc))
            {
                return;
            }

            this.Log(LogLevel.Debug, "Sending packet, length {0}", frame.Bytes.Length);
            FrameReady?.Invoke(frame);
        }
Пример #2
0
        private void TransmitLoop(CancellationToken token)
        {
            while (true)
            {
                byte[] buffer = null;
                if (stream == null)
                {
                    return;
                }
                try
                {
                    buffer = LibC.ReadDataWithTimeout(stream.Handle, MTU, 1000, () => token.IsCancellationRequested);
                }
                catch (ArgumentException)
                {
                    // stream was closed
                    return;
                }
                catch (ObjectDisposedException)
                {
                    return;
                }

                if (token.IsCancellationRequested)
                {
                    return;
                }
                if (buffer == null || buffer.Length == 0)
                {
                    continue;
                }
                var ethernetFrame = EthernetFrame.CreateEthernetFrameWithCRC(buffer);
                FrameReady?.Invoke(ethernetFrame);
            }
        }
Пример #3
0
        public void ReceiveFrame(EthernetFrame frame)
        {
            var ethernetPacket = frame.UnderlyingPacket;

            this.Log(LogLevel.Noisy, "Ethernet packet details: {0}", ethernetPacket);
#if DEBUG_PACKETS
            this.Log(LogLevel.Noisy, Misc.PrettyPrintCollectionHex(frame.Bytes));
#endif

            switch (ethernetPacket.Type)
            {
            case EthernetPacketType.Arp:
                if (TryHandleArp((ARPPacket)ethernetPacket.PayloadPacket, out var arpResponse))
                {
                    var ethernetResponse = new EthernetPacket((PhysicalAddress)MAC, ethernetPacket.SourceHwAddress, EthernetPacketType.None);
                    ethernetResponse.PayloadPacket = arpResponse;

                    this.Log(LogLevel.Noisy, "Sending response: {0}", ethernetResponse);
                    EthernetFrame.TryCreateEthernetFrame(ethernetResponse.Bytes, true, out var response);
                    FrameReady?.Invoke(response);
                }
                break;

            case EthernetPacketType.IpV4:
                var ipv4Packet = (IPv4Packet)ethernetPacket.PayloadPacket;
                arpTable[ipv4Packet.SourceAddress] = ethernetPacket.SourceHwAddress;
                HandleIPv4(ipv4Packet);
                break;

            default:
                this.Log(LogLevel.Warning, "Unsupported packet type: {0}", ethernetPacket.Type);
                break;
            }
        }
Пример #4
0
    //DateTime dtLastFrame = DateTime.Now;
    //int counter = 0;
    private void ProcessFrame(byte[] frame)
    {
        CurrentFrame = frame;

        _context.Post(delegate
        {
            //added try/catch because sometimes jpeg images are corrupted
            try
            {
                using (var stream = new MemoryStream(frame))
                {
                    var old = Bitmap;
                    Bitmap  = new Bitmap(stream);

                    FrameReady?.Invoke(this, new FrameReadyEventArgs
                    {
                        FrameBuffer = CurrentFrame,
                        Bitmap      = Bitmap,
                    });

                    if (old != null)
                    {
                        old.Dispose();
                    }
                }
            }
            catch
            {
            }
        }, null);
    }
Пример #5
0
        //DateTime dtLastFrame = DateTime.Now;
        //int counter = 0;

        private void ProcessFrame(byte[] frame)
        {
            CurrentFrame = frame;

            // 2018-03-07, Alex
            // Check if code is executed in application context (wpf)
            _context.Post(delegate
            {
                //added try/catch because sometimes jpeg images are corrupted
                try
                {
                    FrameReady?.Invoke(this, new FrameReadyEventArgs
                    {
                        FrameBuffer = CurrentFrame,
                    });
                }
                catch (Exception ex)
                {
                    //  log.Error("ProcessFrame failed", ex);
                }
            }, null);

            // 2018-03-07, Alex
            // Check if code is executed without any context (owin self-host)
        }
Пример #6
0
 private void ProcessFrame(object sender, FrameReadyEventArgs e)
 {
     if (!Paused)
     {
         FrameReady?.Invoke(this, e);
     }
 }
Пример #7
0
        private void SendSingleFrame(IEnumerable <byte> bytes, bool isCRCIncluded)
        {
            if (forwardCRCFromApplication.Value && !isCRCIncluded)
            {
                this.Log(LogLevel.Error, "CRC needs to be provided by the application but is missing");
                return;
            }

            var bytesArray = bytes.ToArray();
            var newLength  = isCRCIncluded ? 64 : 60;

            if (bytesArray.Length < newLength)
            {
                Array.Resize(ref bytesArray, newLength);
            }

            var addCrc = !isCRCIncluded && !forwardCRCFromApplication.Value;

            if (!Misc.TryCreateFrameOrLogWarning(this, bytesArray, out var frame, addCrc))
            {
                return;
            }

            if (insertProtocolChecksum.Value)
            {
                frame.FillWithChecksums(new EtherType[] {}, new [] { IPProtocolType.ICMP, IPProtocolType.ICMPV6, IPProtocolType.TCP, IPProtocolType.UDP });
            }
            if (insertIPHeaderChecksum.Value)
            {
                frame.FillWithChecksums(new [] { EtherType.IpV4 }, new IPProtocolType[] {});
            }

            this.Log(LogLevel.Debug, "Sending packet, length {0}", frame.Bytes.Length);
            FrameReady?.Invoke(frame);
        }
Пример #8
0
        private void OnFrameGrabbed(object sender, EventArgs e)
        {
            // Retrive the frame from the camera
            _capture.Retrieve(_frame);

            using (UMat ugray = new UMat())
            {
                // Convert to grayscale
                CvInvoke.CvtColor(_frame, ugray, ColorConversion.Bgr2Gray);

                // Normalizes brightness and increases contrast of the image
                CvInvoke.EqualizeHist(ugray, ugray);

                // Detect the faces from the gray scale image and store the locations as rectangles.
                // The first dimension is the channel,
                // The second dimension is the index of the rectangle in the specific channel
                var faceBounds = _faceDetector.DetectMultiScale(
                    ugray,
                    1.1,
                    10,
                    new Size(20, 20)
                    );

                _logger.LogTrace("Got frame. Found {0} faces.", faceBounds.Length);

                // Publish a completed frame
                FrameReady?.Invoke(this, new VideoFrame(
                                       DateTime.Now,
                                       faceBounds.Select(bounds => new Face(bounds)).ToList(),
                                       _frame.Bitmap
                                       ));
            }
        }
Пример #9
0
 public void CaptureFrameStart()
 {
     if (_capture != null)
     {
         if (_captureInProgress)
         {
             _captureInProgress = false; //Flag the state of the camera
         }
         else
         {
             RetrieveCaptureInformation(); //Get Camera information
             _capture.SetCaptureProperty(CapProp.Monochrome, 1);
             Task.Run(() =>
             {
                 while (KeepGoing)
                 {
                     if (!Paused)
                     {
                         Thread.Sleep(100);
                         RetrieveImage();
                         FrameReady?.Invoke(this, new FrameReadyEventArgs(CurrentImage));
                     }
                 }
             });
             _captureInProgress = true; //Flag the state of the camera
         }
     }
     else
     {
         SetupCapture();
         CaptureFrameStart(); //recursivly backup
     }
 }
        private async Task ReadPacketAsync()
        {
            var buffer = new byte[Mtu];

            while (!cts.IsCancellationRequested)
            {
                try
                {
                    if (await deviceFile.ReadAsync(buffer, 0, buffer.Length, cts.Token) > 0)
                    {
                        if (!Misc.TryCreateFrameOrLogWarning(this, buffer, out var frame, addCrc: true))
                        {
                            return;
                        }
                        FrameReady?.Invoke(frame);
                        this.NoisyLog("Frame of length {0} received from host.", frame.Bytes.Length);
                    }
                }
                catch (IOException)
                {
                    if (networkInterface.OperationalStatus != OperationalStatus.Up)
                    {
                        this.NoisyLog("I/O exception while interface is not up, waiting {0}s.", Misc.NormalizeDecimal(GracePeriod.TotalSeconds));
                        // probably the interface is not opened yet
                        await Task.Delay(GracePeriod);
                    }
                    else
                    {
                        throw;
                    }
                }
            }
        }
Пример #11
0
        private VideoFrame QueueFrame(MemoryStream stream, long frameIndex)
        {
            var frame = new VideoFrame(stream.ToArray(), frameIndex, TimeSpan.FromSeconds(frameIndex / _frameRate), _frameDuration);

            _frames.Push(frame);
            FrameReady?.Invoke(this, new FrameReadyArgs(frame));
            return(frame);
        }
Пример #12
0
 private void ProcessFrame(byte[] frame)
 {
     CurrentFrame = frame;
     _context.Post(delegate
     {
         // tell whoever's listening that we have a frame to draw
         FrameReady?.Invoke(this, new FrameReadyEventArgs(CurrentFrame));
     }, null);
 }
Пример #13
0
        private void transmitFrame()
        {
            var td = new transmitDescriptor(machine.SystemBus);

            td.Fetch(transmitDescriptorBase | transmitDescriptorOffset);

            if (!td.Enable)
            {
                return; //if decriptor is disabled there is nothing to send (just return)
            }

            var packetBytes = machine.SystemBus.ReadBytes(td.PacketAddress, (int)td.Length);

            if (!Misc.TryCreateFrameOrLogWarning(this, packetBytes, out var packet, addCrc: true))
            {
                return;
            }

            this.Log(LogLevel.Info, "Sending packet length {0}", packet.Bytes.Length);
            this.Log(LogLevel.Info, "Packet address = 0x{0:X}", td.PacketAddress);
            FrameReady?.Invoke(packet);

            registers.Status |= 1u << 3;

            if (td.Wrap)
            {
                transmitDescriptorOffset = 0;
            }
            else
            {
                if (transmitDescriptorOffset != 0x3f8)
                {
                    transmitDescriptorOffset += 8;
                }
                else
                {
                    transmitDescriptorOffset = 0;
                }
            }

            if (td.InterruptEnable && ((registers.Control & (1u << 2)) != 0))
            {
                //if interrupts enabled
                registers.Status |= 1u << 3; //transmitter interrupt bit
                this.IRQ.Set();
                this.IRQ.Unset();
            }

            td.Enable            = false;
            td.Wrap              = false;
            td.InterruptEnable   = false;
            td.Length            = 0;
            td.UnderrunError     = false;
            td.AttemptLimitError = false;
            td.WriteBack();
        }
Пример #14
0
 public void HandleFrame(SKImage frame)
 {
     if (!paused)
     {
         FrameReady?.Invoke(this, new FrameReadyEventArgs()
         {
             FrameBuffer = null, Image = frame
         });
     }
 }
Пример #15
0
        private void Finished()
        {
            if (mode == Mode.WaitingForPacket)
            {
                this.DebugLog("Packet received, LEN {7} {0:x} {1:x} {2:x} {3:x} (...) {4:x} {5:x} {6:x}", request[0], request[1], request[2], request[3], request[currentLength - 5], request[currentLength - 4], request[currentLength - 3], currentLength);
                var frame = new byte[currentLength];
                Array.Copy(request, 0, frame, 0, currentLength);
                //TODO: CRC handling
                if (!Misc.TryCreateFrameOrLogWarning(this, frame, out var ethernetFrame, addCrc: false))
                {
                    return;
                }
                FrameReady?.Invoke(ethernetFrame);
                mode              = Mode.Standard;
                currentLength     = 4;
                transmissionEnded = true;
                if (interruptAfterTransmision)
                {
                    SignalInterrupt();
                }
            }
            if (mode == Mode.SendingPacket)
            {
                mode          = Mode.Standard;
                currentLength = 4;
                lock (packetQueue)
                {
                    if (packetQueue.Count > 0)
                    {
                        SignalInterrupt();
                    }
                }
            }
            if (mode == Mode.Special)
            {
                currentLength = 4;
                mode          = Mode.Standard;
                return;
            }
            switch (lastPacketType)
            {
            case 0xC0:
                mode = Mode.WaitingForPacket;
                var encodedLength = request[3] + (request[4] << 8);
                this.DebugLog("Encoded length is 0x{0:X}.", encodedLength);
                currentLength     = Align(encodedLength + 1 + 1);
                transmissionEnded = false;
                break;

            case 0xF:
                lastPacketType = 0;
                break;
            }
        }
Пример #16
0
 public void FrameReceived(byte[] bytes, SKImage sKImage)
 {
     if (!Paused)
     {
         Main.Post(delegate
         {
             FrameReady?.Invoke(this, new FrameReadyEventArgs()
             {
                 FrameBuffer = bytes, Image = sKImage
             });
         }, null);
     }
 }
Пример #17
0
        public void UpdateFrame()
        {
            // get it on the UI thread
            m_context.Post(delegate
            {
                // update event data
                m_frame_ready_event_param.Width     = m_image_width;
                m_frame_ready_event_param.Height    = m_image_height;
                m_frame_ready_event_param.FrameData = m_frame_buffer.ByteBuffer;

                // tell whoever's listening that we have a frame to draw
                FrameReady?.Invoke(this, m_frame_ready_event_param);
            }, null);
        }
Пример #18
0
        private void HandleUdpResponse(IPEndPoint source, UdpPacket response)
        {
            var ipPacket       = new IPv4Packet(IP, source.Address);
            var ethernetPacket = new EthernetPacket((PhysicalAddress)MAC, arpTable[source.Address], EthernetPacketType.None);

            ipPacket.PayloadPacket       = response;
            ethernetPacket.PayloadPacket = ipPacket;
            response.UpdateCalculatedValues();

            this.Log(LogLevel.Noisy, "Sending UDP response: {0}", response);

            EthernetFrame.TryCreateEthernetFrame(ethernetPacket.Bytes, true, out var ethernetFrame);
            FrameReady?.Invoke(ethernetFrame);
        }
        private void SendPacket()
        {
            var slot = readSlots[readerSlotNumber.Value];

            if (!Misc.TryCreateFrameOrLogWarning(this, slot.Read(), out var frame, addCrc: true))
            {
                return;
            }

            this.Log(LogLevel.Noisy, "Sending packet of length {0} bytes.", frame.Length);
            FrameReady?.Invoke(frame);

            readerEventPending.Value = true;
            RefreshIrq();
        }
Пример #20
0
        private void ProcessFrame(byte[] frameBuffer)
        {
            _context.Post(delegate
            {
                // create a simple GDI+ happy Bitmap
                //bitmap = new Bitmap(new MemoryStream(frameBuffer));

                bitmap = BitmapFactory.DecodeByteArray(frameBuffer, 0, frameBuffer.Length);

                // tell whoever's listening that we have a frame to draw
                FrameReady?.Invoke(this, new FrameReadyEventArgs {
                    FrameBuffer = CurrentFrame, Bitmap = bitmap
                });
            }, null);
        }
Пример #21
0
        private void Capture_FrameReady(object sender, FrameArgs e)
        {
            lock (matLocker)
            {
                if (mat != null)
                {
                    mat.Dispose();
                    mat = null;
                }

                mat          = e.Mat;
                e.MatDispose = false;
                FrameReady?.Invoke(this, mat);
            }
        }
Пример #22
0
        public void WriteDataBulk(USBPacket packet)
        {
            if (packet.data == null)
            {
                return;
            }

            byte[] packetToSend;
            if (packet.data[5] != 64)
            {
                packetToSend = new byte[packet.data.Length - 8];
                Array.Copy(packet.data, 8, packetToSend, 0, packetToSend.Length);
            }
            else
            {
                packetToSend = new byte[packet.data.Length - 12];
                Array.Copy(packet.data, 12, packetToSend, 0, packetToSend.Length);

                if ((packetToSend[14] & 0xF0) == 0x40) //IP packet
                {
                    ushort cksum;
                    IPHeaderLength = (ushort)((packetToSend[14] & 0x0F) * 4);
                    if (packetToSend[23] == 0x06) // TCP packet
                    {
                        IPpacket tcpPacket = new IPpacket(IPHeaderLength, IPpacket.PacketType.TCP);
                        tcpPacket.ReadFromBuffer(packetToSend);
                        cksum  = tcpPacket.GetChecksum();
                        cksum -= 1;
                        packetToSend[MACHeaderLegth + IPHeaderLength + 16] = (byte)((cksum >> 8) & 0xFF);
                        packetToSend[MACHeaderLegth + IPHeaderLength + 17] = (byte)((cksum) & 0xFF);
                    }
                    else if (packetToSend[23] == 0x11) // UDP packet
                    {
                        IPpacket udpPacket = new IPpacket(IPHeaderLength, IPpacket.PacketType.UDP);
                        udpPacket.ReadFromBuffer(packetToSend);
                        cksum  = udpPacket.GetChecksum();
                        cksum -= 1;
                        packetToSend[MACHeaderLegth + IPHeaderLength + 6] = (byte)((cksum >> 8) & 0xFF);
                        packetToSend[MACHeaderLegth + IPHeaderLength + 7] = (byte)((cksum) & 0xFF);
                    }
                }
            }

            if (Misc.TryCreateFrameOrLogWarning(this, packetToSend, out var frame, addCrc: true))
            {
                FrameReady?.Invoke(frame);
            }
        }
Пример #23
0
        private void TransmitPacket()
        {
            var packetSize = transmitBufferEnd - transmitBufferStart; // -1 for the per packet control byte, but transmitBufferEnd points to the last byte of the packet
            var data       = new byte[packetSize];

            Array.Copy(ethernetBuffer, transmitBufferStart + 1, data, 0, packetSize);
            if (!Misc.TryCreateFrameOrLogWarning(this, data, out var frame, addCrc: true))
            {
                return;
            }
            // status vector is not implemented yet
            this.Log(LogLevel.Debug, "Sending frame {0}.", frame);
            FrameReady?.Invoke(frame);
            transmitPacketInterrupt.Value = true;
            RefreshInterruptStatus();
        }
Пример #24
0
        public byte Transmit()
        {
            lock (lockObj)
            {
                int  len;
                byte whichPacket;

                if ((transmitControl & TransmitEnabled) == 0)
                {
                    return(0);
                }
                if (txFifo.Count == 0)
                {
                    return(0);
                }
                while (txFifo.Count > 0)
                {
                    whichPacket = txFifo.Dequeue();
                    var currentBuffer = memoryBuffer[whichPacket];
                    len  = currentBuffer.Data[2];
                    len |= currentBuffer.Data[3] << 8;
                    len -= 6;

                    byte [] indata = new byte[len];

                    for (int j = 0; j < len; j++)
                    {
                        indata[j] = currentBuffer.Data[j + 4];
                    }

                    if ((control & ControlAutorelease) != 0)
                    {
                        currentBuffer.IsAllocated = false;
                    }
                    else
                    {
                        sentFifo.Enqueue((byte)whichPacket);
                    }
                    if (Misc.TryCreateFrameOrLogWarning(this, indata, out var frame, addCrc: true))
                    {
                        FrameReady?.Invoke(frame);
                    }
                }
                Update();
                return(0);
            }
        }
Пример #25
0
        public byte Transmit()
        {
            lock (lockObj)
            {
                int  len;
                byte whichPacket;

                if ((transmitControl & TransmitEnabled) == 0)
                {
                    return(0);
                }
                if (txFifo.Count == 0)
                {
                    return(0);
                }
                while (txFifo.Count > 0)
                {
                    whichPacket = txFifo.Dequeue();
                    var currentBuffer = memoryBuffer[whichPacket];
                    len  = currentBuffer.Data[2];
                    len |= currentBuffer.Data[3] << 8;
                    len -= 6;

                    byte [] indata = new byte[len];

                    for (int j = 0; j < len; j++)
                    {
                        indata[j] = currentBuffer.Data[j + 4];
                    }

                    if ((control & ControlAutorelease) != 0)
                    {
                        currentBuffer.IsAllocated = false;
                    }
                    else
                    {
                        sentFifo.Enqueue((byte)whichPacket);
                    }
                    var frame = EthernetFrame.CreateEthernetFrameWithCRC(indata);
                    FrameReady?.Invoke(frame);
                }
                Update();
                return(0);
            }
        }
Пример #26
0
        // https://github.com/MarekKowalski/HoloFace/blob/master/HoloFace/Assets/HololensCameraUWP.cs
        private void MediaFrameReader_FrameArrived(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
        {
            using (var frame = sender.TryAcquireLatestFrame()) {
                // https://docs.microsoft.com/en-us/windows/mixed-reality/locatable-camera
                var coordinateSystem = frame?.CoordinateSystem;
                var cameraIntrinsics = frame?.VideoMediaFrame?.CameraIntrinsics;

                var ht = coordinateSystem.TryGetTransformTo(originalFrameOfReference.CoordinateSystem);

                Matrix4 webcamToWorldMatrix = new Matrix4(
                    ht?.M11 ?? 1, ht?.M21 ?? 0, ht?.M31 ?? 0, ht?.Translation.X ?? 0,
                    ht?.M12 ?? 0, ht?.M22 ?? 1, ht?.M32 ?? 0, ht?.Translation.Y ?? 0,
                    -ht?.M13 ?? 0, -ht?.M23 ?? 0, -ht?.M33 ?? 1, -ht?.Translation.Z ?? 0,
                    0, 0, 0, 1);

                using (var bitmap = frame?.VideoMediaFrame?.SoftwareBitmap) {
                    if (bitmap == null)
                    {
                        return;
                    }

                    Width  = bitmap.PixelWidth;
                    Height = bitmap.PixelHeight;

                    var projectionMatrix = new Matrix4();
                    projectionMatrix.M11 = 2 * cameraIntrinsics.FocalLength.X / Width;
                    projectionMatrix.M22 = 2 * cameraIntrinsics.FocalLength.Y / Height;
                    projectionMatrix.M13 = -2 * (cameraIntrinsics.PrincipalPoint.X - Width / 2) / Width;
                    projectionMatrix.M23 = 2 * (cameraIntrinsics.PrincipalPoint.Y - Height / 2) / Height;
                    projectionMatrix.M33 = -1;
                    projectionMatrix.M44 = -1;
                    ProjectionMatrix     = projectionMatrix;

                    var copy = SoftwareBitmap.Copy(bitmap);
                    FrameReady?.Invoke(new FrameData()
                    {
                        bitmap = copy,
                        webcamToWorldMatrix = webcamToWorldMatrix,
                        projectionMatrix    = projectionMatrix
                    });
                }
            }
        }
Пример #27
0
        private void Proc()
        {
            double lastMs  = 0;
            double fps     = capture.Get(CaptureProperty.Fps);
            char   lastKey = (char)255;

            if (fps < 1)
            {
                fps = 30;
            }

            while (true)
            {
                Mat       mat = new Mat();
                FrameArgs arg = new FrameArgs(mat, lastKey);
                if (capture.Read(mat) && !mat.Empty())
                {
                    Cv2.Flip(mat, mat, FlipMode.Y);

                    FrameReady?.Invoke(this, arg);

                    int sleep = (int)Math.Max(1, (1000.0 / fps) - (stopwatch.ElapsedMilliseconds - lastMs));
                    lastMs  = stopwatch.ElapsedMilliseconds;
                    lastKey = (char)Cv2.WaitKey(sleep);
                }
                else
                {
                    IsRunning = false;
                    return;
                }

                if (arg.MatDispose)
                {
                    mat.Dispose();
                }

                if (arg.Break)
                {
                    IsRunning = false;
                    return;
                }
            }
        }
Пример #28
0
        private void TransmitLoop(CancellationToken token)
        {
            while (true)
            {
                if (token.IsCancellationRequested)
                {
                    this.Log(LogLevel.Noisy, "Requested thread cancellation - stopping reading from the TAP device file.");
                    return;
                }

                if (stream == null)
                {
                    this.Log(LogLevel.Error, "Stream null on receiving the frame from the TAP interface - stopping reading from the TAP device file");
                    return;
                }
                try
                {
                    var buffer    = new byte[MTU];
                    int bytesRead = stream.Read(buffer, 0, MTU);
                    if (bytesRead > 0)
                    {
                        var packet = new byte[bytesRead];
                        Array.Copy(buffer, packet, bytesRead);
                        this.Log(LogLevel.Noisy, "Received {0} bytes frame", bytesRead);
                        if (Misc.TryCreateFrameOrLogWarning(this, packet, out var frame, addCrc: true))
                        {
                            FrameReady?.Invoke(frame);
                        }
                    }
                }
                catch (ArgumentException e)
                {
                    this.Log(LogLevel.Error, "Stream was most likely closed - stopping reading from the TAP device file. Exception message: {0}", e.Message);
                    return;
                }
                catch (ObjectDisposedException e)
                {
                    this.Log(LogLevel.Error, "Error reading data - stopping reading from the TAP device file. Exception message: {0}", e.Message);
                    return;
                }
            }
        }
Пример #29
0
        private void OnTimerTick(object sender, EventArgs e)
        {
            using (UMat ugray = new UMat())
                using (Mat frame = _capture.QueryFrame())
                {
                    if (frame == null)
                    {
                        _logger.LogWarning("Frame not ready from camera. Maybe framerate is too high?");
                        return;
                    }

                    // Resize the frame
                    CvInvoke.ResizeForFrame(frame, frame, new Size(_frameWidth, _frameHeigth), Inter.Cubic, scaleDownOnly: true);

                    // Convert to grayscale
                    CvInvoke.CvtColor(frame, ugray, ColorConversion.Bgr2Gray);

                    // Normalizes brightness and increases contrast of the image
                    CvInvoke.EqualizeHist(ugray, ugray);

                    // Detect the faces from the gray scale image and store the locations as rectangles.
                    // The first dimension is the channel,
                    // The second dimension is the index of the rectangle in the specific channel
                    var faceBounds = _faceDetector.DetectMultiScale(
                        ugray,
                        1.1,
                        10,
                        new Size(20, 20)
                        );

                    _logger.LogTrace("Got frame. Found {0} faces.", faceBounds.Length);

                    // Publish a completed frame
                    FrameReady?.Invoke(this, new VideoFrame(
                                           DateTime.Now,
                                           faceBounds.Select(bounds => new VideoFrame.Face(bounds, -1, -1, -1)).ToList(),
                                           frame.ToImage <Bgr, byte>(),
                                           _frameWidth,
                                           _frameHeigth
                                           ));
                }
        }
        private void SendSingleFrame(IEnumerable <byte> bytes, bool isCRCIncluded)
        {
            var frame = EthernetFrame.CreateEthernetFrameWithoutCRC(bytes.ToArray());

            if (!isCRCIncluded)
            {
                if (checksumGeneratorEnabled.Value)
                {
                    this.Log(LogLevel.Noisy, "Generating checksum for the frame");
                    frame.FillWithChecksums(new [] { EtherType.IpV4 }, new [] { IPProtocolType.TCP, IPProtocolType.UDP });
                }
                else
                {
                    this.Log(LogLevel.Warning, "The frame has no CRC, but the automatic checksum generation is disabled");
                }
            }

            this.Log(LogLevel.Noisy, "Sending packet, length {0}", frame.Bytes.Length);
            FrameReady?.Invoke(frame);
        }