コード例 #1
0
        /// <summary>
        /// Called when frame receives by the listener.
        /// </summary>
        /// <param name="sender">The sender.</param>
        /// <param name="args">The <see cref="FrameReceivedEventArgs"/> instance containing the event data.</param>
        protected void OnFrameReceivedHandler(object sender, FrameReceivedEventArgs args)
        {
            var stream = args.Stream;
            var frame  = args.Frame;

            switch (frame.FrameType)
            {
            case FrameType.Headers:
                if (ForbiddenHeaders.HasForbiddenHeader(stream.Headers))
                {
                    stream.WriteRst(ResetStatusCode.ProtocolError);
                    return;
                }
                ProcessRequest(stream, frame);
                break;

            case FrameType.Data:
                ProcessIncomingData(stream, frame);
                break;

            case FrameType.Settings:
                ProcessSettings(frame as SettingsFrame);
                break;
            }
        }
コード例 #2
0
        void target_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            imageAvailable = false;
            int paddingLeft, paddingRight;

            if (null == normalizedImage)
            {
                imageAvailable = e.TryGetRawImage(ImageType.Normalized,
                                                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Left,
                                                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Top,
                                                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Width,
                                                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Height,
                                                  out normalizedImage, out imageMetrics, out paddingLeft, out paddingRight);
            }
            else
            {
                imageAvailable = e.UpdateRawImage(ImageType.Normalized, normalizedImage,
                                                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Left,
                                                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Top,
                                                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Width,
                                                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Height);
            }

            if (!imageAvailable)
            {
                return;
            }
            DisableRawImage();

            ShowInWPF();
            CaptureVideo();

            imageAvailable = false;
            EnableRawImage();
        }
コード例 #3
0
        private void FrameReceivedHandler(object sender, FrameReceivedEventArgs args)
        {
            var stream = args.Stream;
            var method = stream.Headers.GetValue(":method").ToLower();

            try
            {
                switch (method)
                {
                case "dir":
                case "get":
                    if (args.Frame is DataFrame)
                    {
                        SaveDataFrame(stream, (DataFrame)args.Frame);
                    }
                    else if (args.Frame is HeadersFrame)
                    {
                        Http2Logger.LogConsole("Headers received for stream: " + args.Frame.StreamId + " status:" + ((HeadersFrame)args.Frame).Headers.GetValue(":status"));
                    }
                    break;
                }
            }
            catch (Exception)
            {
                stream.WriteRst(ResetStatusCode.InternalError);
                stream.Dispose();
            }
        }
コード例 #4
0
ファイル: PollingClient.cs プロジェクト: orf53975/PollingTcp
        private void TransportLayerOnFrameReceived(object sender, FrameReceivedEventArgs <TServerDataFrameType> frameReceivedEventArgs)
        {
            this.receivedDataTimeoutTimer.Change(this.receiveTimeout, Timeout.InfiniteTimeSpan);

            if (this.connectionState == ConnectionState.Connecting && this.expectConnectionEstablishement)
            {
                lock (this.connectionStateLock)
                {
                    if (this.connectionState == ConnectionState.Connecting && this.expectConnectionEstablishement)
                    {
                        var data = frameReceivedEventArgs.Frame.Payload;

                        var assignedClientId = BitConverter.ToInt32(data, 0);

                        this.StartConnection(assignedClientId);
                    }
                    else
                    {
                        Console.WriteLine("Received connection response while not in connecting mode anymore or no one would like to accept. State: {0}, Accept: {1}.", this.connectionState, this.expectConnectionEstablishement);
                    }
                }
            }
            else if (this.connectionState == ConnectionState.Connected)
            {
                this.OnFrameReceived(new FrameReceivedEventArgs <TServerDataFrameType>
                {
                    Frame = frameReceivedEventArgs.Frame
                });
            }
        }
コード例 #5
0
 public void FrameReceived(object sender, FrameReceivedEventArgs e)
 {
     lock (pendingLock)
     {
         PendingMsgs.Add(e.ReceivedFrame);
     }
 }
コード例 #6
0
        public static void OnDeviceDataReceieved(object sender, FrameReceivedEventArgs e)
        {
            N2kFrame    n2kFrame = (N2kFrame)e.ReceivedFrame;
            CANStreamer stream   = sender as CANStreamer;

            CANDevice dev = UpdateAddDevice(n2kFrame, stream.Name);
        }
コード例 #7
0
        private void FrameReceivedEventHandler(object o, FrameReceivedEventArgs a)
        {
            if (a.Frame.MailboxId == (uint)CNXMsgIds.BlockQueryResponse + (uint)mBlockId)
            {
                byte[] data = a.Frame.Data;

                if (data.Length > 3)
                {
                    if (!BitConverter.IsLittleEndian)
                    {
                        Array.Reverse(data, 0, 4);
                    }
                    mLastOffset = BitConverter.ToUInt32(data, 0);
                }
                // crc supplied
                if (data.Length > 5)
                {
                    if (!BitConverter.IsLittleEndian)
                    {
                        Array.Reverse(data, 4, 2);
                    }
                    mDeviceCRC = BitConverter.ToUInt16(data, 4);
                }
                // do we have some version information
                if (data.Length > 6)
                {
                    mDeviceVersion = data[6];
                }

                object[] arg = new object[] { (Block)(a.Frame.MailboxId & 0xff), mLastOffset, mLastOffset, mDeviceCRC, mBlockCRC, mDeviceVersion, mBlockVersion };
                CNXLog.InfoFormat("BlockQueryResponse {0}, offset 0x{1:x} ({2}), reported crc 0x{3:x}, block crc 0x{4:x}, reported version {5}, block version {6}.", arg);

                BlockStateMachine(BlockEvent.Frame);
            }
        }
コード例 #8
0
        private void OnTouchTargetFrameReceived(object sender, FrameReceivedEventArgs e)
        {
            //lock (this)
            //{
            // get image from Surface
            if (_surfaceImage == null)
            {
                e.TryGetRawImage(
                    ImageType.Normalized,
                    0, 0,
                    Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.WorkingAreaWidth,
                    Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.WorkingAreaHeight,
                    out _surfaceImage,
                    out _surfaceMetrics);
            }
            else
            {
                e.UpdateRawImage(
                    ImageType.Normalized,
                    _surfaceImage,
                    0, 0,
                    Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.WorkingAreaWidth,
                    Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.WorkingAreaHeight);
            }

            // create EmguCV image and fire event
            _emguCvImage      = CreateEmguCvImage(_surfaceImage, _surfaceMetrics);
            _currentTimeStamp = DateTime.Now.Ticks;
            _imageEvent.Set();
            if (OnNewImage != null)
            {
                OnNewImage(_emguCvImage);
            }
            //}
        }
コード例 #9
0
 static void OnFrameReceived(FrameReceivedEventArgs eventArgs)
 {
     if (s_CameraFrameReceived != null)
     {
         RaiseFrameReceivedEvent();
     }
 }
コード例 #10
0
        private void contactTarget_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            int paddingLeft, paddingRight;

            if (raw == null)
            {
                imageAvailable = e.TryGetRawImage(ImageType.Normalized,
                                                  Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Left,
                                                  Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Top,
                                                  Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Width,
                                                  Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Height,
                                                  out raw, out imageMetrics, out paddingLeft, out paddingRight);
            }
            else
            {
                imageAvailable = e.UpdateRawImage(ImageType.Normalized, raw,
                                                  Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Left,
                                                  Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Top,
                                                  Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Width,
                                                  Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Height);
            }
            if (!imageAvailable)
            {
                return;
            }

            var r = CreateEmguCvImage(raw, imageMetrics);

            ProcessFrame(r);
            DetectInputs();
        }
コード例 #11
0
 private void ReceiveCallback(IAsyncResult ar)
 {
     NET_STREAM.EndRead(ar);
     EventArgs       = new FrameReceivedEventArgs();
     EventArgs.Frame = RECEIVE_BUFFER;
     FrameReceived(this, EventArgs);
     NET_STREAM.BeginRead(RECEIVE_BUFFER, 0, RECEIVE_BUFFER.Length, new AsyncCallback(ReceiveCallback), null);
 }
コード例 #12
0
        void target_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            if (++count < 30)
            {
                return;
            }
            else
            {
                count          = 0;
                imageAvailable = false;
                int paddingLeft, paddingRight;
                if (null == normalizedImage)
                {
                    imageAvailable = e.TryGetRawImage(ImageType.Normalized,
                                                      Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Left,
                                                      Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Top,
                                                      Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Width,
                                                      Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Height,
                                                      out normalizedImage, out imageMetrics, out paddingLeft, out paddingRight);
                }
                else
                {
                    imageAvailable = e.UpdateRawImage(ImageType.Normalized, normalizedImage,
                                                      Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Left,
                                                      Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Top,
                                                      Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Width,
                                                      Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Height);
                }

                if (!imageAvailable)
                {
                    return;
                }

                DisableRawImage();

                //System.IO.MemoryStream stream = new System.IO.MemoryStream(normalizedImage);
                //BmpBitmapDecoder decoder = new BmpBitmapDecoder(stream, BitmapCreateOptions.PreservePixelFormat, BitmapCacheOption.OnLoad);
                //ImageSource source = decoder.Frames[0];
                //source.Freeze();
                //iCapturedFrame.Source = source;

                GCHandle h      = GCHandle.Alloc(normalizedImage, GCHandleType.Pinned);
                IntPtr   ptr    = h.AddrOfPinnedObject();
                Bitmap   bitmap = new Bitmap(imageMetrics.Width,
                                             imageMetrics.Height,
                                             imageMetrics.Stride,
                                             System.Drawing.Imaging.PixelFormat.Format8bppIndexed,
                                             ptr);


                tracker.ProcessImage(bitmap);

                imageAvailable = false;
                //if (!tracker.TrackingDisabled)
                EnableRawImage();
            }
        }
コード例 #13
0
        protected virtual void OnFrameReceived(FrameReceivedEventArgs <TClientDataFrameType> e)
        {
            EventHandler <FrameReceivedEventArgs <TClientDataFrameType> > handler = this.FrameReceived;

            if (handler != null)
            {
                handler(this, e);
            }
        }
コード例 #14
0
        private static void TxtCameraOnFrameReceived(object sender, FrameReceivedEventArgs frameReceivedEventArgs)
        {
            // When a new frame arrived we decode it and fire an event
            DecodeBitmap(frameReceivedEventArgs.FrameData, frameReceivedEventArgs.DataLength);

            CameraFrameDecoded?.Invoke(null, new FrameDecodedEventArgs(FirstFrame));

            FirstFrame = false;
        }
コード例 #15
0
        private static void Decoder_OnFrameReceived(object sender, FrameReceivedEventArgs e)
        {
            Console.WriteLine($"Frame received at: {DateTime.Now}");

            using (FileStream fs = new FileStream($"{FramesDir}\\{DateTime.Now:HH-mm-ss}.jpeg", FileMode.CreateNew))
            {
                fs.Write(e.Frame, 0, e.Frame.Length);
                fs.Flush();
            }
        }
コード例 #16
0
ファイル: RtpStream.cs プロジェクト: cucacutexice/AIGA
        internal void RaiseFrameReceivedEvent(object[] args)
        {
            if (!RtpEvents.FireEvent(FrameReceived, args))
            {
                FrameReceivedEventArgs ea = (FrameReceivedEventArgs)args[1];

                eventLog.WriteEntry("FrameReceivedEvent - " + ea.RtpStream.SSRC,
                                    EventLogEntryType.Warning, (int)RtpEL.ID.FrameReceived);
            }
        }
コード例 #17
0
        internal void RaiseFrameReceivedEvent(object[] args)
        {
            if (!RtpEvents.FireEvent(FrameReceived, args))
            {
                FrameReceivedEventArgs ea = (FrameReceivedEventArgs)args[1];

                eventLog.WriteEntry(string.Format(CultureInfo.CurrentCulture, Strings.FrameReceivedEvent,
                                                  ea.RtpStream.SSRC), EventLogEntryType.Warning, (int)RtpEL.ID.FrameReceived);
            }
        }
コード例 #18
0
ファイル: Program.cs プロジェクト: valoni/NETMF-Toolkit
        static void xbee_OnPacketReceived(object sender, FrameReceivedEventArgs e)
        {
            XBeeResponse response = e.Response;

            if (response != null)
            {
                Console.WriteLine(response.ToString() + "\r\n==============================");
            }

            AtCommandResponse res = response as AtCommandResponse;

            if (res != null)
            {
                //if (res.ParseValue() is ZNetNodeDiscover)
                //{
                //    ZNetNodeDiscover nd = res.ParseValue() as ZNetNodeDiscover;

                //    if (nd.NodeIdentifier == "SENSOR")
                //    {
                //        (sender as XBee).Execute(new RemoteAtRequest(nd.SerialNumber, nd.ShortAddress, new ForceSample()));
                //        //sender.SendCommand(new AtRemoteCommand(nd.SerialNumber, nd.ShortAddress, new XBeeSensorSample()));
                //    }
                //    else
                //    {
                //        ZNetTxRequest x = new ZNetTxRequest(nd.SerialNumber, nd.ShortAddress, Encoding.ASCII.GetBytes(DateTime.Now.ToLongTimeString()));
                //        (sender as XBee).Execute(x);
                //    }

                //}
                return;
            }

            RemoteAtResponse res2 = response as RemoteAtResponse;

            if (res2 != null)
            {
                //if (res2.ParseValue() is ForceSampleData)
                //{
                //    ForceSampleData d = res2.ParseValue() as ForceSampleData;

                //    double mVanalog = (((float)d.AD2) / 1023.0) * 1200.0;
                //    double temp_C = (mVanalog - 500.0) / 10.0 - 4.0;
                //    double lux = (((float)d.AD1) / 1023.0) * 1200.0;

                //    mVanalog = (((float)d.AD3) / 1023.0) * 1200.0;
                //    double hum = ((mVanalog * (108.2 / 33.2)) - 0.16) / (5 * 0.0062 * 1000.0);

                //    temperature = temp_C;
                //}
            }
        }
コード例 #19
0
        void target_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            if (++count < 30)
            {
                return;
            }
            else
            {
                count          = 0;
                imageAvailable = false;
                int paddingLeft, paddingRight;
                if (null == normalizedImage)
                {
                    imageAvailable = e.TryGetRawImage(ImageType.Normalized,
                                                      Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Left,
                                                      Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Top,
                                                      Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Width,
                                                      Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Height,
                                                      out normalizedImage, out imageMetrics, out paddingLeft, out paddingRight);
                }
                else
                {
                    imageAvailable = e.UpdateRawImage(ImageType.Normalized, normalizedImage,
                                                      Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Left,
                                                      Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Top,
                                                      Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Width,
                                                      Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Height);
                }

                if (!imageAvailable)
                {
                    return;
                }

                DisableRawImage();

                tracker.ProcessImage(normalizedImage, imageMetrics, captureFrame);
                captureFrame = false;

                imageAvailable = false;
                EnableRawImage();
            }
        }
コード例 #20
0
        private void FrameReceivedEventHandler(object o, FrameReceivedEventArgs a)
        {
            switch ((CNXMsgIds)a.Frame.MailboxId)
            {
            case CNXMsgIds.DriverMessageAck:
                mCommsServer.Send(RTTMesg.CreateDriverMessageAck(BitConverter.ToUInt16(a.Frame.Data, 0)));
                break;

            case CNXMsgIds.DriverStatus:
                CNXLog.WarnFormat("Driver status recieved {0}.", (CANLib.DriverStatus)a.Frame.Data[0]);
                RTTMesg.RTTOutMsgIds state = RTTMesg.RTTOutMsgIds.UNKNOWN;
                switch ((CANLib.DriverStatus)a.Frame.Data[0])
                {
                case DriverStatus.Break:
                    state = RTTMesg.RTTOutMsgIds.DriverStatusOnBreak;
                    break;

                case DriverStatus.Normal:
                    state = RTTMesg.RTTOutMsgIds.DriverStatusNormal;
                    break;

                case DriverStatus.OutOfVehicle:
                    state = RTTMesg.RTTOutMsgIds.DriverStatusOutOfVehicle;
                    break;

                default:
                    CNXLog.WarnFormat("CNXMsgIds.DriverStatus {0} out of range.", a.Frame.Data[0]);
                    break;
                }
                if (state != RTTMesg.RTTOutMsgIds.UNKNOWN)
                {
                    mCommsServer.Send(RTTMesg.CreateDriverStatus(state));
                }
                break;

            default:
                break;
            }
        }
コード例 #21
0
ファイル: Program.cs プロジェクト: joshjliu/Zigbee-3
        static void xbee_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            Console.WriteLine("received a packet: " + e.Response);

            NodeDiscover nd = NodeDiscover.Parse((e.Response as AtCommandResponse));

            if (nd != null && nd.ShortAddress != null)
            {
                Console.WriteLine(nd);

                if (nd.NodeIdentifier == "SLAVE")
                {
                    Console.WriteLine("Sending \"Hallo\" to the SLAVE...");
                    (sender as XBee).ExecuteNonQuery(new TxRequest64(nd.SerialNumber, Encoding.ASCII.GetBytes("Hallo")));
                }
            }

            if (e.Response is RxResponse64)
            {
                Console.WriteLine("Recevied Rx64");
                Console.WriteLine(ByteUtil.PrintBytes((e.Response as RxResponse64).Value));
            }
        }
コード例 #22
0
        private void FrameReceivedEventHandler(object o, FrameReceivedEventArgs a)
        {
            try
            {
                switch (a.Frame.MailboxId)
                {
                case (uint)CNXMsgIds.ProductId:
                    CNXLog.InfoFormat("DeviceCataloge {0}", a.Frame.ToString());
                    ThreadPool.QueueUserWorkItem(new WaitCallback(UpdateDeviceCatalogue), a.Frame);
                    break;

                case (uint)CNXMsgIds.Fareset:
                    CNXLog.InfoFormat("Frame detected Fareset {0}.", a.Frame.Data[0]);
                    break;

                case (uint)CNXMsgIds.Destination:
                    CNXLog.InfoFormat("Frame detected Destination {0}.", CNXCANMsgHelper.UnpackDestinationFrameData(a.Frame.Data));
                    break;

                case (uint)CNXMsgIds.RouteTrip:
                    string route = string.Empty;
                    int?   trip  = null;
                    CNXCANMsgHelper.UnpackFareboxData(a.Frame.Data, out route, out trip);
                    CNXLog.InfoFormat("Frame detected Farebox route code {0} trip No {0}.", route, trip);
                    break;

                default:
                    break;
                }

                CNXLog.Debug("FrameReceivedEventHandler");
            }
            catch (Exception e)
            {
                CNXLog.ErrorFormat("CANCommsServer Frame Rx {0}.", e.ToString());
            }
        }
コード例 #23
0
        /// <summary>
        /// Deals with frames comming in from CAN
        /// </summary>
        /// <param name="o">Event source object.</param>
        /// <param name="a">Event Parmeters</param>
        public void FrameReceivedEventHandler(object o, FrameReceivedEventArgs a)
        {
            try
            {
                switch ((CNXMsgIds)a.Frame.MailboxId)
                {
                case CNXMsgIds.DuressState:
                    if (CNXCANMsgHelper.DuressFrame(a.Frame))
                    {
                        CNXLog.WarnFormat("TrackerState Duress Active frame recieved.");
                        mLocationMessage.Alarm = true;
                        StateMachine(TrackingEvent.ALARM_ACTIVE);
                    }
                    break;

                default:
                    break;
                }
            }
            catch (Exception e)
            {
                CNXLog.Error(string.Format("TrackerState Frame error {0}", a.Frame.ToString()), e);
            }
        }
        private void contactTarget_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            int padRight, padLeft;

            if (rawImageBytes == null)
            {
                e.TryGetRawImage(ImageType.Normalized,
                    0,
                    InteractiveSurface.DefaultInteractiveSurface.Top,
                    InteractiveSurface.DefaultInteractiveSurface.Width,
                    InteractiveSurface.DefaultInteractiveSurface.Height,
                    out rawImageBytes, out rawImageMetrics, out padLeft, out padRight);
            }
            else
            {
                e.UpdateRawImage(ImageType.Normalized, rawImageBytes,
                    0,
                    InteractiveSurface.DefaultInteractiveSurface.Top,
                    InteractiveSurface.DefaultInteractiveSurface.Width,
                    InteractiveSurface.DefaultInteractiveSurface.Height);
            }
        }
コード例 #25
0
 /// <summary>
 /// Called when the ContactTargetFrameReceived event is raised.
 /// </summary>
 /// <param name="sender">The sender.</param>
 /// <param name="args">The <see cref="Microsoft.Surface.Core.FrameReceivedEventArgs"/> instance containing the event data.</param>
 private static void OnContactTargetFrameReceived(object sender, FrameReceivedEventArgs args)
 {
     imageAvailable = false;
     int paddingLeft, paddingRight;
     if (normalizedImage == null)
     {
         imageAvailable = args.TryGetRawImage(
             ImageType.Normalized,
             InteractiveSurface.DefaultInteractiveSurface.Left,
             InteractiveSurface.DefaultInteractiveSurface.Top,
             InteractiveSurface.DefaultInteractiveSurface.Width,
             InteractiveSurface.DefaultInteractiveSurface.Height,
             out normalizedImage,
             out normalizedMetrics,
             out paddingLeft,
             out paddingRight);
     }
     else
     {
         imageAvailable = args.UpdateRawImage(
              ImageType.Normalized,
              normalizedImage,
              InteractiveSurface.DefaultInteractiveSurface.Left,
              InteractiveSurface.DefaultInteractiveSurface.Top,
              InteractiveSurface.DefaultInteractiveSurface.Width,
              InteractiveSurface.DefaultInteractiveSurface.Height);
     }
 }
コード例 #26
0
        private void FrameReceivedHandler(object sender, FrameReceivedEventArgs args)
        {
            var stream = args.Stream;
            var method = stream.Headers.GetValue(":method");
            if (!string.IsNullOrEmpty(method)) 
                method = method.ToLower();

            try
            {
                if (args.Frame is DataFrame)
                {
                    switch (method)
                    {
                        case "post":
                        case "put":
                            SaveDataFrame(stream, (DataFrame) args.Frame);
                            //Avoid leading \ at the filename
                            AddFileToRootFileList(stream.Headers.GetValue(":path").Substring(1));
                            break;
                    }
                } 
                else if (args.Frame is HeadersFrame)
                {
                    switch (method)
                    {
                        case "get":
                        case "dir":
                            try
                            {
                                string path = stream.Headers.GetValue(":path").Trim('/');
                                SendFile(path, stream);
                            }
                            catch (FileNotFoundException e)
                            {
                                Http2Logger.LogDebug("File not found: " + e.FileName);
                                WriteStatus(stream, StatusCode.Code404NotFound, true);
                            }

                            break;
                        case "delete":
                            WriteStatus(stream, StatusCode.Code401Forbidden, true);
                            break;

                        default:
                            Http2Logger.LogDebug("Received headers with Status: " + stream.Headers.GetValue(":status"));
                            break;
                    }
                }
            }
            catch (Exception e)
            {
                Http2Logger.LogDebug("Error: " + e.Message);
                stream.WriteRst(ResetStatusCode.InternalError);
                stream.Dispose();
            }
        }
コード例 #27
0
        private void OnTouchTargetFrameReceived(object sender, FrameReceivedEventArgs e)
        {
            ReadOnlyTouchPointCollection touches = touchTarget.GetState();
            prev_x = 0.0;
            prev_y = 0.0;

            getcords(touches);
        }
コード例 #28
0
        private static void Con_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
//            Console.WriteLine($"Frame received. Type: {e.Frame.ServiceType}");
            if (e.Frame.ServiceType.ServiceTypeIdentifier == ServiceTypeIdentifier.TUNNELING_REQ)
            {
                var req = (TunnelingRequest)e.Frame.ServiceType;

                if (req.CemiFrame.DestinationAddress.IsGroupAddress())
                {
                    var addr = ((GroupAddress)req.CemiFrame.DestinationAddress).ToString();
                    if (req.CemiFrame.Apci.Type == Cemi.APCIType.GroupValue_Write)
                    {
                        if (addr.EndsWith("/2/3") || addr.EndsWith("/2/23") || addr.EndsWith("/2/43") || addr.EndsWith("/2/63"))
                        {
                            var value = DPType5.DPT_SCALING.Decode(req.CemiFrame.Apci.Data);
                            Console.WriteLine($"++ {req.CemiFrame.Apci.Type} for \"{addr}\": {value}%");
                        }
                        else if (addr.EndsWith("/1/12") || addr.EndsWith("/1/22") || addr.EndsWith("/1/32") || addr.EndsWith("/1/42") || addr.EndsWith("/1/52"))
                        {
                            var value = DPType14.DPT_ELECTRIC_CURRENT.Decode(req.CemiFrame.Apci.Data);
                            Console.WriteLine($"++ {req.CemiFrame.Apci.Type} for \"{addr}\": {value}A");
                        }
                        else if (addr.EndsWith("/47"))
                        {
                            var value = DPType7.DPT_TIMEPERIOD_HRS.Decode(req.CemiFrame.Apci.Data);
                            Console.WriteLine($"++ {req.CemiFrame.Apci.Type} for \"{addr}\": {value}h");
                        }
                        else if (addr.EndsWith("5/0") || addr.EndsWith("/2/7") || addr.EndsWith("/2/9"))
                        {
                            var value = DPType9.DPT_TEMPERATURE.Decode(req.CemiFrame.Apci.Data);
                            Console.WriteLine($"++ {req.CemiFrame.Apci.Type} for \"{addr}\": {value}°C");
                        }
                        else if (addr.EndsWith("0/7/0"))
                        {
                            var value = DPType11.DPT_DATE.Decode(req.CemiFrame.Apci.Data);
                            Console.WriteLine($"++ {req.CemiFrame.Apci.Type} for \"{addr}\": {value}");
                        }
                        else if (addr.EndsWith("0/7/1"))
                        {
                            var value = DPType10.DPT_TIMEOFDAY.Decode(req.CemiFrame.Apci.Data);
                            Console.WriteLine($"++ {req.CemiFrame.Apci.Type} for \"{addr}\": {value}");
                        }
                        else
                        {
                            Console.WriteLine($"{req.CemiFrame.Apci.Type} for \"{addr}\" - ACPI DATA: {req.CemiFrame.Apci.Data.ToHex()} - Payload: {req.CemiFrame.Payload.ToHex()}");
                        }
                    }
                    else if ((req.CemiFrame.Apci.Type == Cemi.APCIType.GroupValue_Read) && addr.EndsWith("29/0/0"))
                    {
                        SendReadAnswerAsync();
                    }
                    if (req.CemiFrame.Apci.Type == Cemi.APCIType.GroupValue_Response)
                    {
                        Console.WriteLine($"--{req.CemiFrame.Apci.Type} for \"{addr}\" - ACPI DATA: {req.CemiFrame.Apci.Data.ToHex()} - Payload: {req.CemiFrame.Payload.ToHex()}");
                    }
                    else
                    {
                        //Console.WriteLine($"{req.CemiFrame.Apci.Type} for \"{addr}\" - ACPI DATA: {req.CemiFrame.Apci.Data.ToHexString()} - Payload: {req.CemiFrame.Payload.ToHexString()}");
                    }
                }
            }
        }
コード例 #29
0
ファイル: MainForm.cs プロジェクト: kendemu/ardrone3-pcap
 void reader_OnFrameReceived(object sender, FrameReceivedEventArgs e)
 {
     if (e.Frame.Type != FrameType.DATA_LL)
     {
         AddFrameToList(e.Frame);
     }
 }
コード例 #30
0
        private void OnTouchTargetFrameReceived(object sender, FrameReceivedEventArgs e)
        {
            // Lock the syncObject object so normalizedImage isn't changed while the Update method is using it
            lock (syncObject)
            {
                if (normalizedImage == null)
                {
                    // get rawimage data for a specific area
                    if (e.TryGetRawImage(
                            ImageType.Normalized,
                            0, 0,
                            InteractiveSurface.PrimarySurfaceDevice.WorkingAreaWidth,
                            InteractiveSurface.PrimarySurfaceDevice.WorkingAreaHeight,
                            out normalizedImage,
                            out normalizedMetrics))
                    {

                        scale = (InteractiveSurface.PrimarySurfaceDevice == null)
                                    ? 1.0f
                                    : (float)InteractiveSurface.PrimarySurfaceDevice.WorkingAreaWidth / normalizedMetrics.Width;
                        normalizedImageUpdated = true;
                    }
                }
                else
                {
                    // get the updated rawimage data for the specified area
                    normalizedImageUpdated = e.UpdateRawImage(
                        ImageType.Normalized,
                        normalizedImage,
                        0, 0,
                        InteractiveSurface.PrimarySurfaceDevice.WorkingAreaWidth,
                        InteractiveSurface.PrimarySurfaceDevice.WorkingAreaHeight);
                }

            }
        }
コード例 #31
0
ファイル: MainWindow.xaml.cs プロジェクト: fingerx/VrpnNet
        //Display the received image on screen
        void imager_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            if (e.IsBoundFrame && useChannelBinding) //We only want to render a bound frame, this will prevent any stray unbound frames from crashing the code
            {
                image.WritePixels(new Int32Rect(0, 0, width, height), e.Image, width * bytesPerPixel, 0);
                if (imageFormat == PixelFormats.Rgba128Float)
                {
                    float[] temp = new float[width * height * 4];
                    for (int i = 0; i < width * height * 4; i++)
                    {
                        temp[i] = BitConverter.ToSingle(e.Image, i * 4);
                    }
                }
            }
            else if (!useChannelBinding && !e.IsBoundFrame)
            {
                //With channel binding turned off we have to compile the color frame manually

                //This is easy for the grayscale case
                if (imageFormat == PixelFormats.Gray8 || imageFormat == PixelFormats.Gray16 || imageFormat == PixelFormats.Gray32Float)
                {
                    rawImage = e.Image;
                }
                else if (imageFormat == PixelFormats.Rgb24 || imageFormat == PixelFormats.Rgb48)
                {
                    int channelOffset = 0;

                    //Get the offset of the channel based on its index.  Similiar to the binding, in a real application, this would probably be done by name not index
                    for (int i = 0; i < imager.Channels.Length; i++)
                    {
                        if (imager.Channels[i].Name == e.Channel)
                        {
                            channelOffset = i * (bytesPerPixel / 3);
                        }
                    }

                    for (int i = 0; i < (e.Image.Length / bytesPerChannel); i++)
                    {
                        for (int j = 0; j < bytesPerChannel; j++)
                        {
                            rawImage[i * bytesPerPixel + channelOffset + j] = e.Image[i * bytesPerChannel + j];
                        }
                    }
                }
                else if (imageFormat == PixelFormats.Rgba64 || imageFormat == PixelFormats.Rgba128Float)
                {
                    int channelOffset = 0;

                    //Get the offset of the channel based on its index.  Similiar to the binding, in a real application, this would probably be done by name not index
                    for (int i = 0; i < imager.Channels.Length; i++)
                    {
                        if (imager.Channels[i].Name == e.Channel)
                        {
                            channelOffset = i * (bytesPerPixel / 4);
                        }
                    }

                    for (int i = 0; i < (e.Image.Length / bytesPerChannel); i++)
                    {
                        for (int j = 0; j < bytesPerChannel; j++)
                        {
                            rawImage[i * bytesPerPixel + channelOffset + j] = e.Image[i * bytesPerChannel + j];
                        }
                    }
                }
                else if (imageFormat == PixelFormats.Bgra32)
                {
                    int channelOffset = 0;

                    //Get the offset of the channel based on its index.  Similiar to the binding, in a real application, this would probably be done by name not index
                    for (int i = 0; i < imager.Channels.Length; i++)
                    {
                        if (imager.Channels[i].Name == e.Channel)
                        {
                            //Again, for BGRA we have to swap the order since we are assuming RGBA in the channel order
                            if (i == 0)
                            {
                                channelOffset = 2 * (bytesPerPixel / 4);
                            }
                            else if (i == 2)
                            {
                                channelOffset = 0;
                            }
                            else
                            {
                                channelOffset = i * (bytesPerPixel / 4);
                            }
                        }
                    }

                    for (int i = 0; i < e.Image.Length; i++)
                    {
                        rawImage[i * bytesPerPixel + channelOffset] = e.Image[i];
                    }
                }

                //Finally, we can write the raw image to the display.  As you can see, channel binding does a lot of the work for you.
                image.WritePixels(new Int32Rect(0, 0, width, height), rawImage, width * bytesPerPixel, 0);
            }
        }
コード例 #32
0
ファイル: XBeeDevice.cs プロジェクト: Condrat/JellyFish12000
 void m_XBee_FrameReceived2(object sender, FrameReceivedEventArgs e)
 {
     //if (e.Response is RxResponse64)
     //{
     if (e.Response is ZNetRxResponse)
     {
         MainForm.ConsoleWriteLine("RX: " + ByteUtil.PrintBytes((e.Response as ZNetRxResponse).Value));
     }
         //Console.WriteLine("Recevied Rx64");
         //Console.WriteLine(ByteUtil.PrintBytes((e.Response as RxResponse64).Value));
     //}
 }
コード例 #33
0
ファイル: Program.cs プロジェクト: valoni/NETMF-Toolkit
        static void xbee_OnPacketReceived(object sender, FrameReceivedEventArgs e)
        {
            XBeeResponse response = e.Response;

            Debug.Print(response.ToString());
        }
コード例 #34
0
 void contactTarget_FrameReceived(object sender, FrameReceivedEventArgs e)
 {
 }
コード例 #35
0
        //  private void getImage(object sender, DoWorkEventArgs args)
        private void getImage(FrameReceivedEventArgs e)
        {
            //    FrameReceivedEventArgs e = args.Argument as FrameReceivedEventArgs;
            bool imageAvailable = false;
            int paddingLeft,
                  paddingRight;
            try
            {
                if (normalizedImage == null)
                {
                    imageAvailable = e.TryGetRawImage(Microsoft.Surface.Core.ImageType.Normalized,
                       0, 0,
                        Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Width,
                        Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Height,
                        out normalizedImage,
                        out normalizedMetrics,
                        out paddingLeft,
                        out paddingRight);
                }
                else
                {

                    imageAvailable = e.UpdateRawImage(Microsoft.Surface.Core.ImageType.Normalized,
                         normalizedImage,
                         0, 0,
                         Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Width,
                         Microsoft.Surface.Core.InteractiveSurface.PrimarySurfaceDevice.Height);
                }

            }

            catch (Exception exc)
            {
                Console.WriteLine(exc.Message);
            }

            if (imageAvailable)
            {
                imageAvailable = false;

                GCHandle h = GCHandle.Alloc(normalizedImage, GCHandleType.Pinned);
                IntPtr ptr = h.AddrOfPinnedObject();
                System.Drawing.Bitmap imageBitmap = new System.Drawing.Bitmap(normalizedMetrics.Width,
                                      normalizedMetrics.Height,
                                      normalizedMetrics.Stride,
                                      System.Drawing.Imaging.PixelFormat.Format8bppIndexed,
                                      ptr);

                ImageHelper.BinarizeImage(imageBitmap);
                System.Drawing.Bitmap imgClone = new System.Drawing.Bitmap(imageBitmap);
                imgClone.Palette = imageBitmap.Palette;
                DateTime now = DateTime.Now;
                snapshots.Add(now, imgClone);

                //  imgClone.Save(".\\aft\\BEFORE" + DateTime.Now.ToString("hh-mm-ss-fff") + ".jpg");

            }
        }
コード例 #36
0
        public void FrameReceivedEventHandler(object o, FrameReceivedEventArgs a)
        {
            try
            {
                byte loading;
                switch ((CNXMsgIds)a.Frame.MailboxId)
                {
                case CNXMsgIds.PassengerCountEvent:
                    CNXLog.Info(a.Frame.ToString());
                    mAPCDataSet.AddSensorEvent(a.Frame.Data[0], a.Frame.Data[1]);
                    break;

                case CNXMsgIds.ResetLoading:
                    CNXLog.Info(a.Frame.ToString());
                    if (a.Frame.DataLength == 8)
                    {
                        mAPCDataSet.FlushData(a.Frame.Data);
                        CNXLog.WarnFormat("APC data flushed.");
                    }
                    if (mAPCDataSet.ZeroLoading())
                    {
                        TrackingService.PassengerLoading = mAPCDataSet.Loading;
                        CNXLog.WarnFormat("Loading Zeroed.");
                    }
                    break;

                case CNXMsgIds.PassengerBoardings:
                    loading = mAPCDataSet.AddPassengerEvent(a.Frame.Data);
                    TrackingService.PassengerLoading = loading;
                    CNXLog.WarnFormat("Boarding {0}. Frame {1}.", mAPCDataSet.Loading, a.Frame.ToString());
                    break;

                case CNXMsgIds.PassengerLoad:
                    // theres an assumption here that the counts will turn up in door order
                    int door  = a.Frame.Data[0];
                    int count = ((door - 1) << 1);
                    mCounts[count++] = a.Frame.Data[1];
                    mCounts[count]   = a.Frame.Data[2];
                    if (door > mDoors)
                    {
                        // dont tell APC about higher doors until lower door counts arrive.
                        mDoors = door;
                    }
                    else
                    {
                        byte[] counts = new byte[mDoors * 2];
                        Array.Copy(mCounts, counts, counts.Length);
                        loading = mAPCDataSet.AddPassengerEvent(counts);
                        TrackingService.PassengerLoading = loading;
                        CNXLog.WarnFormat("Loading {0}. Frame {1}.", mAPCDataSet.Loading, a.Frame.ToString());
                    }
                    break;

                default:
                    break;
                }
            }
            catch (Exception e)
            {
                CNXLog.Error(string.Format("APC Frame error {0}", a.Frame.ToString()), e);
            }
        }
コード例 #37
0
 void contactTarget_FrameReceived(object sender, FrameReceivedEventArgs e)
 {
 }
コード例 #38
0
ファイル: App1.cs プロジェクト: schancellor/PassSwipe
        public void OnContactRecordGesture(object sender, FrameReceivedEventArgs e)
        {
            if (isTouching)
            {
                if (normalizedImage == null)
                {
                    e.TryGetRawImage(
                        ImageType.Normalized,
                        0, 0,
                        InteractiveSurface.DefaultInteractiveSurface.Width,
                        InteractiveSurface.DefaultInteractiveSurface.Height,
                        out normalizedImage,
                        out normalizedMetrics);
                }
                else //updates raw image data
                {
                    e.UpdateRawImage(
                        ImageType.Normalized,
                        normalizedImage,
                        0, 0,
                        InteractiveSurface.DefaultInteractiveSurface.Width,
                        InteractiveSurface.DefaultInteractiveSurface.Height);
                }

                capture.OnContactRecordHelper(normalizedImage, normalizedMetrics);
            }
        }
コード例 #39
0
        private void FrameReceivedHandler(object sender, FrameReceivedEventArgs args)
        {
            var stream = args.Stream;
            var method = stream.Headers.GetValue(":method");

            if (!string.IsNullOrEmpty(method))
            {
                method = method.ToLower();
            }

            try
            {
                if (args.Frame is DataFrame)
                {
                    switch (method)
                    {
                    case "post":
                    case "put":
                        SaveDataFrame(stream, (DataFrame)args.Frame);
                        //Avoid leading \ at the filename
                        AddFileToRootFileList(stream.Headers.GetValue(":path").Substring(1));
                        break;
                    }
                }
                else if (args.Frame is HeadersFrame)
                {
                    byte[] binary;
                    switch (method)
                    {
                    case "get":
                    case "dir":
                        try
                        {
                            string path = stream.Headers.GetValue(":path").Trim('/');
                            // check if root is requested, in which case send index.html
                            if (string.IsNullOrEmpty(path))
                            {
                                path = IndexHtml;
                            }

                            binary = _fileHelper.GetFile(path);
                            WriteStatus(stream, StatusCode.Code200Ok, false);
                            SendDataTo(stream, binary);
                            Http2Logger.LogDebug("File sent: " + path);
                        }
                        catch (FileNotFoundException e)
                        {
                            Http2Logger.LogDebug("File not found: " + e.FileName);
                            WriteStatus(stream, StatusCode.Code404NotFound, true);
                        }

                        break;

                    case "delete":
                        WriteStatus(stream, StatusCode.Code401Forbidden, true);
                        break;

                    default:
                        Http2Logger.LogDebug("Received headers with Status: " + stream.Headers.GetValue(":status"));
                        break;
                    }
                }
            }
            catch (Exception)
            {
                stream.WriteRst(ResetStatusCode.InternalError);
                stream.Dispose();
            }
        }
コード例 #40
0
        void target_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            imageAvailable = false;
            int paddingLeft, paddingRight;
            if (normalizedImage == null)
            {
                imageAvailable = e.TryGetRawImage(ImageType.Normalized,
                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Left,
                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Top,
                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Width,
                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Height,
                  out normalizedImage, out imageMetrics, out paddingLeft, out paddingRight);
            }
            else
            {
                imageAvailable = e.UpdateRawImage(ImageType.Normalized, normalizedImage,
                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Left,
                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Top,
                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Width,
                  Microsoft.Surface.Core.InteractiveSurface.DefaultInteractiveSurface.Height);
            }

            if (!imageAvailable)
                return;

            DisableRawImage();

            GCHandle h = GCHandle.Alloc(normalizedImage, GCHandleType.Pinned);
            IntPtr ptr = h.AddrOfPinnedObject();
            frame = new Bitmap(imageMetrics.Width,
                                  imageMetrics.Height,
                                  imageMetrics.Stride,
                                  System.Drawing.Imaging.PixelFormat.Format8bppIndexed,
                                  ptr);

            Convert8bppBMPToGrayscale(frame);

            //convert the bitmap into an EmguCV image <Gray,byte>
            Image<Gray, byte> imageFrame = new Image<Gray, byte>(frame);
            //process the frame for tracking the blob
            imageFrame = processFrame(imageFrame);

            iCapturedFrame.Source = Bitmap2BitmapImage(imageFrame.ToBitmap());

            /* save the first 40 images captured
             *
             * if (i < 40)
             * {
             * flipper.Save("capture-" + i + ".bmp");
             * i++;
             * }
             *
             */

            imageAvailable = false;
            EnableRawImage();
        }
コード例 #41
0
        private void FrameReceivedHandler(object sender, FrameReceivedEventArgs args)
        {
            var stream = args.Stream;
            var method = stream.Headers.GetValue(":method").ToLower();

            try
            {
                switch (method)
                {
                    case "dir":
                    case "get":
                        if (args.Frame is DataFrame)
                        {
                            SaveDataFrame(stream, (DataFrame) args.Frame);
                        }
                        else if (args.Frame is HeadersFrame)
                        {
                            Http2Logger.LogConsole("Headers received for stream: " + args.Frame.StreamId + " status:" + ((HeadersFrame)args.Frame).Headers.GetValue(":status"));
                        }
                        break;
                }
            }
            catch (Exception)
            {
                stream.WriteRst(ResetStatusCode.InternalError);
                stream.Dispose();
            }
        }
コード例 #42
0
        //Display the received image on screen
        void imager_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            if (e.IsBoundFrame && useChannelBinding) //We only want to render a bound frame, this will prevent any stray unbound frames from crashing the code
            {
                image.WritePixels(new Int32Rect(0, 0, width, height), e.Image, width * bytesPerPixel, 0);
                if (imageFormat == PixelFormats.Rgba128Float)
                {
                    float[] temp = new float[width * height * 4];
                    for (int i = 0; i < width * height * 4; i++)
                    {
                        temp[i] = BitConverter.ToSingle(e.Image, i * 4);
                    }
                }
            }
            else if (!useChannelBinding && !e.IsBoundFrame)
            {
                //With channel binding turned off we have to compile the color frame manually

                //This is easy for the grayscale case
                if (imageFormat == PixelFormats.Gray8 || imageFormat == PixelFormats.Gray16 || imageFormat == PixelFormats.Gray32Float)
                {
                    rawImage = e.Image;
                }
                else if (imageFormat == PixelFormats.Rgb24 || imageFormat == PixelFormats.Rgb48)
                {
                    int channelOffset = 0;

                    //Get the offset of the channel based on its index.  Similiar to the binding, in a real application, this would probably be done by name not index
                    for (int i = 0; i < imager.Channels.Length; i++)
                    {
                        if (imager.Channels[i].Name == e.Channel)
                        {
                            channelOffset = i * (bytesPerPixel / 3);
                        }
                    }

                    for (int i = 0; i < (e.Image.Length / bytesPerChannel); i++)
                    {
                        for (int j = 0; j < bytesPerChannel; j++)
                        {
                            rawImage[i * bytesPerPixel + channelOffset + j] = e.Image[i * bytesPerChannel + j];
                        }
                    }
                }
                else if (imageFormat == PixelFormats.Rgba64 || imageFormat == PixelFormats.Rgba128Float)
                {
                    int channelOffset = 0;

                    //Get the offset of the channel based on its index.  Similiar to the binding, in a real application, this would probably be done by name not index
                    for (int i = 0; i < imager.Channels.Length; i++)
                    {
                        if (imager.Channels[i].Name == e.Channel)
                        {
                            channelOffset = i * (bytesPerPixel / 4);
                        }
                    }

                    for (int i = 0; i < (e.Image.Length / bytesPerChannel); i++)
                    {
                        for (int j = 0; j < bytesPerChannel; j++)
                        {
                            rawImage[i * bytesPerPixel + channelOffset + j] = e.Image[i * bytesPerChannel + j];
                        }
                    }
                }
                else if (imageFormat == PixelFormats.Bgra32)
                {
                    int channelOffset = 0;

                    //Get the offset of the channel based on its index.  Similiar to the binding, in a real application, this would probably be done by name not index
                    for (int i = 0; i < imager.Channels.Length; i++)
                    {
                        if (imager.Channels[i].Name == e.Channel)
                        {
                            //Again, for BGRA we have to swap the order since we are assuming RGBA in the channel order
                            if (i == 0)
                            {
                                channelOffset = 2 * (bytesPerPixel / 4);
                            }
                            else if (i == 2)
                            {
                                channelOffset = 0;
                            }
                            else
                            {
                                channelOffset = i * (bytesPerPixel / 4);
                            }
                        }
                    }

                    for (int i = 0; i < e.Image.Length; i++)
                    {
                        rawImage[i * bytesPerPixel + channelOffset] = e.Image[i];
                    }
                }

                //Finally, we can write the raw image to the display.  As you can see, channel binding does a lot of the work for you.
                image.WritePixels(new Int32Rect(0, 0, width, height), rawImage, width * bytesPerPixel, 0);
            }
        }
コード例 #43
0
ファイル: App1.cs プロジェクト: MrHayato/SurfaceShooter
 private void TouchTargetOnFrameReceived(object sender, FrameReceivedEventArgs frameReceivedEventArgs)
 {
     //Todo
 }
コード例 #44
0
ファイル: XBeeDevice.cs プロジェクト: Condrat/JellyFish12000
        void m_XBee_FrameReceived(object sender, FrameReceivedEventArgs e)
        {
            if (e.Response is AtCommandResponse)
            {
                NodeDiscover nd = NodeDiscover.Parse((e.Response as AtCommandResponse));
                if (nd != null && nd.ShortAddress != null)
                {
                    m_Nodes.Add(nd);

                    //MainForm.ConsoleWriteLine("XBee: Discovered Node: " + nd);
                }
            }

            if (e.Response is RxResponse64)
            {
                MainForm.ConsoleWriteLine("RX: " + ByteUtil.PrintBytes((e.Response as RxResponse64).Value));
                //Console.WriteLine("Recevied Rx64");
                //Console.WriteLine(ByteUtil.PrintBytes((e.Response as RxResponse64).Value));
            }
            if (e.Response is ZNetRxResponse)
            {
                MainForm.ConsoleWriteLine("RX: " + ByteUtil.PrintBytes((e.Response as ZNetRxResponse).Value));
            }
        }
コード例 #45
0
 private void _decoder_OnFrameReceived(object sender, FrameReceivedEventArgs e)
 {
     Dispatcher.Invoke(() => player.Source = e.Frame.ToImageSource());
 }