//Create functions to pass as delegate static SocketData handleGesture(SocketData d) { Console.WriteLine(d.message_type); model_idx++; string s = models_names[model_idx]; //send register task offload_client.sendDataPacket(new SocketData(PacketType.MODEL_CHANGE, 1280, 720, 0, Encoding.ASCII.GetBytes(s))); return null; }
static void localPacketDispatch(SocketData d) { switch (d.message_type) { case PacketType.REGISTER_TASK_REPLY: { configureOffload(d); break; } } }
static SocketData configureOffload(SocketData d) { SocketData returnData = null; if (d.message_type == PacketType.REGISTER_TASK_REPLY) { offload_client_ready = true; } model_idx = 0; string s = models_names[model_idx]; //send register task returnData = new SocketData(PacketType.MODEL_CHANGE, 1280, 720, 0, Encoding.ASCII.GetBytes(s)); return returnData; }
public static SocketData receiveDataPacket(Socket socket) { SocketData d; byte[] header = receiveLength(socket, (UInt32)SocketData.getHeaderSize()); if (header != null) { d = new SocketData(header); if (d.message_length > 0) { byte[] body = receiveLength(socket, d.message_length); d.data = body; } } else { d = null; } return d; }
private void Process(Image<Bgr, byte> frame) { CvInvoke.Imshow("frame", frame); CvInvoke.WaitKey(1); Console.WriteLine("Byte[] Length = {0}", (uint)frame.Bytes.Length); SocketData sendPacket1 = new SocketData(PacketType.VIDEO_FRAME, clientID, 13, 14, (uint)frame.Bytes.Length, frame.Bytes); CAPINetworkUtility.sendDataPacket(socket, sendPacket1); }
private void AcceptConnection(Socket socket) { SocketData ap = new SocketData(); ap.message_type = (UInt32)PacketType.CONNECTION_ACCEPT; ap.stream_id = (uint)clientID; ap.frame_id = 42; ap.message_length = 0; ap.message_id = 0; ap.data = null; CAPINetworkUtility.sendDataPacket(socket, ap); }
private SocketData FetchImageFromNetwork(SocketData d /*frame is passed*/) //converts frame into usable frame { #region declarations Image<Bgr, Byte> return_image; #endregion #region Convert to usable image + place in return_image using Peter's DLLs. // Contact Peter Zientara about this piece of code. if (d == null ) return null; int size = stream_width * stream_height * 3; byte[] rgb_data = new byte[size]; unsafe { IntPtr byteArray = Marshal.AllocHGlobal(d.data.Length); Marshal.Copy(d.data, 0, byteArray, d.data.Length); IntPtr rgb_data_ptr; rgb_data_ptr = convertYUVtoRGB(byteArray, stream_width, stream_height); Marshal.FreeHGlobal(byteArray); Marshal.Copy(rgb_data_ptr, rgb_data, 0, size); } Image<Bgr, Byte> converted_image = new Image<Bgr, Byte>(stream_width, stream_height); Buffer.BlockCopy(rgb_data, 0, converted_image.Data, 0, size); //CvInvoke.cvShowImage("frame", image); //CvInvoke.cvWaitKey(1); return_image = converted_image; #endregion FrameFetchedEventArgs argsNetwork = new FrameFetchedEventArgs(); argsNetwork.Frame = return_image; FrameFetched?.Invoke(this, argsNetwork); return null; }
public void addDataPacket(SocketData d) { assembler.addDataPacket(d); return; }
public byte[] addDataPacket(SocketData d) { if(live_packets.ContainsKey((int)d.stream_id)){ H264_TimeoutFrame tf; live_packets.TryGetValue((int)d.stream_id, out tf); return tf.p.addSegment((int)d.message_id, d.data); } else { live_packets.Add((int)d.stream_id, new H264_TimeoutFrame(packet_timeout_ms, (int)d.frame_id, (int)d.stream_id, d.data, removeFromLive, (int)d.message_id)); return null; } }
static SocketData processFrame(SocketData d) { //Mat img = new Mat((int)d.stream_id, (int)d.message_id, Emgu.CV.CvEnum.DepthType.Cv8U, 4); //CvInvoke.Imdecode(d.data, Emgu.CV.CvEnum.LoadImageType.AnyColor, img); //CvInvoke.Imshow("image", img); //writ.Write(d.data); #region - Android / Glass Testing if (d != null) { if (h264_encode) { byte[] frame_data = null; int decodedFrameLength = 0; unsafe { IntPtr byteArray = Marshal.AllocHGlobal(d.data.Length); Marshal.Copy(d.data, 0, byteArray, d.data.Length); decodedFrameLength = decode_packet(byteArray, d.data.Length, width, height); Marshal.FreeHGlobal(byteArray); } if (decodedFrameLength > 0) { frame_data = new byte[width * height * 4]; IntPtr byteArray = getFrame(); Marshal.Copy(byteArray, frame_data, 0, width * height * 4); Image<Bgra, byte> x = new Image<Bgra, byte>(width, height); Buffer.BlockCopy(frame_data, 0, x.Data, 0, width * height * 4); //blocking call to offload///// - will combine in future versions offload_client.sendDataPacket(new SocketData(PacketType.VIDEO_FRAME, (uint)width, (uint)height, 0, frame_data)); SocketData work_reply = offload_client.receiveDataPacket(); /////////////////////////////// if (work_reply.frame_id == 0) { ROIRegion[] r = ROIRegion.extract_regions(work_reply.data, (int)work_reply.message_length); foreach (ROIRegion reg in r) { Point[] contour = new Point[4]; contour[0] = new Point((int)reg.x0, (int)reg.y0); contour[1] = new Point((int)reg.x1, (int)reg.y1); contour[2] = new Point((int)reg.x2, (int)reg.y2); contour[3] = new Point((int)reg.x3, (int)reg.y3); x.Draw(contour, new Bgra(0, 0, 255, 255), 3); write_guidance(contour, x); } CvInvoke.Imshow("decoded_frame", x); CvInvoke.WaitKey(1); } else { Console.WriteLine("no good draw"); CvInvoke.Imshow("decoded_frame", x); CvInvoke.WaitKey(1); } } } else { /* int size = width * height * 4; byte[] rgb_data = new byte[size]; unsafe { IntPtr byteArray = Marshal.AllocHGlobal(d.data.Length); Marshal.Copy(d.data, 0, byteArray, d.data.Length); IntPtr rgb_data_ptr; rgb_data_ptr = convertYUVtoRGB(byteArray, width, height); Marshal.FreeHGlobal(byteArray); Marshal.Copy(rgb_data_ptr, rgb_data, 0, size); } */ Image<Gray, Byte> image = new Image<Gray, Byte>(width, height); Buffer.BlockCopy(d.data, 0, image.Data, 0, 640 * 480); CvInvoke.Imshow("frame", image); CvInvoke.WaitKey(1); // image.Resize(640, 360, Emgu.CV.CvEnum.Inter.Cubic); // return image.Bytes ; return null; } } #endregion #region -- Glove Testing /* if(d != null){ int size = width * height * 4; byte[] rgb_data = new byte[size]; unsafe { IntPtr byteArray = Marshal.AllocHGlobal(d.data.Length); Marshal.Copy(d.data, 0, byteArray, d.data.Length); IntPtr rgb_data_ptr; rgb_data_ptr = convertYUVtoRGB(byteArray, width, height); Marshal.FreeHGlobal(byteArray); Marshal.Copy(rgb_data_ptr, rgb_data, 0, size); } Image<Bgra, Byte> image = new Image<Bgra, Byte>(width, height); Buffer.BlockCopy(rgb_data, 0, image.Data, 0, size); CvInvoke.Imshow("frame", image); CvInvoke.WaitKey(1); } */ #endregion return null; }
private void socketDataDispatch(SocketData pkt) { SocketData returnPacket = null; if (delegateFunctions.ContainsKey(pkt.message_type)) { PacketProcess del = delegateFunctions[pkt.message_type]; returnPacket = del(pkt); } if (returnPacket != null) { sendDataPacket(returnPacket); } }
private SocketData ReceiveDataPacket(NetworkStream stream) { byte[] header = readStream(stream, (UInt32)SocketData.getHeaderSize()); if(header != null) { SocketData d = new SocketData(header); if (d.message_length > 0) { byte[] body = readStream(stream, d.message_length); d.data = body; } return d; } else { Shutdown(); return null; } }
public void sendDataPacket(SocketData data) { data.stream_id = clientId; //append clientID; sendData(data.toByteArray()); }
public static void sendDataPacket(Socket socket, SocketData data) { socket.Send(data.toByteArray()); }