示例#1
0
        private void FrameReceived(object sender, RtpStream.FrameReceivedEventArgs frea)
        {
            FrameWithTicks frame = new FrameWithTicks(frea.Frame);

            /* A number of considerations caused this design...
             *
             * Due to wanting the most accurate timestamp possible, the frame is stamped with the time
             * (by putting it in the FrameWithTicks structure), and then the ProcessFrame method is
             * called from the ThreadPool.
             *
             * However, this caused problems because processing the frames and saving them to disk were
             * contending for time on the threadpool.  Due to the nature of the design of Archiver, it's
             * important that the frames are processed with higher priority than saving them to disk
             * (this minimizes the overhead of exceptions (cpu utilization) during high-stress periods,
             * causing the frames to be lost with the least amount of hiccups on the server.
             *
             * So here we choose to process the frame on this thread only if we know that it won't get
             * immediate attention on the threadpool.
             */

            int workerThreads, ioThreads;

            ThreadPool.GetAvailableThreads(out workerThreads, out ioThreads);

            if (workerThreads > 0)
            {
                ThreadPool.QueueUserWorkItem(new WaitCallback(ProcessFrame), frame);
            }
            else
            {
                Trace.WriteLine("No threads available.  Processing frame on EventThrower thread.");
                ProcessFrame(frame);
            }
        }
示例#2
0
        private void Odbior(object sender, RtpStream.FrameReceivedEventArgs ea)
        {
            NumericUpDown X      = (NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[0]);
            NumericUpDown Y      = (NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[1]);
            NumericUpDown height = (NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[2]);
            NumericUpDown width  = (NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[3]);
            MemoryStream  ms     = new MemoryStream(ea.Frame.Buffer);
            double        d      = difference(((PictureBox)(((formy)(polaczenia[ea.RtpStream])).obrazy)).Image, Image.FromStream(ms), ea);

            if (d > 10)
            {
                this.Invoke((MethodInvoker) delegate() { d_komunikat.Text = "Wykryto ruch kamera: " + (1 + d_tlp_kamery.Controls.IndexOf(((PictureBox)(((formy)(polaczenia[ea.RtpStream])).obrazy)))).ToString(); });
            }
            else
            {
                this.Invoke((MethodInvoker) delegate() { d_komunikat.Text = ""; });
            }
            ((PictureBox)(((formy)(polaczenia[ea.RtpStream])).obrazy)).Image = Image.FromStream(ms);
            this.Invoke((MethodInvoker) delegate()
            {
                d_tlp_kamery.Controls[d_tlp_kamery.Controls.IndexOf(((PictureBox)(((formy)(polaczenia[ea.RtpStream])).obrazy)))].Width  = Image.FromStream(ms).Width;
                d_tlp_kamery.Controls[d_tlp_kamery.Controls.IndexOf(((PictureBox)(((formy)(polaczenia[ea.RtpStream])).obrazy)))].Height = Image.FromStream(ms).Height;
                X.Maximum      = Image.FromStream(ms).Width;
                Y.Maximum      = Image.FromStream(ms).Height;
                height.Maximum = Image.FromStream(ms).Height - Y.Value;
                width.Maximum  = Image.FromStream(ms).Width - X.Value;
                Graphics g     = Graphics.FromImage(((PictureBox)(((formy)(polaczenia[ea.RtpStream])).obrazy)).Image);
                g.DrawRectangle(new Pen(Color.Red), (int)X.Value, (int)Y.Value, (int)width.Value, (int)height.Value);
                ((PictureBox)(((formy)(polaczenia[ea.RtpStream])).obrazy)).Refresh();
            });
        }
示例#3
0
 // whenever we receive the frame we have to decode and display as show below.
 private void FrameReceived(object sender, RtpStream.FrameReceivedEventArgs ea)
 {
     System.IO.MemoryStream ms = new MemoryStream(ea.Frame.Buffer);
     IFFmpeg.avcodec_find_decoder(IFFmpeg.CodecID.CODEC_ID_H263);
     IFFmpeg.DecodeFrame(Image.FromStream(ms), native);
     IFFmpeg.ConvertYUV2RGB(yuv, Image.FromStream(ms));
     pictureBox2.Image = Image.FromStream(ms);
 }
示例#4
0
        private void Odbior(object sender, RtpStream.FrameReceivedEventArgs ea)
        {
            //zapis klatki do pamięci
            MemoryStream ms = new MemoryStream(ea.Frame.Buffer);

            //załadowanie klatki filmu do controlki
            pb_picture.Image = Image.FromStream(ms);
        }
示例#5
0
        static void stream_FrameReceived(object sender, RtpStream.FrameReceivedEventArgs ea)
        {
            Console.Out.WriteLine("Frame received of length: " + ea.Frame.Length);

            byte[] hash = MD5.Create().ComputeHash(ea.Frame.Buffer, ea.Frame.Index, ea.Frame.Length);

            // compare arrays:
            for (int i = 0; i < oracleHash.Length; i++)
            {
                if (oracleHash[i] != hash[i])
                {
                    Console.Out.WriteLine("Hash values not equal!");
                    return;
                }
            }
            Console.Out.WriteLine("Hash value is good!");
        }
示例#6
0
        private void FrameReceived(object sender, RtpStream.FrameReceivedEventArgs ea)
        {
            BufferChunk frame = ea.Frame;

            if (frame.Length != data.Length)
            {
                throw new TestCaseException(string.Format("Lengths don't match! lengthSent: {0}, lengthRecv'd: {1}",
                                                          data.Length, frame.Length));
            }

            for (int i = 0; i < frame.Length; i++)
            {
                if (frame[i] != data[i])
                {
                    throw new TestCaseException(string.Format("Bytes don't match! Index:{0}, byteSent: {1} + byteRecv'd: {2}",
                                                              i.ToString(), data[i].ToString(), frame[i].ToString()));
                }
            }

            are.Set();
        }
示例#7
0
        private void FrameReceived(object sender, RtpStream.FrameReceivedEventArgs ea)
        {
            try
            {
                if (firstFrame)
                {
                    Bitmap DrawImage = new Bitmap(video_width, video_height);

                    EncoderParameter epQuality = new EncoderParameter(Encoder.Quality, video_quality);
                    // Store the quality parameter in the list of encoder parameters
                    EncoderParameters epParameters = new EncoderParameters(1);
                    epParameters.Param[0] = epQuality;

                    MemoryStream ms = new MemoryStream();
                    DrawImage.Save(ms, GetImageCodecInfo(ImageFormat.Jpeg), epParameters);

                    Array.Copy(ms.GetBuffer(), 0, JpegHeader, 0, offset);
                    firstFrame = false;
                }

                byte[] data = new byte[ea.Frame.Buffer.Length + offset];
                Array.Copy(JpegHeader, data, JpegHeader.Length);
                Array.Copy(ea.Frame.Buffer, 0, data, offset, ea.Frame.Buffer.Length);
                System.IO.MemoryStream msImage = new MemoryStream(data);

                VsImage img   = new VsImage((Bitmap)Image.FromStream(msImage));
                int     index = GetIndex(ea.RtpStream.IPAddress);

                if (index != -1 && vsRtpStream[index] != null && vsRtpStream[index].FrameOut != null)
                {
                    vsRtpStream[index].FrameOut(this, new VsImageEventArgs(img));
                }
                img.Dispose();
                img = null;
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }
示例#8
0
        private void FrameReceived(object sender, RtpStream.FrameReceivedEventArgs ea)
        {
            try
            {
                if (firstFrame)
                {
                    Console.WriteLine("First Frame");
                    Bitmap DrawImage = new Bitmap(ImgWidth, ImgHeight);

                    EncoderParameter epQuality = new EncoderParameter(Encoder.Quality, Quality);
                    // Store the quality parameter in the list of encoder parameters
                    EncoderParameters epParameters = new EncoderParameters(1);
                    epParameters.Param[0] = epQuality;

                    MemoryStream ms = new MemoryStream();
                    DrawImage.Save(ms, GetImageCodecInfo(ImageFormat.Jpeg), epParameters);

                    offset     = search_jpegoffset(ms);
                    JpegHeader = new byte[offset];
                    Array.Copy(ms.GetBuffer(), 0, JpegHeader, 0, offset);
                    firstFrame = false;
                }

                byte[] data = new byte[ea.Frame.Buffer.Length + offset];
                Array.Copy(JpegHeader, data, JpegHeader.Length);
                Array.Copy(ea.Frame.Buffer, 0, data, offset, ea.Frame.Buffer.Length);
                System.IO.MemoryStream msImage = new MemoryStream(data);

                Console.WriteLine("Received New Frame");

                pictureBox1.Image = Image.FromStream(msImage);
            }
            catch (Exception ex)
            {
                Console.WriteLine("Received Error :" + ex.Message);
            }
        }
示例#9
0
 private void FrameReceived(object sender, RtpStream.FrameReceivedEventArgs ea)
 {
     ShowMessage(string.Format("{0}: {1}", ea.RtpStream.Properties.Name, (string)ea.Frame));
 }
示例#10
0
        void RtpStream_FrameReceived(object sender, RtpStream.FrameReceivedEventArgs ea)
        {
            //   System.Diagnostics.Debug.WriteLine(ea.RtpStream.Properties.CName+""+ea.RtpStream.Properties.Name);
            window.Dispatcher.Invoke((Action)(() => {
                if (image == null && vpList != null)
                {
                    System.IO.MemoryStream ms = new System.IO.MemoryStream(ea.Frame.Buffer);

                    int sizeBytes = 0;
                    Byte[] buffer = new Byte[4];
                    ms.Read(buffer, 0, 4);
                    sizeBytes = BitConverter.ToInt32(buffer, 0);
                    Byte[] imageData = new Byte[sizeBytes];
                    ms.Read(imageData, 0, imageData.Length);
                    string[] hostIPS = ea.RtpStream.Properties.CName.Split('#');

                    foreach (var ip in hostIPS)
                    {
                        if (vpList.ContainsKey(IPAddress.Parse(ip)))
                        {
                            vpList[IPAddress.Parse(ip)].prev.Source = GetImage(imageData).Source;
                            break;
                        }
                    }
                    Byte[] audio = new Byte[ms.Length - ms.Position];
                    ms.Read(audio, 0, audio.Length);

                    {
                        waveProvider.AddSamples(audio, 0, audio.Length);
                        if (waveOut.PlaybackState != PlaybackState.Playing)
                        {
                            waveOut.Play();
                        }
                    }
                }
                else if (vpList == null && image != null)
                {
                    image.Source = GetImage(ea.Frame.Buffer).Source;
                }
                else if (vpList == null && image == null)
                {
                    System.IO.MemoryStream ms = new System.IO.MemoryStream(ea.Frame.Buffer);

                    Byte[] audio = new Byte[ms.Length];
                    ms.Read(audio, 0, audio.Length);
                    if (ea.RtpStream.FramesReceived > 10)
                    {
                        waveProvider.AddSamples(audio, 0, audio.Length);
                    }
                    //        if (waveOut.PlaybackState != PlaybackState.Playing)
                    waveOut.Play();
                    string[] hostIPS = ea.RtpStream.Properties.CName.Split('#');
                    foreach (var ip in hostIPS)
                    {
                        IPAddress IP = IPAddress.Parse(ip);
                        if (apList.ContainsKey(IP))
                        {
                            if (apList[IP].canRecord)
                            {
                                string Path = AppDomain.CurrentDomain.BaseDirectory + "\\" + IP.ToString().Replace('.', '\\').ToString() + "\\" + DateTime.Now.ToString("yy.MM.dd.HH.mm.ss.ffff") + ".wav";
                                if (!waveWriters.ContainsKey(IP))
                                {
                                    waveWriters.Add(IP, new WaveFileWriter(Path, AudioConf.audio.sourceStream.WaveFormat));
                                }

                                waveWriters[IP].WriteData(audio, 0, audio.Length);
                                waveWriters[IP].Flush();
                            }
                            else
                            {
                            }
                        }
                    }
                }
            }));
        }
示例#11
0
        /// <summary>
        /// Processes a message received over the RTP network interface.
        /// </summary>
        private void HandleFrameReceived(object sender, RtpStream.FrameReceivedEventArgs args)
        {
            Debug.Assert(args.RtpStream == this.m_RtpStream);

            BufferChunk chunk = args.Frame;

            try {
                // Attempt to deserialize the contents of the frame.
                using (MemoryStream stream = new MemoryStream(chunk.Buffer, chunk.Index, chunk.Length)) {
                    object message = this.m_Serializer.Deserialize(stream);

                    // If the message is a chunked, process it with the Chunk Assembler.
                    // (Messages don't have to be chunked, in which case they are processed
                    // immediately below).  If the chunk is the last remaining chunk composing
                    // a message, then the Assembler returns the completed message.
                    if (message is Chunk)
                    {
                        Chunk chunked = ((Chunk)message);
                        // Ensure that no more NACKs are sent for this chunk, if it was recovered.
                        if (chunked.FrameSequence > ulong.MinValue)
                        {
                            this.m_Sender.NackManager.Discard(this.m_RtpStream.SSRC, chunked.FrameSequence);
                        }

                        // Ensure that no futile NACKs are sent for unrecoverable chunks.
                        if (chunked.OldestRecoverableFrame - 1 > ulong.MinValue)
                        {
                            this.m_Sender.NackManager.Discard(this.m_RtpStream.SSRC,
                                                              new Range(ulong.MinValue, chunked.OldestRecoverableFrame - 1));
                        }

                        message = this.m_Assembler.Add(chunked);
                    }
                    else if (message is RtpNackMessage)
                    {
                        // Otherwise, if the message is a NACK, delegate it to the RTP sender,
                        // which attempts to resend chunks from its buffer.
                        RtpNackMessage nack = ((RtpNackMessage)message);
                        this.m_Sender.ProcessNack(nack);
                        return;
                    }

                    // If we have a valid/complete message, queue it for processing on the separate
                    // message execution thread.
                    if (message is IEnumerable <object> )
                    {
                        foreach (object decoded in (IEnumerable <object>)message)
                        {
                            if (decoded is Message)
                            {
                                // NOTE: Added by CMPRINCE for testing network performance
                                // Save this message
                                this.m_Model.ViewerState.Diagnostic.LogMessageRecv((Message)decoded);

                                Debug.WriteLine(string.Format("Received message: {0} bytes, type {1}",
                                                              chunk.Length, decoded.GetType()), this.GetType().ToString());
                                Debug.Indent();
                                try {
                                    this.m_Queue.ProcessMessage((Message)decoded);
                                } finally {
                                    Debug.Unindent();
                                }
                            }
                            else
                            {
                                Trace.WriteLine("Received invalid message: " + decoded != null ? decoded.GetType().ToString() : null,
                                                this.GetType().ToString());
                            }
                        }
                    }
                    else if (message != null)
                    {
                        Trace.WriteLine("Received invalid message: " + message.GetType().ToString(),
                                        this.GetType().ToString());
                    }
                }
            }

            catch (Exception e) {
                // The application should not crash on account of malformed messages sent from remote clients.
                // Therefore, we catch all exceptions.  We only want to print an error message.
                Trace.WriteLine("Error deserializing a message: " + e.ToString() + "\r\n" + e.StackTrace, this.GetType().ToString());
            }
        }
示例#12
0
        private double difference(Image OrginalImage, Image SecoundImage, RtpStream.FrameReceivedEventArgs ea)
        {
            double percent = 0;
            int    X       = ((int)((NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[0])).Value);
            int    Y       = ((int)((NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[1])).Value);
            int    height  = ((int)((NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[2])).Value);
            int    width   = ((int)((NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[3])).Value);

            try
            {
                float counter = 0;

                Random rand = new Random();

                Bitmap bt1 = new Bitmap(OrginalImage);
                Bitmap bt2 = new Bitmap(SecoundImage);
                if (((NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[2])).Value != 0 && ((NumericUpDown)(((formy)(polaczenia[ea.RtpStream])).pola[3])).Value != 0)
                {
                    float total = height * width;

                    Color pixel_image1;
                    Color pixel_image2;


                    for (int i = 0; i < Math.Round(total * 0.1); i++)
                    {
                        int x = rand.Next(X, X + width);
                        int y = rand.Next(Y, Y + height);

                        byte srednia1 = (byte)((bt1.GetPixel(x, y).R + bt1.GetPixel(x, y).G + bt1.GetPixel(x, y).B) / 3);
                        pixel_image1 = Color.FromArgb(srednia1, srednia1, srednia1);
                        byte srednia2 = (byte)((bt2.GetPixel(x, y).R + bt2.GetPixel(x, y).G + bt2.GetPixel(x, y).B) / 3);
                        pixel_image2 = Color.FromArgb(srednia2, srednia2, srednia2);

                        if (Math.Abs(pixel_image1.R - pixel_image2.R) > 10)
                        {
                            counter++;
                        }
                    }
                    percent = (counter / Math.Round(total * 0.1)) * 100;
                }
                else
                {
                    int size_H = bt1.Size.Height;
                    int size_W = bt1.Size.Width;

                    float total = size_H * size_W;

                    Color pixel_image1;
                    Color pixel_image2;


                    for (int i = 0; i < Math.Round(total * 0.1); i++)
                    {
                        int x = rand.Next(bt1.Size.Width);
                        int y = rand.Next(bt1.Size.Height);

                        byte srednia1 = (byte)((bt1.GetPixel(x, y).R + bt1.GetPixel(x, y).G + bt1.GetPixel(x, y).B) / 3);
                        pixel_image1 = Color.FromArgb(srednia1, srednia1, srednia1);
                        byte srednia2 = (byte)((bt2.GetPixel(x, y).R + bt2.GetPixel(x, y).G + bt2.GetPixel(x, y).B) / 3);
                        pixel_image2 = Color.FromArgb(srednia2, srednia2, srednia2);

                        if (Math.Abs(pixel_image1.R - pixel_image2.R) > 10)
                        {
                            counter++;
                        }
                    }
                    percent = (counter / Math.Round(total * 0.1)) * 100;
                }
            }
            catch (Exception) { percent = 0; }
            return(percent);
        }
示例#13
0
 private void FrameReceived(object sender, RtpStream.FrameReceivedEventArgs ea)
 {
     System.IO.MemoryStream ms = new MemoryStream(ea.Frame.Buffer);
     pictureBox_Receive.Image = Image.FromStream(ms);
 }