Пример #1
0
        static void Main(string[] args)
        {
            Capture                    file;
            Image <Bgr, Byte>          frame;                                     //Frame from video
            Image <Bgr, Byte>          frame2;                                    //Clone of frame - addresses null frame.Data
            Stack <Image <Bgr, Byte> > frames = new Stack <Image <Bgr, Byte> >(); //Contains frames

            file = new Capture("competition_1_1_xvid.avi");
            VideoWriter videoOutput = new VideoWriter("output.avi", 0, 30, 720, 480, true);

            while (1 == 1)
            {
                frame = file.QueryFrame();
                if (frame == null)
                {
                    break;                      //Break after last frame
                }
                frame2 = frame.Clone();
                frames.Push(frame2);
            }
            Console.WriteLine("Number of frames: {0}", frames.Count);

            while (frames.Count > 0)
            {
                videoOutput.WriteFrame(frames.Pop());
            }

            videoOutput.Dispose();
        }
 protected override void Process()
 {
     while (RunCaptureThread)
     {
         FVideoWriter.WriteFrame(Image.Image);
     }
 }
        void StopRecording()
        {
            if (!_isRecording)
            {
                return;
            }

            //CvInvoke.CV_FOURCC('P','I','M','1')    = MPEG-1 codec
            //CvInvoke.CV_FOURCC('M','J','P','G')    = motion-jpeg codec (does not work well)
            //CvInvoke.CV_FOURCC('M', 'P', '4', '2') = MPEG-4.2 codec
            //CvInvoke.CV_FOURCC('D', 'I', 'V', '3') = MPEG-4.3 codec
            //CvInvoke.CV_FOURCC('D', 'I', 'V', 'X') = MPEG-4 codec
            //CvInvoke.CV_FOURCC('U', '2', '6', '3') = H263 codec
            //CvInvoke.CV_FOURCC('I', '2', '6', '3') = H263I codec
            //CvInvoke.CV_FOURCC('F', 'L', 'V', '1') = FLV1 codec

            using (VideoWriter vw = new VideoWriter(_fileName, 0, 30, 640, 480, true))
            {
                for (int i = 0; i < _videoArray.Count(); i++)
                {
                    vw.WriteFrame <Rgb, Byte>(_videoArray[i]);
                }
            }
            _fileName = string.Empty;
            _videoArray.Clear();
            _isRecording = false;
        }
        public void CreateVideo(int lastStep, int snapTime)
        {
            if (!Directory.Exists(outputPath))
            {
                Directory.CreateDirectory(outputPath);
            }
            var a = new VideoWriter(outputPath + videoName, Emgu.CV.CvInvoke.CV_FOURCC('M', 'J', 'P', 'G'), 30, width, height, true);

            for (var i = 0; i <= lastStep; i++)
            {
                if ((i + 1) % 100 == 0)
                {
                    GC.Collect();
                }
                if ((i + 1) % 50 == 0)
                {
                    System.Console.WriteLine("making movie: " + i);
                }

                if ((i + 1) % snapTime != 0)
                {
                    continue;
                }
                var addPic = new Bitmap(Image.FromFile(inputPath + i + ".jpg"));
                a.WriteFrame(new Image <Rgb, byte>(addPic));
                addPic.Dispose();
                Thread.Sleep(20);
            }

            a.Dispose();
        }
Пример #5
0
        private void Record(int num)
        {
            if (num == 0)
            {
                nameVideo = String.Format("{0}{1}{2}", directorioK1V, DateTime.Now.ToString("MMddyyyyHmmss"), ".avi");

                using (VideoWriter vi = new VideoWriter(nameVideo, 0, 30, 640, 480, true))
                {
                    for (int i = 0; i < videoColor1.Count(); i++)
                    {
                        vi.WriteFrame <Bgr, Byte>(videoColor1[i]);
                    }
                    vi.Dispose();
                }

                nameVideo = string.Empty;
                videoColor1.Clear();
            }
            else
            {
                nameVideo = String.Format("{0}{1}{2}", directorioK2V, DateTime.Now.ToString("MMddyyyyHmmss"), ".avi");

                using (VideoWriter vi = new VideoWriter(nameVideo, 0, 30, 640, 480, true))
                {
                    for (int i = 0; i < videoColor2.Count(); i++)
                    {
                        vi.WriteFrame <Bgr, Byte>(videoColor2[i]);
                    }
                    vi.Dispose();
                }

                nameVideo = string.Empty;
                videoColor2.Clear();
            }
        } //fin Record()
Пример #6
0
        public static void CreateVideo(CrowdCountingRegression regression, int N, int M, int width, int height, IEnumerable <string> frames, List <IList <PointF> > outputs)
        {
            MathN::Matrix <double> shouldBe = PeoplePositions.GridQuantize(outputs, N, M, width, height);

            using (VideoWriter numberVideoWriter = new VideoWriter("D:\\video_predictions.avi", fps: 1, width: width, height: height, isColor: true))
                using (VideoWriter differenceVideoWriter = new VideoWriter("D:\\video_differences.avi", fps: 1, width: width, height: height, isColor: true))
                {
                    int cellHeight = height / N;
                    int cellWidth  = width / M;

                    MathN::Matrix <double> prediction = regression.Predict(new List <string>(frames));
                    int frameID = 0;
                    foreach (string framePath in frames)
                    {
                        using (Image <Bgr, Byte> countFrame = new Image <Bgr, Byte>(framePath))
                            using (Image <Bgr, Byte> differenceFrame = new Image <Bgr, Byte>(framePath))
                            {
                                for (int i = 1; i < N; ++i)
                                {
                                    LineSegment2D line = new LineSegment2D(
                                        new Point(0, i * cellHeight),
                                        new Point(width, i * cellHeight));
                                    countFrame.Draw(line, new Bgr(Color.Yellow), 2);
                                    differenceFrame.Draw(line, new Bgr(Color.Red), 2);
                                }

                                for (int j = 1; j < M; ++j)
                                {
                                    LineSegment2D line = new LineSegment2D(
                                        new Point(j * cellWidth, 0),
                                        new Point(j * cellWidth, height));
                                    countFrame.Draw(line, new Bgr(Color.Yellow), 2);
                                    differenceFrame.Draw(line, new Bgr(Color.Red), 2);
                                }


                                for (int i = 0; i < N; ++i)
                                {
                                    for (int j = 0; j < M; ++j)
                                    {
                                        double    cellPrediction = prediction[frameID, i *M + j];
                                        int       cellShoudlBe   = (int)Math.Round(shouldBe[frameID, i * M + j]);
                                        Rectangle rect           = new Rectangle(j * cellWidth, i * cellHeight, cellWidth, cellHeight);

                                        drawText(countFrame, rect, Brushes.Yellow, String.Format("{0:0.0}", cellPrediction));

                                        double difference       = (cellPrediction - cellShoudlBe);
                                        string differenceString = difference > 0 ? "+" + String.Format("{0:0.0}", difference) :  String.Format("{0:0.0}", difference);
                                        drawText(differenceFrame, rect, Brushes.Red, differenceString);
                                    }
                                }

                                numberVideoWriter.WriteFrame(countFrame);
                                differenceVideoWriter.WriteFrame(differenceFrame);
                            }

                        frameID++;
                    }
                }
        }
Пример #7
0
 private void frameTimeEvent(object source, ElapsedEventArgs e)
 {  //Event handler for getting the next from of the video for a 30 fps video
     if (isRecording)
     {
         recordVid.WriteFrame <Bgr, Byte>(_cameraCapture.QueryFrame());
     }
 }
Пример #8
0
 private void ProcessFrame()
 {
     this.CurrentImage = m_Capture.QueryFrame();
     m_ImageBox.Image  = this.CurrentImage;
     if (m_IsRecording)
     {
         m_VideoWriter.WriteFrame <Bgr, Byte>(this.CurrentImage);
     }
 }
Пример #9
0
        private void depth_FrameReady(object sender, DepthImageFrameReadyEventArgs e)
        {
            current_frame_number_for_stop++;
            if (waiting == 0)
            {
                waiting += 2;
            }
            else
            {
                waiting++;
            }
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    int count       = 0;
                    int frameNumber = 0;
                    if (depthFrame.Timestamp == 0)
                    {
                        waiting -= 1;
                        return;
                    }

                    if (depthFrame.Timestamp < FirstTimeStamp)
                    {
                        FirstTimeStamp = depthFrame.Timestamp;
                    }

                    frameNumber         = GetRealCurrentFrame(depthFrame.Timestamp - FirstTimeStamp);
                    count               = frameNumber - PreDepthFrameNumber;
                    PreDepthFrameNumber = frameNumber;
                    //Console.WriteLine("Depth {0} {1} {2} {3} {4}", FirstTimeStamp, depthFrame.Timestamp, depthFrame.Timestamp - FirstTimeStamp, frameNumber, count);

                    depthFrame.CopyDepthImagePixelDataTo(depthPixels);
                    //int minDepth = depthFrame.MinDepth;
                    //int maxDepth = depthFrame.MaxDepth;
                    int width  = depthFrame.Width;
                    int height = depthFrame.Height;
                    //Console.WriteLine("Depth:{0} {1}" ,DepthTS,count);

                    colorizer.TransformAndConvertDepthFrame(depthPixels, _colorPixels);

                    var depthImg = ImageConverter.Array2Image(_colorPixels, width, height, width * 4).Convert <Bgr, byte>();
                    if (depthImg.Ptr != IntPtr.Zero)
                    {
                        for (int i = 0; i < count; i++)
                        {
                            if (depthWriter != null)
                            {
                                depthWriter.WriteFrame(depthImg);
                            }
                        }
                    }
                }
            }
            waiting -= 1;
        }
        //Save video function called when t>trigger+T/2 OR from UI Test button, Saves ImageCollection buffer to video
        private void SaveVideo(bool fromTestButton)
        {
            string vEventfileName = fileHandler.getVideoFileName() + "event" + (fromTestButton?"test":"") + (counter).ToString() + ".avi"; //

            using (VideoWriter vw = new VideoWriter(vEventfileName, 0, 32 / frameAcceptance, 640, 480, true))
            {
                for (int i = 0; i < _videoArray.Count(); i++)
                {
                    vw.WriteFrame <Emgu.CV.Structure.Rgb, Byte>(_videoArray[i]);
                }
            }
        }
Пример #11
0
        private void SaveVideo()
        {
            string vEventfileName = vfileName + "event" + vidCounter.ToString() + ".avi"; //was eventCounter.tostring()

            using (VideoWriter vw = new VideoWriter(vEventfileName, 0, frameRate / frameAcceptance, 640, 480, true))
            {
                for (int i = 0; i < _videoArray.Count(); i++)
                {
                    vw.WriteFrame <Emgu.CV.Structure.Rgb, Byte>(_videoArray[i]);
                }
            }
        }
Пример #12
0
        private void WriteImage(Image <TColor, TDepth> image)
        {
            var bgrImage = image.Convert <Bgr, byte>();

            image.Dispose();

            var resizedImage = bgrImage.Resize(VideoWriterWidth, VideoWriterHeight, INTER.CV_INTER_CUBIC);

            bgrImage.Dispose();

            _videoWriter.WriteFrame(resizedImage);
            //resizedImage.Dispose();
        }
Пример #13
0
        private void TimerEventProcessor(object sender, EventArgs e)
        {
            Image <Bgr, Byte> frame = cap.QueryFrame(); // Query 攝影機的畫面

            pictureBox1.Image = frame.ToBitmap();       // 把畫面轉換成bitmap型態,在丟給pictureBox元件

            //錄影模式
            if (_isRecording)
            {
                //將影格寫入影片中
                video.WriteFrame <Bgr, byte>(frame);
            }
        }
        public void writeFromBuffer()
        {
            for (int i = 0; i < buffer.Count; i++)
            {
                videoWriter.WriteFrame(buffer[i]);
            }
            buffer.Clear();
            videoWriter.Dispose();

            // We are theoretically done with writing the video... so we notify all registered listeners
            DoneWritingRallyVideoEventArgs e = new DoneWritingRallyVideoEventArgs(this.starttime);

            donewritingrallyvideo(this, e);
        }
Пример #15
0
        private void color_FrameReady(object sender, ColorImageFrameReadyEventArgs e)
        {
            current_frame_number_for_stop++;
            if (waiting == 0)
            {
                waiting += 2;
            }
            else
            {
                waiting++;
            }
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    int count       = 0;
                    int frameNumber = 0;
                    if (colorFrame.Timestamp == 0)
                    {
                        waiting -= 1;
                        return;
                    }

                    if (colorFrame.Timestamp < FirstTimeStamp)
                    {
                        FirstTimeStamp = colorFrame.Timestamp;
                    }

                    frameNumber         = GetRealCurrentFrame(colorFrame.Timestamp - FirstTimeStamp);
                    count               = frameNumber - PreColorFrameNumber;
                    PreColorFrameNumber = frameNumber;
                    //Console.WriteLine("Color {0} {1} {2} {3} {4}", FirstTimeStamp, colorFrame.Timestamp, colorFrame.Timestamp - FirstTimeStamp, frameNumber,count);

                    colorFrame.CopyPixelDataTo(this._colorPixels);
                    var img = ImageConverter.Array2Image(_colorPixels, 640, 480, 640 * 4).Convert <Bgr, byte>();
                    if (img.Ptr != IntPtr.Zero)
                    {
                        for (int i = 0; i < count; i++)
                        {
                            if (colorWriter != null)
                            {
                                colorWriter.WriteFrame(img);
                            }
                        }
                    }
                }
            }
            waiting -= 1;
        }
Пример #16
0
        public static Dictionary <string, Image <Hsv, byte> > getWithoutBackground(Dictionary <string, Image <Bgr, byte> > images)
        {
            Dictionary <string, Image <Hsv, byte> > outImages = new Dictionary <string, Image <Hsv, byte> >();

            try
            {
                VideoWriter videoW = new VideoWriter("test.avi", 4, images["Image:0"].Width, images["Image:0"].Height, true);

                //schreib die bilder ins video
                for (int i = 0; i < images.Count; i++)
                {
                    string key = "Image:" + i;
                    videoW.WriteFrame(images[key]);
                }

                //erstelle neuen backgroundsubtractor
                Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG subtractor = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG(3, 4, backgroundRatio, 0.01);


                Image <Gray, byte> backgroundmodel    = new Image <Gray, byte>(images["Image:" + 0].Width, images["Image:" + 0].Height);
                Image <Bgr, byte>  bgrOhneHintergrund = new Image <Bgr, byte>(backgroundmodel.Width, backgroundmodel.Height);

                for (int i = 0; i < images.Count; i++)
                {
                    string key = "Image:" + i;
                    subtractor.Update(images[key]);
                    backgroundmodel = subtractor.ForegroundMask;
                    backgroundmodel = backgroundmodel.Mul(1.0 / 255.0);
                    backgroundmodel._Dilate(dilatationErosionNumIter);
                    backgroundmodel._Erode(dilatationErosionNumIter);

                    bgrOhneHintergrund[0] = images[key][0].Mul(backgroundmodel);
                    bgrOhneHintergrund[1] = images[key][1].Mul(backgroundmodel);
                    bgrOhneHintergrund[2] = images[key][2].Mul(backgroundmodel);

                    outImages.Add(key, bgrOhneHintergrund.Convert <Hsv, byte>());
                }
            }
            catch (Exception e) {
                foreach (KeyValuePair <string, Emgu.CV.Image <Bgr, byte> > kvp in images)
                {
                    outImages.Add(kvp.Key, kvp.Value.Convert <Hsv, byte>());
                }
                MessageBox.Show(e.ToString());
                exception = true;
            }
            return(outImages);
        }
Пример #17
0
 public static bool SaveFrameInAVIFormat(VideoWriter output_writer, Image <Bgr, byte> frame)
 {
     try
     {
         using (frame)
         {
             output_writer.WriteFrame(frame);
             return(true);
         }
     }
     catch (Exception e)
     {
         Debug.WriteLine(e.Message);
         return(false);
     }
 }
Пример #18
0
        private void bg_DoWork(object sender, DoWorkEventArgs e)
        {
            if (rec)
            {
                try {
                    vw.WriteFrame((Image <Bgr, Byte>)e.Argument);
                }
                catch (InvalidOperationException exp) {
                    //Do nothing
                    //This exception happens when bg workers try to save
                    //the frame while the program is closed and video saved
                }
            }

            e.Result = 0;
        }
Пример #19
0
 public bool AddFrame(Bitmap cImage)
 {
     try
     {
         Image <Gray, byte> frame = new Image <Gray, byte>(cImage);
         writer.WriteFrame <Gray, byte>(frame);
         //Both codes trigger the same exception
         //IntPtr writerPtr = writer.Ptr;
         //IntPtr framePtr = frame.Ptr;
         //CvInvoke.cvWriteFrame(writerPtr, framePtr);
         return(true);
     }
     catch (Exception e)
     {
         Console.WriteLine("Exception capturing video: {0}", e.Message);
         return(false);
     }
 }
Пример #20
0
        public void write()
        {
            int codec = Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1');

            int fps = 25;

            if (list_timestamps.Count > 0)
            {
                String  tempvideopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[0].ToString() + ".mpg";
                Capture tempcapture   = new Capture(tempvideopath);
                fps = (int)tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS);
                tempcapture.Dispose();
            }

            VideoWriter videowriter = new VideoWriter(videopath, codec, fps, 640, 480, true);


            for (int i = 0; i < list_timestamps.Count; i++)
            {
                videopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[i].ToString() + ".mpg";
                try
                {
                    Capture           joincapture = new Capture(videopath);
                    Image <Bgr, byte> frame       = joincapture.QueryFrame();
                    for (int n = 1; n < 15; n++)
                    {
                        joincapture.QueryFrame();
                    }

                    while (frame != null)
                    {
                        videowriter.WriteFrame(frame);
                        frame = joincapture.QueryFrame();
                    }
                    joincapture.Dispose();

                    // Notify main frame to update its progressbar
                    ExportVideoProgressEventArgs e = new ExportVideoProgressEventArgs(i);
                    DoneAppendingRallyVideoEvent(this, e);
                }
                catch (NullReferenceException) { Console.WriteLine("unreadable video file"); }
            }
            videowriter.Dispose();
        }
Пример #21
0
 private void OnCameraCapture_ImageGrabbed(object sender, EventArgs e)
 {
     currentFrame = capture.RetrieveBgrFrame();
     if (currentFrame == null)
     {
         capture.Stop();
         capture.Dispose();
         return;
     }
     ++frameCount;
     if (isRecordCamVideo)
     {
         videoWriter.WriteFrame <Bgr, Byte>(currentFrame);
     }
     imageBoxCameraCapture.Image = currentFrame;
     imageBoxResult.Image        = currentFrame.Sub(previousFrame);
     previousFrame = currentFrame.Copy(); //请使用'Copy'而不是'='
     stripCameraCapture.BeginInvoke(new SetLabelText(SetStatusLabelText), labelCameraFrameCounter, frameCount);
 }
Пример #22
0
        static void ReadWriteVideo(string input, string output)
        {
            var video = new VideoReader(input);

            video.LoadMetadataAsync().Wait();
            video.Load();

            using (var writer = new VideoWriter(File.Create(output),
                                                video.Metadata.Width, video.Metadata.Height, video.Metadata.AvgFramerate,
                                                new H264Encoder()
            {
                Format = "flv"
            }.Create()))
            {
                writer.OpenWrite(true);
                //video.CopyTo(writer);

                var frame = new VideoFrame(video.Metadata.Width, video.Metadata.Height);
                while (true)
                {
                    // read next frame
                    var f = video.NextFrame(frame);
                    if (f == null)
                    {
                        break;
                    }


                    for (int i = 0; i < 100; i++)
                    {
                        for (int j = 0; j < 100; j++)
                        {
                            var px = frame.GetPixels(i, j).Span;
                            px[0] = 255;
                            px[1] = 0;
                            px[2] = 0;
                        }
                    }

                    writer.WriteFrame(frame);
                }
            }
        }
Пример #23
0
        public override IDataContainer PreProcess(IDataContainer dataContainer)
        {
            _debugOutputImage = new Image <Rgb, byte>(Width, Height);

            var rgbImage = dataContainer.OfType <RgbImageData>().ToArray();

            if (!rgbImage.Any())
            {
                return(null);
            }

            _debugOutputImage += rgbImage.First().Image.Copy();

            var devices = dataContainer.OfType <Device>().ToArray();
            //var unknownDevices = dataContainer.OfType<Device>().Where(d => !d.IsIdentified).ToArray();
            var hands = dataContainer.OfType <Hand>().ToArray();

            foreach (var device in devices)
            {
                var polyline = new List <Point>();
                foreach (var point in device.Shape.Points)
                {
                    var x = point.X * Width;
                    var y = point.Y * Height;

                    polyline.Add(new Point((int)x, (int)y));
                }

                var centerX = (int)(device.SmoothedCenter.X / 320 * Width);
                var centerY = (int)(device.SmoothedCenter.Y / 240 * Height);

                _debugOutputImage.DrawPolyline(polyline.ToArray(), true, device.IsIdentified ? Rgbs.Red : Rgbs.White, 5);

                if (device.IsIdentified)
                {
                    _debugOutputImage.Draw(string.Format("Id {0}", device.DeviceId), ref EmguFontBig, new Point(centerX, centerY), Rgbs.Red);
                }
            }

            foreach (var hand in hands)
            {
                var resizedHandSegment = hand.Segment.Resize(_debugOutputImage.Width, _debugOutputImage.Height, INTER.CV_INTER_CUBIC).Mul(255);

                //_debugOutputImage = _debugOutputImage.Copy(resizedHandSegment.Not());
                _debugOutputImage = _debugOutputImage.AddWeighted(resizedHandSegment.Convert <Rgb, byte>(), 1.0, 0.5, 0.0);

                resizedHandSegment.Dispose();

                var point      = new Point((int)(hand.RelativeCenter.X * Width), (int)(hand.RelativeCenter.Y * Height));
                var labelPoint = new Point((int)(hand.RelativeCenter.X * Width + 30), (int)(hand.RelativeCenter.Y * Height));

                _debugOutputImage.Draw(new CircleF(point, 10), Rgbs.Red, 6);
                _debugOutputImage.Draw(string.Format("Id {0} (d={1:F0})", hand.Id, hand.Depth), ref EmguFontBig, labelPoint, Rgbs.Red);
            }

            var debugOutputImageCopy = _debugOutputImage.Copy();

            Task.Factory.StartNew(() =>
            {
                var bitmapSource = debugOutputImageCopy.ToBitmapSource(true);
                debugOutputImageCopy.Dispose();
                return(bitmapSource);
            }).ContinueWith(t => DebugOutputBitmapSource = t.Result);

            Stage(new RgbImageData(this, "DataRenderer", _debugOutputImage.Copy()));

            if (_videoWriter != null)
            {
                _videoWriter.WriteFrame(_debugOutputImage.Convert <Bgr, byte>());
            }

            _debugOutputImage.Dispose();

            Push();

            return(base.PreProcess(dataContainer));
        }
Пример #24
0
        private void ProcessData(ColorSpacePoint[] depthMappedToColorPoints, Bitmap bitmapItem)
        {
            Image <Bgr, byte> img;
            int BytePerPixel = 4;
            int stride       = 1920 * BytePerPixel;

            //鎖定記憶體位置與指標指向初始位置
            BitmapData Bmp_Data_color = bitmapItem.LockBits(Rect_color, ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
            IntPtr     Ptr_color      = Bmp_Data_color.Scan0;

            //鎖定記憶體位置與指標指向初始位置
            BitmapData Bmp_Data_display = bmp_display.LockBits(Rect_display, ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
            IntPtr     Ptr_display      = Bmp_Data_display.Scan0;

            unsafe
            {
                //轉換指標的格式(C#只能用於unsafe中)
                byte *P_color   = (byte *)(void *)Ptr_color;
                byte *P_display = (byte *)(void *)Ptr_display;

                for (int y = 0; y < 424; y++)
                {
                    for (int x = 0; x < 512; x++)
                    {
                        if (!float.IsNegativeInfinity(depthMappedToColorPoints[y * 512 + x].X) &&
                            !float.IsNegativeInfinity(depthMappedToColorPoints[y * 512 + x].Y) &&
                            (depthMappedToColorPoints[y * 512 + x].X >= 0) &&
                            (depthMappedToColorPoints[y * 512 + x].Y >= 0) &&
                            (depthMappedToColorPoints[y * 512 + x].Y <= 1080) &&
                            (depthMappedToColorPoints[y * 512 + x].X <= 1920)
                            )
                        {
                            //獲得对应色彩点的内存位置(byte信息)
                            //+0.5 四捨五入
                            int yOfMemory = (int)(depthMappedToColorPoints[y * 512 + x].Y + 0.5);
                            if (yOfMemory + 1 > 1080)
                            {
                                yOfMemory--;                      //除去大于等于1080的值
                            }
                            //int adress = (int)(depthMappedToColorPoints[y * 512 + x].X) + (int)(depthMappedToColorPoints[y * 512 + x].Y ) * 1920;

                            int adress = (int)(depthMappedToColorPoints[y * 512 + x].X + 0.5) + yOfMemory * 1920;
                            adress = adress * BytePerPixel;

                            //顯示(bmp_display)的記憶體位置的轉換
                            int index = y * 512 + x;
                            index = index * BytePerPixel;

                            int i = y * 512 + x;

                            if (Math.Abs(depth[i] - (int)MaxV) < 500)
                            {
                                //將彩圖位置的色彩
                                //資訊寫入顯示(bmp_display)的記憶體
                                P_display[index]     = P_color[adress];
                                P_display[index + 1] = P_color[adress + 1];
                                P_display[index + 2] = P_color[adress + 2];
                                P_display[index + 3] = P_color[adress + 3];
                            }
                            else
                            {
                                P_display[index]     = 0xff;
                                P_display[index + 1] = 0xff;
                                P_display[index + 2] = 0xff;
                                P_display[index + 3] = 0xff;
                            }
                        }
                        else
                        {
                            //获得对应深度点的内存地址
                            int index = y * 512 + x;
                            index *= BytePerPixel;
                            //无效映射值状态显示为红色
                            P_display[index]     = 0xff;
                            P_display[index + 1] = 0xff;
                            P_display[index + 2] = 0xff;
                            P_display[index + 3] = 0xff;
                        }
                    }
                }
            }
            //更新圖檔
            bitmapItem.UnlockBits(Bmp_Data_color);
            bmp_display.UnlockBits(Bmp_Data_display);
            pictureBox1.Image = bmp_display;
            //pictureBox2.Image = bitmapItem;
            if (VW_OPEN == true)
            {
                img = new Image <Bgr, Byte>(bmp_display);
                VW.WriteFrame(img);
                img.Dispose();
            }
            bitmapItem = null;
            //bmp_display.Dispose();
            //bmp_color.Dispose();
        }
        private void previewBtn_Click(object sender, RoutedEventArgs e)
        {
            if (previewBtn.Content.ToString() == "Preview Stream")
            {
                if (kinect_sensor != null)
                {
                    // disable all other buttons
                    DeactivateReplay();
                    gestureCaptureBtn.IsEnabled     = false;
                    gestureRecognitionBtn.IsEnabled = false;
                    gestureReplayBtn.IsEnabled      = false;
                    previewBtn.Content = "Stop Stream";
                    isStreaming        = true;
                    kinect_data_manager.ifShowJointStatus = true;

                    frame_rec_buffer.Clear();

                    kinect_sensor.Start();
                }
            }
            else
            {
                if (kinect_sensor != null)
                {
                    kinect_sensor.Stop();

                    gestureCaptureBtn.IsEnabled     = true;
                    gestureReplayBtn.IsEnabled      = true;
                    gestureRecognitionBtn.IsEnabled = true;


                    isStreaming = false;
                    kinect_data_manager.ifShowJointStatus = false;

                    // save recorded frame to disk
                    if (frame_rec_buffer != null && saveVideoCheckBox.IsChecked.Value)
                    {
                        // create video writer
                        int fwidth  = (int)groupBox3.Width + 20;
                        int fheight = (int)groupBox3.Height + 20;

                        SaveFileDialog saveDialog = new SaveFileDialog();
                        saveDialog.Filter           = "avi files (*.avi)|*.avi";
                        saveDialog.FilterIndex      = 2;
                        saveDialog.RestoreDirectory = true;

                        if (saveDialog.ShowDialog().Value)
                        {
                            statusbarLabel.Content = "Saving video...";

                            string      videofile   = saveDialog.FileName.ToString();
                            VideoWriter videoWriter = new VideoWriter(videofile, CvInvoke.CV_FOURCC('M', 'J', 'P', 'G'), 15,
                                                                      fwidth, fheight, true);

                            if (videoWriter == null)
                            {
                                MessageBox.Show("Fail to save video. Check if codec has been installed.");
                            }
                            else
                            {
                                for (int i = 0; i < frame_rec_buffer.Count; i++)
                                {
                                    // write to video file
                                    Emgu.CV.Image <Bgr, byte> cvImg =
                                        new Emgu.CV.Image <Bgr, byte>(frame_rec_buffer[i] as Bitmap);

                                    videoWriter.WriteFrame <Bgr, byte>(cvImg);
                                }

                                videoWriter.Dispose();

                                statusbarLabel.Content = "Video saved to " + videofile;
                            }
                        }
                    }

                    frame_rec_buffer.Clear();

                    previewBtn.Content = "Preview Stream";

                    // save tracked elbow speed
                    //FileStream file = File.Open("d:\\temp\\test.txt", FileMode.Create);
                    //StreamWriter writer = new StreamWriter(file);
                    //for (int i = 0; i < motion_assessor.jointStatusSeq.Count; i++)
                    //    writer.WriteLine(motion_assessor.jointStatusSeq[i][JointType.HandRight].abs_speed);
                    //writer.Close();
                }
            }
        }
Пример #26
0
        void FrameGrabber(object sender, EventArgs e)
        {
            try
            {
                //Get the current frame form capture device
                currentFrame = grabber.QueryFrame().Resize(520, 340, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            catch (NullReferenceException e1)
            {
                _motionHistory     = new MotionHistory(2.0, 0.05, 0.5);
                _forgroundDetector = null;
                motionQueue.Clear(); helpQueue.Clear();
                grabber = new Capture(vidlist[excnt]);
                excnt++;
                if (excnt == 5)
                {
                    excnt = 0;
                }
                currentFrame = grabber.QueryFrame().Resize(520, 340, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                green1       = false; green2 = false; green3 = false; green4 = false;
                red1         = false; red2 = false; red3 = false; red4 = false;
            }

            //Convert it to Grayscale
            gray = currentFrame.Convert <Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //MessageBox.Show("wiidth " + f.rect.Width + " height " + f.rect.Height + " area " + f.rect.Width * f.rect.Height);
                if (f.rect.Width > 80)
                {
                    continue;
                }

                //draw the face detected in the 0th (gray) channel with blue color
                if (showHand)
                {
                    currentFrame.Draw(f.rect, new Bgr(Color.LightGreen), 2);
                }

                int nearespos = nearestPosition(f.rect.X, f.rect.Y);

                if (helpQueue.ToArray().ToList().IndexOf(nearespos) == -1)
                {
                    //lbAlerts.Items.Add("Help request at #" + nearespos.ToString());

                    dgAlerts.Rows.Add("Help Request", nearespos.ToString());
                    DB_Connect.InsertQuery("INSERT INTO alert_tab(exam_id,position_id,alert_type,alert_time) VALUES(" + examid + "," + nearespos.ToString() + ",'H','" + DateTime.Now + "')");
                    dgAlerts.FirstDisplayedScrollingRowIndex = dgAlerts.RowCount - 1;

                    //GCM - help
                    //AndroidGCMPushNotification apnGCM = new AndroidGCMPushNotification();
                    //string strResponse = apnGCM.SendNotification(regID, nearespos.ToString() + " "+ DateTime.Now, "H");

                    if (nearespos == 1)
                    {
                        green1 = true;
                    }
                    else if (nearespos == 2)
                    {
                        green2 = true;
                    }
                    else if (nearespos == 3)
                    {
                        green3 = true;
                    }
                    else if (nearespos == 4)
                    {
                        green4 = true;
                    }

                    if (helpQueue.Count == 10)
                    {
                        helpQueue.Dequeue();
                        helpQueue.Enqueue(nearespos);
                    }
                    else
                    {
                        helpQueue.Enqueue(nearespos);
                    }
                }
            }


            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = ProcessFrame(currentFrame);

            if (captureOutput == null && xdoc.Descendants("RecordVideo").First().Value == "1")
            {
                MessageBox.Show("reording start");
                captureOutput = new VideoWriter(@"video" + examid + ".avi", (int)grabber.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FOURCC), 15, 520, 340, true);
            }

            if (currentFrame != null && xdoc.Descendants("RecordVideo").First().Value == "1")
            {
                captureOutput.WriteFrame <Bgr, Byte>(currentFrame);
            }
        }