private void Kamera_YeniFrame(object sender, NewFrameEventArgs e) { try { Monitor.Enter(this); if (sonframe != null) { sonframe.Dispose(); } sonframeHata = null; sonframe = (Bitmap)e.Frame.Clone(); if (mdetector != null) { mdetector.ProcessFrame(sonframe); } width = sonframe.Width; height = sonframe.Height; } catch { Thread.ResetAbort(); } finally { Monitor.Exit(this); } if (YeniFrame != null) { YeniFrame(this, new EventArgs()); } }
// On new frame private void video_NewFrame(object sender, CameraEventArgs e) { // lock Monitor.Enter(this); // dispose old frame if (lastFrame != null) { lastFrame.Dispose(); } lastFrame = (Bitmap)e.Bitmap.Clone(); // apply motion detector if (motionDetecotor != null) { motionDetecotor.ProcessFrame(ref lastFrame); } // image dimension width = lastFrame.Width; height = lastFrame.Height; // unlock Monitor.Exit(this); // notify client if (NewFrame != null) { NewFrame(this, new EventArgs()); } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <returns>Returns amount of motion, which is provided <see cref="IMotionDetector.MotionLevel"/> /// property of the <see cref="MotionDetectionAlgorithm">motion detection algorithm in use</see>.</returns> /// /// <remarks><para>The method first of all applies motion detection algorithm to the specified video /// frame to calculate <see cref="IMotionDetector.MotionLevel">motion level</see> and /// <see cref="IMotionDetector.MotionFrame">motion frame</see>. After this it applies motion processing algorithm /// (if it was set) to do further post processing, like highlighting motion areas, counting moving /// objects, etc.</para> /// /// <para><note>In the case if <see cref="MotionZones"/> property is set, this method will perform /// motion filtering right after motion algorithm is done and before passing motion frame to motion /// processing algorithm. The method does filtering right on the motion frame, which is produced /// by motion detection algorithm. At the same time the method recalculates motion level and returns /// new value, which takes motion zones into account (but the new value is not set back to motion detection /// algorithm' <see cref="IMotionDetector.MotionLevel"/> property). /// </note></para> /// </remarks> /// public float ProcessFrame(UnmanagedImage videoFrame) { lock ( _sync ) { if (_detector == null) { return(0); } _videoWidth = videoFrame.Width; _videoHeight = videoFrame.Height; if (_area == 0) { _area = _videoWidth * _videoHeight; } // call motion detection _detector.ProcessFrame(videoFrame); var motionLevel = _detector.MotionLevel; // check if motion zones are specified if (_detector.MotionFrame != null && _motionZones != null) { if (_zonesFrame == null) { CreateMotionZonesFrame(); } if (_zonesFrame != null && (_videoWidth == _zonesFrame.Width) && (_videoHeight == _zonesFrame.Height)) { unsafe { // pointers to background and current frames var zonesPtr = (byte *)_zonesFrame.ImageData.ToPointer(); var motionPtr = (byte *)_detector.MotionFrame.ImageData.ToPointer(); motionLevel = 0; for (int i = 0, frameSize = _zonesFrame.Stride * _videoHeight; i < frameSize; i++, zonesPtr++, motionPtr++) { *motionPtr &= *zonesPtr; motionLevel += (*motionPtr & 1); } motionLevel /= _area; } } } // call motion post processing if ((_processor != null) && (_detector.MotionFrame != null)) { _processor.ProcessFrame(videoFrame, _detector.MotionFrame); } return(motionLevel); } }
// On new frame private void video_NewFrame(object sender, NewFrameEventArgs e) { try { bool motion = false; lock (this) { // dispose old frame if (mLastFrame != null) { mLastFrame.Dispose(); } mLastFrame = new Bitmap(e.Frame); if (this.Mirror) { mLastFrame.RotateFlip(RotateFlipType.RotateNoneFlipX); } if (this.Flip) { mLastFrame.RotateFlip(RotateFlipType.RotateNoneFlipY); } // apply motion detector if (mMotionDetecotor != null) { mMotionDetecotor.ProcessFrame(mLastFrame); if (mMotionDetecotor is ICountingMotionDetector) { ICountingMotionDetector m = mMotionDetecotor as ICountingMotionDetector; motion = (m.ObjectsCount > 0); } else { motion = (mMotionDetecotor.MotionLevel > 0.005); } } // image dimension mImageWidth = mLastFrame.Width; mImageHeight = mLastFrame.Height; } if (this.Alarm != null) { if ((motion && this.DetectMode == DETECTMODE.MOTION) || (motion == false && this.DetectMode == DETECTMODE.STILLNESS)) { Alarm(this, new EventArgs()); } } } catch (Exception) { } // notify client if (NewFrame != null) { NewFrame(this, new NewFrameEventArgs(null)); } }
// On new frame private void video_NewFrame(object sender, NewFrameEventArgs e) { try { // lock Monitor.Enter(this); // dispose old frame if (lastFrame != null) { lastFrame.Dispose( ); } if (lastRawFrame != null) { lastRawFrame.Dispose(); } lastFrame = (Bitmap)e.Frame.Clone( ); //Clone() doesn't actaully create a seperate copy // We need this for a raw frame, before motion // detection to be transmitted via MJPEGStreams lastRawFrame = new Bitmap(e.Frame); // apply motion detector if (motionDetecotor != null) { motionDetecotor.ProcessFrame(ref lastFrame); // check motion level if ( (motionDetecotor.MotionLevel >= alarmLevel) && (Alarm != null) ) { Alarm(this, new EventArgs( )); } } // image dimension width = lastFrame.Width; height = lastFrame.Height; } catch (Exception) { } finally { // unlock Monitor.Exit(this); } // notify client if (NewFrame != null) { NewFrame(this, new EventArgs( )); } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <returns>Returns amount of motion, which is provided <see cref="IMotionDetector.MotionLevel"/> /// property of the <see cref="MotionDetectionAlgorithm">motion detection algorithm in use</see>.</returns> /// /// <remarks><para>The method first of all applies motion detection algorithm to the specified video /// frame to calculate <see cref="IMotionDetector.MotionLevel">motion level</see> and /// <see cref="IMotionDetector.MotionFrame">motion frame</see>. After this it applies motion processing algorithm /// (if it was set) to do further post processing, like highlighting motion areas, counting moving /// objects, etc.</para> /// /// <para><note>In the case if <see cref="MotionZones"/> property is set, this method will perform /// motion filtering right after motion algorithm is done and before passing motion frame to motion /// processing algorithm. The method does filtering right on the motion frame, which is produced /// by motion detection algorithm. At the same time the method recalculates motion level and returns /// new value, which takes motion zones into account (but the new value is not set back to motion detection /// algorithm' <see cref="IMotionDetector.MotionLevel"/> property). /// </note></para> /// </remarks> /// public float ProcessFrame(UnmanagedImage videoFrame) { lock ( sync ) { if (detector == null) { return(0); } videoWidth = videoFrame.Width; videoHeight = videoFrame.Height; float motionLevel = 0; // call motion detection detector.ProcessFrame(videoFrame); motionLevel = detector.MotionLevel; // check if motion zones are specified if (motionZones != null) { if (zonesFrame == null) { CreateMotionZonesFrame( ); } if ((videoWidth == zonesFrame.Width) && (videoHeight == zonesFrame.Height)) { unsafe { // pointers to background and current frames byte *zonesPtr = (byte *)zonesFrame.ImageData.ToPointer( ); byte *motionPtr = (byte *)detector.MotionFrame.ImageData.ToPointer( ); motionLevel = 0; for (int i = 0, frameSize = zonesFrame.Stride * videoHeight; i < frameSize; i++, zonesPtr++, motionPtr++) { *motionPtr &= *zonesPtr; motionLevel += (*motionPtr & 1); } motionLevel /= (videoWidth * videoHeight); } } } // call motion post processing if ((processor != null) && (detector.MotionFrame != null)) { processor.ProcessFrame(videoFrame, detector.MotionFrame); } return(motionLevel); } }
// On new frame private void video_NewFrame(object sender, NewFrameEventArgs e) { try { // lock Monitor.Enter(this); // dispose old frame if (lastFrame != null) { lastFrame.Dispose( ); } // reset error lastVideoSourceError = null; // get new frame lastFrame = (Bitmap)e.Frame.Clone( ); // apply motion detector if (motionDetecotor != null) { motionDetecotor.ProcessFrame(lastFrame); // check motion level if ( (motionDetecotor.MotionLevel >= alarmLevel) && (Alarm != null) ) { Alarm(this, new EventArgs( )); } } // image dimension width = lastFrame.Width; height = lastFrame.Height; } catch (Exception) { } finally { // unlock Monitor.Exit(this); } // notify client if (NewFrame != null) { NewFrame(this, new EventArgs( )); } }
public float ProcessFrame(UnmanagedImage videoFrame) { lock (sync) { if (detector == null) { return(0); } videoWidth = videoFrame.Width; videoHeight = videoFrame.Height; float motionLevel = 0; detector.ProcessFrame(videoFrame); motionLevel = detector.MotionLevel; if (motionZones != null) { if (zonesFrame == null) { CreateMotionZonesFrame(); } if ((videoWidth == zonesFrame.Width) && (videoHeight == zonesFrame.Height)) { unsafe { byte *zonesPtr = (byte *)zonesFrame.ImageData.ToPointer(); byte *motionPtr = (byte *)detector.MotionFrame.ImageData.ToPointer(); motionLevel = 0; for (int i = 0, frameSize = zonesFrame.Stride * videoHeight; i < frameSize; i++, zonesPtr++, motionPtr++) { *motionPtr &= *zonesPtr; motionLevel += (*motionPtr & 1); } motionLevel /= (videoWidth * videoHeight); } } } if ((processor != null) && (detector.MotionFrame != null)) { processor.ProcessFrame(videoFrame, detector.MotionFrame); } return(motionLevel); } }
// On new frame private void Video_NewFrame(object sender, CameraEventArgs e) { try { // lock Monitor.Enter(this); // dispose old frame if (lastFrame != null) { lastFrame.Dispose( ); } lastFrame = (Bitmap)e.Bitmap.Clone( ); // apply motion detector if (motionDetecotor != null) { motionDetecotor.ProcessFrame(ref lastFrame); // check motion level if ( (motionDetecotor.MotionLevel >= alarmLevel) && (Alarm != null) ) { Alarm(this, new EventArgs( )); } } // image dimension width = lastFrame.Width; height = lastFrame.Height; } catch (Exception) { } finally { // unlock Monitor.Exit(this); } // notify client NewFrame?.Invoke(this, new EventArgs()); }
private void OnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs) { ColorImageFrame colorImageFrame = null; DepthImageFrame depthImageFrame = null; SkeletonFrame skeletonFrame = null; try { colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame(); depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame(); skeletonFrame = allFramesReadyEventArgs.OpenSkeletonFrame(); if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null) { return; } //redimensionar o array dos esqueletos, se necessario if (this.skeletons.Length != skeletonFrame.SkeletonArrayLength) { this.skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength]; } skeletonFrame.CopySkeletonDataTo(this.skeletons); if (this.colorPixelData == null || colorPixelData.Length != colorImageFrame.PixelDataLength) { this.colorPixelData = new byte[colorImageFrame.PixelDataLength]; } colorImageFrame.CopyPixelDataTo(this.colorPixelData); //detectar o numero de utilizadores e qual o mais proximo var newNearestId = -1; var nearestDistance2 = double.MaxValue; int nUsers = 0; int nTrackedUsers = 0; foreach (var skeleton in skeletons) { if (skeleton.TrackingState == SkeletonTrackingState.Tracked) { // Find the distance squared. var distance2 = (skeleton.Position.X * skeleton.Position.X) + (skeleton.Position.Y * skeleton.Position.Y) + (skeleton.Position.Z * skeleton.Position.Z); // Is the new distance squared closer than the nearest so far? if (distance2 < nearestDistance2) { newNearestId = skeleton.TrackingId; nearestDistance2 = distance2; } nTrackedUsers++; } if (skeleton.TrackingState != SkeletonTrackingState.NotTracked) { nUsers++; } } this.nearestId = newNearestId; this.NumberOfDetectUsers = nUsers; //processar a detecao de movimento if (detectMotion && motionDetector != null) { if (this.depthPixelData == null || depthPixelData.Length != depthImageFrame.BytesPerPixel) { this.depthPixelData = new short[depthImageFrame.PixelDataLength]; } depthImageFrame.CopyPixelDataTo(depthPixelData); var coloredPixels = new byte[colorImageFrame.PixelDataLength]; colorImageFrame.CopyPixelDataTo(coloredPixels); var colorBitmap = ImageProcessingAux.CreateBitmapFromPixelData(coloredPixels, colorImageFrame.Width, colorImageFrame.Height); var playerBitmap = ImageProcessingAux.ProccessPlayerPixels(coloredPixels, depthPixelData, colorImageFrame.Format); motionDetector.ProcessFrame(ref colorBitmap, ref playerBitmap); } //processar a deteccao da face if (trackFace && faceTracker != null) { faceTracker.Track(skeletons, colorImageFrame, depthImageFrame, nearestId); } //processar gestos if (gestureDetectorsValue.Count > 0) { foreach (Skeleton s in skeletons) { if (s.TrackingId == nearestId) { foreach (GestureDetector gd in gestureDetectorsValue) { gd.Add(s.Joints[gd.TrackedJoint].Position, kinectSensorValue); } } } } //processar posturas if (postureDetectorsValue.Count > 0) { foreach (Skeleton s in skeletons) { if (s.TrackingId == nearestId) { foreach (PostureDetector pd in postureDetectorsValue) { pd.Add(s, kinectSensorValue); } } } } } finally { if (colorImageFrame != null) { colorImageFrame.Dispose(); } if (depthImageFrame != null) { depthImageFrame.Dispose(); } if (skeletonFrame != null) { skeletonFrame.Dispose(); } } }
// On new frame private void video_NewFrame(object sender, NewFrameEventArgs e) { try { // lock Monitor.Enter(this); // dispose old frame if (lastFrame != null) { lastFrame.Dispose(); } lastFrame = (Bitmap)e.Frame.Clone(); pubFrame = (Bitmap)e.Frame.Clone(); // apply motion detector if (motionDetecotor != null) { motionDetecotor.ProcessFrame(ref lastFrame); MotionLevelArgs a = new MotionLevelArgs(); CamIdArgs b = new CamIdArgs(); a.lvl = motionDetecotor.MotionLevel; b.cam = cam; motionLevelEvent(null, a, b); // check motion level if (motionDetecotor.calibrating && cam == CameraRig.trainCam) { bubble.train(motionDetecotor.MotionLevel); } else { if (alarmActive && alert && motionDetecotor.MotionLevel >= movementVal && motionAlarm != null) { CamIdArgs c = new CamIdArgs(); c.cam = cam; LevelArgs l = new LevelArgs(); l.lvl = Convert.ToInt32(100 * motionDetecotor.MotionLevel); motionAlarm(null, c, l); } } } // image dimension width = lastFrame.Width; height = lastFrame.Height; //#ref5617 } catch (Exception) { } finally { // unlock Monitor.Exit(this); } // notify client if (NewFrame != null) { NewFrame(this, new EventArgs()); } }