void PlayFilesLoop() { int frameRate = 100; int frameDelay = ( 1000 / frameRate); int currentFrame=0; Bitmap bmp= null; m_BitmapsForDisplayQ = new ThreadSafeQueue<Bitmap>(30); while (!m_Stop) { while (playing) { if (m_Stop) break; Thread.Sleep(frameDelay); // when processing plate image jpegs (i.e. just the clipped out plate in a very small image) the player can go too fast for the image processing // resulting in Que overruns, so slow down to prevent loss if (m_AppData.LPRGettingBehind || m_AppData.MotionDetectionGettingBehind || m_AppData.DVRStoringLPRRecordsGettingBehind) Thread.Sleep(250); // slow down a bit if (m_FilesToPlay == null) continue; if (m_FilesToPlay.Count() < 1) continue; try { bmp =(Bitmap) Bitmap.FromFile(m_FilesToPlay[currentFrame]); } catch { } if (bmp == null) continue; Bitmap forDisplayToOwn = new Bitmap(bmp); m_BitmapsForDisplayQ.Enqueue(forDisplayToOwn); m_ParentPanel.Parent.BeginInvoke((MethodInvoker)delegate { PushBitmapToDisplay(); }); FRAME frame = new FRAME(m_AppData); frame.NotVideoEachFrameIsUniqueSize = true; int[,] lum = new int[bmp.Width, bmp.Height]; getPixelsFromImageInY(bmp, ref lum); frame.Luminance = lum; frame.TimeStamp = new FileInfo(m_FilesToPlay[currentFrame]).LastWriteTimeUtc; frame.Bmp = bmp; frame.SourceChannel = m_Channel; // frame.SourceName = ((m_FilesToPlay[currentFrame].Replace("\\", "_")).Replace(":", "_")).Replace(".", "_"); frame.SourceName = m_AppData.UserSpecifiedCameraName == null ? ("stilljpg") : m_AppData.UserSpecifiedCameraName; frame.SetFileName(); OnNewFrame(frame); currentFrame++; if (currentFrame == m_FilesToPlay.Length) { OnEndOfFileEvent(); // we are done. playing = false; m_Stop = true;// this is a one-time use thread break; } } Thread.Sleep(1); } }
// ///////////////////////////////////////////////////////// // ///////////////////////////////////////////////////////// // ///////////////////////////////////////////////////////// // Receiving new frames from the lower layers // receive a new from from a movie file being played void MovieFiles_OnNewImage(FRAME frame) { // this is a bitmap - send it and the last jpeg received up the chain try { m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.FRAME_GENERATOR.FrameGen_FrameCnt].HitMe++; // manufacture a jpeg from the bitmap Image image = frame.Bmp; MemoryStream stream = new MemoryStream(); image.Save(stream, System.Drawing.Imaging.ImageFormat.Jpeg); byte[] jpeg = stream.ToArray(); frame.Jpeg = jpeg; frame.SerialNumber = m_FrameCount; frame.GPSPosition = m_CurrentGPSPosition; frame.PSSName = m_AppData.ThisComputerName; frame.SetFileName(); m_FrameCount++; // send to motion detection if (!frame.NotVideoEachFrameIsUniqueSize) { m_MotionDetectionQ.Enqueue(frame); if (m_MotionDetectionQ.Count > m_MotionDetectionQueLevel / 2) m_AppData.MotionDetectionGettingBehind = true; else m_AppData.MotionDetectionGettingBehind = false; } else { // skip motion detection because the source is a directory of independent jpegs, each potentially of a different size // pretend we detected motion CONSUMER_PUSH p = new CONSUMER_PUSH(); p.FrameToPush = frame; p.ConsumersToPush = m_Channels[frame.SourceChannel].m_MotionDetectedCallBackList; m_MotionDetectedConsumerPushQ.Enqueue(p); } m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.FRAME_GENERATOR.FrameGen_MotionDetectionPendingQ].HitMe = m_MotionDetectionQ.Count; // send to non-motion-detection consumers CONSUMER_PUSH push = new CONSUMER_PUSH(); push.FrameToPush = frame; push.ConsumersToPush = m_Channels[frame.SourceChannel].m_NewImageCallBackList; m_AllFramesConsumerPushQ.Enqueue(push); m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.FRAME_GENERATOR.FrameGen_NonMotionFramePushQ].HitMe = m_AllFramesConsumerPushQ.Count; } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); } }
// get the individual frames and send them to the DVR/LPR processing chains /// <summary> buffer callback, COULD BE FROM FOREIGN THREAD. </summary> int ISampleGrabberCB.BufferCB(double SampleTime, IntPtr pBuffer, int BufferLen) { lock (lockSampleGrabberState) { if (m_Stop) return 0; m_SampleGrabberCallBackIsDone = false; IntPtr ipSource = pBuffer; int[,] ipDest = new int[m_videoWidth, m_videoHeight]; bool invert = true; if (m_videoWidth != ipDest.GetLength(0) || m_videoHeight != ipDest.GetLength(1)) { return 0; } LPROCR_Wrapper.LPROCR_Lib.extractFromBmpDataToLumArray(ipSource, ipDest, m_stride, m_videoWidth, m_videoHeight, invert); // compose a new bitmap Bitmap bmp = new Bitmap(m_videoWidth, m_videoHeight, System.Drawing.Imaging.PixelFormat.Format24bppRgb); // Lock the bitmap's bits. Rectangle rect = new Rectangle(0, 0, bmp.Width, bmp.Height); System.Drawing.Imaging.BitmapData bmpData = bmp.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, bmp.PixelFormat); // Get the address of the first line. IntPtr ptr = bmpData.Scan0; // copy out to the new bitamp bool dontcare = false; unsafe { LPROCR_Lib.MemCopyByte((int*)ipSource, ptr, BufferLen, ref dontcare); } bmp.UnlockBits(bmpData); bmp.RotateFlip(RotateFlipType.Rotate180FlipX);// what it takes to make it look right, if I had time I would do this in one step in LPROCR_Lib.MemCopyByte FRAME frame = new FRAME(m_AppData); frame.Luminance = ipDest; frame.TimeStamp = m_FileTimeOfCurrentFile.AddSeconds(SampleTime); frame.Bmp = bmp; frame.SourceChannel = m_Channel; frame.SourceName = m_AppData.UserSpecifiedCameraName == null ? "storedjpeg" : m_AppData.UserSpecifiedCameraName; frame.SetFileName(); OnNewFrame(frame); m_SampleGrabberCallBackIsDone = true; return 0; } }
public FRAME CompleteFrameDataToByPassLPR(FRAME frame) { // used by the image hand-editor in the Analysts Workstation application, to by-pass the LPREngine and send results to storage frame.TimeStamp = DateTime.UtcNow; frame.SerialNumber = m_FrameCount; frame.GPSPosition = m_CurrentGPSPosition; frame.PSSName = "AnalystsWorkstation_" + m_AppData.ThisComputerName; frame.SetFileName(); return (frame); }