// // add new match events in this method // void ComparePlateNumbersToAllWatchLists(FRAME frame) { foreach (WatchListControl list in m_WatchLists) { for (int i = 0; i < list.WatchEntrys.Length; i++) { string watchNumber = list.WatchEntrys[i].Number; foreach (string plate in frame.PlateNumberLatin) { int score = LPROCR_Lib.scoreMatch(plate, watchNumber); if (score >= list.AlertThreshold) { Console.WriteLine("WL found intermediate match"); BuidlAlertFrame(frame, plate, watchNumber, list.WatchEntrys[i].UserComment, list, score); // // Add new match events here // m_AlertsToBeGeneratedQ.Enqueue(frame); } } } } }
unsafe public void getPixelsFromImageInY(Bitmap bmp, ref int[,] Y) { // Lock the bitmap's bits. Rectangle rect = new Rectangle(0, 0, bmp.Width, bmp.Height); System.Drawing.Imaging.BitmapData bmpData = bmp.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, bmp.PixelFormat); // Get the address of the first line. IntPtr ptr = bmpData.Scan0; // Declare an array to hold the bytes of the bitmap. int bytes = bmpData.Stride * bmp.Height; int pixelOffset = bmpData.Stride / bmp.Width; fixed(int *arrayPtr = Y) { int * srcPtr = (int *)ptr; IntPtr dest = new IntPtr((void *)arrayPtr); // Copy the RGB values into the array. // LPROCR_Lib.MemCopyInt(srcPtr, dest, bytes / 4); LPROCR_Lib.MemCopyByteArrayToIntArray(bmpData.Scan0, dest, bmp.Width * bmp.Height, bmp.Width, bmp.Height); } // Unlock the bits. bmp.UnlockBits(bmpData); }
public LPREngine(APPLICATION_DATA appData) { try { m_AppData = appData; m_AppData.AddOnClosing(Stop, APPLICATION_DATA.CLOSE_ORDER.MIDDLE); m_Log = (ErrorLog)m_AppData.Logger; m_FrameGen = (FrameGenerator)m_AppData.FrameGenerator; m_LPRProcessQ = new ThreadSafeQueue <FRAME>(m_LPRProcessQueSize, "QueueOverruns_LPR_LPRProcessQ", m_AppData); // this queue hold frames that come from the framegenerator and need to be processed by LPR m_AppData.LPRGettingBehind = false; m_LPRFinalPlateGroupOutputQ = new ThreadSafeQueue <FRAME>(60, "QueueOverruns_LPR_LPRFinalPlateGroupOutputQ", m_AppData); // filtered plate readings, grouped into similar readings, redundant readings removed m_LPRPerFrameReadingQ = new ThreadSafeQueue <FRAME>(60, "QueueOverruns_LPR_LPRPerFrameReadingQ", m_AppData); // instantaneous output from LPR for each fram processed m_StoredFrameData = new ThreadSafeHashableQueue(30 * 60); // 60 seconds of frames at 30fps m_LPRFuntions = new LPROCR_Lib(); unsafe { // the plate group processor accumulates per-frame plate readings and consolidates them into a single plate reading where appropriate m_LPRFuntions.RegisterPlateGroupCB(OnNewPlateGroupReady); } int maxW = 0, minW = 0, maxH = 0, minH = 0; m_LPRFuntions.GetMinMaxPlateSize(ref minW, ref maxW, ref minH, ref maxH); m_AppData.MAX_PLATE_HEIGHT = maxH; m_AppData.MIN_PLATE_HEIGHT = minH; m_AppData.MAX_PLATE_WIDTH = maxW; m_AppData.MIN_PLATE_WIDTH = minW; m_processOptions = new LPROCR_Lib.LPR_PROCESS_OPTIONS(); m_processOptions.roll = 1; m_processOptions.rotation = 1; // register with the frame grabber to get new bitmaps from the channel sources as they come in m_ConsumerID = m_FrameGen.GetNewConsumerID(); // m_NumSourceChannels = m_FrameGen.GetNumberOfConfiguredChannels(); m_NumSourceChannels = (m_AppData.RunninAsService) ? m_AppData.MAX_PHYSICAL_CHANNELS : m_AppData.MAX_VIRTUAL_CHANNELS; m_LPREngineProcessThread = new Thread(LPREngineProcessLoop); PushLPRResultsThread = new Thread(PushLPRResultsLoop); } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); } }
void DetectMotion(FRAME frame) { m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.MOTION_DETECTION.MotionDetector_FrameCnt].HitMe++; int[,] luminance = frame.Luminance; bool motionDetected = false; int error = 0; try { motionDetected = LPROCR_Lib.DetectMotion(frame.SourceChannel, luminance, luminance.GetLength(0), luminance.GetLength(1), ref error); } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.INFORMATIONAL); } if (error != 0) { m_Log.Log("DetectMotion error = " + error.ToString(), ErrorLog.LOG_TYPE.FATAL); } if (motionDetected) { m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.MOTION_DETECTION.MotionDetector_FramesDetected].HitMe++; CONSUMER_PUSH push = new CONSUMER_PUSH(); push.FrameToPush = frame; push.ConsumersToPush = m_Channels[frame.SourceChannel].m_MotionDetectedCallBackList; m_MotionDetectedConsumerPushQ.Enqueue(push); } }
void WriteMaskFile(string path, bool lastChar) { Bitmap bmp = new Bitmap(path); FileInfo fi = new FileInfo(path); string inputNameOnly = fi.Name; string intputNameNoExtention; if (inputNameOnly.Contains("_")) { string[] s = inputNameOnly.Split('_'); intputNameNoExtention = s[0]; } else { string[] s = inputNameOnly.Split('.'); intputNameNoExtention = s[0]; } string inputDirectory = fi.DirectoryName; string outPath = m_DestinationFile; int[,] charY = new int[bmp.Width, bmp.Height]; getPixelsFromImageInY(bmp, ref charY); if (bmp.Width != 20 || bmp.Height != 40) { LPROCR_Lib LPR_C_Lib = new LPROCR_Lib(); int[,] newScaledArray = new int[20, 40]; LPR_C_Lib.OCR_prepChar(charY, charY.GetLength(0), charY.GetLength(1), newScaledArray); charY = newScaledArray; bmp = new Bitmap(20, 40); putPixelsFromImageinYBW(ref bmp, charY); } string line1 = "{\"" + intputNameNoExtention + "\", //name \r\n"; string line2 = "{ \r\n"; File.AppendAllText(outPath, line1); File.AppendAllText(outPath, line2); StringBuilder sb; for (int x = 0; x < charY.GetLength(0); x++) { sb = new StringBuilder(); sb.Append("{ "); for (int y = 0; y < charY.GetLength(1); y++) { int val; if (charY[x, y] <= 0) { val = -1; } else { val = 1; } // int val = charY[x, y]; sb.Append(val.ToString()); if (y != charY.GetLength(1) - 1) { sb.Append(", "); } else { sb.Append(" "); } } if (x != charY.GetLength(0) - 1) { sb.Append(" },\r\n"); // not last data line for this data entry } else { sb.Append(" }\r\n"); // is last data line } File.AppendAllText(outPath, sb.ToString()); } if (!lastChar) { line1 = " }},\r\n"; // close this array entry, but not last array data entry } else { line1 = " }}\r\n"; } File.AppendAllText(outPath, line1); }
unsafe void handleBitmap(int *binfo, byte *image, int cindex, int frameIndex) { try { if (m_Stop) { return; } // compose a managed bitmap from the bitmap parts being delivered System.Drawing.Imaging.BitmapData srcBmpHeader = new System.Drawing.Imaging.BitmapData(); Marshal.PtrToStructure((IntPtr)binfo, srcBmpHeader); //pDoc->m_buf[idx].lpbmi[frm_idx]->bmiHeader.biSizeImage // for some reason stride and width a swapped in position between the 2255 definition and the windows bitmpa definition int width = srcBmpHeader.Height; int height = srcBmpHeader.Stride; Rectangle rect = new Rectangle(0, 0, width, height); // Bitmap nBmp = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); Bitmap nBmp = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format8bppIndexed); System.Drawing.Imaging.BitmapData bmpData = nBmp.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, nBmp.PixelFormat); // Get the address of the first line. // int byteCount = width * height * 3; int byteCount = width * height; //byte[] byteArray = new byte[byteCount]; //Marshal.Copy(new IntPtr(image), byteArray, 0, byteCount); //Marshal.Copy(byteArray, 0, bmpData.Scan0, byteCount); bool detectedNoVideoPresent = false; LPROCR_Lib.MemCopyByte((int *)image, bmpData.Scan0, byteCount, ref detectedNoVideoPresent); // LPROCR_Lib.MemCopyInt((int*)image, bmpData.Scan0, byteCount ); nBmp.UnlockBits(bmpData); // nBmp.RotateFlip(RotateFlipType.RotateNoneFlipY); // if there is no video connected, the 2255 device sends frames at 30fps, but they are solid blue-screen. if (!detectedNoVideoPresent) { SetHaveVideo(cindex); SendImageToConsumer(nBmp, S2255Controller.COMPRESSION_MODE.BITMAP, cindex); } else { SetDontHaveVideo(cindex); } } catch (Exception ex) { m_Log.Log("handleBitmap ex: " + ex.Message, ErrorLog.LOG_TYPE.FATAL); } }
public GoNoGoStatusMainForm() { InitializeComponent(); this.Text = "First Evidence GoNoGo System Monitor, version " + System.Reflection.Assembly.GetExecutingAssembly().GetName().Version.ToString(); m_AppData = new APPLICATION_DATA(); m_Log = new ErrorLog(m_AppData); m_AppData.Logger = (object)m_Log; LPROCR_Lib LPRlib = new LPROCR_Lib(); // get plate min max to draw the min/max target boxes for the user to see LPRlib.GetMinMaxPlateSize(ref minW, ref maxW, ref minH, ref maxH); // fudge the min box size so the user will make the plate bigger minH += 20; minW += 40; m_ReceiveDataSingleton = new object(); jpegQ = new ThreadSafeQueue <JPEG>(30); m_PreviousPlateNumber = new string[4]; m_UneditedImages = new UNEDITEDIMAGES[4]; m_UneditedImages[0] = new UNEDITEDIMAGES(); m_UneditedImages[1] = new UNEDITEDIMAGES(); m_UneditedImages[2] = new UNEDITEDIMAGES(); m_UneditedImages[3] = new UNEDITEDIMAGES(); m_SystemStatusLock = new object(); m_SystemStatus = new SYSTEM_STATUS_STRINGS(); m_FullScreenPB = new bool[4]; m_FullScreenPB[0] = false; m_FullScreenPB[1] = false; m_FullScreenPB[2] = false; m_FullScreenPB[3] = false; this.FormClosing += new FormClosingEventHandler(GoNoGoStatusMainForm_FormClosing); this.Resize += new EventHandler(GoNoGoStatusMainForm_Resize); m_ServerConnection = new RCSClient(m_AppData); m_PollingThread = new Thread(PollingLoop); m_ServerConnection.MessageEventGenerators.OnRxChannelList += OnReceiveChannels; m_ServerConnection.MessageEventGenerators.OnRxJpeg += OnNewJpeg; m_ServerConnection.MessageEventGenerators.OnRxHealthStatus += OnRxStats; pb_ClickGenericHandler = new pb_ClickDelegate[4]; pb_ClickGenericHandler[0] = pb_Click0; pb_ClickGenericHandler[1] = pb_Click1; pb_ClickGenericHandler[2] = pb_Click2; pb_ClickGenericHandler[3] = pb_Click3; m_JpegPlayControl = new JPEG_PLAY_CONTROL[4]; m_JpegPlayControl[0] = new JPEG_PLAY_CONTROL(); m_JpegPlayControl[1] = new JPEG_PLAY_CONTROL(); m_JpegPlayControl[2] = new JPEG_PLAY_CONTROL(); m_JpegPlayControl[3] = new JPEG_PLAY_CONTROL(); progressBarPlateProcessQueueLevel = new MyProgressBar(); progressBarPlateProcessQueueLevel.Location = new Point(10, 40); progressBarPlateProcessQueueLevel.Size = new Size(groupBoxPlateProcessQueLevel.Size.Width - 20, 30); groupBoxPlateProcessQueLevel.Controls.Add(progressBarPlateProcessQueueLevel); buttonSaveCurrentImage.Visible = false; buttonSaveCurrentImage.Enabled = false; m_PollingThread.Start(); }
// get the individual frames and send them to the DVR/LPR processing chains /// <summary> buffer callback, COULD BE FROM FOREIGN THREAD. </summary> int ISampleGrabberCB.BufferCB(double SampleTime, IntPtr pBuffer, int BufferLen) { lock (lockSampleGrabberState) { if (m_Stop) { return(0); } m_SampleGrabberCallBackIsDone = false; IntPtr ipSource = pBuffer; int[,] ipDest = new int[m_videoWidth, m_videoHeight]; bool invert = true; if (m_videoWidth != ipDest.GetLength(0) || m_videoHeight != ipDest.GetLength(1)) { return(0); } LPROCR_Wrapper.LPROCR_Lib.extractFromBmpDataToLumArray(ipSource, ipDest, m_stride, m_videoWidth, m_videoHeight, invert); // compose a new bitmap Bitmap bmp = new Bitmap(m_videoWidth, m_videoHeight, System.Drawing.Imaging.PixelFormat.Format24bppRgb); // Lock the bitmap's bits. Rectangle rect = new Rectangle(0, 0, bmp.Width, bmp.Height); System.Drawing.Imaging.BitmapData bmpData = bmp.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, bmp.PixelFormat); // Get the address of the first line. IntPtr ptr = bmpData.Scan0; // copy out to the new bitamp bool dontcare = false; unsafe { LPROCR_Lib.MemCopyByte((int *)ipSource, ptr, BufferLen, ref dontcare); } bmp.UnlockBits(bmpData); bmp.RotateFlip(RotateFlipType.Rotate180FlipX);// what it takes to make it look right, if I had time I would do this in one step in LPROCR_Lib.MemCopyByte FRAME frame = new FRAME(m_AppData); frame.Luminance = ipDest; frame.TimeStamp = m_FileTimeOfCurrentFile.AddSeconds(SampleTime); frame.Bmp = bmp; frame.SourceChannel = m_Channel; frame.SourceName = m_AppData.UserSpecifiedCameraName == null ? "storedjpeg" : m_AppData.UserSpecifiedCameraName; frame.SetFileName(); OnNewFrame(frame); m_SampleGrabberCallBackIsDone = true; return(0); } }