//void CreateLogDirectory(string dir) //{ // if (!Directory.Exists((string)dir)) // { // char[] seperator = { '\\', '\\' }; // string[] branches = dir.Split(seperator); // string path = null; // for (int i = 0; i < branches.Count(); i++) // { // if (branches[i].Length < 1) continue; // path += (branches[i] + "\\"); // if (!Directory.Exists(path)) // Directory.CreateDirectory(path); // } // } //} string BuildPlateField(FRAME frame) { // PLATE^LATIN^C3C456D2AB:G3C456D2AB:G3G456D2A8 string plateField = "PLATE," + frame.PlateNativeLanguage + "^" + BuildAltReadings(frame.PlateNumberNativeLanguage) + "^" + BuildAltReadings(frame.PlateNumberLatin); return(plateField); }
void fileDecoder_OnNewFrame(FRAME frame) { // lock (singleton) { FramesToPushQ.Enqueue(frame); } }
public FRAME SlowFalloffValue(int time) { for (int i = 0; i < frames.Count; i++) { if (frames[i].time > time) { i = max(0, i - 1); if (i == frames.Count - 1) { return(frames[i]); } FRAME res = new FRAME(); res.values = new float[FREQS]; for (int k = i; k >= 0 && k > i - 10; k--) { FRAME a = frames[k]; for (int j = 0; j < FREQS; j++) { res.values[j] = max(res.values[j], lerp(a.values[j], 0, (time - a.time) / 1200)); } } res.time = time; return(res); } } throw new Exception("hithere"); }
public FFT() { frames = new List <FRAME>(); float maxvol = 0f; using (StreamReader r = new StreamReader("fft.txt")) { string line; while ((line = r.ReadLine()) != null) { if (line.Length == 0) { continue; } string[] p = line.Split(' '); FRAME f = new FRAME(); f.time = int.Parse(p[0]); f.values = new float[FREQS]; for (int i = 0; i < FREQS; i++) { f.values[i] = float.Parse(p[1 + i]); maxvol = max(maxvol, f.values[i]); } frames.Add(f); } } foreach (FRAME f in frames) { for (int i = 0; i < FREQS; i++) { f.values[i] /= maxvol; } } Console.WriteLine("{0} audio frames, maxvol {1}", frames.Count, maxvol); }
void MotionDetectionLoop() { FRAME frm = null; int count = 0; while (!m_Stop) { try { count = 4; while (count > 0) { frm = m_MotionDetectionQ.Dequeue(); if (frm != null) { DetectMotion(frm); } count--; if (!m_Stop) { break; } } Thread.Sleep(1); } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); } } }
public EVENT_TO_WRITE WriteLPREvent(FRAME frame) { EVENT_TO_WRITE eventData = new EVENT_TO_WRITE(); lineFields[(int)FIELDS_ON_BUILDING_STRING.EVENT] = BuildPlateField(frame); lineFields[(int)FIELDS_ON_BUILDING_STRING.PSS_NAME] = frame.PSSName; lineFields[(int)FIELDS_ON_BUILDING_STRING.TIME_STAMP] = frame.TimeStamp.ToString(m_AppData.TimeFormatStringForFileNames); lineFields[(int)FIELDS_ON_BUILDING_STRING.SOURCE_CAMERA_NAME] = frame.SourceName; if (frame.GPSPosition.Contains("No Position")) { frame.GPSPosition = "No Position Available, No position Available"; // once for lat and once for lon } lineFields[(int)FIELDS_ON_BUILDING_STRING.GPS_LOCATION] = frame.GPSPosition; lineFields[(int)FIELDS_ON_BUILDING_STRING.JPEGFILE_RELATIVE_PATH] = frame.JpegFileRelativePath; string line = BuildLine(lineFields) + "\r\n"; string file = m_PathManager.GetEventLogFilePath(frame.TimeStamp, frame.SourceName); eventData.file = file; eventData.line = line; eventData.directory = m_PathManager.GetEventLogDirectoryPath(frame.TimeStamp, frame.SourceName); return(eventData); //lock (m_FileLock) //{ // CreateLogDirectory(m_PathManager.GetEventLogDirectoryPath(frame.TimeStamp, frame.SourceName)); // File.AppendAllText(file, line); //} }
void PushLPRResultsLoop() { while (!m_Stop) { Thread.Sleep(1); try { FRAME frame = m_LPRFinalPlateGroupOutputQ.Dequeue(); if (frame != null) { // this delegate will send a frame to all cosumers that added their events handlers to this delegate if (OnNewFilteredPlateGroupEvent != null) { OnNewFilteredPlateGroupEvent(frame); } } frame = m_LPRPerFrameReadingQ.Dequeue(); if (frame != null) { // this delegate will send a frame to all cosumers that added their events handlers to this delegate if (OnNewUnfilteredPlateEvent != null) { OnNewUnfilteredPlateEvent(frame); } } } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); } } }
void WatchThreadLoop() { int count = 0; while (!m_Stop) { Thread.Sleep(10); try { FRAME frame = m_NewLPRResultQ.Dequeue(); if (frame != null) { Console.WriteLine("WL comparing new plate to lists"); ComparePlateNumbersToAllWatchLists(frame); } } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); } // check watch list files to see if the user has updated a list count++; try { if (count > 100) { count = 0; checkLists(); } } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); } } }
/// <summary> /// put images into the pre-motion temp buffer area on disk /// </summary> /// <param name="frame"></param> void PendingMotionDetection(FRAME frame) { if (PauseFlag.Pause) { return; } if (m_PreMotionRecords[frame.SourceChannel].MotionDetectedMovingFilesInProcess) { return; } // generate a sub-directory prequalified file name for this image to go into string PreMotionFileNameCompletePath = GetPreMotionFileNameCompletePath(frame); FileAccessControl.WriteStream(PreMotionFileNameCompletePath, frame.Jpeg); // use this queue to keep track of the last X frames to have come in before a motion event m_PreMotionRecords[frame.SourceChannel].PendingMotionDetectionQ.Enqueue(PreMotionFileNameCompletePath); // keep the number of pre-motion frames at m_MotionFrameCount if (m_PreMotionRecords[frame.SourceChannel].PendingMotionDetectionQ.Count >= m_NumberOfFramesToPrePostBuffer) { PreMotionFileNameCompletePath = m_PreMotionRecords[frame.SourceChannel].PendingMotionDetectionQ.Dequeue(); FileAccessControl.FileDelete(PreMotionFileNameCompletePath); } }
// // add new match events in this method // void ComparePlateNumbersToAllWatchLists(FRAME frame) { foreach (WatchListControl list in m_WatchLists) { for (int i = 0; i < list.WatchEntrys.Length; i++) { string watchNumber = list.WatchEntrys[i].Number; foreach (string plate in frame.PlateNumberLatin) { int score = LPROCR_Lib.scoreMatch(plate, watchNumber); if (score >= list.AlertThreshold) { Console.WriteLine("WL found intermediate match"); BuidlAlertFrame(frame, plate, watchNumber, list.WatchEntrys[i].UserComment, list, score); // // Add new match events here // m_AlertsToBeGeneratedQ.Enqueue(frame); } } } } }
void NewLPRResultsEvent_OnNewPlateEvent(FRAME frame) { Console.WriteLine("WL recevied new plate"); // put this on the que to be handeled by the watch list thread m_NewLPRResultQ.Enqueue(frame); }
internal void SetProperty(FRAME frame, PROPERTY property, bool setLogic = true) { this.Frame = frame; if (setLogic) { if (this.IsMonster()) { ScaleLeft = ScaleRight = ATK = DEF = Level = Rank = -1; IsPendulum = false; Attribute = Frame == FRAME.Spell ? ATTRIBUTE.SPELL : ATTRIBUTE.TRAP; Abilities.Clear(); Property = PROPERTY.Normal; Type = TYPE.NONE; } else { if (property.IsSpellPropertyOnly()) { Attribute = ATTRIBUTE.SPELL; //_Frame = FRAME.Spell; } else if (property.IsTrapPropertyOnly()) { Attribute = ATTRIBUTE.TRAP; //_Frame = FRAME.Trap; } } } Property = property; }
public FRAME SmootherValue(int time) { for (int i = 0; i < frames.Count; i++) { if (frames[i].time > time) { i = max(0, i - 1); if (i == frames.Count - 1) { return(frames[i]); } FRAME one = SmoothValue(frames[i].time + 1); FRAME two = SmoothValue(frames[i + 1].time + 1); FRAME res = new FRAME(); res.time = time; res.values = new float[FREQS]; float p = progress(one.time, two.time, time); for (int j = 0; j < FREQS; j++) { res.values[j] = lerp(one.values[j], two.values[j], p); } return(res); } } throw new Exception("hithere"); }
private void Btn_Click(object sender, RoutedEventArgs e) { Button b = sender as Button; if (b == btn_material) { FRAME.Navigate(page_material); } else if (b == btn_sale) { FRAME.Navigate(page_sale); } else if (b == btn_test) { FRAME.Navigate(page_input); } if (pre_B != null) { pre_B.Background = new SolidColorBrush(Color.FromRgb(254, 203, 137)); } b.Background = new SolidColorBrush(Color.FromRgb(185, 172, 146)); pre_B = b; }
/// <summary> /// Use this routine to push hand-edited plate results into the storage system as if the plates came from the automated LPR reading chain. /// </summary> public void PushHandEditedPlate(FRAME frame) { // put this result into the result Q if (frame != null) { m_LPRFinalPlateGroupOutputQ.Enqueue(frame); } }
private YgoCard() { DefaultFrame = FRAME.Effect; DatabasePath = DatabasePath = Utilities.GetLocationPath() + @"\Resources\Data.ld"; Database = new ByteDatabase(DatabasePath); this.Abilities = new List <ABILITY>(); this.Version = new Version("0.1"); }
void BuidlAlertFrame(FRAME frame, string matchingPlateString, string watchNumber, string watchNumberComment, WatchListControl list, int score) { frame.BestMatchingString = matchingPlateString; frame.MatchScore = score; frame.ParentWatchList = (object)list; frame.WatchListMatchingNumber = watchNumber; frame.WatchListMatchingNumberUserComment = watchNumberComment; }
byte[] GetCurrentJpeg(string channel, out string timeStamp, out string currentPlateReading, out int channelIndex) { timeStamp = " "; currentPlateReading = " "; channelIndex = 0; int c; try { c = m_FrameGenerator.GetChannelIndex(channel); if (c < 0) { m_Log.Log("GetCurrentJpeg received bad channel index: " + c.ToString(), ErrorLog.LOG_TYPE.FATAL); return(null); } channelIndex = c; FRAME currentFrame = null; lock (m_FrameLock) { if (m_CurrentImageQ[c].Count > 0) { currentFrame = m_CurrentImageQ[c].Dequeue(); timeStamp = currentFrame.TimeStamp.ToString(m_AppData.TimeFormatStringForFileNames); // is there an LPR result available at this time? FRAME lprResultFrame = m_CurrentPlateNumberQ[c].Dequeue(); if (lprResultFrame != null) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < lprResultFrame.PlateNumberLatin.Length; i++) { string s = lprResultFrame.PlateNumberLatin[i]; if (i < lprResultFrame.PlateNumberLatin.Length - 1) { sb.Append(s + "^ "); // use the ^ to seperate strings, the comma is a parse field delimeter so do not use that } else { sb.Append(s); // do not put a delimeter after the last string } } currentPlateReading = sb.ToString(); } return(currentFrame.Jpeg); } else { return(null); } } } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); return(null); } }
void OnNewFrameFromDevice(FRAME frame) { // send new frame to registered consumers if (OnNewFrame != null) { OnNewFrame(frame); } }
public void Update(int time) { if (time > 121000) { // err yeah return; } frame = Value(time); smoothframe = SmoothValue(time); }
public FRAME CompleteFrameDataToByPassLPR(FRAME frame) { // used by the image hand-editor in the Analysts Workstation application, to by-pass the LPREngine and send results to storage frame.TimeStamp = DateTime.UtcNow; frame.SerialNumber = m_FrameCount; frame.GPSPosition = m_CurrentGPSPosition; frame.PSSName = "AnalystsWorkstation_" + m_AppData.ThisComputerName; frame.SetFileName(); return(frame); }
void m_LPREngine_OnNewPlateEvent(FRAME frame) { try { int c = frame.SourceChannel; m_CurrentPlateNumberQ[c].Dequeue();// this queue is just to act as a single unit buffer m_CurrentPlateNumberQ[c].Enqueue(frame); } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); } }
public void AddFrame(FRAME frame, string matchingPlateString, int newScore, string alertString) { if (newScore > score) { score = newScore; bestMatchPlateString = matchingPlateString; matchingFrame = frame; matchingFrame.BestMatchingString = matchingPlateString; matchingFrame.MatchScore = newScore; matchingFrame.ParentWatchList = (object)parentWatchList; matchingFrame.WatchListMatchingNumber = alertString; } }
void OnLPRNewRecord(FRAME frame) { m_NewLPRRecordQ.Enqueue(frame); if (m_NewLPRRecordQ.Count > m_LPRRecordQueLen / 2) { m_AppData.DVRStoringLPRRecordsGettingBehind = true; } else { m_AppData.DVRStoringLPRRecordsGettingBehind = false; } }
void NewImageCallBack(FRAME frame) { try { int c = frame.SourceChannel; if (m_CurrentImageQ[c].Count > 2) { m_CurrentImageQ[c].Dequeue(); // this queue is just to act as a single unit buffer } m_CurrentImageQ[c].Enqueue(frame); } catch (Exception ex) { m_Log.Trace(ex, ErrorLog.LOG_TYPE.FATAL); } }
void PushFramesToConsumersLoop() { while (!m_Stop) { Thread.Sleep(1); FRAME frame = m_NewFramesToPushQ.Dequeue(); if (frame != null) { OnNewImage(frame); } } }
// registered to receive frames where motion was detected from the previous frame void OnRxNewMotionWasDetectedFrame(FRAME frame) { bool sucess = m_LPRProcessQ.Enqueue(frame); // enqueue for the LPR thread to process if (!sucess) { m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.LPR.LPR_DroppedFrames].HitMe++; } if (m_LPRProcessQ.Count > m_LPRProcessQueSize / 2) { m_AppData.LPRGettingBehind = true; } else { m_AppData.LPRGettingBehind = false; } // store the frame meta data for later retrival if the LPR finds plates // no need to use memory storing bitmaps and jpegs as the jpeg has been stored on disk by the DVR and the // frame meta data includes a reference to this file. FRAME newFrame = null; newFrame = frame.Clone(false, false, false); newFrame.PlateNativeLanguage = "LATIN"; // keep knowledge of this frame around for a while, so that if LPR plate number group processig finds some numbers, // we can look up which frame was associated with those numbers by serial number. // the jpeg already went to disk by the time the LPR found numbers, so only the file url gets recorded // into the eventlog.txt file m_StoredFrameData.Add(newFrame.SerialNumber, newFrame); // for plate groupings, keep the serialnumber/frame data around for a while, only use //if (m_AppData.DVRMode == APPLICATION_DATA.DVR_MODE.STORE_ON_MOTION) //{ // newFrame = frame.Clone(false, false, false); // newFrame.PlateNativeLanguage = "LATIN"; // // keep knowledge of this frame around for a while, so that if LPR plate number group processig finds some numbers, // // we can look up which frame was associated with those numbers by serial number. // // the jpeg already went to disk by the time the LPR found numbers, so only the file url gets recorded // // into the eventlog.txt file // m_StoredFrameData.Add(newFrame.SerialNumber, newFrame); //} //else //{ // //else m_AppData.DVRMode == APPLICATION_DATA.DVR_MODE.STORE_ON_PLATE_FOUND // // keep a copy of the jpeg in this frame since this frame is what the DVR will write to disk // newFrame = frame.Clone(false,true,false); //} }
void OnReceiveNewImageFromS2255Device(FRAME partialFrame) { if (partialFrame.Jpeg != null) { // this is a jpeg m_LastJPEGReceived[partialFrame.SourceChannel].SetJpeg(partialFrame.Jpeg); } else { // this is a bitmap - send it and the last jpeg received up the chain m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.FRAME_GENERATOR.FrameGen_FrameCnt].HitMe++; int chan = partialFrame.SourceChannel; FRAME frame = new FRAME(m_AppData); frame.SetNew(partialFrame.Bmp, m_LastJPEGReceived[partialFrame.SourceChannel].GetJpeg(), partialFrame.SourceName, DateTime.UtcNow, m_FrameCount, m_CurrentGPSPosition, m_ConsumerIDs.GetConsumerCount, partialFrame.SourceChannel); frame.PSSName = m_AppData.ThisComputerName; m_FrameCount++; // convert the bitmap format to a luminace 2-D array for image processing int[,] luminance = new int[frame.Bmp.Width, frame.Bmp.Height]; getPixelsFromImageInY(frame.Bmp, ref luminance); frame.Luminance = luminance; // send to motion detection m_MotionDetectionQ.Enqueue(frame); m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.FRAME_GENERATOR.FrameGen_MotionDetectionPendingQ].HitMe = m_MotionDetectionQ.Count; // send to non-motion-detection consumers CONSUMER_PUSH push = new CONSUMER_PUSH(); push.FrameToPush = frame; push.ConsumersToPush = m_Channels[chan].m_NewImageCallBackList; m_AllFramesConsumerPushQ.Enqueue(push); m_AppData.HealthStatistics[(int)APPLICATION_DATA.HEALTH_STATISTICS.FRAME_GENERATOR.FrameGen_NonMotionFramePushQ].HitMe = m_AllFramesConsumerPushQ.Count; } }
public FRAME Value(int time) { FRAME frame = null; int idx = 0; for (; idx < frames.Count; idx++) { frame = frames[idx]; if (frames[idx].time > time) { break; } } return(frame); }
//prints each frame using the class toString() function and the rolling game public void gameTostring() { foreach (var FRAME in this.FrameGame) { Console.Write(FRAME.rollsToString()); } Console.WriteLine(); int frame = 0; foreach (var FRAME in this.FrameGame) { frame++; Console.WriteLine("Frame " + frame + " : " + FRAME.toString()); } }