internal static Pixelmap BuildFitsPixelmap(int width, int height, uint[] pixelsFlat, int bitPix, bool hasUtcTimeStamps, double?exposure, DateTime?timestamp, BasicHDU fitsImage, Dictionary <string, string> cards) { byte[] displayBitmapBytes = new byte[width * height]; byte[] rawBitmapBytes = new byte[(width * height * 3) + 40 + 14 + 1]; TangraCore.PreProcessors.EnsurePixelRange(pixelsFlat, width, height, bitPix, 0); uint[] flatPixelsCopy = new uint[pixelsFlat.Length]; Array.Copy(pixelsFlat, flatPixelsCopy, pixelsFlat.Length); TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(pixelsFlat, flatPixelsCopy, width, height, bitPix, 0 /* No normal value for FITS files */, exposure.HasValue ? (float)exposure.Value : 0); TangraCore.GetBitmapPixels(width, height, flatPixelsCopy, rawBitmapBytes, displayBitmapBytes, true, (ushort)bitPix, 0); Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, width, height); Pixelmap rv = new Pixelmap(width, height, bitPix, flatPixelsCopy, displayBitmap, displayBitmapBytes); rv.UnprocessedPixels = pixelsFlat; if (hasUtcTimeStamps) { rv.FrameState.CentralExposureTime = timestamp.HasValue ? timestamp.Value : DateTime.MinValue; rv.FrameState.ExposureInMilliseconds = exposure.HasValue ? (float)(exposure.Value * 1000.0) : 0; } rv.FrameState.Tag = fitsImage; rv.FrameState.AdditionalProperties = new SafeDictionary <string, object>(); foreach (string key in cards.Keys) { rv.FrameState.AdditionalProperties.Add(key, cards[key]); } return(rv); }
public Pixelmap GetPixelmap(int index) { byte[] displayBitmapBytes = new byte[m_Width * m_Height]; byte[] rawBitmapBytes = new byte[Pixelmap.GetBitmapBIRGBPixelArraySize(24, m_Width, m_Height) + 40 + 14 + 1]; uint[] flatPixelsCopy = new uint[m_FlatPixels.Length]; Array.Copy(m_FlatPixels, flatPixelsCopy, m_FlatPixels.Length); TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(m_FlatPixels, flatPixelsCopy, m_Width, m_Height, m_Bpp, 0 /* No normal value for FITS files */, m_Exposure); TangraCore.GetBitmapPixels(m_Width, m_Height, flatPixelsCopy, rawBitmapBytes, displayBitmapBytes, true, (ushort)m_Bpp, 0); Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, m_Width, m_Height); Pixelmap rv = new Pixelmap(m_Width, m_Height, m_Bpp, flatPixelsCopy, displayBitmap, displayBitmapBytes); rv.UnprocessedPixels = m_FlatPixels; if (m_Cards != null && m_Cards.Count > 0) { rv.FrameState.AdditionalProperties = new SafeDictionary <string, object>(); foreach (string key in m_Cards.Keys) { rv.FrameState.AdditionalProperties.Add(key, m_Cards[key]); } } return(rv); }
private void PlotDigitPatterns(IotaVtiOrcManaged ocrEngine) { List <uint[]> patterns = ocrEngine.GetLearntDigitPatterns(); if (patterns.Count > 12) { Bitmap bmpZero = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[0], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picZero.Image = bmpZero; picZero.Update(); Bitmap bmpOne = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[1], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picOne.Image = bmpOne; picOne.Update(); Bitmap bmpTwo = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[2], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picTwo.Image = bmpTwo; picTwo.Update(); Bitmap bmpThree = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[3], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picThree.Image = bmpThree; picThree.Update(); Bitmap bmpFour = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[4], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picFour.Image = bmpFour; picFour.Update(); Bitmap bmpFive = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[5], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picFive.Image = bmpFive; picFive.Update(); Bitmap bmpSix = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[6], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picSix.Image = bmpSix; picSix.Update(); Bitmap bmpSeven = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[7], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picSeven.Image = bmpSeven; picSeven.Update(); Bitmap bmpEight = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[8], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picEight.Image = bmpEight; picEight.Update(); Bitmap bmpNine = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[9], ocrEngine.BlockWidth, ocrEngine.BlockHeight); picNine.Image = bmpNine; picNine.Update(); Bitmap bmp83 = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[10], ocrEngine.BlockWidth, ocrEngine.BlockHeight); pic83.Image = bmp83; pic83.Update(); Bitmap bmp86 = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[11], ocrEngine.BlockWidth, ocrEngine.BlockHeight); pic86.Image = bmp86; pic86.Update(); Bitmap bmp89 = Pixelmap.ConstructBitmapFromBitmapPixels(patterns[12], ocrEngine.BlockWidth, ocrEngine.BlockHeight); pic89.Image = bmp89; pic89.Update(); } }
private void picField_MouseDown(object sender, MouseEventArgs e) { uint[] blockPixels = m_Processor.GetBlockAt(e.X, e.Y, m_CurrentIndex % 2 == 1); Bitmap bmpBlock = Pixelmap.ConstructBitmapFromBitmapPixels(blockPixels, m_Processor.BlockWidth, m_Processor.BlockHeight); picBlock.Image = bmpBlock; picBlock.Update(); }
public Pixelmap CreatePixelmap(uint[] pixels) { byte[] displayBitmapBytes = new byte[m_AdvFile.Width * m_AdvFile.Height]; for (int y = 0; y < m_AdvFile.Height; y++) { for (int x = 0; x < m_AdvFile.Width; x++) { int index = x + y * m_AdvFile.Width; if (m_AdvFile.MaxPixelValue == 8) { displayBitmapBytes[index] = (byte)((pixels[index] & 0xFF)); } else if (m_AdvFile.DataBpp == 12) { displayBitmapBytes[index] = (byte)((pixels[index] >> 4)); } else if (m_AdvFile.DataBpp == 14) { displayBitmapBytes[index] = (byte)((pixels[index] >> 6)); } else if (m_AdvFile.DataBpp == 16) { if (m_AdvFile.MaxPixelValue > 0) { displayBitmapBytes[index] = (byte)((255.0 * pixels[index] / m_AdvFile.MaxPixelValue)); } else { displayBitmapBytes[index] = (byte)((pixels[index] >> 8)); } } else { displayBitmapBytes[index] = (byte)((pixels[index] >> (m_AdvFile.DataBpp - 8))); } } } Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, (int)m_AdvFile.Width, (int)m_AdvFile.Height); Pixelmap rv = new Pixelmap((int)m_AdvFile.Width, (int)m_AdvFile.Height, m_AdvFile.DataBpp, pixels, displayBitmap, displayBitmapBytes); return(rv); }
public Pixelmap CreatePixelmap(AdvImageData imageData) { uint[] pixels = new uint[Width * Height]; byte[] displayBitmapBytes = new byte[Width * Height]; for (int y = 0; y < Height; y++) { for (int x = 0; x < Width; x++) { pixels[x + y * Width] = imageData.ImageData[x, y]; if (BitsPerPixel == 8) { displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] & 0xFF)); } else if (BitsPerPixel == 12) { displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] >> 4)); } else if (BitsPerPixel == 14) { displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] >> 6)); } else if (BitsPerPixel == 16) { if (Adv16NormalisationValue.HasValue) { displayBitmapBytes[x + y * Width] = (byte)((255.0 * imageData.ImageData[x, y] / Adv16NormalisationValue.Value)); } else { displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] >> 8)); } } else { displayBitmapBytes[x + y * Width] = (byte)((imageData.ImageData[x, y] >> (BitsPerPixel - 8))); } } } Bitmap displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, (int)Width, (int)Height); Pixelmap rv = new Pixelmap((int)Width, (int)Height, BitsPerPixel, pixels, displayBitmap, displayBitmapBytes); return(rv); }
private void PlotAperturePreview() { if (m_MeasurementContext.ObjectToMeasure != null) { float x0 = m_MeasurementContext.ObjectToMeasure.X0; float y0 = m_MeasurementContext.ObjectToMeasure.Y0; // x0 and y0 were measured on the first frame but we may have moved to a different frame due to positioning to the first frame of integration period // so we determine the position of the object on the current frame in order to draw the aperture nicely centered var fit = new PSFFit((int)x0, (int)y0); fit.Fit(m_VideoController.GetCurrentAstroImage(false).GetMeasurableAreaPixels((int)x0, (int)y0)); if (fit.IsSolved) { x0 = (float)fit.XCenter; y0 = (float)fit.YCenter; } byte[,] bmpPixels = m_VideoController.GetCurrentAstroImage(false).GetMeasurableAreaDisplayBitmapPixels((int)x0, (int)y0, 85); Bitmap bmp = Pixelmap.ConstructBitmapFromBitmapPixels(bmpPixels, 85, 85); using (Graphics g = Graphics.FromImage(pboxAperturePreview.Image)) { g.DrawImage(bmp, 0, 0); float xCenter = (x0 - (int)x0) + 42; float yCenter = (y0 - (int)y0) + 42; float radius = (float)nudAperture.Value; g.DrawEllipse(Pens.YellowGreen, xCenter - radius, yCenter - radius, 2 * radius, 2 * radius); radius = (float)(nudAperture.Value + nudGap.Value); g.DrawEllipse(Pens.YellowGreen, xCenter - radius, yCenter - radius, 2 * radius, 2 * radius); radius = (float)(nudAperture.Value + nudGap.Value + nudAnnulus.Value); g.DrawEllipse(Pens.YellowGreen, xCenter - radius, yCenter - radius, 2 * radius, 2 * radius); g.Save(); } pboxAperturePreview.Invalidate(); } }
public static void GetIntegratedFrame(int startFrameNo, int framesToIntegrate, bool isSlidingIntegration, bool isMedianAveraging, out uint[] pixels, out uint[] originalPixels, out Bitmap bitmap, out byte[] bitmapBytes) { if (s_fileInfo != null) { originalPixels = new uint[s_fileInfo.Width * s_fileInfo.Height]; pixels = new uint[s_fileInfo.Width * s_fileInfo.Height]; byte[] bitmapPixels = new byte[s_fileInfo.Width * s_fileInfo.Height * 3 + 40 + 14 + 1]; bitmapBytes = new byte[s_fileInfo.Width * s_fileInfo.Height]; TangraVideoGetIntegratedFrame(startFrameNo, framesToIntegrate, isSlidingIntegration, isMedianAveraging, pixels, bitmapPixels, bitmapBytes); byte[] rawBitmapBytes = new byte[(s_fileInfo.Width * s_fileInfo.Height * 3) + 40 + 14 + 1]; Array.Copy(pixels, originalPixels, pixels.Length); TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(originalPixels, pixels, s_fileInfo.Width, s_fileInfo.Height, 8, 0 /* No normal value for FITS files */, 0 /* No exposure support for 8 bit darks. They must be same exposure */); TangraCore.GetBitmapPixels(s_fileInfo.Width, s_fileInfo.Height, pixels, rawBitmapBytes, bitmapBytes, true, 8, 0); bitmap = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, s_fileInfo.Width, s_fileInfo.Height); } else { bitmap = new Bitmap(100, 100); using (Graphics g = Graphics.FromImage(bitmap)) { g.Clear(Color.White); g.DrawString("Invalid file.", s_ErrorFont, Brushes.Red, 10, 10); g.Save(); } bitmapBytes = new byte[100 * 100]; pixels = new uint[100 * 100]; originalPixels = new uint[100 * 100]; } }
public Pixelmap GetPixelmap(int index) { if (index >= m_FirstFrame + m_CountFrames) { throw new ApplicationException("Invalid frame position: " + index); } uint[] pixels = new uint[m_Width * m_Height]; uint[] unprocessedPixels = new uint[m_Width * m_Height]; byte[] displayBitmapBytes = new byte[m_Width * m_Height]; byte[] rawBitmapBytes = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1]; var frameInfo = new AdvFrameInfoNative(); byte[] gpsFix = new byte[256 * 16]; byte[] userCommand = new byte[256 * 16]; byte[] systemError = new byte[256 * 16]; lock (m_SyncLock) { TangraCore.ADVGetFrame(index, pixels, unprocessedPixels, rawBitmapBytes, displayBitmapBytes, frameInfo, gpsFix, userCommand, systemError); } m_CurrentFrameInfo = new AdvFrameInfo(frameInfo); m_CurrentFrameInfo.UserCommandString = AdvFrameInfo.GetStringFromBytes(userCommand); m_CurrentFrameInfo.SystemErrorString = AdvFrameInfo.GetStringFromBytes(systemError); m_CurrentFrameInfo.GPSFixString = AdvFrameInfo.GetStringFromBytes(gpsFix); if (m_Engine == "AAV" && m_CurrentFrameInfo.IntegratedFrames > 0 && TangraConfig.Settings.AAV.SplitFieldsOSD && m_OsdFirstLine * m_OsdLastLine != 0) { TangraCore.BitmapSplitFieldsOSD(rawBitmapBytes, m_OsdFirstLine, m_OsdLastLine); } if (frameInfo.HasNtpTimeStamp && m_CurrentFrameInfo.Exposure10thMs == 0 && index + 1 < m_FirstFrame + m_CountFrames) { lock (m_SyncLock) { TangraCore.ADVGetFrameStatusChannel(index + 1, frameInfo, gpsFix, userCommand, systemError); } if (frameInfo.HasNtpTimeStamp) { m_CurrentFrameInfo.Exposure10thMs = (int)Math.Round(new TimeSpan(frameInfo.EndExposureNtpTimeStamp.Ticks - m_CurrentFrameInfo.EndExposureNtpTimeStamp.Ticks).TotalMilliseconds * 10); } } using (MemoryStream memStr = new MemoryStream(rawBitmapBytes)) { Bitmap displayBitmap; if (m_Engine == "AAV" && m_CurrentFrameInfo.IntegratedFrames == 0) { // This is a VTI Split reference frame. Put some mark on it to mark it as such?? displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(pixels, m_Width, m_Height); for (int i = 0; i < pixels.Length; i++) { displayBitmapBytes[i] = (byte)pixels[i]; } } else { try { displayBitmap = (Bitmap)Bitmap.FromStream(memStr); } catch (Exception ex) { Trace.WriteLine(ex.GetFullStackTrace()); displayBitmap = new Bitmap(m_Width, m_Height); } } var rv = new Pixelmap(m_Width, m_Height, m_BitPix, pixels, displayBitmap, displayBitmapBytes); rv.SetMaxSignalValue(m_Aav16NormVal); rv.FrameState = GetCurrentFrameState(index); rv.UnprocessedPixels = unprocessedPixels; return(rv); } }
public static void SendOcrErrorReport(VideoController videoController, string errorMessage, ITimestampOcr timestampOCR, Dictionary <string, Tuple <uint[], int, int> > images, uint[] lastUnmodifiedImage, Bitmap ocdDebugImage, string email) { string tempDir = Path.GetFullPath(Path.GetTempPath() + @"\" + Guid.NewGuid().ToString()); string tempFile = Path.GetTempFileName(); try { Directory.CreateDirectory(tempDir); int fieldAreaWidth = timestampOCR.InitializationData.OSDFrame.Width; int fieldAreaHeight = timestampOCR.InitializationData.OSDFrame.Height; int frameWidth = timestampOCR.InitializationData.FrameWidth; int frameHeight = timestampOCR.InitializationData.FrameHeight; foreach (string key in images.Keys) { uint[] pixels = images[key].Item1; Bitmap img = null; if (pixels.Length == fieldAreaWidth * fieldAreaHeight) { img = Pixelmap.ConstructBitmapFromBitmapPixels(pixels, fieldAreaWidth, fieldAreaHeight); } else if (pixels.Length == frameWidth * frameHeight) { img = Pixelmap.ConstructBitmapFromBitmapPixels(pixels, frameWidth, frameHeight); } else if (pixels.Length == images[key].Item2 * images[key].Item3) { img = Pixelmap.ConstructBitmapFromBitmapPixels(pixels, images[key].Item2, images[key].Item3); } if (img != null) { img.Save(Path.GetFullPath(string.Format(@"{0}\{1}", tempDir, key)), ImageFormat.Bmp); } } if (lastUnmodifiedImage != null) { Bitmap fullFrame = Pixelmap.ConstructBitmapFromBitmapPixels(lastUnmodifiedImage, frameWidth, frameHeight); fullFrame.Save(Path.GetFullPath(string.Format(@"{0}\full-frame.bmp", tempDir)), ImageFormat.Bmp); } if (ocdDebugImage != null) { ocdDebugImage.Save(Path.GetFullPath(string.Format(@"{0}\ocr-debug-image.bmp", tempDir)), ImageFormat.Bmp); } ZipUnzip.Zip(tempDir, tempFile, false); byte[] attachment = File.ReadAllBytes(tempFile); var binding = new BasicHttpBinding(); var address = new EndpointAddress("http://www.tangra-observatory.org/TangraErrors/ErrorReports.asmx"); var client = new TangraService.ServiceSoapClient(binding, address); string errorReportBody = errorMessage + "\r\n\r\n" + "OCR OSD Engine: " + timestampOCR.NameAndVersion() + "\r\n" + "OSD Type: " + timestampOCR.OSDType() + "\r\n" + "Frames Range: [" + videoController.VideoFirstFrame + ", " + videoController.VideoLastFrame + "]\r\n" + "File Name: " + videoController.FileName + "\r\n" + "Video File Type:" + videoController.CurrentVideoFileType + "\r\n\r\n" + "Contact Email: " + email + "\r\n\r\n" + frmSystemInfo.GetFullVersionInfo(); List <string> errorMesages = timestampOCR.GetCalibrationErrors(); if (errorMesages != null && errorMesages.Count > 0) { errorReportBody += "\r\n\r\n" + string.Join("\r\n", errorMesages); } client.ReportErrorWithAttachment( errorReportBody, string.Format("CalibrationFrames-{0}.zip", Guid.NewGuid().ToString()), attachment); } finally { if (Directory.Exists(tempDir)) { try { Directory.Delete(tempDir, true); } catch { } } if (File.Exists(tempFile)) { try { File.Delete(tempFile); } catch { } } } }
public static void GetFrame(int frameNo, out uint[] pixels, out uint[] originalPixels, out Bitmap videoFrame, out byte[] bitmapBytes) { originalPixels = null; if (s_fileInfo != null) { int width = s_fileInfo.Width; int height = s_fileInfo.Height; pixels = new uint[width * height]; originalPixels = new uint[width * height]; byte[] bitmapPixels = new byte[width * height * 3 + 40 + 14 + 1]; bitmapBytes = new byte[width * height]; int rv = -1; try { lock (s_SyncRoot) { rv = TangraVideoGetFrame(frameNo, pixels, bitmapPixels, bitmapBytes); if (rv == 0) { byte[] rawBitmapBytes = new byte[(width * height * 3) + 40 + 14 + 1]; Array.Copy(pixels, originalPixels, pixels.Length); TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(originalPixels, pixels, width, height, 8, 0 /* No normal value for FITS files */, 0 /* No exposure support for 8 bit darks. They must be same exposure */); TangraCore.GetBitmapPixels(width, height, pixels, rawBitmapBytes, bitmapBytes, true, 8, 0); videoFrame = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, width, height); } else { throw new InvalidOperationException("The core returned an error when trying to get a frame. Error code: " + rv.ToString()); } } } catch (Exception ex) { if (rv == 0) { try { videoFrame = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, width, height); return; } catch (Exception ex2) { Trace.WriteLine(ex2.GetFullStackTrace()); } } videoFrame = new Bitmap(width, height); using (Graphics g = Graphics.FromImage(videoFrame)) { g.Clear(Color.White); g.DrawString(ex.Message, s_ErrorFont, Brushes.Red, 10, 10); g.Save(); } } } else { videoFrame = new Bitmap(100, 100); using (Graphics g = Graphics.FromImage(videoFrame)) { g.Clear(Color.White); g.DrawString("Invalid file.", s_ErrorFont, Brushes.Red, 10, 10); g.Save(); } bitmapBytes = new byte[100 * 100]; pixels = new uint[100 * 100]; } }
public Pixelmap GetPixelmap(int index, int streamId) { if (m_AdvFile.MainSteamInfo.FrameCount == 0) { return(null); } uint[] pixels; uint[] unprocessedPixels = new uint[m_Width * m_Height]; byte[] displayBitmapBytes = new byte[m_Width * m_Height]; byte[] rawBitmapBytes = new byte[(m_Width * m_Height * 3) + 40 + 14 + 1]; Adv.AdvFrameInfo advFrameInfo; lock (m_SyncLock) { if (streamId == 0) { pixels = m_AdvFile.GetMainFramePixels((uint)index, out advFrameInfo); } else if (streamId == 1) { pixels = m_AdvFile.GetCalibrationFramePixels((uint)index, out advFrameInfo); } else { throw new ArgumentOutOfRangeException("streamId"); } if (unprocessedPixels.Length != pixels.Length) { throw new ApplicationException("ADV Buffer Error"); } Array.Copy(pixels, unprocessedPixels, pixels.Length); } TangraCore.PreProcessors.ApplyPreProcessingPixelsOnly(unprocessedPixels, pixels, m_Width, m_Height, m_BitPix, m_MaxPixelValue, (float)(advFrameInfo.UtcExposureMilliseconds / 1000.0)); TangraCore.GetBitmapPixels(Width, Height, pixels, rawBitmapBytes, displayBitmapBytes, true, (ushort)BitPix, m_MaxPixelValue); Bitmap displayBitmap = null; if (m_AAVVersion != null && m_IntegratedAAVFrames > 0 && TangraConfig.Settings.AAV.SplitFieldsOSD && m_OsdFirstLine * m_OsdLastLine != 0) { TangraCore.BitmapSplitFieldsOSD(rawBitmapBytes, m_OsdFirstLine, m_OsdLastLine); using (MemoryStream memStr = new MemoryStream(rawBitmapBytes)) { try { displayBitmap = (Bitmap)Bitmap.FromStream(memStr); } catch (Exception ex) { Trace.WriteLine(ex.GetFullStackTrace()); displayBitmap = new Bitmap(m_Width, m_Height); } } } else { displayBitmap = Pixelmap.ConstructBitmapFromBitmapPixels(displayBitmapBytes, Width, Height); } Pixelmap rv = new Pixelmap(Width, Height, BitPix, pixels, displayBitmap, displayBitmapBytes); rv.SetMaxSignalValue(m_MaxPixelValue); rv.FrameState = GetCurrentFrameState(advFrameInfo); rv.UnprocessedPixels = unprocessedPixels; return(rv); }
public Pixelmap GetPixelmap(int index) { if (index < FirstFrame) { index = FirstFrame; } if (index > LastFrame) { index = LastFrame; } uint[] pixels; uint[] originalPixels; Bitmap videoFrame; byte[] bitmapBytes; if (Mode == ReInterlaceMode.SwapFields) { TangraVideo.GetFrame(index, out pixels, out originalPixels, out videoFrame, out bitmapBytes); byte[] bitmapPixels = new byte[Width * Height * 3 + 40 + 14 + 1]; TangraCore.SwapVideoFields(pixels, originalPixels, Width, Height, bitmapPixels, bitmapBytes); videoFrame = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, Width, Height); var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes); rv.UnprocessedPixels = originalPixels; return(rv); } else if (Mode == ReInterlaceMode.ShiftOneField || Mode == ReInterlaceMode.SwapAndShiftOneField) { uint[] pixels2; uint[] originalPixels2; Bitmap videoFrame2; byte[] bitmapBytes2; if (m_LastPrevFrameId == index) { pixels = m_LastPrevFramePixels; originalPixels = m_LastPrevFrameOriginalPixels; bitmapBytes = m_LastPrevFrameBitmapBytes; } else { TangraVideo.GetFrame(index, out pixels, out originalPixels, out videoFrame, out bitmapBytes); } TangraVideo.GetFrame(index + 1, out pixels2, out originalPixels2, out videoFrame2, out bitmapBytes2); m_LastPrevFrameId = index + 1; m_LastPrevFramePixels = pixels2; m_LastPrevFrameOriginalPixels = originalPixels2; m_LastPrevFrameBitmapBytes = bitmapBytes2; byte[] bitmapPixels = new byte[Width * Height * 3 + 40 + 14 + 1]; TangraCore.ShiftVideoFields(pixels, originalPixels, pixels2, originalPixels2, Width, Height, m_ShiftMode, bitmapPixels, bitmapBytes); videoFrame = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, Width, Height); var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes); rv.UnprocessedPixels = originalPixels; return(rv); } else if (Mode == ReInterlaceMode.None) { return(m_BaseStream.GetPixelmap(index)); } else { throw new NotSupportedException(); } }
private void PlotDigitPatterns() { if (m_Processor.ZeroDigitPattern != null && m_Processor.ZeroDigitPattern.Length > 0) { Bitmap bmpZero = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.ZeroDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picZero.Image = bmpZero; picZero.Update(); } if (m_Processor.OneDigitPattern != null && m_Processor.OneDigitPattern.Length > 0) { Bitmap bmpOne = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.OneDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picOne.Image = bmpOne; picOne.Update(); } if (m_Processor.TwoDigitPattern != null && m_Processor.TwoDigitPattern.Length > 0) { Bitmap bmpTwo = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.TwoDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picTwo.Image = bmpTwo; picTwo.Update(); } if (m_Processor.ThreeDigitPattern != null && m_Processor.ThreeDigitPattern.Length > 0) { Bitmap bmpThree = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.ThreeDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picThree.Image = bmpThree; picThree.Update(); } if (m_Processor.FourDigitPattern != null && m_Processor.FourDigitPattern.Length > 0) { Bitmap bmpFour = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.FourDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picFour.Image = bmpFour; picFour.Update(); } if (m_Processor.FiveDigitPattern != null && m_Processor.FiveDigitPattern.Length > 0) { Bitmap bmpFive = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.FiveDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picFive.Image = bmpFive; picFive.Update(); } if (m_Processor.SixDigitPattern != null && m_Processor.SixDigitPattern.Length > 0) { Bitmap bmpSix = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.SixDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picSix.Image = bmpSix; picSix.Update(); } if (m_Processor.SevenDigitPattern != null && m_Processor.SevenDigitPattern.Length > 0) { Bitmap bmpSeven = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.SevenDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picSeven.Image = bmpSeven; picSeven.Update(); } if (m_Processor.EightDigitPattern != null && m_Processor.EightDigitPattern.Length > 0) { Bitmap bmpEight = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.EightDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picEight.Image = bmpEight; picEight.Update(); } if (m_Processor.NineDigitPattern != null && m_Processor.NineDigitPattern.Length > 0) { Bitmap bmpNine = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.NineDigitPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); picNine.Image = bmpNine; picNine.Update(); } if (m_Processor.SixEightXorPattern != null && m_Processor.SixEightXorPattern.Length > 0) { Bitmap bmp68 = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.SixEightXorPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); pic86.Image = bmp68; pic86.Update(); } if (m_Processor.NineEightXorPattern != null && m_Processor.NineEightXorPattern.Length > 0) { Bitmap bmp98 = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.NineEightXorPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); pic89.Image = bmp98; pic89.Update(); } if (m_Processor.ThreeEightXorPattern != null && m_Processor.ThreeEightXorPattern.Length > 0) { Bitmap bmp38 = Pixelmap.ConstructBitmapFromBitmapPixels(m_Processor.ThreeEightXorPattern, m_Processor.BlockWidth, m_Processor.BlockHeight); pic83.Image = bmp38; pic83.Update(); } }
public void Initialize(Action <int, int> progressCallback) { Task.Run(() => { progressCallback(0, m_Aav.CountFrames); var aavFile = AdvFile.OpenFile(m_Aav.FileName); aavFile.Close(); // This is the system timestamp retrieved by the system when the frame was saved to disk. var SYSTEM_TIME_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "SystemTime"); // This is the system timestamp retrieved by the system when the frame was received. var SYSTEM_TIME_FT_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "SystemTimeFileTime"); // This is the OCR-ed time with the full precision (0.1 ms for IOTA-VTI) rather than only the 1 ms from the AAVv1 timestamp var OCR_TIME_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "OcrTime"); // This is the NTP timestamp retrieved by the system when the frame was received. It has been corrected for the configured 'Calibration Correction' in OccuRec var NTP_TIME_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "NTPEndTimestamp"); var NTP_ERROR_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "NTPTimestampError"); var OCR_DEBUG_1_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "StartFrameTimestamp"); var OCR_DEBUG_2_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "EndFrameTimestamp"); var CPU_USAGE_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "CpuUtilisation"); var DISK_USAGE_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "DisksUtilisation"); var FREE_MEMORY_TAG = aavFile.StatusSection.TagDefinitions.SingleOrDefault(x => x.Name == "FreeMemoryMb"); float maxDeltaSys = 0; float minDeltaSys = 0; float maxDeltaSysF = 0; float minDeltaSysF = 0; float maxDeltaNtp = 0; float minDeltaNtp = 0; float maxNtpError = 0; float minNtpError = 0; float maxFreeMemory = 0; float minFreeMemory = float.MaxValue; float maxDiskUsage = 0; using (FileStream file = new FileStream(m_Aav.FileName, FileMode.Open, FileAccess.Read, FileShare.Read)) using (BinaryReader reader = new BinaryReader(file)) { int i = 0; string tagVal; List <TimeAnalyserEntry> entriesWithNoUtil = new List <TimeAnalyserEntry>(); foreach (var idx in aavFile.Index.Index) { file.Seek(idx.Offset, SeekOrigin.Begin); uint frameDataMagic = reader.ReadUInt32(); Trace.Assert(frameDataMagic == 0xEE0122FF); byte[] data = reader.ReadBytes((int)idx.Length); // Read the timestamp and exposure long frameTimeMsSince2010 = (long)data[0] + (((long)data[1]) << 8) + (((long)data[2]) << 16) + (((long)data[3]) << 24) + (((long)data[4]) << 32) + (((long)data[5]) << 40) + (((long)data[6]) << 48) + (((long)data[7]) << 56); int exposure = data[8] + (data[9] << 8) + (data[10] << 16) + (data[11] << 24); if (frameTimeMsSince2010 == 0) { // First or last AAV frame continue; } var entry = new TimeAnalyserEntry(); int dataOffset = 12; AdvImageData imageData = null; int sectionDataLength = data[dataOffset] + (data[dataOffset + 1] << 8) + (data[dataOffset + 2] << 16) + (data[dataOffset + 3] << 24); if (sectionDataLength > 2) { imageData = (AdvImageData)aavFile.ImageSection.GetDataFromDataBytes(data, null, sectionDataLength, dataOffset + 4); } dataOffset += sectionDataLength + 4; sectionDataLength = data[dataOffset] + (data[dataOffset + 1] << 8) + (data[dataOffset + 2] << 16) + (data[dataOffset + 3] << 24); AdvStatusData statusSection = (AdvStatusData)aavFile.StatusSection.GetDataFromDataBytes(data, null, sectionDataLength, dataOffset + 4); entry.FrameNo = i; entry.ExposureMs = (float)(exposure / 10.0); if (OCR_TIME_TAG != null && statusSection.TagValues.TryGetValue(OCR_TIME_TAG, out tagVal)) { entry.ReferenceTime = new DateTime(long.Parse(tagVal)); } else { entry.ReferenceTime = AdvFile.ADV_ZERO_DATE_REF.AddMilliseconds(frameTimeMsSince2010).AddMilliseconds(-1 * entry.ExposureMs / 2); } long referenceTimeTicks = entry.ReferenceTime.Ticks; if (SYSTEM_TIME_TAG != null && statusSection.TagValues.TryGetValue(SYSTEM_TIME_TAG, out tagVal)) { entry.SystemTime = AdvFile.ADV_ZERO_DATE_REF.AddMilliseconds(long.Parse(tagVal)).AddMilliseconds(m_CorrSystemTimeMs); if (entry.SystemTime.Day + 1 == entry.ReferenceTime.Day) { // Fixing a date-change bug in OccuRec (var 1) entry.ReferenceTime = entry.ReferenceTime.AddDays(-1); referenceTimeTicks = entry.ReferenceTime.Ticks; } if (entry.SystemTime.Day == entry.ReferenceTime.Day && entry.ReferenceTime.Hour - entry.SystemTime.Hour == 23) { // Fixing a date-change bug in OccuRec (var 2) entry.ReferenceTime = entry.ReferenceTime.AddDays(-1); referenceTimeTicks = entry.ReferenceTime.Ticks; } entry.DeltaSystemTimeMs = (float)new TimeSpan(entry.SystemTime.Ticks - referenceTimeTicks).TotalMilliseconds; } if (SYSTEM_TIME_FT_TAG != null && statusSection.TagValues.TryGetValue(SYSTEM_TIME_FT_TAG, out tagVal)) { // SystemTimeAsFileTime has a microsecond precision. However the IOTA-VTI reference time precision is only 0.1 ms // so here we are rounding the SystemTimeAsFileTime value to the nearest 0.1 ms for correctness entry.SystemTimeFileTime = new DateTime(1000 * (long.Parse(tagVal) / 1000)).AddMilliseconds(m_CorrSystemTimeMs); entry.DeltaSystemFileTimeMs = (float)new TimeSpan(entry.SystemTimeFileTime.Ticks - referenceTimeTicks).TotalMilliseconds; } if (NTP_TIME_TAG != null && statusSection.TagValues.TryGetValue(NTP_TIME_TAG, out tagVal)) { entry.NTPTime = AdvFile.ADV_ZERO_DATE_REF.AddMilliseconds(long.Parse(tagVal)).AddMilliseconds(m_CorrNtpTimeMs); entry.DeltaNTPTimeMs = (float)new TimeSpan(entry.NTPTime.Ticks - referenceTimeTicks).TotalMilliseconds; } if (NTP_ERROR_TAG != null && statusSection.TagValues.TryGetValue(NTP_ERROR_TAG, out tagVal)) { entry.NTPErrorMs = 3 * int.Parse(tagVal) / 10.0f; // Value recorded in 0.1MS, converted to MS and then taken as 3-Sigma } Entries.Add(entry); if (imageData != null) { entry.DebugImage = BitmapFilter.ToVideoFields(Pixelmap.ConstructBitmapFromBitmapPixels(imageData.ImageData)); entry.IsOutlier = true; if (OCR_DEBUG_1_TAG != null && statusSection.TagValues.TryGetValue(OCR_DEBUG_1_TAG, out tagVal)) { entry.OrcField1 = tagVal; } if (OCR_DEBUG_2_TAG != null && statusSection.TagValues.TryGetValue(OCR_DEBUG_2_TAG, out tagVal)) { entry.OrcField2 = tagVal; } DebugFrames.Add(entry); } else { int maxOutlier = 2000; int minOutlier = -2000; if (entry.DeltaSystemTimeMs > maxDeltaSys) { if (entry.DeltaSystemTimeMs < maxOutlier) { maxDeltaSys = entry.DeltaSystemTimeMs; } else { entry.IsOutlier = true; Trace.WriteLine("SystemTime Outlier: " + entry.DeltaSystemTimeMs); } } if (entry.DeltaSystemTimeMs < minDeltaSys) { if (entry.DeltaSystemTimeMs > minOutlier) { minDeltaSys = entry.DeltaSystemTimeMs; } else { entry.IsOutlier = true; Trace.WriteLine("SystemTime Outlier: " + entry.DeltaSystemTimeMs); } } if (entry.DeltaSystemFileTimeMs > maxDeltaSysF) { if (entry.DeltaSystemFileTimeMs < maxOutlier) { maxDeltaSysF = entry.DeltaSystemFileTimeMs; } else { entry.IsOutlier = true; Trace.WriteLine("SystemFileTime Outlier: " + entry.DeltaSystemFileTimeMs); } } if (entry.DeltaSystemFileTimeMs < minDeltaSysF) { if (entry.DeltaSystemFileTimeMs > minOutlier) { minDeltaSysF = entry.DeltaSystemFileTimeMs; } else { entry.IsOutlier = true; Trace.WriteLine("SystemFileTime Outlier: " + entry.DeltaSystemFileTimeMs); } } if (entry.DeltaNTPTimeMs > maxDeltaNtp) { if (entry.DeltaNTPTimeMs < maxOutlier) { maxDeltaNtp = entry.DeltaNTPTimeMs; } else { entry.IsOutlier = true; //Trace.WriteLine("NTPTime Outlier: " + entry.DeltaNTPTimeMs); } } if (entry.DeltaNTPTimeMs < minDeltaNtp) { if (entry.DeltaNTPTimeMs > minOutlier) { minDeltaNtp = entry.DeltaNTPTimeMs; } else { entry.IsOutlier = true; //Trace.WriteLine("NTPTime Outlier: " + entry.DeltaNTPTimeMs); } } if (entry.NTPErrorMs > maxNtpError) { if (entry.NTPErrorMs < maxOutlier) { maxNtpError = entry.NTPErrorMs; } else { entry.IsOutlier = true; //Trace.WriteLine("NtpError Outlier: " + entry.NTPErrorMs); } } if (-entry.NTPErrorMs < minNtpError) { if (-entry.NTPErrorMs > minOutlier) { minNtpError = -entry.NTPErrorMs; } else { entry.IsOutlier = true; //Trace.WriteLine("NtpError Outlier: " + -entry.NTPErrorMs); } } } if (CPU_USAGE_TAG != null && statusSection.TagValues.TryGetValue(CPU_USAGE_TAG, out tagVal)) { float cpuUsage = float.Parse(tagVal, CultureInfo.InvariantCulture); float diskUsage = 0; float freeMemory = 0; if (DISK_USAGE_TAG != null && statusSection.TagValues.TryGetValue(DISK_USAGE_TAG, out tagVal)) { diskUsage = float.Parse(tagVal, CultureInfo.InvariantCulture); if (maxDiskUsage < diskUsage) { maxDiskUsage = diskUsage; } } if (FREE_MEMORY_TAG != null && statusSection.TagValues.TryGetValue(FREE_MEMORY_TAG, out tagVal)) { freeMemory = float.Parse(tagVal, CultureInfo.InvariantCulture); if (maxFreeMemory < freeMemory) { maxFreeMemory = freeMemory; } if (minFreeMemory > freeMemory) { minFreeMemory = freeMemory; } } var currUtilisationEntry = new SystemUtilisationEntry() { CpuUtilisation = cpuUsage, DiskUtilisation = diskUsage, FreeMemory = freeMemory }; SystemUtilisation.Add(currUtilisationEntry); entriesWithNoUtil.ForEach(x => x.UtilisationEntry = currUtilisationEntry); entriesWithNoUtil.Clear(); } entriesWithNoUtil.Add(entry); i++; if (i % 100 == 0) { progressCallback(i - m_Aav.FirstFrame, m_Aav.CountFrames); } } } Trace.WriteLine(string.Format("MinDeltaSys: {0:0.0}, MaxDeltaSys: {1:0.0}", minDeltaSys, maxDeltaSys)); Trace.WriteLine(string.Format("MinDeltaSysF: {0:0.0}, MaxDeltaSysF: {1:0.0}", minDeltaSysF, maxDeltaSysF)); Trace.WriteLine(string.Format("MinDeltaNtp: {0:0.0}, MaxDeltaNtp: {1:0.0}", minDeltaNtp, maxDeltaNtp)); MinDeltaNTPMs = minDeltaNtp; MaxDeltaNTPMs = maxDeltaNtp; MinDeltaNTPErrorMs = minNtpError; MaxDeltaNTPErrorMs = maxNtpError; MinDeltaSystemTimeMs = minDeltaSys; MaxDeltaSystemTimeMs = maxDeltaSys; MinDeltaSystemFileTimeMs = minDeltaSysF; MaxDeltaSystemFileTimeMs = maxDeltaSysF; MinFreeMemoryMb = minFreeMemory; MaxFreeMemoryMb = maxFreeMemory; MaxDiskUsage = maxDiskUsage; if (Entries.Count > 0) { FromDateTime = Entries.First().ReferenceTime; ToDateTime = Entries.Last().ReferenceTime; } var logFileName = Path.ChangeExtension(m_Aav.FileName, ".log"); if (File.Exists(logFileName)) { ExtractNTPLogData(logFileName); } var meinbergFileName = Path.GetFullPath(Path.GetDirectoryName(m_Aav.FileName) + @"\ntsmadvlog.txt"); if (File.Exists(meinbergFileName)) { ExtractMeinbergLogData(meinbergFileName); } progressCallback(0, 0); }); }
public SingleBitmapFileFrameStream(byte[] bitmapBytes, int width, int height) : this(Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, width, height)) { }