internal SingleMeasurement(LCMeasurement lcMeasurement, double frameNo, LCFile lcFile, bool dontIncludeTimes) { CurrFrameNo = (int)lcMeasurement.CurrFrameNo; TargetNo = lcMeasurement.TargetNo; Measurement = lcMeasurement.IsSuccessfulReading ? 1.0f * lcMeasurement.AdjustedReading : INVALID_MEASUREMENT_VALUE; Background = lcMeasurement.IsSuccessfulReading // NOTE: Make sure negative backgrounds are sent as negative values (not as the serialized UINTs) ? 1.0f * (int)lcMeasurement.TotalBackground : INVALID_MEASUREMENT_VALUE; string isCorrectedForInstrumentalDelay; if (dontIncludeTimes || !lcFile.Footer.ReductionContext.HasEmbeddedTimeStamps /* If the times are entered by the user, only include the times for the frames enterred by the user*/) { Timestamp = DateTime.MinValue; isCorrectedForInstrumentalDelay = null; if ((int)frameNo == lcFile.Header.FirstTimedFrameNo) { Timestamp = lcFile.Header.FirstTimedFrameTime; } else if ((int)frameNo == lcFile.Header.LastTimedFrameNo) { Timestamp = lcFile.Header.SecondTimedFrameTime; } } else { Timestamp = lcFile.GetTimeForFrame(frameNo, out isCorrectedForInstrumentalDelay); } IsCorrectedForInstrumentalDelay = lcFile.Header.InstrumentalDelayCorrectionsNotRequired() || !string.IsNullOrEmpty(isCorrectedForInstrumentalDelay); IsSuccessful = lcMeasurement.IsSuccessfulReading; }
internal SingleMeasurement(LCMeasurement lcMeasurement, double frameNo, LCFile lcFile, bool dontIncludeTimes) { CurrFrameNo = (int)lcMeasurement.CurrFrameNo; TargetNo = lcMeasurement.TargetNo; Measurement = lcMeasurement.IsSuccessfulReading ? 1.0f * lcMeasurement.AdjustedReading : INVALID_MEASUREMENT_VALUE; Background = lcMeasurement.IsSuccessfulReading // NOTE: Make sure negative backgrounds are sent as negative values (not as the serialized UINTs) ? 1.0f * (int)lcMeasurement.TotalBackground : INVALID_MEASUREMENT_VALUE; string isCorrectedForInstrumentalDelay; if (dontIncludeTimes || !lcFile.Footer.ReductionContext.HasEmbeddedTimeStamps /* If the times are entered by the user, only include the times for the frames enterred by the user*/) { Timestamp = DateTime.MinValue; isCorrectedForInstrumentalDelay = null; if ((int)frameNo == lcFile.Header.FirstTimedFrameNo) Timestamp = lcFile.Header.FirstTimedFrameTime; else if ((int)frameNo == lcFile.Header.LastTimedFrameNo) Timestamp = lcFile.Header.SecondTimedFrameTime; } else { Timestamp = lcFile.GetTimeForFrame(frameNo, out isCorrectedForInstrumentalDelay); } IsCorrectedForInstrumentalDelay = lcFile.Header.InstrumentalDelayCorrectionsNotRequired() || !string.IsNullOrEmpty(isCorrectedForInstrumentalDelay); IsSuccessful = lcMeasurement.IsSuccessfulReading; }
internal void SetupLCFileInfo(LCFile lcFile) { pnlViewLightCurve.Controls.Clear(); var info = new ucLCFileInfo(lcFile, m_VideoController); pnlViewLightCurve.Controls.Add(info); info.Dock = DockStyle.Fill; }
public async Task Qiniu() { LCFile file = new LCFile("avatar", APKFilePath); await file.Save(); TestContext.WriteLine(file.ObjectId); Assert.NotNull(file.ObjectId); }
public static LCFile DecodeFile(IDictionary dict) { LCFile file = new LCFile(); LCObjectData objectData = LCObjectData.Decode(dict as Dictionary <string, object>); file.Merge(objectData); return(file); }
public async Task QueryFile() { LCQuery <LCFile> query = LCFile.GetQuery(); LCFile file = await query.Get("5e0dbfa0562071008e21c142"); Assert.NotNull(file.Url); TestContext.WriteLine(file.Url); TestContext.WriteLine(file.GetThumbnailUrl(32, 32)); }
public async Task QueryFile() { LCQuery <LCFile> query = LCFile.GetQuery(); LCFile file = await query.Get(avatar.ObjectId); Assert.NotNull(file.Url); TestContext.WriteLine(file.Url); TestContext.WriteLine(file.GetThumbnailUrl(32, 32)); }
internal void SetLcFile(LCFile lcFile) { m_lcFile = lcFile; m_Context = new LightCurveContext(lcFile); m_LightCurveForm.SetNewLcFile(lcFile); m_LightCurveForm.SetGeoLocation(m_VideoController.GeoLocation); }
public async Task SaveFromPath() { avatar = new LCFile("avatar", AvatarFilePath); await avatar.Save((count, total) => { TestContext.WriteLine($"progress: {count}/{total}"); }); TestContext.WriteLine(avatar.ObjectId); Assert.NotNull(avatar.ObjectId); }
public async Task AWS() { Logger.LogDelegate += Utils.Print; LeanCloud.Initialize("UlCpyvLm8aMzQsW6KnP6W3Wt-MdYXbMMI", "PyCTYoNoxCVoKKg394PBeS4r", "https://ulcpyvlm.api.lncldglobal.com"); LCFile file = new LCFile("avatar", APKFilePath); await file.Save(); TestContext.WriteLine(file.ObjectId); Assert.NotNull(file.ObjectId); }
/// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); m_LCFile = null; m_SelectedMeasurements = null; }
public async Task SaveFromMemory() { string text = "hello, world"; byte[] data = Encoding.UTF8.GetBytes(text); LCFile file = new LCFile("text", data); await file.Save(); TestContext.WriteLine(file.ObjectId); Assert.NotNull(file.ObjectId); }
public async Task Send() { TaskCompletionSource <object> tcs = new TaskCompletionSource <object>(); int count = 0; m2.OnMessage = (conv, msg) => { WriteLine(msg.Id); if (msg is LCIMImageMessage imageMsg) { WriteLine($"-------- url: {imageMsg.Url}"); count++; } else if (msg is LCIMFileMessage fileMsg) { WriteLine($"-------- name: {fileMsg.Format}"); count++; } else if (msg is LCIMTextMessage textMsg) { WriteLine($"-------- text: {textMsg.Text}"); count++; } if (count >= 3) { tcs.SetResult(null); } }; LCIMTextMessage textMessage = new LCIMTextMessage("hello, world"); await conversation.Send(textMessage); Assert.NotNull(textMessage.Id); LCFile image = new LCFile("hello", "../../../../../assets/hello.png"); LCIMImageMessage imageMessage = new LCIMImageMessage(image); await conversation.Send(imageMessage); Assert.NotNull(imageMessage.Id); LCFile file = new LCFile("apk", "../../../../../assets/test.apk"); LCIMFileMessage fileMessage = new LCIMFileMessage(file); await conversation.Send(fileMessage); Assert.NotNull(fileMessage.Id); LCIMBinaryMessage binaryMessage = new LCIMBinaryMessage(System.Text.Encoding.UTF8.GetBytes("LeanCloud")); await conversation.Send(binaryMessage); Assert.NotNull(binaryMessage.Id); await tcs.Task; }
public async Task SaveFromUrl() { LCFile file = new LCFile("scene", new Uri("http://img95.699pic.com/photo/50015/9034.jpg_wh300.jpg")); file.AddMetaData("size", 1024); file.AddMetaData("width", 128); file.AddMetaData("height", 256); file.MimeType = "image/jpg"; await file.Save(); TestContext.WriteLine(file.ObjectId); Assert.NotNull(file.ObjectId); }
public SingleBitmapFileFrameStream(LCFile lightCurveFile) { m_lcFile = lightCurveFile; if (lightCurveFile.LcFileFormatVersion < 4 && m_lcFile.Footer.AveragedFrameBytes.Length == 4 * m_lcFile.Footer.AveragedFrameWidth * m_lcFile.Footer.AveragedFrameHeight) m_Pixelmap = Pixelmap.ConstructForLCFile32bppArgbAveragedFrame(m_lcFile.Footer.AveragedFrameBytes, m_lcFile.Footer.AveragedFrameWidth, m_lcFile.Footer.AveragedFrameHeight, m_lcFile.Footer.AveragedFrameBpp); else m_Pixelmap = Pixelmap.ConstructForLCFileAveragedFrame(m_lcFile.Footer.AveragedFrameBytes, m_lcFile.Footer.AveragedFrameWidth, m_lcFile.Footer.AveragedFrameHeight, m_lcFile.Footer.AveragedFrameBpp); m_FirstFrame = (int)m_lcFile.Header.MinFrame; m_LastFrame = (int)m_lcFile.Header.MaxFrame; m_NumFrames = (int)m_lcFile.Header.MeasuredFrames; }
internal ucLCFileInfo(LCFile lcFile, IVideoController videoController) { InitializeComponent(); m_lcFile = lcFile; m_VideoController = videoController; DisplayLCFileInfo(); // Only analogue video (or derived from it) has fields VideoFileFormat videoFileFormat = m_lcFile.Header.GetVideoFileFormat(); btnShowFields.Visible = videoFileFormat == VideoFileFormat.AVI || videoFileFormat.IsAAV(); m_ShowingFields = false; }
internal frmPSFFits(LightCurveContext context, LCFile lcFile, TangraConfig.LightCurvesDisplaySettings displaySettings) { InitializeComponent(); m_Context = context; m_LCFile = lcFile; m_DisplaySettings = displaySettings; picTarget1PSF.Image = new Bitmap(picTarget1PSF.Width, picTarget1PSF.Height); picTarget2PSF.Image = new Bitmap(picTarget2PSF.Width, picTarget2PSF.Height); picTarget3PSF.Image = new Bitmap(picTarget3PSF.Width, picTarget3PSF.Height); picTarget4PSF.Image = new Bitmap(picTarget4PSF.Width, picTarget4PSF.Height); m_TargetBoxes = new PictureBox[] { picTarget1PSF, picTarget2PSF, picTarget3PSF, picTarget4PSF }; }
internal frmPSFFits(LightCurveContext context, LCFile lcFile, TangraConfig.LightCurvesDisplaySettings displaySettings) { InitializeComponent(); m_Context = context; m_LCFile = lcFile; m_DisplaySettings = displaySettings; picTarget1PSF.Image = new Bitmap(picTarget1PSF.Width, picTarget1PSF.Height); picTarget2PSF.Image = new Bitmap(picTarget2PSF.Width, picTarget2PSF.Height); picTarget3PSF.Image = new Bitmap(picTarget3PSF.Width, picTarget3PSF.Height); picTarget4PSF.Image = new Bitmap(picTarget4PSF.Width, picTarget4PSF.Height); m_TargetBoxes = new PictureBox[] { picTarget1PSF, picTarget2PSF, picTarget3PSF, picTarget4PSF}; }
internal frmBackgroundHistograms(LightCurveContext context, LCFile lcFile, TangraConfig.LightCurvesDisplaySettings displaySettings) { InitializeComponent(); m_Context = context; m_LCFile = lcFile; m_DisplaySettings = displaySettings; picTarget1Hist.Image = new Bitmap(picTarget1Hist.Width, picTarget1Hist.Height); picTarget2Hist.Image = new Bitmap(picTarget2Hist.Width, picTarget2Hist.Height); picTarget3Hist.Image = new Bitmap(picTarget3Hist.Width, picTarget3Hist.Height); picTarget4Hist.Image = new Bitmap(picTarget4Hist.Width, picTarget4Hist.Height); m_TargetBoxes = new PictureBox[] { picTarget1Hist, picTarget2Hist, picTarget3Hist, picTarget4Hist }; }
public SingleBitmapFileFrameStream(LCFile lightCurveFile) { m_lcFile = lightCurveFile; if (lightCurveFile.LcFileFormatVersion < 4 && m_lcFile.Footer.AveragedFrameBytes.Length == 4 * m_lcFile.Footer.AveragedFrameWidth * m_lcFile.Footer.AveragedFrameHeight) { m_Pixelmap = Pixelmap.ConstructForLCFile32bppArgbAveragedFrame(m_lcFile.Footer.AveragedFrameBytes, m_lcFile.Footer.AveragedFrameWidth, m_lcFile.Footer.AveragedFrameHeight, m_lcFile.Footer.AveragedFrameBpp); } else { m_Pixelmap = Pixelmap.ConstructForLCFileAveragedFrame(m_lcFile.Footer.AveragedFrameBytes, m_lcFile.Footer.AveragedFrameWidth, m_lcFile.Footer.AveragedFrameHeight, m_lcFile.Footer.AveragedFrameBpp); } m_FirstFrame = (int)m_lcFile.Header.MinFrame; m_LastFrame = (int)m_lcFile.Header.MaxFrame; m_NumFrames = (int)m_lcFile.Header.MeasuredFrames; }
internal LCFITSTimeStampReader(LCFile lcFile) { m_LCFile = lcFile; var files = lcFile.Data[0].Select(x => x.CurrFileName).ToArray(); var frameNos = lcFile.Data[0].Select(x => x.CurrFrameNo).ToArray(); if (m_LCFile.CanDetermineFrameTimes) { for (int i = 0; i < files.Length; i++) { m_FitsIndex.Add(files[i].ToLower(), frameNos[i]); } } else { m_FitsIndex = null; } }
internal LightCurveContext(LCFile lcFile) { if (lcFile.Header.MeasuredFrames > 0) { for (int i = 0; i < lcFile.Header.ObjectCount; i++) { m_ReProcessApertures[i] = lcFile.Header.MeasurementApertures[i]; m_ReProcessFitAreas[i] = 2 * (lcFile.Header.PsfFitMatrixSizes[i] / 2) + 1; AllReadings[i] = lcFile.Data[i]; } } MagnitudeConverter = new MagnitudeConverter(lcFile.Header.ReferenceMagnitudes, lcFile.Header.ReferenceIntensity); for (int i = 0; i < 4; i++) { m_ObjectTitles[i] = string.Format("Object {0}", i); } NormMethod = NormalisationMethod.Average4Frame; }
private object GetVideoFileMatchingLcFile(LCFile lcFile, string pathToLCFile) { if (lcFile.Header.GetVideoFileFormat() == VideoFileFormat.FITS) { // For now don't support opening FITS files for LightCurves as it is more complicated return(null); } if (File.Exists(lcFile.Header.PathToVideoFile) && TestWhetherVideoFileMatchesLcHeader(lcFile.Header.PathToVideoFile, lcFile.Header)) { return(lcFile.Header.PathToVideoFile); } if (lcFile.Header.GetVideoFileFormat() == VideoFileFormat.FITS && Directory.Exists(lcFile.Header.PathToVideoFile) && lcFile.Data.Count > 0) { return(TestWhetherFITSFolderMatchesLcHeader(lcFile.Header.PathToVideoFile, lcFile.Data[0])); } string nextGuess = Path.GetFullPath(Path.GetDirectoryName(pathToLCFile) + "\\" + Path.GetFileName(lcFile.Header.PathToVideoFile)); if (File.Exists(nextGuess) && TestWhetherVideoFileMatchesLcHeader(nextGuess, lcFile.Header)) { return(nextGuess); } nextGuess = Path.GetFullPath(AppDomain.CurrentDomain.BaseDirectory + "\\" + Path.GetFileName(lcFile.Header.PathToVideoFile)); if (File.Exists(nextGuess) && TestWhetherVideoFileMatchesLcHeader(nextGuess, lcFile.Header)) { return(nextGuess); } return(null); }
internal SingleMeasurement(frmLightCurve.BinnedValue binnedMeasurement, int targetNo, double binMiddleFrameNo, LCFile lcFile, bool dontIncludeTimes, int totalBins) { CurrFrameNo = (int)binnedMeasurement.BinNo; TargetNo = (byte)targetNo; Measurement = binnedMeasurement.IsSuccessfulReading ? (float)binnedMeasurement.AdjustedValue : INVALID_MEASUREMENT_VALUE; Background = binnedMeasurement.IsSuccessfulReading ? (float)binnedMeasurement.BackgroundValue : INVALID_MEASUREMENT_VALUE; string isCorrectedForInstrumentalDelay; if (dontIncludeTimes || /* If the times are entered by the user, only include the times for the first and last bin derived from the frame times enterred by the user*/ (!lcFile.Footer.ReductionContext.HasEmbeddedTimeStamps && binnedMeasurement.BinNo != 1 && binnedMeasurement.BinNo != totalBins)) { Timestamp = DateTime.MinValue; isCorrectedForInstrumentalDelay = null; } else { Timestamp = lcFile.GetTimeForFrame(binMiddleFrameNo, out isCorrectedForInstrumentalDelay); } IsCorrectedForInstrumentalDelay = lcFile.Header.InstrumentalDelayCorrectionsNotRequired() || !string.IsNullOrEmpty(isCorrectedForInstrumentalDelay); IsSuccessful = binnedMeasurement.IsSuccessfulReading; }
public async Task FileACL() { LCUser user = await LCUser.LoginAnonymously(); LCFile file = new LCFile("avatar", AvatarFilePath); LCACL acl = new LCACL(); acl.SetUserReadAccess(user, true); file.ACL = acl; await file.Save(); LCQuery <LCFile> query = LCFile.GetQuery(); LCFile avatar = await query.Get(file.ObjectId); Assert.NotNull(avatar.ObjectId); await LCUser.LoginAnonymously(); try { LCFile forbiddenAvatar = await query.Get(file.ObjectId); } catch (LCException e) { Assert.AreEqual(e.Code, 403); } }
public LCIMFileMessage(LCFile file) : base() { File = file; }
internal void SetLCFile(LCFile loadedFile) { m_LoadedLcFile = loadedFile; }
internal void EnterViewLightCurveMode(LCFile lcFile, IVideoController videoController, Panel controlPanelHolder) { m_ControlPanelHolder = controlPanelHolder; m_VideoController = (VideoController)videoController; EnsureControlPanel(m_ControlPanelHolder); m_LightCurveController.SetLcFile(lcFile); m_Measuring = false; m_Refining = false; m_ViewingLightCurve = true; m_Configuring = false; m_StateMachine = new LCStateMachine(this, m_VideoController); m_StateMachine.ChangeState(LightCurvesState.Viewing); m_StateMachine.SelectedMeasuringStar = -1; m_StateMachine.SelectedObject = null; m_MeasurementInterval = 1; m_CurrFrameNo = -1; m_CurrFileName = null; m_ControlPanel.BeginConfiguration(m_StateMachine, m_VideoController); m_ControlPanel.SetupLCFileInfo(m_LightCurveController.LcFile); m_ControlPanel.UpdateState(); }
public LCIMVideoMessage(LCFile file) : base(file) { }
internal frmCompleteReductionInfoForm(LCFile lcFile) { InitializeComponent(); m_lcFile = lcFile; }
internal void SetNewLcFile(LCFile lcFile) { m_LCFile = lcFile; m_Header = lcFile.Header; m_Header.LcFile = lcFile; m_Footer = lcFile.Footer; m_FrameTiming = lcFile.FrameTiming; OnNewLCFile(); m_IsFirstDraw = true; pnlChart.Invalidate(); }
public void OpenLcFile(string fileName) { var fi = new FileInfo(fileName); double expectedMemoryMbNeeded = 500 /* For Tangra to operate*/ + 20 * fi.Length / (1024 * 1024) /* For the .lc file to be unpacked and loaded in memory */; double availableMemoryMb = CrossPlaform.GetAvailableMemoryInMegabytes(); if (expectedMemoryMbNeeded > availableMemoryMb) { if (MessageBox.Show( m_MainFormView, string.Format("It appears that you may be running in a low memory conditions and opening this file will require at least {0}Gb of free memory. Do you wish to continue?", (Math.Ceiling(expectedMemoryMbNeeded / 512.0) / 2).ToString("0.0")), "Warning", MessageBoxButtons.YesNo, MessageBoxIcon.Warning) == DialogResult.No) { return; } } m_MainFormView.Cursor = Cursors.WaitCursor; LCFile lcFile = null; try { m_MainFormView.Update(); lcFile = LCFile.Load(fileName, m_VideoController); if (lcFile != null) { ReduceLightCurveOperation operation = (ReduceLightCurveOperation)m_VideoController.SetOperation <ReduceLightCurveOperation>(this, false); operation.SetLCFile(lcFile); bool flipVertically; bool flipHorizontally; FramePlayer.TranslateFlipRotate(lcFile.Footer.RotateFlipType, out flipVertically, out flipHorizontally); object videoFile = GetVideoFileMatchingLcFile(lcFile, fileName); if (videoFile is string && !string.IsNullOrEmpty((string)videoFile) && File.Exists((string)videoFile)) { if (m_VideoController.OpenVideoFile((string)videoFile, new TangraOpenFileArgs { FrameRate = lcFile.Header.FramesPerSecond, BitPix = lcFile.Footer.DataBitPix, SerTiming = lcFile.Header.SerTimingType })) { TangraContext.Current.CanPlayVideo = false; m_VideoController.UpdateViews(); } } else if (videoFile is string[] && ((string[])videoFile).Length > 0) { var fitsFiles = (string[])videoFile; if (m_VideoController.OpenFitsFileSequence(Path.GetDirectoryName(fitsFiles[0]), fitsFiles, new LCFITSTimeStampReader(lcFile), null, 0, flipVertically, flipHorizontally, (int)lcFile.Data[0][0].CurrFrameNo)) { TangraContext.Current.CanPlayVideo = false; if (lcFile.Footer.FitsDynamicFromValue != -1 && lcFile.Footer.FitsDynamicToValue != -1) { m_VideoController.SetDisplayIntensifyMode(DisplayIntensifyMode.Dynamic, lcFile.Footer.FitsDynamicFromValue, lcFile.Footer.FitsDynamicToValue); } m_VideoController.UpdateViews(); } } else { // NOTE: No video file found, just show the saved averaged frame bool oldCanProcessLightCurvePixels = TangraContext.Current.CanProcessLightCurvePixels; TangraContext.Current.Reset(); TangraContext.Current.CanProcessLightCurvePixels = oldCanProcessLightCurvePixels; if (lcFile.Footer.AveragedFrameBytes != null) { if (m_VideoController.SingleBitmapFile(lcFile)) { TangraContext.Current.CanPlayVideo = false; m_VideoController.UpdateViews(); PSFFit.SetDataRange(lcFile.Footer.DataBitPix, lcFile.Footer.DataAav16NormVal); } } TangraContext.Current.CanPlayVideo = false; TangraContext.Current.CanScrollFrames = false; m_VideoController.UpdateViews(); } m_Context = new LightCurveContext(lcFile); m_LightCurveForm = new frmLightCurve(this, m_AddinsController, lcFile, fileName); m_LightCurveForm.SetGeoLocation(m_VideoController.GeoLocation); m_LightCurveForm.Show(m_MainFormView); m_LightCurveForm.Update(); // TODO: Review the VideoController-LightCurveController-ReduceLightCurveOperation relation and how they are initialized // TODO: Provide a clean way of initializing the controller/operation state when opening an .lc file! operation.EnterViewLightCurveMode(lcFile, m_VideoController, m_VideoController.ControlerPanel); RegisterRecentFile(RecentFileType.LightCurve, fileName); if (!string.IsNullOrEmpty(m_VideoController.CurrentVideoFileType)) { // Move to the first frame in the light curve m_VideoController.MoveToFrame((int)lcFile.Header.MinFrame); } TangraContext.Current.FileName = Path.GetFileName(fileName); TangraContext.Current.FileFormat = m_lcFile.Header.SourceInfo; m_VideoController.UpdateViews(); } } catch (IOException ioex) { MessageBox.Show(ioex.Message, "Tangra", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { m_MainFormView.Cursor = Cursors.Default; } }
internal frmZoomedPixels(LightCurveContext context, LCFile lcFile, TangraConfig.LightCurvesDisplaySettings displaySettings, LightCurveController lightcurveController) { InitializeComponent(); m_Context = context; m_LCFile = lcFile; m_DisplaySettings = displaySettings; m_LightcurveController = lightcurveController; m_Saturation = TangraConfig.Settings.Photometry.Saturation.GetSaturationForBpp(context.BitPix, context.MaxPixelValue); picTarget1Pixels.Image = new Bitmap(picTarget1Pixels.Width, picTarget1Pixels.Height); picTarget2Pixels.Image = new Bitmap(picTarget2Pixels.Width, picTarget2Pixels.Height); picTarget3Pixels.Image = new Bitmap(picTarget3Pixels.Width, picTarget3Pixels.Height); picTarget4Pixels.Image = new Bitmap(picTarget4Pixels.Width, picTarget4Pixels.Height); m_AllObjectsPeak = 0; if (lcFile.Footer.ReductionContext.BitPix <= 8) { lblDisplayBandTitle.Text = "Displayed Band:"; lblDisplayedBand.Text = lcFile.Footer.ReductionContext.ColourChannel.ToString(); } else { lblDisplayBandTitle.Text = "Digital Video"; lblDisplayedBand.Text = ""; } m_TargetBoxes = new PictureBox[] { picTarget1Pixels, picTarget2Pixels, picTarget3Pixels, picTarget4Pixels }; for (int i = 0; i < m_TargetBoxes.Length; i++) { warningProvider.SetIconAlignment(m_TargetBoxes[i], ErrorIconAlignment.TopLeft); warningProvider.SetIconPadding(m_TargetBoxes[i], -17 - 16); infoProvider.SetIconAlignment(m_TargetBoxes[i], ErrorIconAlignment.TopLeft); infoProvider.SetIconPadding(m_TargetBoxes[i], -17); } for (int i = 0; i < m_LCFile.Footer.TrackedObjects.Count; i++) { TrackedObjectConfig cfg = m_LCFile.Footer.TrackedObjects[i]; m_ObjectinGroup[i] = cfg.GroupId >= 0 && m_LCFile.Footer.TrackedObjects.Count(x => x.GroupId == cfg.GroupId) > 1; switch (cfg.TrackingType) { case TrackingType.OccultedStar: m_ObjectTitles.Add(i, "Occulted"); break; case TrackingType.GuidingStar: m_ObjectTitles.Add(i, "Guiding"); break; case TrackingType.ComparisonStar: m_ObjectTitles.Add(i, "No Guiding"); break; } } }
private string GetVideoFileMatchingLcFile(LCFile lcFile, string pathToLCFile) { if (File.Exists(lcFile.Header.PathToVideoFile) && TestWhetherVideoFileMatchesLcHeader(lcFile.Header.PathToVideoFile, lcFile.Header)) return lcFile.Header.PathToVideoFile; string nextGuess = Path.GetFullPath(Path.GetDirectoryName(pathToLCFile) + "\\" + Path.GetFileName(lcFile.Header.PathToVideoFile)); if (File.Exists(nextGuess) && TestWhetherVideoFileMatchesLcHeader(nextGuess, lcFile.Header)) return nextGuess; nextGuess = Path.GetFullPath(AppDomain.CurrentDomain.BaseDirectory + "\\" + Path.GetFileName(lcFile.Header.PathToVideoFile)); if (File.Exists(nextGuess) && TestWhetherVideoFileMatchesLcHeader(nextGuess, lcFile.Header)) return nextGuess; return null; }
internal LCFileImagePixelProvider(LCFile lcFile) { m_LCFile = lcFile; }
public LCIMAudioMessage(LCFile file) : base(file) { }
internal frmLightCurve(LightCurveController controller, AddinsController addinsController, LCFile lcFile, string lcFilePath) : this(controller, addinsController) { m_LCFile = lcFile; m_LCFilePath = lcFilePath; m_Header = lcFile.Header; m_Header.LcFile = lcFile; m_Footer = lcFile.Footer; m_FrameTiming = lcFile.FrameTiming; Text = "Light Curves - " + Path.GetFileName(lcFilePath); OnNewLCFile(); }
internal void DoShowLightCurve(LCFile file) { m_LightCurveController.EnsureLightCurveFormClosed(); m_VideoController.EnsureLightCurveForm(); TangraContext.Current.HasVideoLoaded = true; TangraContext.Current.CanPlayVideo = false; m_VideoController.UpdateViews(); m_LightCurveController.SetLcFile(file); m_VideoController.MoveToFrame((int)m_LightCurveController.LcFile.Header.MinFrame); }
internal LightCurveContext(LCFile lcFile) { if (lcFile.Header.MeasuredFrames > 0) { for (int i = 0; i < lcFile.Header.ObjectCount; i++) { m_ReProcessApertures[i] = lcFile.Header.MeasurementApertures[i]; m_ReProcessFitAreas[i] = 2 * (lcFile.Header.PsfFitMatrixSizes[i] / 2) + 1; AllReadings[i] = lcFile.Data[i]; } } MagnitudeConverter = new MagnitudeConverter(lcFile.Header.ReferenceMagnitudes, lcFile.Header.ReferenceIntensity); for (int i = 0; i < 4; i++) m_ObjectTitles[i] = string.Format("Object {0}", i); NormMethod = NormalisationMethod.Average4Frame; }
internal frmZoomedPixels(LightCurveContext context, LCFile lcFile, TangraConfig.LightCurvesDisplaySettings displaySettings, LightCurveController lightcurveController) { InitializeComponent(); m_Context = context; m_LCFile = lcFile; m_DisplaySettings = displaySettings; m_LightcurveController = lightcurveController; m_Saturation = TangraConfig.Settings.Photometry.Saturation.GetSaturationForBpp(context.BitPix, context.MaxPixelValue); picTarget1Pixels.Image = new Bitmap(picTarget1Pixels.Width, picTarget1Pixels.Height); picTarget2Pixels.Image = new Bitmap(picTarget2Pixels.Width, picTarget2Pixels.Height); picTarget3Pixels.Image = new Bitmap(picTarget3Pixels.Width, picTarget3Pixels.Height); picTarget4Pixels.Image = new Bitmap(picTarget4Pixels.Width, picTarget4Pixels.Height); m_AllObjectsPeak = 0; if (lcFile.Footer.ReductionContext.BitPix <= 8) { lblDisplayBandTitle.Text = "Displayed Band:"; lblDisplayedBand.Text = lcFile.Footer.ReductionContext.ColourChannel.ToString(); } else { lblDisplayBandTitle.Text = "Digital Video"; lblDisplayedBand.Text = ""; } m_TargetBoxes = new PictureBox[] { picTarget1Pixels, picTarget2Pixels, picTarget3Pixels, picTarget4Pixels }; for (int i = 0; i < m_TargetBoxes.Length; i++) { warningProvider.SetIconAlignment(m_TargetBoxes[i], ErrorIconAlignment.TopLeft); warningProvider.SetIconPadding(m_TargetBoxes[i], -17-16); infoProvider.SetIconAlignment(m_TargetBoxes[i], ErrorIconAlignment.TopLeft); infoProvider.SetIconPadding(m_TargetBoxes[i], -17); } for (int i = 0; i < m_LCFile.Footer.TrackedObjects.Count; i++) { TrackedObjectConfig cfg = m_LCFile.Footer.TrackedObjects[i]; m_ObjectinGroup[i] = cfg.GroupId >=0 && m_LCFile.Footer.TrackedObjects.Count(x => x.GroupId == cfg.GroupId) > 1; switch(cfg.TrackingType) { case TrackingType.OccultedStar: m_ObjectTitles.Add(i, "Occulted"); break; case TrackingType.GuidingStar: m_ObjectTitles.Add(i, "Guiding"); break; case TrackingType.ComparisonStar: m_ObjectTitles.Add(i, "No Guiding"); break; } } }
public LCIMImageMessage(LCFile file) : base(file) { }