internal static VideoStream OpenFileForAutomation(string fileName, int videoEngineId) { List <string> allEngines = TangraVideo.EnumVideoEngines().ToList(); var allEnginesByAttemptOrder = new List <string>(allEngines); allEnginesByAttemptOrder.RemoveAt(videoEngineId); allEnginesByAttemptOrder.Insert(0, allEngines[videoEngineId]); Dictionary <int, string> allEnginesByIndex = allEnginesByAttemptOrder.ToDictionary(x => allEngines.IndexOf(x), x => x); foreach (int engineIdx in allEnginesByIndex.Keys) { try { TangraVideo.SetVideoEngine(engineIdx); VideoFileInfo fileInfo = TangraVideo.OpenFile(fileName); var rv = new VideoStream(fileInfo, fileName); // Try to load the first frame to be sure that it is going to work, before accepting this video engine for rendering rv.GetPixelmap(fileInfo.FirstFrame); return(rv); } catch (Exception ex) { Trace.WriteLine(ex.GetFullStackTrace()); } } return(null); }
public static VideoStream OpenFile(string fileName) { List <string> allEngines = TangraVideo.EnumVideoEngines().ToList(); var allEnginesByAttemptOrder = new List <string>(allEngines); allEnginesByAttemptOrder.RemoveAt(TangraConfig.Settings.Generic.AviRenderingEngineIndex); allEnginesByAttemptOrder.Insert(0, allEngines[TangraConfig.Settings.Generic.AviRenderingEngineIndex]); Dictionary <int, string> allEnginesByIndex = allEnginesByAttemptOrder.ToDictionary(x => allEngines.IndexOf(x), x => x); Exception lastError = null; foreach (int engineIdx in allEnginesByIndex.Keys) { try { TangraVideo.SetVideoEngine(engineIdx); VideoFileInfo fileInfo = TangraVideo.OpenFile(fileName); if (fileInfo.Height < 0) { MessageBox.Show("This appears to be a top-down DIB bitmap which is not supported by Tangra. If this video was recorded with ASICAP then please either use a different recording software or a different file format such as SER, FITS or ADV.", "Tangra", MessageBoxButtons.OK, MessageBoxIcon.Error); throw new NotSupportedException("Top-down DIB bitmaps are not supported by Tangra."); } TangraContext.Current.RenderingEngine = allEnginesByIndex[engineIdx]; var rv = new VideoStream(fileInfo, fileName); // Try to load the first frame to be sure that it is going to work, before accepting this video engine for rendering rv.GetPixelmap(fileInfo.FirstFrame); UsageStats.Instance.ProcessedAviFiles++; if (allEnginesByIndex[engineIdx] == "VideoForWindows") { UsageStats.Instance.VideoForWindowsUsed++; } else if (allEnginesByIndex[engineIdx] == "DirectShow") { UsageStats.Instance.DirectShowUsed++; } UsageStats.Instance.Save(); return(rv); } catch (Exception ex) { lastError = ex; } } throw new InvalidVideoFileException(lastError != null ? lastError.Message : "None of the rendering engines was able to open the file."); }
public ucAnalogueVideo8bit() { InitializeComponent(); cbxRenderingEngineAttemptOrder.Items.Clear(); string[] availableRenderingEngines = TangraVideo.EnumVideoEngines(); cbxRenderingEngineAttemptOrder.Items.AddRange(availableRenderingEngines); }
public bool AddAviVideoFrame(Pixelmap pixmap, double addedGamma, int?adv16NormalisationValue) { if (!TangraVideo.AddAviVideoFrame(pixmap, addedGamma, adv16NormalisationValue)) { MessageBox.Show("There was an error calling AddAviVideoFrame:\r\n\r\n" + TangraVideo.GetLastAviErrorMessage(), "Tangra", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } return(true); }
public bool StartNewAviFile(string fileName, int width, int height, int bpp, double fps, bool tryCodec) { if (!TangraVideo.StartNewAviFile(fileName, width, height, bpp, fps, tryCodec)) { MessageBox.Show("There was an error calling AddAviVideoFrame:\r\n\r\n" + TangraVideo.GetLastAviErrorMessage(), "Tangra", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } return(true); }
private void GenerateAVIVideo(ModelConfig modelConfig, List <IStar> stars) { TangraVideo.CloseAviFile(); TangraVideo.StartNewAviFile(modelConfig.FileName, modelConfig.FrameWidth, modelConfig.FrameHeight, 8, 25, false); m_MagnitudeToPeakDict = null; try { //Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, bitPix, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null); //AddOnScreenText(bmp, modelConfig, "The simulated video stars from the next frame"); //TangraVideo.AddAviVideoFrame(pixmap, modelConfig.Gamma, null); uint maxSignalValue = (uint)(255 * modelConfig.Integration); Random rndGen = new Random((int)DateTime.Now.Ticks); m_SimulatedDarkFrame = new int[modelConfig.FrameWidth, modelConfig.FrameHeight]; for (int x = 0; x < modelConfig.FrameWidth; x++) { for (int y = 0; y < modelConfig.FrameHeight; y++) { if (modelConfig.DarkFrameMean > 0) { double randomPeak = rndGen.Next(0, 100) == 66 ? 255 : 0; double darkPixel = Math.Abs(VideoModelUtils.Random((modelConfig.DarkFrameMean + randomPeak) * modelConfig.Integration, 1)); double bgPixel = Math.Min(maxSignalValue, Math.Max(0, darkPixel)); m_SimulatedDarkFrame[x, y] = (int)bgPixel; } else { m_SimulatedDarkFrame[x, y] = 0; } } } for (int i = 0; i <= modelConfig.TotalFrames; i++) { using (Pixelmap pixmap = new Pixelmap(modelConfig.FrameWidth, modelConfig.FrameHeight, 16, new uint[modelConfig.FrameWidth * modelConfig.FrameHeight], null, null)) { pixmap.SetMaxSignalValue(maxSignalValue); VideoModelUtils.GenerateNoise(pixmap, m_SimulatedDarkFrame, modelConfig.NoiseMean * modelConfig.Integration, modelConfig.NoiseStdDev * modelConfig.Integration); GenerateFrame(pixmap, stars, modelConfig); TangraVideo.AddAviVideoFrame(pixmap, modelConfig.LinearityCoefficient, (int)pixmap.MaxSignalValue); } InvokeUpdateUI(2, (int)(100.0 * i / modelConfig.TotalFrames), true); } } finally { TangraVideo.CloseAviFile(); } }
public static VideoStream OpenFile(string fileName) { List <string> allEngines = TangraVideo.EnumVideoEngines().ToList(); var allEnginesByAttemptOrder = new List <string>(allEngines); allEnginesByAttemptOrder.RemoveAt(TangraConfig.Settings.Generic.AviRenderingEngineIndex); allEnginesByAttemptOrder.Insert(0, allEngines[TangraConfig.Settings.Generic.AviRenderingEngineIndex]); Dictionary <int, string> allEnginesByIndex = allEnginesByAttemptOrder.ToDictionary(x => allEngines.IndexOf(x), x => x); Exception lastError = null; foreach (int engineIdx in allEnginesByIndex.Keys) { try { TangraVideo.SetVideoEngine(engineIdx); VideoFileInfo fileInfo = TangraVideo.OpenFile(fileName); TangraContext.Current.RenderingEngine = allEnginesByIndex[engineIdx]; var rv = new VideoStream(fileInfo, fileName); // Try to load the first frame to be sure that it is going to work, before accepting this video engine for rendering rv.GetPixelmap(fileInfo.FirstFrame); UsageStats.Instance.ProcessedAviFiles++; if (allEnginesByIndex[engineIdx] == "VideoForWindows") { UsageStats.Instance.VideoForWindowsUsed++; } else if (allEnginesByIndex[engineIdx] == "DirectShow") { UsageStats.Instance.DirectShowUsed++; } UsageStats.Instance.Save(); return(rv); } catch (Exception ex) { lastError = ex; } } throw new InvalidVideoFileException(lastError != null ? lastError.Message : "None of the rendering engines was able to open the file."); }
private static void CheckUnmanagedLibraries() { TangraCoreVersionRequiredAttribute minCoreVersionRequired = ((TangraCoreVersionRequiredAttribute)Assembly.GetExecutingAssembly().GetCustomAttributes(typeof(TangraCoreVersionRequiredAttribute), false)[0]); TangraVideoVersionRequiredAttribute minVideoVersionRequired = ((TangraVideoVersionRequiredAttribute)Assembly.GetExecutingAssembly().GetCustomAttributes(typeof(TangraVideoVersionRequiredAttribute), false)[0]); TangraVideoLinuxVersionRequiredAttribute minVideoLinuxVersionRequired = ((TangraVideoLinuxVersionRequiredAttribute)Assembly.GetExecutingAssembly().GetCustomAttributes(typeof(TangraVideoLinuxVersionRequiredAttribute), false)[0]); TangraVideoOSXVersionRequiredAttribute minVideoOSXVersionRequired = ((TangraVideoOSXVersionRequiredAttribute)Assembly.GetExecutingAssembly().GetCustomAttributes(typeof(TangraVideoOSXVersionRequiredAttribute), false)[0]); string engineVersion = TangraCore.GetTangraCoreVersion(); int engineBitness = TangraCore.GetTangraCoreBitness(); if (engineBitness > 0) { Trace.WriteLine(string.Format("Tangra Core v{0} ({1} bit)", engineVersion, engineBitness)); } else { Trace.WriteLine(string.Format("Tangra Core v{0})", engineVersion)); } if (minCoreVersionRequired != null && !minCoreVersionRequired.IsReqiredVersion(engineVersion)) { string fileName = CurrentOS.IsWindows ? "TangraCore.dll" : (CurrentOS.IsMac ? "libTangraCore.dylib" : "libTangraCore.so"); MessageBox.Show(string.Format("Your installation of Tangra3 desn't have the latest version of {0}. Please check for updates.", fileName), "Tangra 3", MessageBoxButtons.OK, MessageBoxIcon.Warning); } engineVersion = TangraVideo.GetVideoEngineVersion(); Trace.WriteLine(string.Format("Tangra Video Engine v{0}", engineVersion)); if (CurrentOS.IsWindows) { if (minVideoVersionRequired != null && !minVideoVersionRequired.IsReqiredVersion(engineVersion)) { MessageBox.Show("Your installation of Tangra3 desn't have the latest version of TangraVideo.dll. Please check for updates.", "Tangra 3", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } else if (CurrentOS.IsMac) { if (minVideoOSXVersionRequired != null && !minVideoOSXVersionRequired.IsReqiredVersion(engineVersion)) { MessageBox.Show("Your installation of Tangra3 desn't have the latest version of libTangraVideo.dylib. Please check for updates.", "Tangra 3", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } else { if (minVideoLinuxVersionRequired != null && !minVideoLinuxVersionRequired.IsReqiredVersion(engineVersion)) { MessageBox.Show("Your installation of Tangra3 desn't have the latest version of libTangraVideo.so. Please check for updates.", "Tangra 3", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } }
private void GenerateSimulatedVideo(object state) { InvokeUpdateUI(2, 0, true); try { ModelConfig modelConfig = (ModelConfig)state; TangraVideo.CloseAviFile(); TangraVideo.StartNewAviFile(modelConfig.FileName, 300, 200, 8, 25, false); try { using (Bitmap bmp = new Bitmap(300, 200, PixelFormat.Format24bppRgb)) { AddOnScreenText(bmp, modelConfig, "The simulated video stars from the next frame"); Pixelmap pixmap = Pixelmap.ConstructFromBitmap(bmp, TangraConfig.ColourChannel.Red); TangraVideo.AddAviVideoFrame(pixmap, modelConfig.Gamma, null); } for (int i = 1; i <= modelConfig.TotalFrames; i++) { using (Pixelmap pixmap = GenerateFrame(i * 1.0 / modelConfig.TotalFrames, i, modelConfig)) { TangraVideo.AddAviVideoFrame(pixmap, modelConfig.Gamma, null); } InvokeUpdateUI(2, (int)(100.0 * i / modelConfig.TotalFrames), true); } } finally { TangraVideo.CloseAviFile(); } } finally { InvokeUpdateUI(2, 100, false); } }
public Pixelmap GetPixelmap(int index) { if (index < FirstFrame) { index = FirstFrame; } if (index > LastFrame) { index = LastFrame; } uint[] pixels; uint[] originalPixels; Bitmap videoFrame; byte[] bitmapBytes; TangraVideo.GetFrame(index, out pixels, out originalPixels, out videoFrame, out bitmapBytes); var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes); rv.UnprocessedPixels = originalPixels; return(rv); }
public Pixelmap GetIntegratedFrame(int startFrameNo, int framesToIntegrate, bool isSlidingIntegration, bool isMedianAveraging) { if (startFrameNo < FirstFrame) { startFrameNo = FirstFrame; } if (startFrameNo > LastFrame) { startFrameNo = LastFrame; } uint[] pixels; uint[] originalPixels; Bitmap videoFrame; byte[] bitmapBytes; TangraVideo.GetIntegratedFrame(startFrameNo, framesToIntegrate, isSlidingIntegration, isMedianAveraging, out pixels, out originalPixels, out videoFrame, out bitmapBytes); var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes); rv.UnprocessedPixels = originalPixels; return(rv); }
public void Dispose() { TangraVideo.CloseFile(); }
public Pixelmap GetPixelmap(int index) { if (index < FirstFrame) { index = FirstFrame; } if (index > LastFrame) { index = LastFrame; } uint[] pixels; uint[] originalPixels; Bitmap videoFrame; byte[] bitmapBytes; if (Mode == ReInterlaceMode.SwapFields) { TangraVideo.GetFrame(index, out pixels, out originalPixels, out videoFrame, out bitmapBytes); byte[] bitmapPixels = new byte[Width * Height * 3 + 40 + 14 + 1]; TangraCore.SwapVideoFields(pixels, originalPixels, Width, Height, bitmapPixels, bitmapBytes); videoFrame = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, Width, Height); var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes); rv.UnprocessedPixels = originalPixels; return(rv); } else if (Mode == ReInterlaceMode.ShiftOneField || Mode == ReInterlaceMode.SwapAndShiftOneField) { uint[] pixels2; uint[] originalPixels2; Bitmap videoFrame2; byte[] bitmapBytes2; if (m_LastPrevFrameId == index) { pixels = m_LastPrevFramePixels; originalPixels = m_LastPrevFrameOriginalPixels; bitmapBytes = m_LastPrevFrameBitmapBytes; } else { TangraVideo.GetFrame(index, out pixels, out originalPixels, out videoFrame, out bitmapBytes); } TangraVideo.GetFrame(index + 1, out pixels2, out originalPixels2, out videoFrame2, out bitmapBytes2); m_LastPrevFrameId = index + 1; m_LastPrevFramePixels = pixels2; m_LastPrevFrameOriginalPixels = originalPixels2; m_LastPrevFrameBitmapBytes = bitmapBytes2; byte[] bitmapPixels = new byte[Width * Height * 3 + 40 + 14 + 1]; TangraCore.ShiftVideoFields(pixels, originalPixels, pixels2, originalPixels2, Width, Height, m_ShiftMode, bitmapPixels, bitmapBytes); videoFrame = Pixelmap.ConstructBitmapFromBitmapPixels(bitmapBytes, Width, Height); var rv = new Pixelmap(Width, Height, 8, pixels, videoFrame, bitmapBytes); rv.UnprocessedPixels = originalPixels; return(rv); } else if (Mode == ReInterlaceMode.None) { return(m_BaseStream.GetPixelmap(index)); } else { throw new NotSupportedException(); } }
public void CloseAviFile() { TangraVideo.CloseAviFile(); }