public static CropValues autocrop(IVideoReader reader) { // start at 10% of the video, then advance by 6,66% and analyze 11 frames in total int pos = reader.FrameCount / 10; int step = reader.FrameCount / 15; CropValues[] cropValues = new CropValues[11]; for (int i = 0; i < 11; i++) { using (Bitmap b = reader.ReadFrameBitmap(pos)) cropValues[i] = getAutoCropValues(b); pos += step; } bool error = false; CropValues final = getFinalAutocropValues(cropValues); if (!error) { return(final); } else { final.left = -1; final.right = -1; final.top = -1; final.bottom = -1; return(final); } }
public static CropValues autocrop(IVideoReader reader) { int pos = reader.FrameCount / 4; int tenPercent = reader.FrameCount / 20; CropValues[] cropValues = new CropValues[10]; for (int i = 0; i < 10; i++) { Bitmap b = reader.ReadFrameBitmap(pos); cropValues[i] = getAutoCropValues(b); pos += tenPercent; } bool error = false; CropValues final = getFinalAutocropValues(cropValues); if (!error) { return(final); } else { final.left = -1; final.right = -1; final.top = -1; final.bottom = -1; return(final); } }
public static bool autocrop(out CropValues cropValues, IVideoReader reader, bool signalAR, mod16Method cropMethod, modValue mValue) { cropValues = Autocrop.autocrop(reader); if (signalAR) { if (cropMethod == mod16Method.overcrop) { ScriptServer.overcrop(ref cropValues, mValue); } else if (cropMethod == mod16Method.mod4Horizontal) { ScriptServer.cropMod4Horizontal(ref cropValues); } else if (cropMethod == mod16Method.undercrop) { ScriptServer.undercrop(ref cropValues, mValue); } } if (cropValues.left < 0) { return(false); } else { return(true); } }
public void Dispose() { if (videoSourceFile != null) { videoSourceFile.Dispose(); videoSourceFile = null; videoReader = null; } }
private void TryToOpenFiles() { foreach (string f in filesToOpen) { using (AvsFile v = AvsFile.ParseScript(f)) { IVideoReader r = v.GetVideoReader(); } } }
public BarCodeVideoController() { // TODO: Use dependency injection _appDataPath = HttpContext.Current.Server.MapPath("~/App_Data"); _ffmpegPath = HttpContext.Current.Server.MapPath("~/Libs/Ffmpeg"); _uploadsPath = HttpContext.Current.Server.MapPath("~/Uploads"); Directory.CreateDirectory(_appDataPath); Directory.CreateDirectory(_uploadsPath); _videoReader = new VideoReader(_appDataPath, _ffmpegPath); }
private void testCropping(string file, int left, int right, int top, int bottom) { using (AvsFile f = AvsFile.ParseScript(file)) { IVideoReader r = f.GetVideoReader(); CropValues c = Autocrop.autocrop(r); Expect(c.left, EqualTo(left)); Expect(c.right, EqualTo(right)); Expect(c.top, EqualTo(top)); Expect(c.bottom, EqualTo(bottom)); } }
public VideoVideoModel(string videoUrl, IVideoFilter[] filters) { this.VideoUrl = videoUrl; _videoFilters = filters; _playCommand = new RelayCommand(_ => Play()); _pauseCommand = new RelayCommand(_ => Stop()); _videoReader = ModuleLoader.Load <IVideoReader>(); _backgroundWorker = new BackgroundWorker() { WorkerSupportsCancellation = true }; _backgroundWorker.DoWork += StartBackgroundJob; }
public MainForm(IImageDetector imageDetector, IVideoReaderThreadManager videoReaderThreadManager, ILogger logger, IVideoReader videoReader) { //Сейчас приложение запускается только после инициализации Yolo, правильно ли это или нет, я хз // — Предлагаешь мне прелоадер сделать? //я хз, я не знаю как лучше ImageDetector = imageDetector; VideoReaderThreadManager = videoReaderThreadManager; Logger = logger; VideoReader = videoReader; InitializeComponent(); toolStripStatusLabelYoloInfo.Text = $@"Detection system: {ImageDetector.YoloMetaInfo.DetectionSystem}"; }
/// <summary> /// sets up encoding /// </summary /// <param name="job">the job to be processed</param> /// <param name="error">output for any errors that might ocurr during this method</param> /// <returns>true if the setup has succeeded, false if it has not</returns> public bool setup(Job job, out string error) { error = ""; if (job is AviSynthJob) { this.job = (AviSynthJob)job; } else { error = "Job '" + job.Name + "' has been given to the AviSynthProcessor, even though it is not an AviSynthJob."; return(false); } stup.JobName = job.Name; try { file = AvsFile.OpenScriptFile(job.Input); reader = file.GetVideoReader(); } catch (Exception ex) { error = ex.Message; return(false); } stup.NbFramesTotal = reader.FrameCount; position = 0; try { processorThread = new Thread(new ThreadStart(process)); } catch (Exception e) { error = e.Message; return(false); } try { statusThread = new Thread(new ThreadStart(update)); } catch (Exception e) { error = e.Message; return(false); } return(true); }
private Bitmap getFrame(int pos) { readerWriterLock.AcquireReaderLock(Timeout.Infinite); try { IVideoReader reader = VideoReader; if (reader == null) { return(null); } return(reader.ReadFrameBitmap(pos)); } finally { readerWriterLock.ReleaseReaderLock(); } }
public IVideoReader GetVideoReader() { if (!this.VideoInfo.HasVideo) { throw new Exception("Can't get Video Reader, since there is no video stream!"); } if (videoReader == null) { lock (this) { if (videoReader == null) { videoReader = new AvsVideoReader(clip, (int)VideoInfo.Width, (int)VideoInfo.Height); } } } return(videoReader); }
public void LoadVideo(IVideoReader reader, double fps, int startPosition) { UnloadVideo(); //just to be sure... shouldn't be necessary because after UnloadVideo //videoReader will be null readerWriterLock.AcquireWriterLock(Timeout.Infinite); try { videoReader = reader; } finally { readerWriterLock.ReleaseWriterLock(); } Framerate = fps; Position = startPosition; }
public IVideoReader GetVideoReader() { if (!HasVideo || !CanReadVideo) { throw new Exception("Can't read the video stream"); } if (videoSourceFile == null || videoReader == null) { lock (this) { if (videoSourceFile == null) { videoSourceFile = AvsFile.ParseScript(ScriptServer.GetInputLine(file, PossibleSources.directShow, false, false, false, FPS)); videoReader = null; } if (videoReader == null) { videoReader = videoSourceFile.GetVideoReader(); } } } return(videoReader); }
/// <summary> /// sets up encoding /// </summary /// <param name="job">the job to be processed</param> /// <param name="error">output for any errors that might ocurr during this method</param> /// <returns>true if the setup has succeeded, false if it has not</returns> public void setup(Job job, StatusUpdate su, LogItem _) { Debug.Assert(job is AviSynthJob, "Job isn't an AviSynthJob"); stup = su; this.job = (AviSynthJob)job; try { file = AvsFile.OpenScriptFile(job.Input); reader = file.GetVideoReader(); } catch (Exception ex) { throw new JobRunException(ex); } stup.NbFramesTotal = (ulong)reader.FrameCount; stup.ClipLength = TimeSpan.FromSeconds((double)stup.NbFramesTotal / file.VideoInfo.FPS); stup.Status = "Playing through file..."; position = 0; try { processorThread = new Thread(new ThreadStart(process)); } catch (Exception e) { throw new JobRunException(e); } try { statusThread = new Thread(new ThreadStart(update)); } catch (Exception e) { throw new JobRunException(e); } }
private void previewButton_Click(object sender, System.EventArgs e) { // If the player is null, create a new one. // Otherwise use the existing player to load the latest preview. if (player == null || player.IsDisposed) player = new VideoPlayer(); bool videoLoaded = player.loadVideo(mainForm, avisynthScript.Text, PREVIEWTYPE.REGULAR, false, true, player.CurrentFrame); if (videoLoaded) { player.disableIntroAndCredits(); reader = player.Reader; isPreviewMode = true; sendCropValues(); player.Show(); } }
/// <summary> /// opens a given DGIndex script /// </summary> /// <param name="videoInput">the DGIndex script to be opened</param> private void openVideo(string videoInput, string textBoxName, bool inlineAvs) { this.crop.Checked = false; this.input.Filename = ""; this.originalScript = videoInput; this.originalInlineAvs = inlineAvs; if (player != null) player.Dispose(); bool videoLoaded = showOriginal(); enableControls(videoLoaded); if (videoLoaded) { this.input.Filename = textBoxName; file = player.File; reader = player.Reader; this.fpsBox.Value = (decimal)file.Info.FPS; if (file.Info.FPS.Equals(25.0)) // disable ivtc for pal sources this.tvTypeLabel.Text = "PAL"; else this.tvTypeLabel.Text = "NTSC"; horizontalResolution.Maximum = file.Info.Width; verticalResolution.Maximum = file.Info.Height; horizontalResolution.Value = file.Info.Width; verticalResolution.Value = file.Info.Height; arChooser.Value = file.Info.DAR; cropLeft.Maximum = cropRight.Maximum = file.Info.Width / 2; cropTop.Maximum = cropBottom.Maximum = file.Info.Height / 2; /// Commented out to ensure to keep the source file resolution when opening it /// if (resize.Enabled && resize.Checked) /// suggestResolution.Checked = true; /// -------------------------------------------------------------------------- this.showScript(); } }
/// <summary> /// opens a dgindex script /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="horizontalResolution">desired horizontal resolution of the output</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <returns>the name of the AviSynth script created, empty of there was an error</returns> private string openVideo(string path, Dar?AR, int horizontalResolution, bool signalAR, LogItem log, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar) { dar = null; IMediaFile d2v = new d2vFile(path); IVideoReader reader = d2v.GetVideoReader(); if (reader.FrameCount < 1) { log.Error("DGDecode reported 0 frames in this file. This is a fatal error. Please recreate the DGIndex project"); return(""); } //Autocrop CropValues final = Autocrop.autocrop(reader); if (signalAR) { if (avsSettings.Mod16Method == mod16Method.overcrop) { ScriptServer.overcrop(ref final); } else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal) { ScriptServer.cropMod4Horizontal(ref final); } else if (avsSettings.Mod16Method == mod16Method.undercrop) { ScriptServer.undercrop(ref final); } } bool error = (final.left == -1); if (!error) { log.LogValue("Autocrop values", final); } else { log.Error("Autocrop failed, aborting now"); return(""); } decimal customDAR; log.LogValue("Auto-detect aspect ratio now", AR == null); //Check if AR needs to be autodetected now if (AR == null) // it does { customDAR = d2v.Info.DAR.ar; if (customDAR > 0) { log.LogValue("Aspect ratio", customDAR); } else { customDAR = Dar.ITU16x9PAL.ar; log.Warn(string.Format("No aspect ratio found, defaulting to {0}.", customDAR)); } } else { customDAR = AR.Value.ar; } // Minimise upsizing int sourceHorizontalResolution = (int)d2v.Info.Width - final.right - final.left; if (horizontalResolution > sourceHorizontalResolution) { if (avsSettings.Mod16Method == mod16Method.resize) { while (horizontalResolution > sourceHorizontalResolution + 16) { horizontalResolution -= 16; } } else { horizontalResolution = sourceHorizontalResolution; } } //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged) int scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); log.LogValue("Output resolution", horizontalResolution + "x" + scriptVerticalResolution); if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit { x264Settings xs = (x264Settings)settings; if (xs.Level != 15) { AVCLevels al = new AVCLevels(); log.LogValue("AVC level", al.getLevels()[xs.Level]); int compliantLevel = 15; while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel)) { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again string levelName = al.getLevels()[xs.Level]; horizontalResolution -= 16; scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } log.LogValue("Resolution adjusted for AVC Level", horizontalResolution + "x" + scriptVerticalResolution); } } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v, false, false, false, 0); log.LogValue("Automatic deinterlacing", autoDeint); if (autoDeint) { string d2vPath = path; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); deinterlaceLines = filters[0].Script; log.LogValue("Deinterlacing used", deinterlaceLines); } inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0); cropLine = ScriptServer.GetCropLine(true, final); denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } log.LogValue("Generated Avisynth script", newScript); try { StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs")); sw.Write(newScript); sw.Close(); } catch (IOException i) { log.LogValue("Error saving AviSynth script", i, ImageType.Error); return(""); } return(Path.ChangeExtension(path, ".avs")); }
public VideoReaderThreadManager(IVideoReader videoReader) { VideoReader = videoReader; }
public void LoadVideo(IVideoReader reader, double fps) { LoadVideo(reader, fps, 0); }
public void LoadVideo(IVideoReader reader) { LoadVideo(reader, 25, 0); }
/* private void getAudioStreams(Dictionary<int, string> audioFiles, OneClickWindow.PartialAudioStream[] partialAudioStream, out List<AudioJob> encodableAudioStreams, out List<MuxStream> muxOnlyAudioStreams) * { * muxOnlyAudioStreams = new List<MuxStream>(); * encodableAudioStreams = new List<AudioJob>(); * int counter = 0; * foreach (OneClickWindow.PartialAudioStream propertiesStream in job.PostprocessingProperties.AudioStreams) * { * counter++; // The track number starts at 1, so we increment right here. This also ensures it will always be incremented * * bool error = false; * string input = null, output = null, language = null; * AudioCodecSettings settings = null; * // Input * if (string.IsNullOrEmpty(propertiesStream.input)) * continue; // Here we have an unconfigured stream. Let's just go on to the next one * * if (propertiesStream.useExternalInput) * input = propertiesStream.input; * else if (audioFiles.ContainsKey(propertiesStream.trackNumber)) * input = audioFiles[propertiesStream.trackNumber]; * else * error = true; * * // Settings * if (propertiesStream.dontEncode) * settings = null; * else if (propertiesStream.settings != null) * settings = propertiesStream.settings; * else * error = true; * * // Output * if (propertiesStream.dontEncode) * output = input; * else if (!error) * output = Path.Combine( * Path.GetDirectoryName(input), * Path.GetFileNameWithoutExtension(input) + "_" + * propertiesStream.trackNumber + ".file"); * * // Language * if (!string.IsNullOrEmpty(propertiesStream.language)) * language = propertiesStream.language; * else * language = ""; * * if (error) * { * logBuilder.AppendFormat("Trouble with audio track {0}. Skipping track...{1}", counter, Environment.NewLine); * output = null; * input = null; * input = null; * } * else * { * if (propertiesStream.dontEncode) * { * MuxStream newStream = new MuxStream(); * newStream.path = input; * newStream.name = ""; * newStream.language = language; * muxOnlyAudioStreams.Add(newStream); * } * else * { * AudioJob encodeStream = new AudioJob(); * encodeStream.Input = input; * encodeStream.Output = output; * encodeStream.Settings = settings; * encodableAudioStreams.Add(encodeStream); * } * } * } * }*/ /// <summary> /// opens a dgindex script /// if the file can be properly opened, auto-cropping is performed, then depending on the AR settings /// the proper resolution for automatic resizing, taking into account the derived cropping values /// is calculated, and finally the avisynth script is written and its name returned /// </summary> /// <param name="path">dgindex script</param> /// <param name="aspectRatio">aspect ratio selection to be used</param> /// <param name="customDAR">custom display aspect ratio for this source</param> /// <param name="horizontalResolution">desired horizontal resolution of the output</param> /// <param name="logBuilder">stringbuilder where to append log messages</param> /// <param name="settings">the codec settings (used only for x264)</param> /// <param name="sarX">pixel aspect ratio X</param> /// <param name="sarY">pixel aspect ratio Y</param> /// <param name="height">the final height of the video</param> /// <param name="signalAR">whether or not ar signalling is to be used for the output /// (depending on this parameter, resizing changes to match the source AR)</param> /// <returns>the name of the AviSynth script created, empty of there was an error</returns> private string openVideo(string path, Dar?AR, int horizontalResolution, bool signalAR, StringBuilder logBuilder, AviSynthSettings avsSettings, bool autoDeint, VideoCodecSettings settings, out Dar?dar) { dar = null; IMediaFile d2v = new d2vFile(path); IVideoReader reader = d2v.GetVideoReader(); if (reader.FrameCount < 1) { logBuilder.Append("DGDecode reported 0 frames in this file.\r\nThis is a fatal error.\r\n\r\nPlease recreate the DGIndex project"); return(""); } //Autocrop CropValues final = Autocrop.autocrop(reader); if (signalAR) { if (avsSettings.Mod16Method == mod16Method.overcrop) { ScriptServer.overcrop(ref final); } else if (avsSettings.Mod16Method == mod16Method.mod4Horizontal) { ScriptServer.cropMod4Horizontal(ref final); } } bool error = (final.left == -1); if (!error) { logBuilder.Append("Autocropping successful. Using the following crop values: left: " + final.left + ", top: " + final.top + ", right: " + final.right + ", bottom: " + final.bottom + ".\r\n"); } else { logBuilder.Append("Autocropping did not find 3 frames that have matching crop values\r\n" + "Autocrop failed, aborting now"); return(""); } decimal customDAR; //Check if AR needs to be autodetected now if (AR == null) // it does { logBuilder.Append("Aspect Ratio set to auto-detect later, detecting now. "); customDAR = d2v.Info.DAR.ar; if (customDAR > 0) { logBuilder.AppendFormat("Found aspect ratio of {0}.{1}", customDAR, Environment.NewLine); } else { customDAR = Dar.ITU16x9PAL.ar; logBuilder.AppendFormat("No aspect ratio found, defaulting to {0}.{1}", customDAR, Environment.NewLine); } } else { customDAR = AR.Value.ar; } //Suggest a resolution (taken from AvisynthWindow.suggestResolution_CheckedChanged) int scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); if (settings != null && settings is x264Settings) // verify that the video corresponds to the chosen avc level, if not, change the resolution until it does fit { x264Settings xs = (x264Settings)settings; if (xs.Level != 15) { int compliantLevel = 15; while (!this.al.validateAVCLevel(horizontalResolution, scriptVerticalResolution, d2v.Info.FPS, xs, out compliantLevel)) { // resolution not profile compliant, reduce horizontal resolution by 16, get the new vertical resolution and try again AVCLevels al = new AVCLevels(); string levelName = al.getLevels()[xs.Level]; logBuilder.Append("Your chosen AVC level " + levelName + " is too strict to allow your chosen resolution of " + horizontalResolution + "*" + scriptVerticalResolution + ". Reducing horizontal resolution by 16.\r\n"); horizontalResolution -= 16; scriptVerticalResolution = Resolution.suggestResolution(d2v.Info.Height, d2v.Info.Width, (double)customDAR, final, horizontalResolution, signalAR, mainForm.Settings.AcceptableAspectErrorPercent, out dar); } logBuilder.Append("Final resolution that is compatible with the chosen AVC Level: " + horizontalResolution + "*" + scriptVerticalResolution + "\r\n"); } } //Generate the avs script based on the template string inputLine = "#input"; string deinterlaceLines = "#deinterlace"; string denoiseLines = "#denoise"; string cropLine = "#crop"; string resizeLine = "#resize"; inputLine = ScriptServer.GetInputLine(path, false, PossibleSources.d2v, false, false, false, 0); if (autoDeint) { logBuilder.AppendLine("Automatic deinterlacing was checked. Running now..."); string d2vPath = path; SourceDetector sd = new SourceDetector(inputLine, d2vPath, false, mainForm.Settings.SourceDetectorSettings, new UpdateSourceDetectionStatus(analyseUpdate), new FinishedAnalysis(finishedAnalysis)); finished = false; sd.analyse(); waitTillAnalyseFinished(); deinterlaceLines = filters[0].Script; logBuilder.AppendLine("Deinterlacing used: " + deinterlaceLines); } inputLine = ScriptServer.GetInputLine(path, interlaced, PossibleSources.d2v, avsSettings.ColourCorrect, avsSettings.MPEG2Deblock, false, 0); cropLine = ScriptServer.GetCropLine(true, final); denoiseLines = ScriptServer.GetDenoiseLines(avsSettings.Denoise, (DenoiseFilterType)avsSettings.DenoiseMethod); resizeLine = ScriptServer.GetResizeLine(!signalAR || avsSettings.Mod16Method == mod16Method.resize, horizontalResolution, scriptVerticalResolution, (ResizeFilterType)avsSettings.ResizeMethod); string newScript = ScriptServer.CreateScriptFromTemplate(avsSettings.Template, inputLine, cropLine, resizeLine, denoiseLines, deinterlaceLines); if (dar.HasValue) { newScript = string.Format("global MeGUI_darx = {0}\r\nglobal MeGUI_dary = {1}\r\n{2}", dar.Value.X, dar.Value.Y, newScript); } logBuilder.Append("Avisynth script created:\r\n"); logBuilder.Append(newScript); try { StreamWriter sw = new StreamWriter(Path.ChangeExtension(path, ".avs")); sw.Write(newScript); sw.Close(); } catch (IOException i) { logBuilder.Append("An error ocurred when trying to save the AviSynth script:\r\n" + i.Message); return(""); } return(Path.ChangeExtension(path, ".avs")); }
private bool showOriginal() { if (player == null || player.IsDisposed) { player = new VideoPlayer(); } this.isPreviewMode = false; if (player.loadVideo(mainForm, originalScript, PREVIEWTYPE.REGULAR, false, originalInlineAvs, player.CurrentFrame)) { player.Show(); reader = player.Reader; sendCropValues(); if (mainForm.Settings.AlwaysOnTop) player.TopMost = true; return true; } else { player.Close(); player = null; return false; } }
public IVideoReader GetVideoReader() { if (!this.Info.HasVideo) throw new Exception("Can't get Video Reader, since there is no video stream!"); if (videoReader == null) lock (this) { if (videoReader == null) videoReader = new AvsVideoReader(clip, (int)Info.Width, (int)Info.Height); } return videoReader; }
/// <summary> /// reloads the video, sets up the proper window size and enables / disables the GUI buttons depending on the /// preview type set /// </summary> /// <returns>true if the video could be opened, false if not</returns> public bool reloadVideo() { videoPreview.UnloadVideo(); lock (this) { if (file != null) { file.Dispose(); } } try { if (bInlineAVS) { file = AvsFile.ParseScript(strFileName, true); } else { file = mainForm.MediaFileFactory.Open(strFileName); if (file == null) { throw new Exception("The video stream cannot be opened"); } } reader = file.GetVideoReader(); } catch (AviSynthException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } catch (ArgumentException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } catch (Exception e) { MessageBox.Show("The file " + strFileName + " cannot be opened.\r\n" + "Error message: " + e.Message, "Cannot open video input", MessageBoxButtons.OK, MessageBoxIcon.Stop); return(false); } if (reader != null && reader.FrameCount > 0) { this.positionSlider.Minimum = 0; this.positionSlider.Maximum = reader.FrameCount - 1; this.positionSlider.TickFrequency = this.positionSlider.Maximum / 20; SetMaxZoomWidth(); doInitialAdjustment(); int iStart = 0; if (positionSlider.Value >= 0 && positionSlider.Value <= reader.FrameCount) { iStart = positionSlider.Value; } else { iStart = reader.FrameCount / 2; } videoPreview.LoadVideo(reader, file.VideoInfo.FPS, iStart); setTitleText(); return(true); } return(false); }
/// <summary> /// loads the video, sets up the proper window size and enables / disables the GUI buttons depending on the /// preview type set /// </summary> /// <param name="path">path of the video file to be loaded</param> /// <param name="type">type of window</param> /// <param name="inlineAvs">true if path contain not filename but avsynth script to be parsed</param> /// <param name="startFrame">Select a specific frame to start off with or -1 for middle of video</param> /// <returns>true if the video could be opened, false if not</returns> public bool loadVideo(MainForm mainForm, string path, PREVIEWTYPE type, bool hasAR, bool inlineAvs, int startFrame) { lock (this) { if (file != null) file.Dispose(); if (videoPreview.Image != null) videoPreview.Image.Dispose(); // get rid of previous bitmap } try { if (inlineAvs) { file = AvsFile.ParseScript(path); } else { file = mainForm.MediaFileFactory.Open(path); if (file == null && !(file.Info.HasVideo && file.CanReadVideo)) throw new ArgumentException("The video stream cannot be opened"); } reader = file.GetVideoReader(); } catch (AviSynthException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return false; } catch (ArgumentException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return false; } catch (Exception e) { MessageBox.Show("The file " + path + " cannot be opened.\r\n Please make sure it's a valid AviSynth script and that AviSynth is " + " properly installed.\r\nYou can check the validity of your script and AviSynth installation by opening the file in your favorite media player.\r\n" + " If that works, try opening the video in VirtualDub(Mod) as well. If the former works and the latter doesn't, install a YV12 codec.\r\n" + "Error message for your reference: " + e.Message, "Cannot open video input", MessageBoxButtons.OK, MessageBoxIcon.Stop); return false; } if (reader != null && reader.FrameCount > 0) { this.positionSlider.Minimum = 0; this.positionSlider.Maximum = reader.FrameCount - 1; this.positionSlider.Value = startFrame >= 0 ? startFrame : reader.FrameCount / 2; this.positionSlider.TickFrequency = this.positionSlider.Maximum / 20; this.viewerType = type; this.hasAR = hasAR; this.videoWindowWidth = (int)file.Info.Width; this.videoWindowHeight = (int)file.Info.Height; zoomWidth = (int)file.Info.Width; doInitialAdjustment(); adjustSize(); positionSlider_Scroll(null, null); // makes the image visible this.Text = "Current position: " + this.positionSlider.Value + "/" + this.positionSlider.Maximum; isRunning = false; millisecondsPerFrame = (int)(1000 / file.Info.FPS); return true; } return false; }
/// <summary> /// loads the video, sets up the proper window size and enables / disables the GUI buttons depending on the /// preview type set /// </summary> /// <param name="path">path of the video file to be loaded</param> /// <param name="type">type of window</param> /// <param name="inlineAvs">true if path contain not filename but avsynth script to be parsed</param> /// <param name="startFrame">Select a specific frame to start off with or -1 for middle of video</param> /// <returns>true if the video could be opened, false if not</returns> public bool loadVideo(MainForm mainForm, string path, PREVIEWTYPE type, bool hasAR, bool inlineAvs, int startFrame, bool originalSize) { videoPreview.UnloadVideo(); bInlineAVS = inlineAvs; strFileName = path; bOriginalSize = originalSize; lock (this) { if (file != null) { file.Dispose(); } } try { if (inlineAvs) { file = AvsFile.ParseScript(path, true); btnReloadVideo.Enabled = false; } else { file = mainForm.MediaFileFactory.Open(path); if (file == null) { throw new Exception("The video stream cannot be opened"); } btnReloadVideo.Enabled = true; } reader = file.GetVideoReader(); } catch (AviSynthException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } catch (ArgumentException e) { MessageBox.Show("AviSynth script error:\r\n" + e.Message, "AviSynth error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } catch (Exception e) { MessageBox.Show("The file " + path + " cannot be opened.\r\n" + "Error message: " + e.Message, "Cannot open video input", MessageBoxButtons.OK, MessageBoxIcon.Stop); return(false); } if (reader != null && reader.FrameCount > 0) { this.positionSlider.Minimum = 0; this.positionSlider.Maximum = reader.FrameCount - 1; this.positionSlider.TickFrequency = this.positionSlider.Maximum / 20; this.viewerType = type; this.hasAR = hasAR; zoomMaxWidth = 0; SetMaxZoomWidth(); doInitialAdjustment(); int iStart = 0; if (startFrame >= 0) { iStart = startFrame; } else { iStart = reader.FrameCount / 2; } videoPreview.LoadVideo(reader, file.VideoInfo.FPS, iStart); setTitleText(); return(true); } return(false); }
public StringMask GetMask(string s, int x, int y, string style) { if (!IsAvsMask) { return(base.GetMask(s, x, y)); } if (s.Trim() == "") { return new StringMask { Height = FontHeight, Width = (WhitespaceWidth >= 0) ? WhitespaceWidth : FontWidth, X0 = x, Y0 = y, Points = new List <ASSPoint>() } } ; using (MaskDataContext db = new MaskDataContext()) { var ma = db.Masks.Where(m => m.X == x && m.Y == y && m.PlayResX == this.PlayResX && m.PlayResY == this.PlayResY && m.Style == this.MaskStyle && m.Str == s.GetHashCode().ToString()); if (ma.Count() > 0) { return(new BinaryFormatter().Deserialize(new MemoryStream(ma.First().Data.ToArray())) as StringMask); } } // generate ass file string assFN = "BaseAnime2_Temp.ass"; StreamWriter assOut = new StreamWriter(new FileStream(assFN, FileMode.Create), Encoding.Unicode); assOut.WriteLine("[Script Info]"); assOut.WriteLine("Synch Point:0"); assOut.WriteLine("ScriptType: v4.00+"); assOut.WriteLine("Collisions:Normal"); assOut.WriteLine("PlayResX:{0}", this.PlayResX); assOut.WriteLine("PlayResY:{0}", this.PlayResY); assOut.WriteLine("Timer:100.0000"); assOut.WriteLine(""); assOut.WriteLine("[V4+ Styles]"); assOut.WriteLine("Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding"); assOut.WriteLine(style); assOut.WriteLine(""); assOut.WriteLine("[Events]"); assOut.WriteLine("Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text"); assOut.WriteLine("Dialogue: 0,0:00:00.00,0:01:00.00,Default,NTP,0000,0000,0000,,{0}{1}", ASSEffect.pos(x, y), s); assOut.Close(); // generate avs file string avsFN = "BaseAnime2_Temp.avs"; StreamWriter avsOut = new StreamWriter(new FileStream(avsFN, FileMode.Create), Encoding.Default); avsOut.WriteLine("BlankClip(height={0}, width={1}, length=1000, fps=23.976)", this.PlayResY, this.PlayResX); avsOut.WriteLine("ConvertToRGB24()"); avsOut.WriteLine("TextSub(\"{0}\")", assFN); avsOut.Close(); AvsFile avs = AvsFile.OpenScriptFile(avsFN); if (!avs.CanReadVideo) { return(null); } IVideoReader ivr = avs.GetVideoReader(); Bitmap bmp = ivr.ReadFrameBitmap(0); BitmapData bd = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb); List <ASSPoint> result = new List <ASSPoint>(); unsafe { byte *p = (byte *)(void *)bd.Scan0; for (int x1 = 0; x1 < bmp.Width; x1++) { for (int y1 = 0; y1 < bmp.Height; y1++) { byte *q = p + bd.Stride * y1 + x1 * 3; if (q[0] > 0) { result.Add(new ASSPoint { Brightness = q[0], X = x1, Y = y1 }); } } } } bmp.UnlockBits(bd); bmp.Dispose(); avs.Dispose(); if (result.Count == 0) { return new StringMask { Height = 0, Width = 0, X0 = x, Y0 = y, Points = result } } ; int xmin = 10000; int ymin = 10000; int xmax = -1; int ymax = -1; foreach (ASSPoint pt in result) { if (xmin > pt.X) { xmin = pt.X; } if (xmax < pt.X) { xmax = pt.X; } if (ymin > pt.Y) { ymin = pt.Y; } if (ymax < pt.Y) { ymax = pt.Y; } } StringMask sm = new StringMask { Height = ymax - ymin + 1, Width = xmax - xmin + 1, X0 = x, Y0 = y, Points = result }; //sm.CalculateEdgeDistance(); using (MaskDataContext db = new MaskDataContext()) { try { MemoryStream ms = new MemoryStream(); new BinaryFormatter().Serialize(ms, sm); ms.Position = 0; db.Masks.InsertOnSubmit(new Mask { X = x, Y = y, PlayResX = this.PlayResX, PlayResY = this.PlayResY, Style = this.MaskStyle, Str = s.GetHashCode().ToString(), Data = new Binary(ms.ToArray()) }); db.SubmitChanges(); } catch (Exception e) { throw e; } } return(sm); }