private void LoadVideo() ///<summary> /// input the local video file path with FileOpenDialogue. /// configure the video capture /// </summary> { if (openFileDialog.ShowDialog() == DialogResult.OK) { if (cap != null) { cap.Dispose(); } // create the video capture string path = openFileDialog.FileName; cap = VideoCapture.FromFile(path); // display the input file name string inFname = Path.GetFileName(path); // lblInput.Text = "Video File: " + inFname; uFps = (uint)cap.Get(CaptureProperty.Fps); uFrameWidth = (uint)cap.Get(CaptureProperty.FrameWidth); uFrameHeight = (uint)cap.Get(CaptureProperty.FrameHeight); strCapType = "Video"; strCapFeed = path; } }
static void ProcFromVideFile(string filename) { using (var source = VideoCapture.FromFile(filename)) { ProcCapture(source); } }
public void StartAnalyzingPhotos(TreatmentPhotos treatmentPhotos) { var photoCount = listPhotos.Count; Utilities.debugmessage("Analyze Photo Thread STARTED!"); do { if (!analyzePhotoStarted) { analyzePhotoStarted = true; var path = listPhotos[trPhoto]; nameFile = path; cap = VideoCapture.FromFile(path); Mat photoFrm = new Mat(); cap.Read(photoFrm); AnalyzePhoto(photoFrm); trPhoto++; GC.Collect(); treatmentPhotos.Invoke(new Action(() => { treatmentPhotos.lblStatus.Text = "Treatment photo..."; treatmentPhotos.lblCount.Text = "Photo: " + trPhoto + "/" + photoCount; })); } } while (trPhoto < photoCount); Utilities.debugmessage("Analyze Photo Thread FINISHED!"); }
private void SelectCropSize_OnClick(object sender, RoutedEventArgs e) { string file = browseFile.Text; if (!VerifyFileExists(file)) { return; } var capture = VideoCapture.FromFile(file); capture.Set(VideoCaptureProperties.PosFrames, 0); var mat = new Mat(); capture.Read(mat); var window = new CropWindow(mat); window.cropLeft.Value = CropLeft.Value; window.cropTop.Value = CropTop.Value; window.cropRight.Value = CropRight.Value; window.cropBot.Value = CropBot.Value; if (window.ShowDialog().HasValue) { CropLeft.Value = window.cropLeft.Value; CropTop.Value = window.cropTop.Value; CropRight.Value = window.cropRight.Value; CropBot.Value = window.cropBot.Value; } }
public FrameAnalyzer(MainWindow mWw, string filePath, Plane pictureBox, CancellationToken TT, bool camM = false) { mw = mWw; token = TT; //Инициализуем все ссылочные переменные int a; if (int.TryParse(filePath, out a)) { videoStream = VideoCapture.FromCamera(CaptureDevice.Any); } else { videoStream = VideoCapture.FromFile(filePath); } plane = pictureBox; lastTime = DateTime.Now; cameraMode = camM; logName = $@"{DateTime.Now.ToShortDateString()} {DateTime.Now.ToLongTimeString()}.txt".Replace(":", "."); WriteStateLogs("Начало работы", logName); WriteStateLogsInDB(); //Для записи в БД }
public void Init() { //https://drive.google.com/file/d/1rc13wZ9zC03ObG5zB3uccUtsg_rsI8hC/view cap = VideoCapture.FromFile("Megaman.mp4"); var time = (double)cap.FrameCount / cap.Fps; FrameCount = cap.FrameCount; }
public void player() { string path = AppDomain.CurrentDomain.BaseDirectory; string fullpath = System.IO.Path.Combine(path, "cheerleading_0001.mp4"); VideoCapture video = VideoCapture.FromFile(fullpath); var asfe = video.Get(CaptureProperty.FrameCount); if (!video.IsOpened()) { MessageBox.Show("not open"); return; } video.Set(CaptureProperty.FrameWidth, video.FrameWidth); video.Set(CaptureProperty.FrameHeight, video.FrameHeight); double fps = video.Get(CaptureProperty.Fps); //프레임 진행 int count = 0; DateTime start = DateTime.Now; while (true) { Mat frame = new Mat(); if (video.Read(frame)) { if (frame.Width == 0 && frame.Height == 0) { break; } count++; TimeSpan playTime = DateTime.Now - start; TimeSpan targetTime = TimeSpan.FromSeconds(count / fps); if (targetTime < playTime) { //Console.WriteLine($"{playTime}, {targetTime}"); continue; } Dispatcher.Invoke(new Action(delegate() { var a = WriteableBitmapConverter.ToWriteableBitmap(frame, 96, 96, PixelFormats.Bgr24, null); //img_player.Source = a; })); playTime = DateTime.Now - start; if (targetTime > playTime) { Thread.Sleep(targetTime - playTime); } } } }
/// <summary> /// Открывает выбор камер /// </summary> private void CameraChanger_Click(object sender, RoutedEventArgs e) { if (Start.IsEnabled == false) { Stop_Click(new object(), new RoutedEventArgs()); } if (cameraMode == StreamSrc.Video) { var f = new CameraSelectWindow().GetCam(); if (f == "") { return; } try { VideoCapture v; if (f == "USB") { cameraMode = StreamSrc.USB_cam; v = VideoCapture.FromCamera(CaptureDevice.Any); filePath = "0"; camM = true; } else { cameraMode = StreamSrc.IP_cam; filePath = f; v = VideoCapture.FromFile(filePath); camM = true; } var r = new Mat(); v.Read(r); myImage.Source = WriteableBitmapConverter.ToWriteableBitmap(r); r.Dispose(); v.Dispose(); CameraChanger.Content = "Режим камеры активирован"; CameraChanger.Background = Brushes.Green; labelCurState.Content = "Получение потока с камеры"; } catch { MessageBox.Show("Камера недоступна"); cameraMode = StreamSrc.Video; } } else { labelCurState.Content = "Чтение видео-файла"; CameraChanger.Content = "Выбор камеры"; CameraChanger.Background = Brushes.LightGray; cameraMode = StreamSrc.Video; } }
static public void exec() { if (!Directory.Exists(@"image\capture")) { Directory.CreateDirectory(@"image\capture"); } var start = DateTime.Now; //var videoCapture = VideoCapture.FromCamera(CaptureDevice.Any, 0); //var videoCapture = VideoCapture.FromFile(@"C:\tagwork\test\knn_t\testdata\2018-08-09 23-25-24.avi"); var videoCapture = VideoCapture.FromFile(@"C:\tagwork\test\knn_t\testdata\2018-08-09 23-18-15.avi"); //videoCapture.Set(CaptureProperty.FrameWidth, 1920); //videoCapture.Set(CaptureProperty.FrameHeight, 1080); //videoCapture.Set(CaptureProperty.FrameWidth, 1280); //videoCapture.Set(CaptureProperty.FrameHeight, 720); //videoCapture.Set(CaptureProperty.FrameWidth, 1920); //videoCapture.Set(CaptureProperty.FrameHeight, 1080); var mat = new Mat(); if (videoCapture.IsOpened()) { for (int i = 0; i < 24500; i++) { var ret = videoCapture.Read(mat); if (mat.Empty()) { break; } Console.WriteLine($"roop:{i} ret:{ret}"); if (!ret) { break; } //Cv2.ImShow("test", mat); Cv2.WaitKey(5); Cv2.ImWrite($@"image\capture\1920x1080_{i:00000}.bmp", mat); } //for (int i = 0; i < 100; i++) //{ // var fname = $@"image\test{i}.bmp"; // Console.WriteLine(fname); // mat = Cv2.ImRead(fname); // Cv2.ImShow("test", mat.Clone()); // Cv2.WaitKey(10); //} } var end = DateTime.Now; Console.WriteLine($"{end - start}"); videoCapture.Release(); videoCapture.Dispose(); //Console.ReadKey(); }
/// <summary> /// Загружает первый кадр в imageBox /// </summary> private void GetFirstFrame() { var v = VideoCapture.FromFile(filePath); var r = new Mat(); v.Read(r); myImage.Source = WriteableBitmapConverter.ToWriteableBitmap(r); r.Dispose(); v.Dispose(); }
private void Form1_Load(object sender, EventArgs e) { #region 로컬 파일 읽기 //로컬 파일 읽기 { frame = new Mat(); cap = VideoCapture.FromFile("D:\\Downloads\\프리스틴.mp4"); //cap.FrameWidth = frameWidth; //cap.FrameHeight = frameHeight; //cap.Open(0); cap.Read(frame); //pictureBoxIpl1.ImageIpl = frame; //Console.WriteLine(cap.Fps); //int sleepTime = (int)Math.Round(1000 / cap.Fps); //Console.WriteLine(sleepTime); var window = new Window("capture"); // When the movie playback reaches end, Mat.data becomes NULL. while (true) { try { Console.WriteLine("READ"); cap.Read(frame); // same as cvQueryFrame window.ShowImage(frame); Cv2.WaitKey(); } catch (Exception extdf) { Console.WriteLine(extdf.ToString()); } } } #endregion #region 카메라 영상 읽기 { //frame = new Mat(); //for (int i = 0; i < 1; i++) //{ // cap = VideoCapture.FromCamera(CaptureDevice.Any, 0); // Console.WriteLine(cap.IsOpened()); //} //window = new Window("capture"); } #endregion }
public void Run() { Console.WriteLine(@"Creating face image..."); SettingData.TotalFrameNum = 0; SettingData.CurrentFrameNum = 0; foreach (var videoName in SettingData.VideoFileNames) { using (var video = new VideoCapture(videoName)) { SettingData.TotalFrameNum += video.FrameCount; } } Cv2.NamedWindow("image", WindowMode.FreeRatio); foreach (var videoName in SettingData.VideoFileNames) { using (var video = VideoCapture.FromFile(videoName)) { var frameNum = 0; while (true) { using (var frame = video.RetrieveMat()) { if (frame.Empty() || IsExitStatus) { if (IsExitStatus) { Console.WriteLine(@"Cancel"); IsExitStatus = false; goto CANCEL; } break; } frameNum++; SettingData.CurrentFrameNum++; //Detecting every 10 frames because the number of images //increases too much when cutting out all frames if (frameNum % SettingData.FrameRateNum == 0) { DetectAndSaveImg(frame); } } } Console.WriteLine(@"End of video"); } } CANCEL: Cv2.DestroyAllWindows(); Cascade.Dispose(); }
public static void exec() { var fname = "base01.mp4"; var maskname = "yellow_1bit_inversion.bmp"; var videoCapture = VideoCapture.FromFile($@"image\{fname}"); videoCapture.Set(CaptureProperty.FrameWidth, 1920); videoCapture.Set(CaptureProperty.FrameHeight, 1080); videoCapture.Set(CaptureProperty.FrameWidth, 1280); videoCapture.Set(CaptureProperty.FrameHeight, 720); var mat = new Mat(); var mat_resize = new Mat(); Mat yellow_hist, result_hist; if (videoCapture.IsOpened()) { for (int i = 0; i < 10000; i++) { var ret = videoCapture.Read(mat); Cv2.Resize(mat, mat_resize, new Size(1280, 720)); Stopwatch sw = new Stopwatch(); sw.Start(); var rect = new Rect(820, 30, 260, 80); var startimg = new Mat(mat_resize, rect); // 黄色の画素範囲 Scalar scalar_low = new Scalar(0, 240, 240); Scalar scalar_high = new Scalar(20, 255, 255); Mat yellow = new Mat(); Cv2.InRange(startimg, scalar_low, scalar_high, yellow); var mask = new Mat($@"image\{maskname}", ImreadModes.GrayScale); var result = new Mat(); Cv2.Add(yellow, mask, result); result_hist = GetHistogram(result); sw.Stop(); Console.WriteLine($"Elapsed:{sw.ElapsedMilliseconds}"); //Cv2.ImShow("movie", mat_resize); Cv2.ImShow("result", result); Cv2.ImShow("result_hist", result_hist); Cv2.WaitKey(1); } } }
private void Form1_Load(object sender, EventArgs e) { frame = new Mat(); cap = VideoCapture.FromFile("D:\\Downloads\\프리스틴.mp4"); cap.Read(frame); OpenCvSharp.Size newsize = new OpenCvSharp.Size(frameWidth, frameHeight); frame = frame.Resize(newsize); var window = new Window("capture"); facecascade = new CascadeClassifier("haarcascade_frontalface_default.xml"); Mat last = null; // When the movie playback reaches end, Mat.data becomes NULL. while (true) { try { Console.WriteLine("READ"); cap.Read(frame); newsize = new OpenCvSharp.Size(frameWidth, frameHeight); frame = frame.Resize(newsize); rects = facecascade.DetectMultiScale(frame); foreach (Rect rect in rects) { frame.Rectangle(rect, Scalar.Green, 3); } //Mat dstmat = new Mat(); //Cv2.Canny(frame, dstmat, 50, 200); //window.ShowImage(dstmat); window.ShowImage(frame); //System.GC.Collect(0, GCCollectionMode.Forced); System.GC.Collect(); Cv2.WaitKey(10); } catch (Exception extdf) { Console.WriteLine(extdf.ToString()); } } }
/// <summary> /// Переподключение (к USB). При неудаче Выбор камеры (CameraChanger_Click) /// </summary> public void MakeReconnect() { var f = new CameraSelectWindow(); f.ShowBusyBox(); var ff = f.imShown; Task.Run(() => { var success = false; while (ff) { VideoCapture v; if (cameraMode == StreamSrc.USB_cam) { v = VideoCapture.FromCamera(CaptureDevice.Any); } else { v = VideoCapture.FromFile(filePath); } var r = new Mat(); v.Read(r); if (r.Empty()) { Thread.Sleep(1000); } else { Application.Current.Dispatcher.BeginInvoke(new Action(() => Restart(f))); success = true; r.Dispose(); break; } r.Dispose(); Application.Current.Dispatcher.BeginInvoke(new Action(() => ff = f.imShown)); } if (!success) { Application.Current.Dispatcher.BeginInvoke(new Action(() => PerformAct(CameraChanger_Click))); } }); }
// Load Camera private void LoadCamera() ///<summary> /// input the camera feed and configure the video capture from the camera feed(url) ///</summary> { camFeed.ShowDialog(); strCapFeed = camFeed.getCamFeed(); if (strCapFeed == "") { return; } else if (strCapFeed.Length == 1) { if (cap != null) { cap.Dispose(); } int uCamID; if (Int32.TryParse(strCapFeed, out uCamID)) { cap = VideoCapture.FromCamera(uCamID); } } else { if (cap != null) { cap.Dispose(); } cap = VideoCapture.FromFile(strCapFeed); } if (cap != null) { uFrameWidth = (uint)cap.Get(CaptureProperty.FrameWidth); uFrameHeight = (uint)cap.Get(CaptureProperty.FrameHeight); uFps = (uint)cap.Get(CaptureProperty.Fps); if ((uFps < 10) || (uFps > 60)) { uFps = 30; } strCapType = "Camera"; } }
public void Run() { //https://drive.google.com/file/d/1rc13wZ9zC03ObG5zB3uccUtsg_rsI8hC/view VideoCapture cap = VideoCapture.FromFile("Input.mp4"); Mat avg = new Mat(); Mat output = new Mat(); while (true) { Mat frame = new Mat(); // Capture frame-by-frame cap.Read(frame); // If the frame is empty, break immediately if (frame.Empty()) { break; } if (cap.Get(CaptureProperty.PosFrames) == 1) { frame.ConvertTo(avg, MatType.CV_32F); } Cv2.AccumulateWeighted(frame, avg, 0.0005, null); Cv2.ConvertScaleAbs(avg, output); Cv2.ImShow("output", output); // Press ESC on keyboard to exit char c = (char)Cv2.WaitKey(25); if (c == 27) { break; } frame.Release(); } // When everything done, release the video capture object cap.Release(); avg.Release(); output.Release(); // Closes all the frames Cv2.DestroyAllWindows(); }
private void trimCheckBox_Checked(object sender, RoutedEventArgs e) { trimContainer.Visibility = trimCheckbox.IsChecked == true ? Visibility.Visible : Visibility.Collapsed; string file = browseFile.Text; if (!VerifyFileExists(file)) { return; } var capture = VideoCapture.FromFile(file); // Only changing to 0 doesn't update the field... TrimStart.Value = TimeSpan.FromSeconds(1); TrimStart.Value = TimeSpan.FromSeconds(0); TrimEnd.Value = TimeSpan.FromSeconds(capture.FrameCount / capture.Fps); }
public void Run() { //https://drive.google.com/file/d/1rc13wZ9zC03ObG5zB3uccUtsg_rsI8hC/view VideoCapture cap = VideoCapture.FromFile("Input.mp4"); Mat background = new Mat(); for (int i = 0; i < 60; i++) { cap.Read(background); } Cv2.ImShow("background", background); //flip(background,background,1); while (true) { Mat frame = new Mat(); // Capture frame-by-frame cap.Read(frame); // If the frame is empty, break immediately if (frame.Empty()) { break; } Mat hsv = new Mat(); //flip(frame,frame,1); Cv2.CvtColor(frame, hsv, ColorConversionCodes.BGR2HSV); Mat mask1 = new Mat(), mask2 = new Mat(); Cv2.InRange(hsv, new Scalar(0, 120, 70), new Scalar(10, 255, 255), mask1); Cv2.InRange(hsv, new Scalar(170, 120, 70), new Scalar(180, 255, 255), mask2); mask1 = mask1 + mask2; Mat kernel = Mat.Ones(rows: 3, cols: 3, type: MatType.CV_32F); Cv2.MorphologyEx(mask1, mask1, MorphTypes.Open, kernel); Cv2.MorphologyEx(mask1, mask1, MorphTypes.Dilate, kernel); Cv2.BitwiseNot(mask1, mask2); Mat res1 = new Mat(), res2 = new Mat(), final_output = new Mat(); Cv2.BitwiseAnd(frame, frame, res1, mask2); Cv2.BitwiseAnd(background, background, res2, mask1); Cv2.AddWeighted(res1, 1, res2, 1, 0, final_output); Cv2.ImShow("Magic !!!", final_output); // Display the resulting frame //imshow( "Frame", frame ); // Press ESC on keyboard to exit char c = (char)Cv2.WaitKey(25); if (c == 27) { break; } // Also relese all the mat created in the code to avoid memory leakage. frame.Release(); hsv.Release(); mask1.Release(); mask2.Release(); res1.Release(); res2.Release(); final_output.Release(); } // When everything done, release the video capture object cap.Release(); // Closes all the frames Cv2.DestroyAllWindows(); }
public void StartAnalyzingVideo(int frameN, int cc, int coupCoord) { Utilities.debugmessage("Analyze Video Thread STARTED!"); CoupCount = cc; masTrackDcoup = -999; masTrackDcoup = coupCoord; window.Invoke(new Action(() => { window.Text = "Train Coup - Processing..."; })); cap = VideoCapture.FromFile(window.tBox_path.Text); cap.Set(CaptureProperty.PosFrames, frameN); cframe.frameNum = frameN; cap.Read(cframe.Frame); cframe.frameNum++; DateTime timeDelta; frameTime = (int)(1000 / cap.Fps); TimeSpan maxTime = TimeSpan.FromMilliseconds(cap.FrameCount * frameTime); frameCnt = cap.FrameCount; do { timeDelta = DateTime.Now; if (!analyzeStarted) { analyzeStarted = true; yoloThread = new Thread(AnalyzeVideo); yoloThread.IsBackground = true; yoloThread.Start(); } window.picBoxSmall.BeginInvoke(new Action(() => { // Realtime drawing window.picBoxSmall.ImageIpl = cframe.Frame.Resize(new OpenCvSharp.Size(window.picBoxSmall.Width, window.picBoxSmall.Height)); })); // Sleep thread for new frame + collect grabage if ((DateTime.Now - timeDelta).Milliseconds < frameTime) { Thread.Sleep(frameTime - (DateTime.Now - timeDelta).Milliseconds); GC.Collect(); window.frameCnt.Invoke(new Action(() => { TimeSpan curTime = TimeSpan.FromMilliseconds(cframe.frameNum * frameTime); window.frameCnt.Text = "Frames: " + cframe.frameNum + "/" + cap.FrameCount + "\nTime: " + curTime.ToString(@"hh\:mm\:ss") + " / " + maxTime.ToString(@"hh\:mm\:ss"); })); if (cframe.frameNum + 1 < frameCnt) { cap.Read(cframe.Frame); cframe.frameNum++; } } } while (!cframe.Frame.Empty() && PLAY_FLAG); // Close Thread try { masTrackDcoup = -999; cap.Dispose(); window.Invoke(new Action(() => { if (window.isPaused) { window.Text = "Train Coup Detector - Paused"; } else { window.Text = "Train Coup Detector - Ready"; } })); } catch { } Utilities.debugmessage("Analyze Video Thread FINISHED!"); GC.Collect(); }
public static LoadResults Start(string file, bool partialRun, CropSettings?crop, TrimSettings?trim, LoadType loadTypes, bool resize, Action <ProgressPhase, float> updateProgress) { List <Load> loads = new List <Load>(); updateProgress.Invoke(ProgressPhase.Phase_1_PreprocessVideo, 0); VideoCapture capture = new VideoCapture(file); string processedFile = CropTrimAndResizeVideo(capture, file, crop, /*trim,*/ resize); capture = VideoCapture.FromFile(processedFile); updateProgress.Invoke(ProgressPhase.Phase_2_StartingTime, 0); int startingFrame = 0; int endingFrame = trim.HasValue ? (int)Math.Min((trim.Value.End * capture.Fps) - 1, capture.FrameCount - 1) : capture.FrameCount - 1; if (!partialRun && loadTypes.HasFlag(LoadType.Start)) { var startLoad = Util.CountDarknessFrames(LoadType.Start, capture, trim?.Start ?? 0, (int)(capture.Fps * StartScreenMaxDuration)); if (startLoad.FrameStart == -1) { throw new Exception( "Start screen not detected, make sure the video starts on the \"Start\"/\"Options\" screen"); } loads.Add(startLoad); startingFrame = startLoad.FrameStart; if (loadTypes.HasFlag(LoadType.Overworld)) { var startOverworldLoad = Util.CountFrozenFrames(LoadType.Overworld, capture, startLoad.FrameEnd / capture.Fps, (int)capture.Fps / 5, (int)capture.Fps * 20); if (startOverworldLoad.HasValue) { loads.Add(startOverworldLoad.Value); } } } updateProgress.Invoke(ProgressPhase.Phase_3_VideoScale, 0); float videoScale = LifeCounter.GetLifeCountScale(capture, startingFrame, updateProgress); if (float.IsNaN(videoScale)) { throw new Exception("Video Scale couldn't be determined: " + videoScale); } updateProgress.Invoke(ProgressPhase.Phase_4_EndingTime, 0); if (!partialRun) { var _endingFrame = BossLoads.GetLastFinalBossFrame(capture, videoScale, endingFrame, updateProgress); if (!_endingFrame.HasValue) { throw new Exception( "Final hit not detected, make sure the video doesn't end more than 3 minutes after the final hit."); } endingFrame = _endingFrame.Value; } updateProgress.Invoke(ProgressPhase.Phase_5_EndSignLoads, 0); if (loadTypes.HasFlag(LoadType.EndSign)) { loads.AddRange(EndSignLoads.GetEndSignLoads(capture, videoScale, startingFrame, endingFrame, updateProgress)); } updateProgress.Invoke(ProgressPhase.Phase_6_OverworldLoads, 0); if (loadTypes.HasFlag(LoadType.Overworld) || loadTypes.HasFlag(LoadType.BackSign)) { loads.AddRange(OverworldLoads.GetOverworldLoads(capture, videoScale, startingFrame / (float)capture.Fps, endingFrame / (float)capture.Fps, loadTypes, updateProgress)); } updateProgress.Invoke(ProgressPhase.Phase_7_DeathLoads, 0); if (loadTypes.HasFlag(LoadType.Death)) { loads.AddRange(DeathLoads.GetDeathLoads(capture, videoScale, startingFrame, endingFrame, updateProgress)); } updateProgress.Invoke(ProgressPhase.Phase_8_BossLoads, 0); if (loadTypes.HasFlag(LoadType.Boss)) { loads.AddRange(BossLoads.GetBossLoads(capture, videoScale, startingFrame, endingFrame, updateProgress)); } int phase8Progress = 0; // Remove backsign loads that aren't preceded by an overworld load (ignore death loads for this) var sortedLoads = loads.OrderBy(l => l.FrameStart).ToList(); List <Load> backsignLoadsToRemove = new List <Load>(); for (int i = 0; i < sortedLoads.Count; i++) { if (sortedLoads[i].Type == LoadType.BackSign) { var bsLoad = sortedLoads[i]; for (int j = i - 1; j >= 0; j--) { var checkLoad = sortedLoads[j]; // only consider loads more than 3 seconds before the backsign load if (checkLoad.FrameStart > bsLoad.FrameStart - capture.Fps * 3.0) { continue; } if (checkLoad.Type == LoadType.Death) { continue; } if (checkLoad.Type == LoadType.Overworld) { break; } else { backsignLoadsToRemove.Add(bsLoad); } } } } foreach (var l in backsignLoadsToRemove) { loads.Remove(l); } // Remove unnecessary endsign loads (when they overlap with other loads) foreach (var load in loads.Where(l => l.Type != LoadType.EndSign).ToList()) { loads.RemoveAll(l => l.Type == LoadType.EndSign && l.Overlaps(load, (int)(capture.Fps * 0.5f))); } // Remove unnecessary backsign loads (when they overlap with other loads) foreach (var load in loads.Where(l => l.Type != LoadType.BackSign).ToList()) { loads.RemoveAll(l => l.Type == LoadType.BackSign && l.Overlaps(load, (int)(capture.Fps * 0.5f))); } // Remove all loads that start after the last frame loads.RemoveAll(l => l.FrameStart > endingFrame); updateProgress.Invoke(ProgressPhase.Phase_9_GenerateReport, 0); LoadResults results = new LoadResults(loads, (float)capture.Fps, startingFrame, endingFrame); results.SaveDebugImages(capture, "debugExport", "file"); var report = new LoadRemoverReport(Path.GetFileName(file), results, capture); var reportPath = Path.ChangeExtension(file, null) + "_report.html"; report.GenerateHtml(TemplateFile).Save(reportPath); updateProgress.Invoke(ProgressPhase.Phase_9_GenerateReport, 1); var openReport = MessageBox.Show($"Done! The report file can be found at {Environment.NewLine}{reportPath}{Environment.NewLine}" + $"Do you wish to open the report now?", "Report", MessageBoxButton.YesNo, MessageBoxImage.Question); if (openReport == MessageBoxResult.Yes) { // Open report in default application (hopefully the browser) var psi = new ProcessStartInfo { FileName = reportPath, UseShellExecute = true }; Process.Start(psi); } return(results); }
public void AnalyzeVideo(string file, bool isDebug) { var dateString = "1900-01-01 12:00:00"; var fileName = Path.GetFileNameWithoutExtension(file); if (fileName.Contains("_")) { dateString = fileName.Split("_")[0].Insert(4, "-").Insert(7, "-").Insert(10, " ").Insert(13, ":").Insert(16, ":"); } var currentFrame = 1; var startDate = DateTime.Parse(dateString); var stopwatch = Stopwatch.StartNew(); var frameCount = 0; var frame = new Mat(); var realTimeSpeed = new RealTimeSpeed(isDebug); var bgSub = BackgroundSubtractorMOG.Create(200, 100); var carRecords = new List <CarTracker>(); var entries = new List <Entry>(); using (var videoCapture = VideoCapture.FromFile(file)) { frameCount = videoCapture.FrameCount; while (videoCapture.IsOpened()) { frame = videoCapture.RetrieveMat(); if (frame.Cols == 0) { break; } var carBlobs = DetectCars(frame, bgSub, carRecords.HasPossibleOverlaps(), isDebug); var blobsWithTrackers = MatchBlobsToTrackers(carBlobs, carRecords); carRecords.ForEach(c => c.IsUpdated = false); foreach (var blob in blobsWithTrackers) { var closestTracker = carRecords.Where(c => c.Id == blob.Value).FirstOrDefault(); if (closestTracker == null) { closestTracker = new CarTracker { Id = NextTrackerId++, Car = new Entry { Direction = blob.Key.X < Config.LeftStart + 75 ? "R" : "L", DateAdded = startDate.AddSeconds(currentFrame / 30) }, Rect = blob.Key, IsUpdated = true }; carRecords.Add(closestTracker); } else { closestTracker.IsUpdated = true; closestTracker.LastUpdated = currentFrame; closestTracker.Rect = blob.Key; if (isDebug) { Cv2.Rectangle(frame, blob.Key, Scalar.Purple, 8); } } closestTracker.ValidateTracker(frame.Height); } carRecords.ForEach(c => c.UpdateEvents(currentFrame, frame, Config)); //remove any trackers that have completed but are invalid OR they haven't been updated in 15 frames var badRecords = carRecords.Where(c => (!c.IsUpdated && c.Car.Speed > 0 && c.IsInvalid) || (!c.IsUpdated && currentFrame - c.LastUpdated > 15)).ToList(); foreach (var badRecord in badRecords) { carRecords.Remove(badRecord); } var completedRecords = carRecords.Where(c => !c.IsUpdated && c.Car.Speed > 0).ToList(); foreach (var completed in completedRecords) { entries.Add(completed.Car); carRecords.Remove(completed); realTimeSpeed.StopFrame = 60; realTimeSpeed.Speed = (int)Math.Floor(completed.Car.Speed); } //display progress output var fps = Math.Round(currentFrame / stopwatch.Elapsed.TotalSeconds, 2); var percentDone = (currentFrame / (double)frameCount) * 100; var timeRemaining = TimeSpan.FromSeconds((frameCount - currentFrame) / fps); if (isDebug) { DrawStartEndLines(frame); DrawTime(startDate, currentFrame, frame); realTimeSpeed.DrawSpeeds(frame); Cv2.ImShow("Frame", frame); Cv2.WaitKey(1); } Console.SetCursorPosition(0, Console.CursorTop); Console.Write($"\r{currentFrame} of {frameCount} - Time: {startDate.AddSeconds(currentFrame / 30).ToString("HH:mm:ss")} - FPS: {fps.ToString("N2")} - {Math.Round(percentDone, 2).ToString("N2")}% - Remaining: {timeRemaining.ToString(@"hh\:mm\:ss")} - Cars: {entries.Count} "); currentFrame++; } } Console.WriteLine(""); Console.WriteLine($"{entries.Count} cars in {stopwatch.Elapsed.ToString(@"hh\:mm\:ss")}"); //save all entries to the DB and their photos to disk if debug is false if (!isDebug) { foreach (var entry in entries) { Console.WriteLine($"{entry.DateAdded.ToString("hh:mm:ss")}: {entry.Direction} - {entry.Speed:N2}"); Database.EntryInsert(entry); if (!entry.PhotoUpdated) { Database.LogInsert(new Log { DateAdded = DateTime.Now, Message = $"Photo for #{entry.Id} has not been updated!", StackTrace = "" }); } //save the picture if it exists if (entry.Picture != null) { using (var fileStream = new FileStream(Path.Combine(Config.PhotoFolder, $"{entry.Id}.jpg"), FileMode.Create)) { fileStream.Write(entry.Picture, 0, entry.Picture.Length); } } } } return; }
protected override VideoCapture GetVideoCapture() { return(VideoCapture.FromFile(this.FileName)); }
// FilePicker for video files (or images; It's not including array of images) private void btn_filePick_Click(object sender, EventArgs e) { stopButton_Click(sender, e); var filePath = string.Empty; using (OpenFileDialog openFileDialog = new OpenFileDialog()) { openFileDialog.InitialDirectory = @""; openFileDialog.FilterIndex = 2; openFileDialog.Filter = ".avi files (*.avi)|*.avi|All files (*.*)|*.*"; openFileDialog.RestoreDirectory = true; if (openFileDialog.ShowDialog() == DialogResult.OK) { filePath = openFileDialog.FileName; } if (filePath != "") { try { // Loading first frame of video like preview var cap = VideoCapture.FromFile(filePath); var img = new Mat(); cap.Read(img); picBox.ImageIpl = img.Resize(new OpenCvSharp.Size(picBox.Width, picBox.Height)); Utilities.debugmessage("File has " + cap.FrameCount + " frames!"); frameCnt.Text = "Frames: 0/" + cap.FrameCount; cap.Dispose(); img.Dispose(); tBox_path.Text = filePath; // Creating file in LOGS folder to store data of founded coups string logFile = @"LOGS\" + Path.GetFileName(tBox_path.Text) + ".txt"; if (!Directory.Exists("LOGS")) { Directory.CreateDirectory("LOGS"); } // Will store data in CSV mode using (StreamWriter sw = new StreamWriter(logFile, false, Encoding.UTF8)) { sw.WriteLine("frame,x1,y1,x2,y2,time"); sw.Close(); } } catch { Utilities.showMsg("This file cannot be loaded", "Error!"); frameCnt.Text = "Frames: 0/0"; picBox.ImageIpl = null; picBoxSmall.ImageIpl = null; } } filePath = null; picBox.clearBBoxes(); GC.Collect(); } }
private void btnCheck_Click(object sender, EventArgs e) /// <summary> /// validate the camera connection /// read the first frame from the camera feed /// </summary> { if (radioWeb.Checked) { try { VideoCapture cap; cap = VideoCapture.FromCamera(uSelectedCam); int uFrameWidth = (int)cap.Get(CaptureProperty.FrameWidth); cap.Dispose(); if (uFrameWidth == 0) { MessageBox.Show("ERROR! Web Camera: " + strCamFeed); strCamFeed = uSelectedCam.ToString(); } else { MessageBox.Show("Success! Web Camera: " + uSelectedCam.ToString()); strCamFeed = uSelectedCam.ToString(); } } catch (Exception ex) { MessageBox.Show("ERROR! Web Camera: " + uSelectedCam.ToString()); strCamFeed = ""; } } if (radioIP.Checked) { try { strCamFeed = get_IPcamUrl(); VideoCapture cap; cap = VideoCapture.FromFile(strCamFeed); int uFrameWidth = (int)cap.Get(CaptureProperty.FrameWidth); cap.Dispose(); if (uFrameWidth == 0) { MessageBox.Show("Error! IP Camera:" + strCamFeed); strCamFeed = ""; } else { MessageBox.Show("Success! IP Camera: " + strCamFeed); } } catch (Exception ex) { MessageBox.Show("ERROR! IP Camera: " + strCamFeed); strCamFeed = ""; } } if (strCamFeed == "") { chkBoxConnectCheck.Checked = false; chkBoxConnectCheck.Text = "Error !"; } else { chkBoxConnectCheck.Checked = true; chkBoxConnectCheck.Text = "Success !"; } }
public static void f_main(String ar_path_for_uploading_videos, String ar_working_folder_name, String ar_uploaded_video_name, String[] ar_brand_names, double ar_cost_of_1_second) { //===initializing google API key string ls_google_app_credentials_path_and_filename = HostingEnvironment.MapPath("~/CloudVision/google_cloud_credential_for_logo_detection-nowting-bd7886019869.json"); Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", ls_google_app_credentials_path_and_filename); VideoCapture l_capture = VideoCapture.FromFile(Path.Combine(ar_path_for_uploading_videos, ar_working_folder_name, ar_uploaded_video_name)); Double l_framerate = l_capture.Get(5); Double l_frameid; int li_counter = 0; Mat l_frame = new Mat(); Mat l_frame_with_boundarie = new Mat(); String ls_result_folder_path = Path.Combine(ar_path_for_uploading_videos, ar_working_folder_name); Directory.CreateDirectory(ls_result_folder_path); List <DataResult> l_list_DataResult = new List <DataResult>(); while (true) { l_capture.Read(l_frame); if (l_frame.Empty()) { break; } l_frameid = l_capture.Get(1); if (l_frameid % l_framerate != 0) { continue; //===getting 1 frame per second } //====================================================================================================== //====================================================================================================== li_counter++; if (li_counter != 1 && li_counter != 2 && li_counter != 3) { continue; } //=== temp code to process only the first 3 frames //====================================================================================================== //====================================================================================================== //===find all texts in frame AnnotateImageResponse l_response = f_find_brand_by_DetectDocumentText_in(l_frame); //===set a rectangle over each corresponding brand-text and save the frame foreach (string l_brand_name in ar_brand_names) { if (String.IsNullOrEmpty(l_brand_name.Trim())) { continue; } DataResult l_current_data_result = f_hilight_brand_and_save_frame(l_frame, l_response, l_brand_name, ar_cost_of_1_second, ls_result_folder_path, "pic_" + li_counter.ToString(), li_counter); l_list_DataResult.Add(l_current_data_result); } } //===write result into file using (StreamWriter l_file = File.AppendText(Path.Combine(ls_result_folder_path, "Results.txt"))) { JsonSerializer serializer = new JsonSerializer(); serializer.Serialize(l_file, l_list_DataResult); } string l_domainnamefordownloadingresults = ConfigurationManager.AppSettings["domainnamefordownloadingresults"]; EvaluationResults l_EvaluationResults = new EvaluationResults(); l_EvaluationResults.ResultPathURL = l_domainnamefordownloadingresults + "/" + ar_working_folder_name; l_EvaluationResults.BrandNames = ar_brand_names; l_EvaluationResults.BrandIndexToShow = 0; l_EvaluationResults.array_DataResult = l_list_DataResult.ToArray(); HttpContext.Current.Session["results"] = l_EvaluationResults; return; }
private async void BtnFile_ClickAsync(object sender, EventArgs e) { OpenFileDialog fileDialog = new OpenFileDialog { Multiselect = false, Title = "Please choose a video file:", Filter = "Video File (*.mp4)|*.mp4" }; if (fileDialog.ShowDialog() != DialogResult.OK) { return; } BtnFile.Enabled = false; string file = fileDialog.FileName; string path = Path.GetDirectoryName(file); string temppath = Path.GetTempPath(); string filename = Path.GetFileName(file); string audio = Path.Combine(temppath, "output.mp3"); string opencv_out = Path.Combine(temppath, "output.mp4"); string file_output = Path.Combine(path, "[superresolution]" + filename); string ffmpegPath = ConfigurationManager.AppSettings["ffmpeg"]; int index = 1; var video_in = VideoCapture.FromFile(file); var ffmpeg_extract = new Process(); ffmpeg_extract.StartInfo.UseShellExecute = false; ffmpeg_extract.StartInfo.RedirectStandardInput = true; ffmpeg_extract.StartInfo.RedirectStandardOutput = true; ffmpeg_extract.StartInfo.RedirectStandardError = true; ffmpeg_extract.StartInfo.CreateNoWindow = true; ffmpeg_extract.StartInfo.FileName = ffmpegPath; ffmpeg_extract.StartInfo.Arguments = " -i " + file + " -vn -f mp3 " + audio + " -y"; ffmpeg_extract.Start(); ffmpeg_extract.WaitForExit(); if (!ffmpeg_extract.HasExited) { ffmpeg_extract.Kill(); } FourCC fourCC = FourCC.DIVX; double fps = video_in.Fps; Size dsize = new Size(video_in.FrameWidth * 2, video_in.FrameHeight * 2); VideoWriter video_out = new VideoWriter(opencv_out, fourCC, fps, dsize, true); var max = video_in.FrameCount; progressBar1.Value = 0; progressBar1.Maximum = max; TimeSpan interval_start = new TimeSpan(System.DateTime.Now.Ticks); while (video_in.IsOpened()) { Mat frame_in = new Mat(); var status = video_in.Read(frame_in); if (!status) { break; } var frame_out = await Task.Run(() => ExtractFrame(frame_in)); video_out.Write(frame_out); MemoryStream ms_in = new MemoryStream(frame_in.ToBytes()); var image_in = Image.FromStream(ms_in); PicVidIn.Image = image_in; MemoryStream ms_out = new MemoryStream(frame_out.ToBytes()); Image image_out = Image.FromStream(ms_out); PicVidOut.Image = image_out; var percentage = Math.Round((float)index / max * 100, 2); TimeSpan interval_now = new TimeSpan(System.DateTime.Now.Ticks); TimeSpan countTime = interval_start.Subtract(interval_now).Duration(); int second = (int)(countTime.TotalSeconds / ((float)index / max)); int hour = second / 3600; second %= 3600; int minute = second / 60; second %= 60; label2.Text = index + "/" + max + " Frames - " + percentage + "% Approximately " + hour + "H" + minute + "M" + second + "S Left"; progressBar1.Value++; index++; } video_in.Release(); video_out.Release(); var ffmpeg_add = new Process(); ffmpeg_add.StartInfo.UseShellExecute = false; ffmpeg_add.StartInfo.RedirectStandardInput = true; ffmpeg_add.StartInfo.RedirectStandardOutput = true; ffmpeg_add.StartInfo.RedirectStandardError = true; ffmpeg_add.StartInfo.CreateNoWindow = true; ffmpeg_add.StartInfo.FileName = ffmpegPath; ffmpeg_add.StartInfo.Arguments = " -i " + opencv_out + " -i " + audio + " -c copy " + file_output; ffmpeg_add.Start(); ffmpeg_add.WaitForExit(); if (!ffmpeg_add.HasExited) { ffmpeg_add.Kill(); } File.Delete(opencv_out); File.Delete(audio); MessageBox.Show("Video file \"output_" + filename + "\" saved.", "Task Finished"); BtnFile.Enabled = true; }