private void LoadVideo() ///<summary> /// input the local video file path with FileOpenDialogue. /// configure the video capture /// </summary> { if (openFileDialog.ShowDialog() == DialogResult.OK) { if (cap != null) { cap.Dispose(); } // create the video capture string path = openFileDialog.FileName; cap = VideoCapture.FromFile(path); // display the input file name string inFname = Path.GetFileName(path); // lblInput.Text = "Video File: " + inFname; uFps = (uint)cap.Get(CaptureProperty.Fps); uFrameWidth = (uint)cap.Get(CaptureProperty.FrameWidth); uFrameHeight = (uint)cap.Get(CaptureProperty.FrameHeight); strCapType = "Video"; strCapFeed = path; } }
/// <summary> /// 获取图像 /// </summary> public void Cap_Run(object o) { VideoCapture cap = new VideoCapture(Path); VideoWriter output = new VideoWriter(); int videoW = (int)cap.Get(CaptureProperty.FrameHeight); int videoH = (int)cap.Get(CaptureProperty.FrameHeight); int fps = (int)cap.Get(CaptureProperty.Fps); Size videosize = new Size(videoW, videoH); output.Open("demo.avi", FourCC.MJPG, fps, videosize); while (true) { cap.Read(Source); output.Write(Source); //Source.ImWrite("D:\\Show me your Code\\GitHub仓库\\My-ROV-Project\\temp.jpg"); if (Isopen == 0) { break; } try { (this.Owner as MainForm).PicBox_Video.Image = BitmapConverter.ToBitmap(Source); } catch (Exception e) { MessageBox.Show(e.Message); } } if (Isopen == 0) { this.th.Abort(); } }
public int getTotalFrameNum() { int length; length = (int)Math.Floor(capture.Get(CaptureProperty.FrameCount)); return(length); }
public void player() { string path = AppDomain.CurrentDomain.BaseDirectory; string fullpath = System.IO.Path.Combine(path, "cheerleading_0001.mp4"); VideoCapture video = VideoCapture.FromFile(fullpath); var asfe = video.Get(CaptureProperty.FrameCount); if (!video.IsOpened()) { MessageBox.Show("not open"); return; } video.Set(CaptureProperty.FrameWidth, video.FrameWidth); video.Set(CaptureProperty.FrameHeight, video.FrameHeight); double fps = video.Get(CaptureProperty.Fps); //프레임 진행 int count = 0; DateTime start = DateTime.Now; while (true) { Mat frame = new Mat(); if (video.Read(frame)) { if (frame.Width == 0 && frame.Height == 0) { break; } count++; TimeSpan playTime = DateTime.Now - start; TimeSpan targetTime = TimeSpan.FromSeconds(count / fps); if (targetTime < playTime) { //Console.WriteLine($"{playTime}, {targetTime}"); continue; } Dispatcher.Invoke(new Action(delegate() { var a = WriteableBitmapConverter.ToWriteableBitmap(frame, 96, 96, PixelFormats.Bgr24, null); //img_player.Source = a; })); playTime = DateTime.Now - start; if (targetTime > playTime) { Thread.Sleep(targetTime - playTime); } } } }
public PupilFinder(string videoFileName, System.Windows.Controls.ProgressBar progressBar, System.Windows.Shell.TaskbarItemInfo taskbar, SetStatusDelegate setStatus, FrameProcessedDelegate updateFrame, FramesProcessedDelegate framesProcessed) { this.videoFileName = videoFileName; this.progressBar = progressBar; SetStatus = setStatus; UpdateFrame = updateFrame; OnFramesPupilsProcessed = framesProcessed; this.taskbar = taskbar; videoSource = new VideoCapture(videoFileName); width = (int)videoSource.Get(VideoCaptureProperties.FrameWidth); height = (int)videoSource.Get(VideoCaptureProperties.FrameHeight); fps = (int)videoSource.Get(VideoCaptureProperties.Fps); frameCount = (int)videoSource.Get(VideoCaptureProperties.FrameCount); duration = frameCount / fps; // try to auto load stuff if they exist if (File.Exists(autoTimestampFileName)) { LoadTimestamps(autoTimestampFileName); isTimestampParsed = true; } else { timeStamps = Num.zeros((frameCount, 4), NPTypeCode.Int32); } if (File.Exists(autoPupilsFileName)) { LoadPupilLocations(autoPupilsFileName); } else { pupilLocations = Num.zeros((frameCount, 4), NPTypeCode.Double); pupilLocations *= Num.NaN; // use NaN to indicate pupil not yet found on this frame } cvFrame = new Mat(); for (int i = 0; i < 3; i++) { colorChannels[i] = new Mat(); } red = colorChannels[0]; top = 0; left = 0; right = width; bottom = height; isFrameProcessed = new bool[frameCount]; for (int i = 0; i < frameCount; i++) { isFrameProcessed[i] = false; } }
void Init() { capture.Set(CaptureProperty.FourCC, (double)FourCC.MJPG); capture.Set(CaptureProperty.Fps, 30); capture.Set(CaptureProperty.FrameWidth, 2048); capture.Set(CaptureProperty.FrameHeight, 2048); double w = capture.Get(CaptureProperty.FrameWidth); double h = capture.Get(CaptureProperty.FrameHeight); Console.WriteLine($"Capture Size: (w:{w},h:{h}) CaptureFormat:{capture.Get(CaptureProperty.FourCC)}"); }
// See dummy reference for documentation public (int, int) GetResolution() { if (capture != null && capture.IsDisposed == false) { double width = capture.Get(CaptureProperty.FrameWidth); double height = capture.Get(CaptureProperty.FrameHeight); return((int)width, (int)height); } else { return(VideoFeedSettings.ImageWidth, VideoFeedSettings.ImageHeight); } }
public static void OutputMovie(string path, string extention, FourCC cc, VideoCapture cap, Dictionary <FrameInfo, PrintEffectBase> effect = null) { if (path == null) { throw new ArgumentNullException(nameof(path)); } if (extention == null) { throw new ArgumentNullException(nameof(extention)); } if (cap == null) { throw new ArgumentNullException(nameof(cap)); } if (extention[0] != '.') { extention = "." + extention; } if (!path.EndsWith(extention, CurrentCulture)) { path += extention; } Size size = new Size(cap.Get(FrameWidth), cap.Get(FrameHeight)); double fps = cap.Get(Fps); using VideoWriter vw = new VideoWriter(path, cc, fps, size); Mat frame; cap.Set(PosFrames, 0); do { frame = cap.RetrieveMat(); var f = (uint)cap.Get(PosFrames); if (effect != null) { foreach (var eff in effect) { if (eff.Key.Begin <= f && f <= eff.Key.End) { frame = eff.Value.Processing(frame); } } } vw.Write(frame); Log.Progress("Outputing Movie", f / cap.Get(FrameCount) * 100); }while (!frame.Empty()); }
private static void Show(VideoCapture cap) { Mat frame; while (true) { frame = cap.RetrieveMat(); if (frame.Empty()) { break; } Cv2.ImShow("preview", frame); int key = Cv2.WaitKey(33); double frame_position = cap.Get(VideoCaptureProperties.PosFrames); if (key == ' ') { Cv2.WaitKey(); } else if (key == 'j') { cap.Set(VideoCaptureProperties.PosFrames, frame_position - 20); } else if (key == 'k') { cap.Set(VideoCaptureProperties.PosFrames, frame_position + 20); } else if (key == 0x1b) { break; } } }
// Load Camera private void LoadCamera() ///<summary> /// input the camera feed and configure the video capture from the camera feed(url) ///</summary> { camFeed.ShowDialog(); strCapFeed = camFeed.getCamFeed(); if (strCapFeed == "") { return; } else if (strCapFeed.Length == 1) { if (cap != null) { cap.Dispose(); } int uCamID; if (Int32.TryParse(strCapFeed, out uCamID)) { cap = VideoCapture.FromCamera(uCamID); } } else { if (cap != null) { cap.Dispose(); } cap = VideoCapture.FromFile(strCapFeed); } if (cap != null) { uFrameWidth = (uint)cap.Get(CaptureProperty.FrameWidth); uFrameHeight = (uint)cap.Get(CaptureProperty.FrameHeight); uFps = (uint)cap.Get(CaptureProperty.Fps); if ((uFps < 10) || (uFps > 60)) { uFps = 30; } strCapType = "Camera"; } }
public WindowsCapture(int index) : this() { InnerCapture = new VideoCapture(index); InnerCapture.Set(CaptureProperty.FourCC, (double)FourCC.MJPG); InnerCapture.Set(CaptureProperty.Fps, 30); InnerCapture.Set(CaptureProperty.FrameWidth, 2048); InnerCapture.Set(CaptureProperty.FrameHeight, 2048); double w = InnerCapture.Get(CaptureProperty.FrameWidth); double h = InnerCapture.Get(CaptureProperty.FrameHeight); Logger.Log($"Capture Size: (w:{w},h:{h}) CaptureFormat:{InnerCapture.Get(CaptureProperty.FourCC)}"); flip = true; flipMode = FlipMode.Y; }
private static bool FindDeathsRecursive(VideoCapture capture, List <int> deaths, float videoScale, int startFrame, int endFrame, float stepSize = 4) { capture.Set(VideoCaptureProperties.PosMsec, 0); double fps = capture.Get(VideoCaptureProperties.Fps); int stepSizeFrames = (int)(stepSize * capture.Fps); int currentLives = -1; int lastFrame = 0; for (int frame = startFrame; frame < endFrame; frame += stepSizeFrames) { int lifeCount = LifeCounter.GetLifeCount(capture, frame / fps, videoScale); if (currentLives == -1) { currentLives = lifeCount; } if (lifeCount > 0) { if (lifeCount < currentLives) { if (stepSizeFrames <= 1) { if (!deaths.Contains(lastFrame)) { deaths.Add(lastFrame); } Mat dbgMat = new Mat(); capture.Read(dbgMat); return(true); } else { // Try first half, then second half if (!FindDeathsRecursive(capture, deaths, videoScale, lastFrame, frame + stepSizeFrames / 2, stepSize / 2)) { FindDeathsRecursive(capture, deaths, videoScale, lastFrame + stepSizeFrames / 2, frame, stepSize / 2); } } } currentLives = lifeCount; } lastFrame = frame; } return(false); }
private void CaptureCameraCallback() { Mat frame = new Mat(); VideoCapture capture = new VideoCapture(); capture.Open(camID); if (!capture.IsOpened()) { return; } // discover max resolution capture.Set(CaptureProperty.FrameWidth, 10000); capture.Set(CaptureProperty.FrameHeight, 10000); int w = (int)capture.Get(CaptureProperty.FrameWidth); int h = (int)capture.Get(CaptureProperty.FrameHeight); DateTime lastFrame = DateTime.Now; while (liveCamera) { capture.Read(frame); frameCount++; WriteableBitmap bmp = frame.ToWriteableBitmap(PixelFormats.Bgr24); bmp.Freeze(); updateFrameStats(bmp, w, h); } frame.Release(); capture.Release(); try { Dispatcher.Invoke(new Action(() => { lblStatus.Content = ""; })); } catch { } }
public void Run() { //https://drive.google.com/file/d/1rc13wZ9zC03ObG5zB3uccUtsg_rsI8hC/view VideoCapture cap = VideoCapture.FromFile("Input.mp4"); Mat avg = new Mat(); Mat output = new Mat(); while (true) { Mat frame = new Mat(); // Capture frame-by-frame cap.Read(frame); // If the frame is empty, break immediately if (frame.Empty()) { break; } if (cap.Get(CaptureProperty.PosFrames) == 1) { frame.ConvertTo(avg, MatType.CV_32F); } Cv2.AccumulateWeighted(frame, avg, 0.0005, null); Cv2.ConvertScaleAbs(avg, output); Cv2.ImShow("output", output); // Press ESC on keyboard to exit char c = (char)Cv2.WaitKey(25); if (c == 27) { break; } frame.Release(); } // When everything done, release the video capture object cap.Release(); avg.Release(); output.Release(); // Closes all the frames Cv2.DestroyAllWindows(); }
/// <summary> /// extract video to images /// </summary> /// <param name="fileName"></param> static void ExportToJpg(string fileName) { using (VideoCapture videoCapture = new VideoCapture(fileName)) { var total = Convert.ToInt64(videoCapture.Get(CaptureProperty.FrameCount)); long start = 1; videoCapture.Set(CaptureProperty.PosFrames, start); long stop = total; Mat frame = new Mat(); long current = start; while (current < stop && videoCapture.Read(frame)) { frame.SaveImage("Videos\\" + Path.GetFileNameWithoutExtension(fileName) + $"_{current}.jpg"); current++; } videoCapture.Release(); } }
public static List <int> FindBinocularFrames(VideoCapture capture, float timeStart, float timeEnd) { int fps = (int)capture.Fps; List <int> firstPassFrames = new List <int>(); List <int> frames = new List <int>(); for (float time = timeStart; time < timeEnd; time += 0.5f) // 1 second is too coarse { capture.Set(VideoCaptureProperties.PosMsec, time * 1000.0f); int frame = (int)capture.Get(VideoCaptureProperties.PosFrames); Mat mat = new Mat(); capture.Read(mat); bool isBinocular = IsBinocularFrame(mat); if (isBinocular) { firstPassFrames.Add(frame); } } foreach (var f in firstPassFrames) { for (int i = f - fps + 1; i < f + fps; i++) { capture.Set(VideoCaptureProperties.PosFrames, i); Mat mat = new Mat(); capture.Read(mat); bool isBinocular = IsBinocularFrame(mat); if (isBinocular) { frames.Add(i); } } } frames.Sort(); return(frames); }
public void StartWebcam() { Log.Debug("Starting Camera... {0}", "Channel 0"); frame = new Mat(); capture = new VideoCapture(); try { capture.Open(0); Log.Debug("GetBackendName {0}", capture.GetBackendName()); Log.Debug("Channel {0}", capture.Get(VideoCaptureProperties.Channel)); Log.Debug("IsOpened {0}", capture.IsOpened()); } catch (Exception e) { Log.Fatal("Failed to find cam {0} ", e.Message); return; } }
/// <summary> /// predict video /// </summary> /// <param name="svm"></param> /// <param name="fileName"></param> /// <returns></returns> private static List <float> PredictV(SVM svm, string fileName) { var res = new List <float>(); using (VideoCapture videoCapture = new VideoCapture(fileName)) { var total = Convert.ToInt64(videoCapture.Get(CaptureProperty.FrameCount)); long start = 1; videoCapture.Set(CaptureProperty.PosFrames, start); long current = start; long stop = total; Mat frame = new Mat(); while (current < stop && videoCapture.Read(frame)) { Cv2.CvtColor(frame, frame, ColorConversionCodes.RGB2GRAY); res.Add(Predict(svm, frame)); current++; } frame.Release(); videoCapture.Release(); } return(res); }
//private static float minScoreSameFrames = 0.99999f; /// <summary> /// Counts the amount of frames where the screen is frozen until it changes. /// </summary> /// <param name="capture">Capture</param> /// <param name="startTime">Starting time in the video in seconds</param> /// <param name="minimumDuration">Function will only return after this amount of frames</param> /// <param name="maxFramesToCount">Maximum amount of frames to count</param> /// <returns>The amount of frames for which the screen has been frozen, or -1 if the screen didn't freeze</returns> public static Load?CountFrozenFrames(LoadType type, VideoCapture capture, double startTime, int minimumDuration, int maxFramesToCount, Rect?cropRect = null) { capture.Set(VideoCaptureProperties.PosMsec, startTime * 1000.0f); int startingFrame = (int)capture.Get(VideoCaptureProperties.PosFrames); int sameFrames = 0; Mat[] mats = new Mat[maxFramesToCount]; for (int i = 0; i < maxFramesToCount; i++) { if (capture.Get(VideoCaptureProperties.PosFrames) < capture.FrameCount) { mats[i] = new Mat(); capture.Read(mats[i]); if (cropRect.HasValue) { var rect = cropRect.Value; mats[i] = mats[i][rect.Y, rect.Y + rect.Height, rect.X, rect.X + rect.Width]; } } } /*Dictionary<int, double> scoresMSE = new Dictionary<int, double>(); * Dictionary<int, double> scoresSSIM = new Dictionary<int, double>(); * Dictionary<int, double> scoresMaxDiff = new Dictionary<int, double>();*/ int firstFrame = -1; int loadingTime = -1; for (int i = 0; i < maxFramesToCount - 2; i++) { if (mats[i + 1] == null) { continue; } var matA = mats[i]; var matB = mats[i + 1]; //matA = matA.Resize(new Size(matA.Width/2,matA.Height/2)); //matA = matA.CvtColor(ColorConversionCodes.RGB2GRAY, 1); //matB = matB.Resize(new Size(matB.Width/2,matB.Height/2)); //matB = matB.CvtColor(ColorConversionCodes.RGB2GRAY, 1); //var ssim = SSIM.GetMssim(matA, matB); //var mse = GetMSE(matA, matB); //scoresMSE.Add(i,mse); //scoresSSIM.Add(i, ssim.Score); //scoresMaxDiff.Add(i,GetMaxDifference(matA, matB)); //Cv2.ImWrite(Path.Combine("debugExport", $"compare_{startingFrame+i}_{mse:F4}.png"), mats[i]); //Cv2.ImWrite(Path.Combine("debugExport", $"compare_{i}_{mse:F4}.png"), mats[i]); // If either the next frame or the frame after that is the same if (FramesIdentical(mats[i], mats[i + 1])) { if (firstFrame == -1) { firstFrame = i; } sameFrames++; } else { if (sameFrames > minimumDuration) { loadingTime = sameFrames; break; } firstFrame = -1; sameFrames = 0; } } if (loadingTime >= 0) { return(new Load(type, startingFrame + firstFrame, startingFrame + firstFrame + loadingTime)); } else { Debug.WriteLine($"CountFrozenFrames: No frozen frames found, LoadType {type}, Start Time {startTime}, MinimumDuration {minimumDuration}, MaxFramesToCount = {maxFramesToCount}"); return(null); } }
private static String GetCaptureInfo() { String captureText = String.Format("Capture Backends (VideoCapture from device): {0}{1}", System.Environment.NewLine, GetBackendInfo(CvInvoke.Backends)); //We don't want to create VideoCapture on Mac OS unless we have requested camera permission if (Emgu.Util.Platform.OperationSystem != Platform.OS.MacOS) { using (VideoCapture cap = new VideoCapture(0, VideoCapture.API.Any, new Tuple <CapProp, int>(CapProp.HwAcceleration, (int)VideoAccelerationType.Any))) { if (cap.IsOpened) { String backendName = cap.BackendName; VideoAccelerationType hwAcceleration = (VideoAccelerationType)cap.Get(CapProp.HwAcceleration); captureText += String.Format( "{0}VideoCapture device successfully opened with default backend: {1} (hw acceleration: {2})", System.Environment.NewLine, backendName, hwAcceleration); } else { captureText += String.Format( "{0}VideoCapture device failed to opened with default backend: {1}", System.Environment.NewLine, cap.BackendName); } } } captureText += String.Format("{0}{0}Stream Backends (VideoCapture from file/Stream): {0}{1}", System.Environment.NewLine, GetBackendInfo(CvInvoke.StreamBackends)); captureText += String.Format("{0}{0}VideoWriter backends: {0}{1}{0}", Environment.NewLine, GetBackendInfo(CvInvoke.WriterBackends)); captureText += GetVideoWriterFFMPEGInfo(); //captureText += GetVideoWriterIntelMfxInfo(); return(captureText); }
private static String GetCaptureInfo() { String captureText = String.Format("Capture Backends (VideoCapture from device): {0}{1}", System.Environment.NewLine, GetBackendInfo(CvInvoke.Backends)); //We don't want to create VideoCapture on Mac OS unless we have requested camera permission if (Emgu.Util.Platform.OperationSystem != Platform.OS.MacOS) { using (VideoCapture cap = new VideoCapture(0, VideoCapture.API.Any, new Tuple <CapProp, int>(CapProp.HwAcceleration, (int)VideoAccelerationType.Any))) { if (cap.IsOpened) { String backendName = cap.BackendName; VideoAccelerationType hwAcceleration = (VideoAccelerationType)cap.Get(CapProp.HwAcceleration); captureText += String.Format( "{0}VideoCapture device successfully opened with default backend: {1} (hw acceleration: {2})", System.Environment.NewLine, backendName, hwAcceleration); } } } captureText += String.Format("{0}{0}Stream Backends (VideoCapture from file/Stream): {0}{1}", System.Environment.NewLine, GetBackendInfo(CvInvoke.StreamBackends)); captureText += String.Format("{0}{0}VideoWriter backends: {0}{1}{0}", Environment.NewLine, GetBackendInfo(CvInvoke.WriterBackends)); if (Emgu.Util.Platform.OperationSystem == Platform.OS.Windows) { Emgu.CV.Backend[] backends = CvInvoke.WriterBackends; int backend_idx = 0;//any backend; String backendName = String.Empty; foreach (Emgu.CV.Backend be in backends) { if (be.Name.Equals("FFMPEG")) //if (be.Name.Equals("INTEL_MFX")) { backend_idx = be.ID; backendName = be.Name; break; } } if (backend_idx > 0) //FFMPEG backend is available { using (VideoWriter writer = new VideoWriter( "tmp.avi", backend_idx, VideoWriter.Fourcc('X', 'V', 'I', 'D'), 25, new Size(640, 480), new Tuple <VideoWriter.WriterProperty, int>[] { new Tuple <VideoWriter.WriterProperty, int>(VideoWriter.WriterProperty.IsColor, 1), new Tuple <VideoWriter.WriterProperty, int>(VideoWriter.WriterProperty.HwAcceleration, (int)VideoAccelerationType.Any) })) { VideoAccelerationType hwAcceleration = (VideoAccelerationType)writer.Get(VideoWriter.WriterProperty.HwAcceleration); captureText += String.Format("{0}VideoWriter successfully created with backend: {1} (hw acceleration: {2})", System.Environment.NewLine, backendName, hwAcceleration); } } } return(captureText); }
static void Main(string[] args) { var afWindow = new Window("Annotated Frame"); var cdWindow = new Window("Contour Delta"); VideoCapture capture = new VideoCapture("rtsp://10.0.0.104:554/1/h264major"); int frameIndex = 0; Mat lastFrame = new Mat(); VideoWriter writer = null; while (capture.IsOpened()) { Mat frame = new Mat(); if (!capture.Read(frame)) { break; } Mat grayFrame, dilatedFrame, edges, deltaCopyFrame = new Mat(); Mat deltaFrame = new Mat(); try { frame = frame.Resize(new Size(0, 0), 0.33, 0.33); } catch (Exception e) { } grayFrame = frame.CvtColor(ColorConversionCodes.BGR2GRAY); grayFrame = grayFrame.GaussianBlur(new Size(21, 21), 0); if (frameIndex == 0) { frameIndex++; afWindow.Move(0, 0); cdWindow.Move(0, grayFrame.Size().Height); string fileName = "C:\\temp\\capture.avi"; string fcc = capture.FourCC; double fps = capture.Get(CaptureProperty.Fps); Size frameSize = new Size(grayFrame.Size().Width, grayFrame.Size().Height); writer = new VideoWriter(fileName, fcc, fps, frameSize); Console.Out.WriteLine("Frame Size = " + grayFrame.Size().Width + " x " + grayFrame.Size().Height); if (!writer.IsOpened()) { Console.Out.WriteLine("Error Opening Video File For Write"); return; } lastFrame = grayFrame; continue; } else if (frameIndex % 50 == 0) { frameIndex = 0; lastFrame = grayFrame; } frameIndex++; Cv2.Absdiff(lastFrame, grayFrame, deltaFrame); Cv2.Threshold(deltaFrame, deltaFrame, 50, 255, ThresholdTypes.Binary); int iterations = 2; Cv2.Dilate(deltaFrame, deltaFrame, new Mat(), new Point(), iterations); Point[][] contours; HierarchyIndex[] hierarchy; Cv2.FindContours(deltaFrame, out contours, out hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple, new Point(0, 0)); var countorsPoly = new Point[contours.Length][]; List <Rect> boundRect = new List <Rect>(); List <Point2f> center = new List <Point2f>(); List <float> radius = new List <float>(); for (int i = 0; i < contours.Length; i++) { countorsPoly[i] = Cv2.ApproxPolyDP(contours[i], 3, true); if (countorsPoly.Length != 0) { boundRect.Insert(i, Cv2.BoundingRect(countorsPoly[i])); Cv2.MinEnclosingCircle(countorsPoly[i], out Point2f centerObj, out float radiusObj); center.Insert(i, centerObj); radius.Insert(i, radiusObj); } } for (int i = 0; i < contours.Length; i++) { if (countorsPoly.Length != 0) { Scalar color = new Scalar(54, 67, 244); //Cv2.DrawContours(frame, countorsPoly, i, color, 1, LineTypes.Link8, new HierarchyIndex[] { }, 0, new Point()); Cv2.Rectangle(frame, boundRect[i].TopLeft, boundRect[i].BottomRight, color, 2, LineTypes.Link8, 0); //Cv2.Circle(frame, (int)center[i].X, (int)center[i].Y, (int)radius[i], color, 2, LineTypes.Link8, 0); } } afWindow.ShowImage(frame); cdWindow.ShowImage(deltaFrame); writer.Write(frame); switch (Cv2.WaitKey(1)) { case 27: capture.Release(); writer.Release(); return; } } }
public static void f_main(String ar_path_for_uploading_videos, String ar_working_folder_name, String ar_uploaded_video_name, String[] ar_brand_names, double ar_cost_of_1_second) { //===initializing google API key string ls_google_app_credentials_path_and_filename = HostingEnvironment.MapPath("~/CloudVision/google_cloud_credential_for_logo_detection-nowting-bd7886019869.json"); Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", ls_google_app_credentials_path_and_filename); VideoCapture l_capture = VideoCapture.FromFile(Path.Combine(ar_path_for_uploading_videos, ar_working_folder_name, ar_uploaded_video_name)); Double l_framerate = l_capture.Get(5); Double l_frameid; int li_counter = 0; Mat l_frame = new Mat(); Mat l_frame_with_boundarie = new Mat(); String ls_result_folder_path = Path.Combine(ar_path_for_uploading_videos, ar_working_folder_name); Directory.CreateDirectory(ls_result_folder_path); List <DataResult> l_list_DataResult = new List <DataResult>(); while (true) { l_capture.Read(l_frame); if (l_frame.Empty()) { break; } l_frameid = l_capture.Get(1); if (l_frameid % l_framerate != 0) { continue; //===getting 1 frame per second } //====================================================================================================== //====================================================================================================== li_counter++; if (li_counter != 1 && li_counter != 2 && li_counter != 3) { continue; } //=== temp code to process only the first 3 frames //====================================================================================================== //====================================================================================================== //===find all texts in frame AnnotateImageResponse l_response = f_find_brand_by_DetectDocumentText_in(l_frame); //===set a rectangle over each corresponding brand-text and save the frame foreach (string l_brand_name in ar_brand_names) { if (String.IsNullOrEmpty(l_brand_name.Trim())) { continue; } DataResult l_current_data_result = f_hilight_brand_and_save_frame(l_frame, l_response, l_brand_name, ar_cost_of_1_second, ls_result_folder_path, "pic_" + li_counter.ToString(), li_counter); l_list_DataResult.Add(l_current_data_result); } } //===write result into file using (StreamWriter l_file = File.AppendText(Path.Combine(ls_result_folder_path, "Results.txt"))) { JsonSerializer serializer = new JsonSerializer(); serializer.Serialize(l_file, l_list_DataResult); } string l_domainnamefordownloadingresults = ConfigurationManager.AppSettings["domainnamefordownloadingresults"]; EvaluationResults l_EvaluationResults = new EvaluationResults(); l_EvaluationResults.ResultPathURL = l_domainnamefordownloadingresults + "/" + ar_working_folder_name; l_EvaluationResults.BrandNames = ar_brand_names; l_EvaluationResults.BrandIndexToShow = 0; l_EvaluationResults.array_DataResult = l_list_DataResult.ToArray(); HttpContext.Current.Session["results"] = l_EvaluationResults; return; }
static void Main(string[] args) { Console.WriteLine("请选择模式,索引:c,查找:s"); string mode = Console.ReadLine(); if (!(mode == "c" || mode == "s")) { Console.WriteLine("未知命令"); return; } Console.WriteLine("请输入缓存文件目录"); string cache = Console.ReadLine().Replace("\"", " ").Trim(); cache = Path.Combine(cache, "findVideo.cache"); Cache ch = new Cache(); bool useChche = false; if (File.Exists(cache) && mode == "s") { try { ch = (Cache)DeserializeWithBinary(File.ReadAllBytes(cache)); useChche = true; } catch (Exception) { Console.WriteLine("读取缓存错误"); } } string pic = ""; string tarDir = ""; if (args.Length != 2) { if (mode == "s") { Console.WriteLine("请输入想找的视频的截屏的路径:"); pic = Console.ReadLine().Replace("\"", " ").Trim(); } if (mode == "c") { Console.WriteLine("请存放视频的文件夹路径:"); tarDir = Console.ReadLine().Replace("\"", " ").Trim(); } } else { pic = args[0]; tarDir = args[1]; } var size = new Size(32, 32); var sources = new List <Mat>(); var source_file = new List <string>(); if (mode == "s") { if (Directory.Exists(pic)) { // 是目录 foreach (var item in Directory.GetFiles(pic)) { source_file.Add(item); var s = new Mat(item, ImreadModes.Color); var nmt = new Mat(); Cv2.Resize(s, nmt, size); sources.Add(nmt); } } else if (File.Exists(pic)) { // 是文件 var source = new Mat(pic, ImreadModes.Color); var cmpMat = new Mat(); Cv2.Resize(source, cmpMat, size); sources.Add(cmpMat); } } double[] maxD = new double[sources.Count]; double[] sim_per = new double[sources.Count]; string[] sim_path = new string[sources.Count]; string[] sim_time = new string[sources.Count]; if (mode == "s" && useChche) { foreach (var item in ch.files) { for (int i = 0; i < item.smallMat.Count; i++) { var small = item.smallMat[i]; for (int j = 0; j < sources.Count; j++) { var cmpMat = sources[j]; var d = getPSNR(cmpMat, Mat.FromStream(new MemoryStream(small), ImreadModes.Color)); if (d > maxD[j]) { maxD[j] = d; double maxI = i; sim_path[j] = item.path; sim_per[j] = maxI / item.smallMat.Count * 100; sim_time[j] = formatLongToTimeStr(maxI * 5 * 1 / item.fps * 1000); } } } } for (int i = 0; i < source_file.Count; i++) { Console.WriteLine("--------------------------"); Console.WriteLine($"截屏文件:{source_file[i]}"); Console.WriteLine($"对应视频文件:{sim_path[i]}"); Console.WriteLine($"相似程度(越大越好):{maxD[i]}"); Console.WriteLine($"最相似:{sim_per[i]}%,大约在{sim_time[i]}"); } } else if (mode == "c") { foreach (var item in Directory.GetFiles(tarDir)) { var file = new MatFile(); file.path = item; try { var video = new VideoCapture(item); var mat = new Mat(); var index = 0; double frameount = video.Get(VideoCaptureProperties.FrameCount); double lastp = 0; Console.WriteLine($"当前文件:{item}"); while (video.Read(mat)) { var per = index / frameount * 100; if (per - lastp > 10) { Console.WriteLine($"...{per} %"); lastp = per; } var small = new Mat(); Cv2.Resize(mat, small, size); file.smallMat.Add(small.ToMemoryStream().ToArray()); video.Grab(); video.Grab(); video.Grab(); video.Grab(); index += 5; } file.fps = video.Fps; ch.files.Add(file); } catch (Exception e) { } } try { File.WriteAllBytes(cache, SerializeToBinary(ch)); } catch (Exception) { Console.WriteLine("写入缓存失败"); } } else { Console.WriteLine("搜索时请先建立索引。"); } }
private static String GetVideoCaptureInfo() { if (Emgu.Util.Platform.OperationSystem == Platform.OS.Windows) { String captureText = String.Empty; try { using (VideoCapture cap = new VideoCapture(0, VideoCapture.API.Any, new Tuple <CapProp, int>(CapProp.HwAcceleration, (int)VideoAccelerationType.Any))) { if (cap.IsOpened) { VideoAccelerationType hwAcceleration = (VideoAccelerationType)cap.Get(CapProp.HwAcceleration); captureText += String.Format( "{0}VideoCapture device successfully opened with backend: {1} (hw acceleration: {2})", System.Environment.NewLine, cap.BackendName, hwAcceleration); } else { captureText += String.Format( "{0}VideoCapture device failed to opened.", System.Environment.NewLine); } } } catch (Emgu.CV.Util.CvException e) { //System.Console.WriteLine(">>>>>>>>>>>>>>>>>>>>>"); captureText += String.Format("{0}No capture device found.", System.Environment.NewLine); } return(captureText); } return(String.Empty); }