private void showVideoFrame(double part) { try { System.Drawing.Bitmap frame = FrameGrabber.GetFrameFromVideo(PathLoad, part); if (_intrinsicCamera != null && _distCoefs != null) { Image <Rgb, Byte> FrameBuffer = new Image <Rgb, byte>(frame); Image <Rgb, Byte> FrameResultBuffer = new Image <Rgb, byte>(frame.Size); CvInvoke.Undistort(FrameBuffer, FrameResultBuffer, _intrinsicCamera, _distCoefs); frame = FrameResultBuffer.ToBitmap(); } _imgPath = _tmpFolder + _imgName + _imgCounter++ + ".png"; frame.Save(@_imgPath, System.Drawing.Imaging.ImageFormat.Png); ImageBrush brush = new ImageBrush(); brush.ImageSource = new System.Windows.Media.Imaging.BitmapImage(new System.Uri(@_imgPath, System.UriKind.Absolute)); CanvasIMG = brush; } catch (InvalidVideoFileException) { InfoText = "An error occured. Video file has the wrong format."; } catch (System.ArgumentOutOfRangeException) { InfoText = "An error occured. Video part doesn't exist."; } }
private void button1_Click(object sender, EventArgs e) { if (openFileDialog1.ShowDialog() == DialogResult.OK) { fg = new FrameGrabber(openFileDialog1.FileName); } }
static void Main(string[] args) { var cam = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice).First(); var grabber = new FrameGrabber(cam); Console.ReadLine(); }
/// <summary> /// 视频信息浏览 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void buttonVideoInfoScan_Click(object sender, EventArgs e) { try { Microsoft.DirectX.AudioVideoPlayback.Video video = new Video(videoFileName); double frameRate = 1 / video.AverageTimePerFrame; //////////////////////////////////////////////////////////////////////////在控件中进行视频浏览 WinStructs.VIDEOBASICINFO videoBasicInfo; videoBasicInfo = FrameGrabber.GetVideoBasicInfo(videoFileName); if (videoBasicInfo.totalFrames == 0) { throw new ArgumentException("Total Frames is 0!"); } int intervalFrameNum = Convert.ToInt32(textBox_IntervalNum.Text.Trim()); for (int i = 1; i <= videoBasicInfo.totalFrames; i += intervalFrameNum) { this.pictureBox1.Refresh(); this.pictureBox1.Image = FrameGrabber.GetFrameFromVideo(videoBasicInfo.fileName, i, Size.Empty, string.Empty); this.pictureBox1.Image.Save("D:\\frames\\" + i.ToString() + ".bmp"); } } catch (VideoAnalysisProcessSystem.ClassFile.InvalidVideoFileException ex) { MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } catch (StackOverflowException) { MessageBox.Show("The target image size is too big", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } catch (Exception ex) { MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
private void convertirenimages(object sender, RoutedEventArgs e) { FrameGrabber fg = new FrameGrabber(url); string output = "D:\\projet_football"; if (fg != null) { foreach (FrameGrabber.Frame f in fg) { using (f) { // (Bitmap)f.Image.Clone(); System.IO.Directory.CreateDirectory(output); f.Image.Save(System.IO.Path.Combine(output, "frame" + f.FrameIndex + ".png"), System.Drawing.Imaging.ImageFormat.Png); } if (fg == null) { return; } } } }
private static void Main(string[] args) { // Create grabber. FrameGrabber <DetectedFace[]> grabber = new FrameGrabber <DetectedFace[]>(); // Create Face API Client. FaceClient faceClient = new FaceClient(new ApiKeyServiceClientCredentials(ApiKey)) { Endpoint = Endpoint }; // Set up a listener for when we acquire a new frame. grabber.NewFrameProvided += (s, e) => { Console.WriteLine($"New frame acquired at {e.Frame.Metadata.Timestamp}"); }; // Set up Face API call. grabber.AnalysisFunction = async frame => { Consolw.WriteLine("Submirring Frame"); //Console.WriteLine($"Submitting frame acquired at {frame.Metadata.Timestamp}"); // Encode image and submit to Face API. return((await faceClient.Face.DetectWithStreamAsync(frame.Image.ToMemoryStream(".jpg"))).ToArray()); }; // Set up a listener for when we receive a new result from an API call. grabber.NewResultAvailable += (s, e) => { if (e.TimedOut) { Console.WriteLine("API call timed out."); } //Error in making API calls else if (e.Exception != null) { //Non-timed out exceptions Console.WriteLine("API call threw an exception."); } else { Console.WriteLine($"New result received for frame acquired at {e.Frame.Metadata.Timestamp}. {e.Analysis.Length} faces detected"); } }; // Tell grabber when to call API. // See also TriggerAnalysisOnPredicate grabber.TriggerAnalysisOnInterval(TimeSpan.FromMilliseconds(3000)); // Start running in the background. grabber.StartProcessingCameraAsync().Wait(); // Wait for keypress to stop Console.WriteLine("Press any key to stop..."); Console.ReadKey(); // Stop, blocking until done. grabber.StopProcessingAsync().Wait(); }
public HomeViewModel(FaceServiceClient faceServiceClient, EmotionServiceClient emotionServiceClient) { _faceServiceClient = faceServiceClient; _emotionServiceClient = emotionServiceClient; _frameGrabber = new FrameGrabber <CameraResult>(); Initialize(); }
static void Main(string[] args) { using (var leftFrameGrabber = new FrameGrabber(0, "VID1", "NTSC")) { leftFrameGrabber.OnFrame += leftFrameGrabber_OnFrame; leftFrameGrabber.Start(); } }
private static void Main(string[] args) { MakeRequest(); Console.ReadLine(); Console.WriteLine("Ok fine"); return; // Create grabber. FrameGrabber <Face[]> grabber = new FrameGrabber <Face[]>(); // Create Face API Client. FaceServiceClient faceClient = new FaceServiceClient("<subscription key>"); // Set up a listener for when we acquire a new frame. grabber.NewFrameProvided += (s, e) => { Console.WriteLine("New frame acquired at {0}", e.Frame.Metadata.Timestamp); }; // Set up Face API call. grabber.AnalysisFunction = async frame => { Console.WriteLine("Submitting frame acquired at {0}", frame.Metadata.Timestamp); // Encode image and submit to Face API. return(await faceClient.DetectAsync(frame.Image.ToMemoryStream(".jpg"))); }; // Set up a listener for when we receive a new result from an API call. grabber.NewResultAvailable += (s, e) => { if (e.TimedOut) { Console.WriteLine("API call timed out."); } else if (e.Exception != null) { Console.WriteLine("API call threw an exception."); } else { Console.WriteLine("New result received for frame acquired at {0}. {1} faces detected", e.Frame.Metadata.Timestamp, e.Analysis.Length); } }; // Tell grabber when to call API. // See also TriggerAnalysisOnPredicate grabber.TriggerAnalysisOnInterval(TimeSpan.FromMilliseconds(3000)); // Start running in the background. grabber.StartProcessingCameraAsync().Wait(); // Wait for keypress to stop Console.WriteLine("Press any key to stop..."); Console.ReadKey(); // Stop, blocking until done. grabber.StopProcessingAsync().Wait(); }
public VideoFrameAnalyzerService(IEventAggregator eventAggregator, IVisualizationService visualizationService, IOpenCVService openCVService, IFaceService faceService) { _frameGrabber = new FrameGrabber <LiveCameraResult>(); _eventAggregator = eventAggregator; _visualizationService = visualizationService; _openCVService = openCVService; _faceService = faceService; _localFaceDetector = _openCVService.DefaultFrontalFaceDetector(); }
public MainWindow() { InitializeComponent(); // Create grabber. _grabber = new FrameGrabber <LiveCameraResult>(); // Set up a listener for when the client receives a new frame. _grabber.NewFrameProvided += (s, e) => { // The callback may occur on a different thread, so we must use the // MainWindow.Dispatcher when manipulating the UI. this.Dispatcher.BeginInvoke((Action)(() => { // Display the image in the left pane. LeftImage.Source = e.Frame.Image.ToBitmapSource(); })); // See if auto-stop should be triggered. if (Properties.Settings.Default.AutoStopEnabled && (DateTime.Now - _startTime) > Properties.Settings.Default.AutoStopTime) { _grabber.StopProcessingAsync(); } }; // Set up a listener for when the client receives a new result from an API call. _grabber.NewResultAvailable += (s, e) => { this.Dispatcher.BeginInvoke((Action)(() => { if (e.TimedOut) { MessageArea.Text = "API call timed out."; } else if (e.Exception != null) { string apiName = ""; string message = e.Exception.Message; var faceEx = e.Exception as FaceAPIException; if (faceEx != null) { apiName = "Face"; message = faceEx.ErrorMessage; } MessageArea.Text = string.Format("{0} API call failed on frame {1}. Exception: {2}", apiName, e.Frame.Metadata.Index, message); } else { _latestResultsToDisplay = e.Analysis; RightImage.Source = VisualizeResult(e.Frame); } })); }; // Create local face detector. _localFaceDetector.Load("Data/haarcascade_frontalface_alt2.xml"); }
/// <summary> /// HomeViewModel constructor. Assigsn API service clients, creates a <see cref="FrameGrabber{AnalysisResultType}"/> and <see cref="Recording"/> objects /// </summary> /// <param name="faceServiceClient"><see cref="FaceServiceClient"/> object</param> /// <param name="emotionServiceClient"><see cref="EmotionServiceClient"/> object</param> /// <param name="speakerIdentification"><see cref="ISpeakerIdentificationServiceClient"/> object</param> public HomeViewModel(FaceServiceClient faceServiceClient, ISpeakerIdentificationServiceClient speakerIdentification) { _faceServiceClient = faceServiceClient; _speakerIdentification = new SpeakerIdentification(speakerIdentification); _frameGrabber = new FrameGrabber <CameraResult>(); _recording = new Recording(); Initialize(); }
//async void SendDMToMyAunt() //{ // string username = "******".ToLower(); // //long userPK = 123; // var message = "Hello my auntie"; // // remove \r characters, since windows is adding it to new lines // message = message.Replace("\r", ""); // var inbox = await InstaApi.MessagingProcessor.GetDirectInboxAsync(PaginationParameters.MaxPagesToLoad(1)); // if (inbox.Succeeded) // { // // Act as Instagram> // // search throw ranked recipients: // // manipulate instagram by searching like real instagram: // await InstaApi.MessagingProcessor.GetRankedRecipientsByUsernameAsync(username.Substring(0, 2)); // await InstaApi.MessagingProcessor.GetRankedRecipientsByUsernameAsync(username.Substring(0, 4)); // var rankedRecipients = await InstaApi.MessagingProcessor.GetRankedRecipientsByUsernameAsync(username); // if (rankedRecipients.Succeeded) // { // var threadId = string.Empty; // long userPk = -1; // if (rankedRecipients.Value?.Threads?.Count > 0) // { // var byThread = rankedRecipients.Value.Threads.FirstOrDefault(x => x.Users.Count == 1 && x.Users.FirstOrDefault()?.UserName.ToLower() == username); // if (byThread != null) // threadId = byThread.ThreadId; // } // else // { // var byUser = rankedRecipients.Value.Users.FirstOrDefault(x => x.UserName.ToLower() == username); // if (byUser != null) // userPk = byUser.Pk; // } // // now send message: // if (userPk != -1) // via user public key (user id PK) if exists // { // var dm = await InstaApi.MessagingProcessor.SendDirectTextAsync(userPk.ToString(), null, message); // } // else if (!string.IsNullOrEmpty(threadId)) // with thread id if exists // { // var dm = await InstaApi.MessagingProcessor.SendDirectTextAsync(null, threadId, message); // } // else // Console.WriteLine("WHAT THE F**K?! NO THREAD OR PK FOUND"); // } // } //} //async Task<InstaDirectInboxThread> GetThread(InstaDirectInboxContainer inbox, long userPK) //{ // if (inbox?.Inbox?.Threads?.Count > 0) // { // var exists = inbox.Inbox.Threads.FirstOrDefault(x => x.Users.Count == 1 && x.Users.FirstOrDefault()?.Pk == userPK); // if (exists != null) // return exists; // var getThreadByParticipants = await Helper.InstaApi.MessagingProcessor.GetThreadByParticipantsAsync(inbox.SeqId, new long[] { userPK }); // if (getThreadByParticipants.Succeeded) // return getThreadByParticipants.Value; // } // return null; //} private async void Button_Click(object sender, RoutedEventArgs e) { FileOpenPicker openPicker = new FileOpenPicker { ViewMode = PickerViewMode.Thumbnail, SuggestedStartLocation = PickerLocationId.PicturesLibrary }; openPicker.FileTypeFilter.Add(".jpg"); openPicker.FileTypeFilter.Add(".bmp"); //openPicker.FileTypeFilter.Add(".gif"); openPicker.FileTypeFilter.Add(".png"); openPicker.FileTypeFilter.Add(".mp4"); var file = await openPicker.PickSingleFileAsync(); if (file == null) { return; } var stream = await file.OpenAsync(FileAccessMode.Read); var grabber = await FrameGrabber.CreateFromStreamAsync(stream); var frame = await grabber .ExtractVideoFrameAsync(TimeSpan.FromSeconds(4.5), false); int ix = 1; var savedFile = await KnownFolders.MusicLibrary.CreateFileAsync(ix + ".jpg", CreationCollisionOption.GenerateUniqueName); var oStream = await savedFile.OpenAsync(FileAccessMode.ReadWrite); await frame.EncodeAsJpegAsync(oStream); oStream.Dispose(); for (int i = 0; i < 100; i++) { //var frame2 = await grabber .ExtractNextVideoFrameAsync(); //var savedFile2 = await KnownFolders.MusicLibrary.CreateFileAsync(ix + ".jpg", CreationCollisionOption.GenerateUniqueName); //var oStream2 = await savedFile2.OpenAsync(FileAccessMode.ReadWrite); //await frame2.EncodeAsJpegAsync(oStream2); //oStream2.Dispose(); //var img = new Image //{ // Height = 150, // Width = 150, // Name = ix.ToString() //}; //img.Source //LV.Children.Add(img); } }
private async Task InitAsync() { var stream = await file.OpenAsync(FileAccessMode.Read); grabber = await FrameGrabber.CreateFromStreamAsync(stream); DecodedPixelWidth = (int)Math.Round(videoPropsViewModel.Width * percentageOfResolution / 100); DecodedPixelHeight = (int)Math.Round(videoPropsViewModel.Height * percentageOfResolution / 100); grabber.DecodePixelWidth = DecodedPixelWidth; grabber.DecodePixelHeight = DecodedPixelHeight; }
async void InitServices() { frameGrabber = await FrameGrabber.CreateAsync(this); frameAnalyzer = await FrameAnalyzer.CreateAsync(); frameAnalyzer.Start(); frameAnalyzer.OnAnalyzedFrame += FrameAnalyzer_OnAnalyzedFrame; speechManager = await SpeechManager.CreateAndStartAsync(); speechManager.OnPhraseRecognized += SpeechManager_OnPhraseRecognized; }
public MainWindow() { InitializeComponent(); _grabber = new FrameGrabber(); _localFaceDetector = new CascadeClassifier(); _latestResultsToDisplay = null; _facesGuids = new List <Guid>(); _statisticsWindow = new StatisticsWindow(); StatisticsData = new StatisticsData(); InitEvents(); _localFaceDetector.Load("Data/haarcascade_frontalface_alt2.xml"); }
private async void ExtractFrame(object sender, RoutedEventArgs e) { if (currentFile == null || mediaPlayer.PlaybackSession == null) { await DisplayErrorMessage("Please open a video file first."); } else { try { var stream = await currentFile.OpenAsync(FileAccessMode.Read); bool exactSeek = grabFrameExactSeek.IsOn; var frameGrabber = await FrameGrabber.CreateFromStreamAsync(stream); var frame = await frameGrabber.ExtractVideoFrameAsync(mediaPlayer.PlaybackSession.Position, exactSeek); var filePicker = new FileSavePicker(); filePicker.SuggestedStartLocation = PickerLocationId.VideosLibrary; filePicker.DefaultFileExtension = ".jpg"; filePicker.FileTypeChoices["Jpeg file"] = new[] { ".jpg" }.ToList(); var file = await filePicker.PickSaveFileAsync(); if (file != null) { var outputStream = await file.OpenAsync(FileAccessMode.ReadWrite); await frame.EncodeAsJpegAsync(outputStream); outputStream.Dispose(); bool launched = await Windows.System.Launcher.LaunchFileAsync(file, new LauncherOptions() { DisplayApplicationPicker = false }); if (!launched) { await DisplayErrorMessage("File has been created:\n" + file.Path); } } } catch (Exception ex) { await DisplayErrorMessage(ex.Message); } } }
public MainWindow() { InitializeComponent(); SubscriptionKey = GetSubscriptionKeyFromIsolatedStorage(); SubscriptionEndpoint = GetSubscriptionEndpointFromIsolatedStorage(); SubscriptionKeyTextBox.Text = SubscriptionKey; SubscriptionEndpointTextBox.Text = SubscriptionEndpoint; _grabber = new FrameGrabber <LiveCameraResult> { AnalysisFunction = IdentifyFaceFunction }; _grabber.TriggerAnalysisOnInterval(new TimeSpan(0, 0, 2)); }
private void OnResultAvailable(object sender, FrameGrabber <CameraResult> .NewResultEventArgs e) { var analysisResult = e.Analysis.EmotionScores; if (analysisResult == null || analysisResult.Length == 0) { return; } string emotion = AnalyseEmotions(analysisResult[0]); Application.Current.Dispatcher.Invoke(() => { SystemResponse = $"You seem to be {emotion} today."; }); }
/// <summary> /// 单帧播放。 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button_FrameStep_Click(object sender, EventArgs e) { try { for (int i = 0; i < TotalFrameNum; i++) { pictureBox_GrabFrame.Refresh(); Application.DoEvents(); pictureBox_GrabFrame.Image = FrameGrabber.GetFrameFromVideo(openFileDialog1.FileName, i, Size.Empty, VideoFormat); Thread.Sleep(500); } } catch (Exception ex) { } }
private void NewFrameHandler(object s, FrameGrabber <LiveCameraResult> .NewFrameEventArgs e) { // Local face detection. var rects = _localFaceDetector.DetectMultiScale(e.Frame.Image); // Attach faces to frame. e.Frame.UserData = rects; // The callback may occur on a different thread, so we must use the // MainWindow.Dispatcher when manipulating the UI. Dispatcher.BeginInvoke((Action)(() => { // If we're fusing client-side face detection with remote analysis, show the // new frame now with the most recent analysis available. RightImage.Source = VisualizeResult(e.Frame); })); }
/// <summary> /// 显示下一帧。 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button_NextFrame_Click(object sender, EventArgs e) { try { if (FrameNum + 1 <= TotalFrameNum) { pictureBox_GrabFrame.Image = FrameGrabber.GetFrameFromVideo(openFileDialog1.FileName, FrameNum + 1, Size.Empty, VideoFormat); nextFrame = FrameGrabber.GetFrameFromVideo(openFileDialog1.FileName, FrameNum + 1, Size.Empty, VideoFormat); } else { MessageBox.Show("当前帧没有前一帧", "提示"); } } catch (Exception ex) { } }
public MainWindow() { InitializeComponent(); // Create grabber. _grabber = new FrameGrabber <LiveCameraResult>(); // Set up a listener for when the client receives a new frame. _grabber.NewFrameProvided += NewFrameHandler; // Set up a listener for when the client receives a new result from an API call. _grabber.NewResultAvailable += NewResultHandler; // Create local face detector. _localFaceDetector.Load("Data/haarcascade_frontalface_alt2.xml"); StopButton.Visibility = Visibility.Collapsed; }
private void NewResultHandler(object s, FrameGrabber <LiveCameraResult> .NewResultEventArgs e) { Dispatcher.BeginInvoke((Action)(() => { if (e.TimedOut) { //MessageArea.Text = "API call timed out."; } else if (e.Exception != null) { string apiName = ""; string message = e.Exception.Message; var faceEx = e.Exception as FaceAPI.FaceAPIException; var emotionEx = e.Exception as Common.ClientException; var visionEx = e.Exception as VisionAPI.ClientException; if (faceEx != null) { apiName = "Face"; message = faceEx.ErrorMessage; } else if (emotionEx != null) { apiName = "Emotion"; message = emotionEx.Error.Message; } else if (visionEx != null) { apiName = "Computer Vision"; message = visionEx.Error.Message; } //MessageArea.Text = string.Format("{0} API call failed on frame {1}. Exception: {2}", apiName, e.Frame.Metadata.Index, message); } else { _latestResultsToDisplay = e.Analysis; // Display the image and visualization in the right pane. if (true) { RightImage.Source = VisualizeResult(e.Frame); } } })); }
private async void Button_Click(object sender, RoutedEventArgs e) { FrameGrabber frameGrabber; frameGrabber = await FrameGrabber.CreateFromStreamAsync(stream); sw.Restart(); int counter = 0; VideoFrame frame = await frameGrabber.ExtractVideoFrameAsync(TimeSpan.Zero, true); do { frame = await frameGrabber.ExtractNextVideoFrameAsync(); Debug.WriteLine($"Frame (#{counter++}) from {frame?.Timestamp.TotalSeconds} sec. Elapsed:{sw.Elapsed.TotalSeconds} seconds"); } while (frame != null); Debug.WriteLine($"All ({counter}) in {sw.Elapsed.TotalSeconds} seconds"); }
/// <summary> /// 抓图。 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button_CaptureImage_Click(object sender, EventArgs e) { try { if (openFileDialog1.FileName != "") { if (MyVideo.CurrentPosition >= 0 && MyVideo.CurrentPosition <= MyVideo.Duration) { double currentTime = MyVideo.CurrentPosition; //分析视频的格式 if (VideoFormat == "asf") //如果是asf格式的视频,则默认帧率为30 { FrameNum = (int)(currentTime * 30); } else { FrameNum = (int)(MyVideo.CurrentPosition / MyVideo.AverageTimePerFrame); } //设置文件名 string ImageName = ""; if (folderBrowserDialog_ImagePath.SelectedPath != "") { ImageName = folderBrowserDialog_ImagePath.SelectedPath + DateTime.Now.ToString("yyyy年MM月dd日HH时mm分ss秒") + FrameNum.ToString() + ".bmp"; } else { ImageName = textBox_ImagePath.Text + DateTime.Now.ToString("yyyy年MM月dd日HH时mm分ss秒") + FrameNum.ToString() + ".bmp"; } Image tempImage = FrameGrabber.GetFrameFromVideo(openFileDialog1.FileName, FrameNum, Size.Empty, VideoFormat); tempImage.Save(ImageName); pictureBox_GrabFrame.Image = tempImage; currentFrame = tempImage; } } } catch (Exception ex) { } }
static void Main(string[] args) { IFaceClient client = new FaceClient(new ApiKeyServiceClientCredentials(SUBSCRIPTION_KEY)) { Endpoint = ENDPOINT }; // Define this in Main so it is closed over the client. async Task <DetectedFace[]> Detect(VideoFrame frame) { return((DetectedFace[])await client.Face.DetectWithStreamAsync(frame.Image.ToMemoryStream(".jpg"), detectionModel : DetectionModel.Detection03)); } // Create grabber, with analysis type Face[]. FrameGrabber <DetectedFace[]> grabber = new FrameGrabber <DetectedFace[]>(); // Set up our Face API call. grabber.AnalysisFunction = Detect; // Set up a listener for when we receive a new result from an API call. grabber.NewResultAvailable += (s, e) => { if (e.Analysis != null) { Console.WriteLine("New result received for frame acquired at {0}. {1} faces detected", e.Frame.Metadata.Timestamp, e.Analysis.Length); } }; // Tell grabber to call the Face API every 3 seconds. grabber.TriggerAnalysisOnInterval(TimeSpan.FromMilliseconds(3000)); // Start running. grabber.StartProcessingCameraAsync().Wait(); // Wait for keypress to stop Console.WriteLine("Press any key to stop..."); Console.ReadKey(); // Stop, blocking until done. grabber.StopProcessingAsync().Wait(); }
public static void Run([QueueTrigger("zetron-media", Connection = "AzureWebJobsStorage")] string myQueueItem, TraceWriter log) { var mediaJson = JObject.Parse(myQueueItem); log.Info($"Incoming JSON: {mediaJson.ToString()}"); var client = new HttpClient(); client.DefaultRequestHeaders.Add("Prediction-Key", ConfigurationManager.AppSettings["VisionAPIKey"]); string url = ConfigurationManager.AppSettings["VisionAPIHost"]; var analysisInterval = TimeSpan.Parse(ConfigurationManager.AppSettings["AnalysisInterval"]); var analysisEntryCount = Convert.ToInt32(ConfigurationManager.AppSettings["AnalysisEntryCount"]); var connectionString = ConfigurationManager.ConnectionStrings["ZetronDb"].ConnectionString; var dbcontext = new ZetronDbContext(connectionString, log); var grapper = new FrameGrabber <FrameAnalysisResult>(mediaJson, dbcontext, log); var mediaAnalyzer = new MediaAnalyzer(mediaJson, client, grapper, dbcontext, log, analysisInterval, analysisEntryCount, url, (int)mediaJson["mediaId"]); mediaAnalyzer.ProcessMedia(); log.Info($"MediaQueue trigger function completed for media id: {mediaJson["mediaId"]}"); }
public bool loadVideo() { try { if (FrameGrabber.openVideoStream(PathLoad, out _mediaDet, out _mediaType)) { SliderMax = (int)_mediaDet.StreamLength; InfoText = "Video (" + (int)_mediaDet.StreamLength + "sec.) successfully loaded."; _videoLoaded = true; return(true); } return(false); } catch (System.Exception e) { string errorCode = e.Message.Split(':')[1].Substring(3); uint errorValue = (uint)int.Parse(errorCode, System.Globalization.NumberStyles.HexNumber); InfoText = FrameGrabber.getErrorMsg(errorValue); return(false); } }
public MainWindow() { InitializeComponent(); FaceClientInit(subscriptionKey); // Face API Initiating endpoint if (Uri.IsWellFormedUriString(faceEndpoint, UriKind.Absolute)) { faceClient.Endpoint = faceEndpoint; MessageArea.Text = "Endpoint Initiated"; log.Info("Endpont Initiated"); } else { MessageBox.Show(faceEndpoint, "Invalid URI", MessageBoxButton.OK, MessageBoxImage.Error); log.Error($"{faceEndpoint} Invalid URI"); Environment.Exit(0); } // Create grabber. _grabber = new FrameGrabber <LiveCameraResult>(); // Set up a listener for when the client receives a new frame. _grabber.NewFrameProvided += (s, e) => { // The callback may occur on a different thread, so we must use the // MainWindow.Dispatcher when manipulating the UI. this.Dispatcher.BeginInvoke((Action)(() => { // Display the image in the left pane. LeftImage.Source = e.Frame.Image.ToBitmapSource(); // If we're fusing client-side face detection with remote analysis, show the // new frame now with the most recent analysis available. })); }; }
/// <summary> /// Open the specified video and get the individual frames. /// Construct a 3D Matrix from the 2D matrices returned by the FixFrame function. /// </summary> /// <param name="path"></param> /// <returns></returns> private Matrix[] LoadClip(string path) { FrameGrabber frames = new FrameGrabber(path); //grab frames from video at the specified path Matrix[] M; //Array of 2D matrices built from the frames in the video loaded. Essentially a 3D matrix int count = frames.FrameCount; //number of frames in the video Matrix fixedFrame = FixFrame((Bitmap)frames.GetFrame(0)); //fix the first frame. Used for initialization of M M = new Matrix[count]; for (int i = 0; i < count; i++) //call FixFrame on each frame in the video and fill the matrix with these frames { fixedFrame = FixFrame((Bitmap)frames.GetFrame(i)); M[i] = fixedFrame; } return M; }