private async void MediaControl_SoundLevelChanged(object sender, Object e) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() => { if (Windows.Media.MediaControl.SoundLevel != Windows.Media.SoundLevel.Muted) { ScenarioReset(); } else { if (m_bRecording) { ShowStatusMessage("Stopping Record on invisibility"); await m_mediaCaptureMgr.StopRecordAsync(); m_bRecording = false; EnableButton(true, "StartStopRecord"); } if (m_bPreviewing) { ShowStatusMessage("Stopping Preview on invisibility"); await m_mediaCaptureMgr.StopPreviewAsync(); m_bPreviewing = false; EnableButton(true, "StartPreview"); previewElement2.Source = null; } } }); }
// </SnippetMediaCaptureVideo_StartRecordCS> // <SnippetMediaCaptureVideo_StopRecordCS> // Stop the video capture. private async void StopMediaCaptureSession() { await _mediaCapture.StopRecordAsync(); _recording = false; (App.Current as App).IsRecording = false; }
private async void ScenarioClose() { try { if (m_bRecording) { ShowStatusMessage("Stopping Record"); await m_mediaCaptureMgr.StopRecordAsync(); m_bRecording = false; } if (m_bPreviewing) { ShowStatusMessage("Stopping preview"); await m_mediaCaptureMgr.StopPreviewAsync(); m_bPreviewing = false; } if (m_mediaCaptureMgr != null) { ShowStatusMessage("Stopping Camera"); previewElement1.Source = null; m_mediaCaptureMgr.Dispose(); } } catch (Exception e) { ShowExceptionMessage(e); } }
public async void RecordLimitationExceeded(Windows.Media.Capture.MediaCapture currentCaptureObject) { if (m_bRecording) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() => { try { //ShowStatusMessage("Stopping Record on exceeding max record duration"); await m_mediaCaptureMgr.StopRecordAsync(); m_bRecording = false; // still no button for recording (aka. real time processing) hence the bool holders can record etc. //btnStartStopRecord1.Content = "StartRecord"; canrecord = true; //ShowStatusMessage("Stopped record on exceeding max record duration:" + m_recordStorageFile.Path); if (!m_mediaCaptureMgr.MediaCaptureSettings.ConcurrentRecordAndPhotoSupported) { //if camera does not support record and Takephoto at the same time //enable TakePhoto button again, after record finished //will implement probably //btnTakePhoto1.Content = "TakePhoto"; cantakephoto = true; } } catch (Exception e) { //ShowExceptionMessage(e); } }); } }
private async void ScenarioClose() { if (m_bRecording) { ShowStatusMessage("Stopping Record on invisibility"); await m_mediaCaptureMgr.StopRecordAsync(); m_bRecording = false; EnableButton(true, "StartStopRecord"); m_mediaCaptureMgr.Dispose(); } if (m_mediaCaptureMgr != null) { m_mediaCaptureMgr.Dispose(); } }
// Stop the video capture private async void StopMediaCaptureSession() { await mediaCapture.StopRecordAsync(); recording = false; (App.Current as App).IsRecording = false; //stop the preview await mediaCapture.StopPreviewAsync(); }
private async void endRecord(object sender, RoutedEventArgs e) { if (isRecording) { await audioCapture.StopRecordAsync(); isRecording = false; refreshUI(); outputTextBlock.Text = "Recording stopped."; } }
public async Task<RecordingToken> StartAsync() { var capture = new MediaCapture(); var initSettings = new MediaCaptureInitializationSettings(); initSettings.StreamingCaptureMode = StreamingCaptureMode.Audio; await capture.InitializeAsync(initSettings); var fileName = DateTimeOffset.Now.TimeOfDay.ToString().Replace(':', '_') + ".wav"; var file = await ApplicationData.Current.LocalFolder.CreateFileAsync(fileName); var profile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Medium); await capture.StartRecordToStorageFileAsync(profile, file); return new RecordingToken(file.Path, async () => { await capture.StopRecordAsync(); // It's important to dispose the capture device here to avoid application crash when using FileSavePicker afterwards capture.Dispose(); }); }
async private void StopRecord_Click(object sender, RoutedEventArgs e) { await captureManager.StopRecordAsync(); }
private async void MediaCaptureOnRecordLimitationExceeded(MediaCapture sender) { await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () => { await sender.StopRecordAsync(); var warningMessage = new MessageDialog("The media recording has been stopped because you exceeded the maximum recording length.", "Recording Stoppped"); await warningMessage.ShowAsync(); }); }
//public void CreateProfile() //{ // _profile = Windows.Media.MediaProperties.MediaEncodingProfile.CreateMp3(Windows.Media.MediaProperties.AudioEncodingQuality.Auto); //} private async void endRecord(object sender, RoutedEventArgs e) { await audioCapture.StopRecordAsync(); }
public void PrintMicrophoneSample() { MediaCapture capture; IRandomAccessStream stream; const int BufferSize = 64000; bool recording; float volume = 100; capture = new MediaCapture(); stream = new InMemoryRandomAccessStream(); var captureInitSettings2 = new MediaCaptureInitializationSettings(); captureInitSettings2.StreamingCaptureMode = StreamingCaptureMode.Audio; capture.InitializeAsync(captureInitSettings2).AsTask().Wait(); capture.AudioDeviceController.VolumePercent = volume; MediaEncodingProfile profile = new MediaEncodingProfile(); AudioEncodingProperties audioProperties = AudioEncodingProperties.CreatePcm(16000, 1, 16); profile.Audio = audioProperties; profile.Video = null; profile.Container = new ContainerEncodingProperties() { Subtype = MediaEncodingSubtypes.Wave }; capture.StartRecordToStreamAsync(profile, stream).GetResults(); recording = true; // waste time for (int i = 0; i < 5; i++) { i = i * 232323 + 89;// WriteLine(i); } capture.StopRecordAsync().GetResults(); byte[] wav = new byte[stream.Size]; stream.Seek(0); stream.ReadAsync(wav.AsBuffer(), (uint)stream.Size, InputStreamOptions.None).GetResults(); int sum = 0; for(int i = 0; i < wav.Count(); i++) { sum += (int) wav[i]; } WriteLine((double) wav.Count() / sum); }
private async void StartMediaCaptureRecord_Click(object sender, RoutedEventArgs e) { StartCaptureElementRecord.IsEnabled = false; // Skip if no camera var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); if (devices.Count == 0) { return; } StorageFile destination = await KnownFolders.VideosLibrary.CreateFileAsync("VideoEffectsTestApp.MediaCapture.mp4", CreationCollisionOption.ReplaceExisting); var capture = new MediaCapture(); await capture.InitializeAsync(new MediaCaptureInitializationSettings { StreamingCaptureMode = StreamingCaptureMode.Video }); var definition = await CreateEffectDefinitionAsync( (VideoEncodingProperties)capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoRecord) ); await capture.AddEffectAsync(MediaStreamType.VideoRecord, definition.ActivatableClassId, definition.Properties); await capture.StartRecordToStorageFileAsync(MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Qvga), destination); await Task.Delay(3000); await capture.StopRecordAsync(); StartCaptureElementRecord.IsEnabled = true; }
/// <summary> /// This method is called each time the motion sensor forwards data to the host. /// /// Note that reading is relatively straight forward and follows the general WinRT paradigm of using events args /// and reading from buffers. /// </summary> /// <param name="sender">The hidDevice that raised the event (the one that received the interrupt)</param> /// <param name="eventArgs">Contains the HidInputReport that caused the interrupt</param> private async void OnGeneralInterruptEvent(HidDevice sender, HidInputReportReceivedEventArgs eventArgs) { // Retrieve the sensor data HidInputReport inputReport = eventArgs.Report; IBuffer buffer = inputReport.Data; DataReader dr = DataReader.FromBuffer(buffer); byte[] bytes = new byte[inputReport.Data.Length]; dr.ReadBytes(bytes); // Set the video length and delay values. TimeSpan length = TimeSpan.FromSeconds(5); // Video length 5 seconds TimeSpan delay = TimeSpan.FromSeconds(15); // Pause or delay 15 seconds TimeSpan radioDelay = TimeSpan.FromMilliseconds(250); // Duration of radio-button highlight await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { // Briefly check the radio button to show that an event was fired radio1.IsChecked = true; }); // Create a threadpool timer which toggles the radio button on for the radioDelay interval ThreadPoolTimer RadioButtonTimer = ThreadPoolTimer.CreateTimer( async(source) => { await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { // Radio button is unchecked once duration expires radio1.IsChecked = false; }); }, radioDelay); // The first byte contains the motion data if ((bytes[1] == 1) && !Capture) { Capture = true; // Create a threadpool timer which stops the video capture after "length" seconds ThreadPoolTimer VideoStopTimer = ThreadPoolTimer.CreateTimer( async(source) => { await CaptureMgr.StopRecordAsync(); await rootPage.Dispatcher.RunAsync( CoreDispatcherPriority.Normal, new DispatchedHandler(() => { rootPage.NotifyUser("Video capture concluded.", NotifyType.StatusMessage); })); }, length); // Create a threadpool timer which prevents false captures by pausing detection for "delay" seconds ThreadPoolTimer CapturePauseTimer = ThreadPoolTimer.CreateTimer( async(source) => { Capture = false; await rootPage.Dispatcher.RunAsync( CoreDispatcherPriority.Normal, new DispatchedHandler(() => { rootPage.NotifyUser("Presence sensor enabled.", NotifyType.StatusMessage); })); }, delay); await rootPage.Dispatcher.RunAsync( CoreDispatcherPriority.Normal, new DispatchedHandler(() => { rootPage.NotifyUser("Video capture started.", NotifyType.StatusMessage); })); String fileName; fileName = VIDEO_FILE_NAME; StorageFile = await Windows.Storage.KnownFolders.VideosLibrary.CreateFileAsync(fileName, Windows.Storage.CreationCollisionOption.GenerateUniqueName); MediaEncodingProfile recordProfile = MediaEncodingProfile.CreateMp4(Windows.Media.MediaProperties.VideoEncodingQuality.Auto); await CaptureMgr.StartRecordToStorageFileAsync(recordProfile, StorageFile); } }