public void StartRecording(string path, string name) { job = new LiveJob(); dvs = job.AddDeviceSource(Video, Audio); job.ActivateSource(dvs); WindowsMediaOutputFormat outputFormat = new WindowsMediaOutputFormat(); AdvancedVC1VideoProfile profile = new AdvancedVC1VideoProfile(); profile.Bitrate = new ConstantBitrate(1280, false); profile.Size = new System.Drawing.Size(640, 360); WmaAudioProfile audioProfile = new WmaAudioProfile(); outputFormat.AudioProfile = audioProfile; outputFormat.VideoProfile = profile; job.OutputFormat = outputFormat; CurrentVideoPath = Path.Combine(path, name); FileArchivePublishFormat fileOut = new FileArchivePublishFormat(); fileOut.OutputFileName = CurrentVideoPath; job.PublishFormats.Add(fileOut); job.StartEncoding(); IsRecording = true; }
/// <summary> /// Initializes a new instance of the <see cref="LiveSource"/> class. /// </summary> public LiveSource() { try { // Starts new job for preview window. this.job = new LiveJob(); // Aquires audio and video devices. this.videoDevices = EncoderDevices.FindDevices(EncoderDeviceType.Video).ToList(); var video = this.videoDevices.Count > 0 ? this.videoDevices[1] : null; // Checks for video devices. if (video != null) { // Create a new device source. We use the first audio and video devices on the system. this.DeviceSource = this.job.AddDeviceSource(video, null); // Make this source the active one. this.job.ActivateSource(this.DeviceSource); } else { this.HasError = true; } } catch (Exception ex) { Log.Exception(ex); } }
private void StartCam() { var lstVideoDevices = new List <EncoderDevice>(); var lstAudioDevices = new List <EncoderDevice>(); foreach (EncoderDevice edv in EncoderDevices.FindDevices(EncoderDeviceType.Video)) { lstVideoDevices.Add(edv); //listBox1.Items.Add(edv.Name); } foreach (EncoderDevice eda in EncoderDevices.FindDevices(EncoderDeviceType.Audio)) { lstAudioDevices.Add(eda); //listBox2.Items.Add(eda.Name); } var video = lstVideoDevices.FirstOrDefault(); var audio = lstAudioDevices.FirstOrDefault(); _job = new LiveJob(); _deviceSource = _job.AddDeviceSource(video, audio); _deviceSource.VideoDevice = video; _deviceSource.AudioDevice = audio; _deviceSource.PreviewWindow = new PreviewWindow(new HandleRef(this, this.Handle)); _deviceSource.PreviewWindow.SetSize(Size); _job.ActivateSource(_deviceSource); }
private void Broadcast_Click(object sender, EventArgs e) { EncoderDevice video = null; EncoderDevice audio = null; GetSelectedVideoAndAudioDevices(out video, out audio); StopJob(); if (video == null) { return; } _job = new LiveJob(); _deviceSource = _job.AddDeviceSource(video, audio); _job.ActivateSource(_deviceSource); // Finds and applys a smooth streaming preset _job.ApplyPreset(LivePresets.VC1256kDSL16x9); // Creates the publishing format for the job PullBroadcastPublishFormat format = new PullBroadcastPublishFormat(); format.BroadcastPort = 8080; format.MaximumNumberOfConnections = 2; // Adds the publishing format to the job _job.PublishFormats.Add(format); // Starts encoding _job.StartEncoding(); toolStripStatusLabel1.Text = "Broadcast started on localhost at port 8080, run WpfShowBroadcast.exe now to see it"; }
public void GetVideo() { var listVideoDevices = new List <EncoderDevice>(); var listAudioDevices = new List <EncoderDevice>(); foreach (var edv in EncoderDevices.FindDevices(EncoderDeviceType.Video)) { listVideoDevices.Add(edv); } foreach (var edv in EncoderDevices.FindDevices(EncoderDeviceType.Audio)) { listAudioDevices.Add(edv); } var job = new LiveJob(); var deviceSource = job.AddDeviceSource(listVideoDevices[0], listAudioDevices[0]); job.ActivateSource(deviceSource); job.ApplyPreset(LivePresets.VC1HighSpeedBroadband4x3); PullBroadcastPublishFormat format = new PullBroadcastPublishFormat(); format.BroadcastPort = 5001; format.MaximumNumberOfConnections = 1; job.PublishFormats.Add(format); job.StartEncoding(); }
public Data() { // Initializes Job and collection objects Job = new LiveJob(); AudioDevices = new ObservableCollection <EncoderDevice>(); // Set the output directory for the job OutputDirectory = string.Format("{0}\\", Environment.GetFolderPath(Environment.SpecialFolder.MyVideos)); // Setup the the live job parameters SetupJob(); }
public CatCam() { InitializeComponent(); Console.Write("here"); EncoderDevice video = EncoderDevices.FindDevices(EncoderDeviceType.Video)[1]; EncoderDevice audio = EncoderDevices.FindDevices(EncoderDeviceType.Audio).First(); StopJob(); if (video == null || audio == null) { return; } // Starts new job for preview window _job = new LiveJob(); // Checks for a/v devices if (video != null && audio != null) { // Create a new device source. We use the first audio and video devices on the system _deviceSource = _job.AddDeviceSource(video, audio); _deviceSource.PickBestVideoFormat(new Size(640, 480), 15); // Get the properties of the device video SourceProperties sp = _deviceSource.SourcePropertiesSnapshot(); // Resize the preview panel to match the video device resolution set previewPanel.Size = new Size(sp.Size.Width, sp.Size.Height); // Setup the output video resolution file as the preview _job.OutputFormat.VideoProfile.Size = new Size(sp.Size.Width, sp.Size.Height); // Sets preview window to winform panel hosted by xaml window _deviceSource.PreviewWindow = new PreviewWindow(new HandleRef(previewPanel, previewPanel.Handle)); // Make this source the active one _job.ActivateSource(_deviceSource); snapButton.Enabled = true; } else { // Gives error message as no audio and/or video devices found MessageBox.Show("No Video/Audio capture devices have been found.", "Warning"); } }
private void SetDeviceSources() {// locates the first video device with name = _videoSourceName and the first audio device found (required for class // constructor but not used in our project) // creates LiveJob and adds device sources. EncoderDevice videoDevice = null; EncoderDevice audioDevice = null; _deviceSource = null; videoDevice = EncoderDevices.FindDevices(EncoderDeviceType.Video).FirstOrDefault(vd => vd.Name == _videoSourceName); audioDevice = EncoderDevices.FindDevices(EncoderDeviceType.Audio).FirstOrDefault(); if (videoDevice != null && audioDevice != null) { _job = new LiveJob(); _deviceSource = _job.AddDeviceSource(videoDevice, audioDevice); } }
public StoryPlugin(IMessageDispatcher remote, IUIThreadDispatcher uiThreadDispatcher) { this._remote = remote; this._uiThreadDispatcher = uiThreadDispatcher; uiThreadDispatcher.BlockingInvoke(() => { story = new StoryPage(); pluginContainer = new Viewbox(); pluginContainer.Child = story; }); _remote.RegisterReceiveHandler("story.stopRecording", new MessageHandlerDelegateWrapper(m => endVideoCapture())); _remote.RegisterReceiveHandler("story.startRecording", new MessageHandlerDelegateWrapper(m => startVideoCapture(m))); _remote.RegisterReceiveHandler("story.saveRecording", new MessageHandlerDelegateWrapper(m => saveRecording(m))); foreach (EncoderDevice edv in EncoderDevices.FindDevices(EncoderDeviceType.Video)) { //Debug.WriteLine("found a video deviced named: " + edv.Name); videoDevice = edv; } foreach (EncoderDevice edv in EncoderDevices.FindDevices(EncoderDeviceType.Audio)) { //Debug.WriteLine("found a audio deviced named: " + edv.Name); if (edv.Name.ToLower().Contains("microphone")) { audioDevice = edv; } } story.SizeChanged += new System.Windows.SizeChangedEventHandler(story_SizeChanged); job = new LiveJob(); if (!System.IO.Directory.Exists("C:\\Dropbox\\" + System.Windows.Forms.SystemInformation.ComputerName + "\\videos\\")) { System.IO.Directory.CreateDirectory("C:\\Dropbox\\" + System.Windows.Forms.SystemInformation.ComputerName + "\\videos\\"); } fileOut = new FileArchivePublishFormat(); fileOut.OutputFileName = "C:\\Dropbox\\" + System.Windows.Forms.SystemInformation.ComputerName + "\\videos\\tempVideo.wmv"; System.Console.WriteLine(fileOut.OutputFileName); }
// metoda włączająca podgląd obrazu z urządzenia internal void Play(string deviceName, PictureBox p_preview) { this.p_preview = p_preview; for (int i = 0; i < capDevices.Count; i++) { if (capDevices[i].Equals(deviceName)) { selectDeviceIndex = i; } } // Starts new job for preview window _job = new LiveJob(); // Create a new device source. We use the first audio and video devices on the system _deviceSource = _job.AddDeviceSource(GetVideoDevice(deviceName), AudioDevices[0]); // Sets preview window to winform panel hosted by xaml window _deviceSource.PreviewWindow = new PreviewWindow(new HandleRef(p_preview, p_preview.Handle)); // Make this source the active one _job.ActivateSource(_deviceSource); }
public mainScreen() { InitializeComponent(); //initilize a list of string equivalents of audio visual devices initDeviceList(); //read the datta in theSettings = Settings.readFile(audioDeviceLst, vidDeviceLst); //construct the values for the actual devices convertStringToDevice(theSettings); // Starts new job for preview window audVidJob = new LiveJob(); // Create a new device source. We use the first audio and video devices on the system deviceSource = audVidJob.AddDeviceSource(videoDevice, audioDevice); // Sets preview window to winform panel hosted by xaml window deviceSource.PreviewWindow = new PreviewWindow(new HandleRef(this.panel1, this.panel1.Handle)); // Make this source the active one audVidJob.ActivateSource(deviceSource); //update the face detection values displayed (based on what is read from the file) this.updateDetectionGUI(); this.statusLbl.Text = BoTStatus.Detecting.ToString(); //check the number of videos recorded at start by counting how many videos have been recorded System.IO.DirectoryInfo dir = new System.IO.DirectoryInfo(theSettings.savePath); this.numRecorded = dir.GetFiles().Length; this.recordedLbl.Text = numRecorded.ToString(); //now that the form and it's members have been properly initialized, initialize and start the facial //recognition system faceRecognizer = new FaceHandler(this); //initialize the speech recognition system but DO NOT START IT voiceRecognizer = new VoiceHandler(this); faceRecognizer.Start(); }
private string VideoCapture(Collection <EncoderDevice> Vdevices) { // Starts new job for preview window LiveJob _job = new LiveJob(); // Create a new device source. We use the first audio and video devices on the system LiveDeviceSource _deviceSource = _job.AddDeviceSource(Vdevices[0], null); // Make this source the active one _job.ActivateSource(_deviceSource); FileArchivePublishFormat fileOut = new FileArchivePublishFormat(); // Sets file path and name string path = Environment.GetFolderPath(Environment.SpecialFolder.MyPictures); string output = String.Format(@"{0}\Constellation{1:yyyyMMdd_hhmmss}", path, DateTime.Now); fileOut.OutputFileName = string.Format("{0}.wmv", output); // Adds the format to the job. You can add additional formats // as well such as Publishing streams or broadcasting from a port _job.PublishFormats.Add(fileOut); // Starts encoding _job.StartEncoding(); Thread.Sleep(3000); _job.StopEncoding(); _job.RemoveDeviceSource(_deviceSource); return(output); }
public string StartRecording(string path, string name) { job = new LiveJob(); dvs = job.AddDeviceSource(null, audioEncoder); job.ActivateSource(dvs); WindowsMediaOutputFormat outputFormat = new WindowsMediaOutputFormat() { AudioProfile = new WmaAudioProfile() }; job.OutputFormat = outputFormat; string currentRecordingPath = Path.Combine(path, name); FileArchivePublishFormat fileOut = new FileArchivePublishFormat(); fileOut.OutputFileName = currentRecordingPath; job.PublishFormats.Add(fileOut); job.StartEncoding(); IsRecording = true; return currentRecordingPath; }
private void btnPreview_Click(object sender, EventArgs e) { EncoderDevice video = null; EncoderDevice audio = null; GetSelectedVideoAndAudioDevices(out video, out audio); StopJob(); if (video == null) { return; } // Starts new job for preview window _job = new LiveJob(); // Checks for a/v devices if (video != null && audio != null) { // Create a new device source. We use the first audio and video devices on the system _deviceSource = _job.AddDeviceSource(video, audio); // Is it required to show the configuration dialogs ? if(1==2) { // Yes // VFW video device ? if (lstVideoDevices.SelectedItem.ToString().EndsWith("(VFW)", StringComparison.OrdinalIgnoreCase)) { // Yes if (_deviceSource.IsDialogSupported(ConfigurationDialog.VfwFormatDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VfwFormatDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VfwSourceDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VfwSourceDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VfwDisplayDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VfwDisplayDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } } else { // No if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoCapturePinDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoCapturePinDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoCaptureDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoCaptureDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoCrossbarDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoCrossbarDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoPreviewPinDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoPreviewPinDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoSecondCrossbarDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoSecondCrossbarDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } } } else { // No // Setup the video resolution and frame rate of the video device // NOTE: Of course, the resolution and frame rate you specify must be supported by the device! // NOTE2: May be not all video devices support this call, and so it just doesn't work, as if you don't call it (no error is raised) // NOTE3: As a workaround, if the .PickBestVideoFormat method doesn't work, you could force the resolution in the // following instructions (called few lines belows): 'panelVideoPreview.Size=' and '_job.OutputFormat.VideoProfile.Size=' // to be the one you choosed (640, 480). _deviceSource.PickBestVideoFormat(new Size(640, 480), 15); } // Get the properties of the device video SourceProperties sp = _deviceSource.SourcePropertiesSnapshot(); // Resize the preview panel to match the video device resolution set panelVideoPreview.Size = new Size(sp.Size.Width, sp.Size.Height); // Setup the output video resolution file as the preview _job.OutputFormat.VideoProfile.Size = new Size(sp.Size.Width, sp.Size.Height); // Display the video device properties set toolStripStatusLabel1.Text = sp.Size.Width.ToString() + "x" + sp.Size.Height.ToString() + " " + sp.FrameRate.ToString() + " fps"; // Sets preview window to winform panel hosted by xaml window _deviceSource.PreviewWindow = new PreviewWindow(new HandleRef(panelVideoPreview, panelVideoPreview.Handle)); // Make this source the active one _job.ActivateSource(_deviceSource); btnGrabImage.Enabled = true; toolStripStatusLabel1.Text = "Preview activated"; } else { // Gives error message as no audio and/or video devices found MessageBox.Show("No Video/Audio capture devices have been found.", "Warning"); toolStripStatusLabel1.Text = "No Video/Audio capture devices have been found."; } }
private void mediaPlay() { _mediaJob = new LiveJob(); _mediaDeviceSource = _mediaJob.AddDeviceSource(video, audio); _mediaDeviceSource.PickBestVideoFormat(new Size(640, 480), 15); _mediaDeviceSource.PreviewWindow = new PreviewWindow(new System.Runtime.InteropServices.HandleRef(videoPanel, videoPanel.Handle)); _mediaJob.ActivateSource(_mediaDeviceSource); }
private void btnPreview_Click(object sender, EventArgs e) { EncoderDevice video = null; EncoderDevice audio = null; GetSelectedVideoAndAudioDevices(out video, out audio); StopJob(); if (video == null) { return; } // Starts new job for preview window _job = new LiveJob(); // Checks for a/v devices if (video != null && audio != null) { // Create a new device source. We use the first audio and video devices on the system _deviceSource = _job.AddDeviceSource(video, audio); // Is it required to show the configuration dialogs ? if (checkBoxShowConfigDialog.Checked) { // Yes // VFW video device ? if (lstVideoDevices.SelectedItem.ToString().EndsWith("(VFW)", StringComparison.OrdinalIgnoreCase)) { // Yes if (_deviceSource.IsDialogSupported(ConfigurationDialog.VfwFormatDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VfwFormatDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VfwSourceDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VfwSourceDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VfwDisplayDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VfwDisplayDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } } else { // No if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoCapturePinDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoCapturePinDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoCaptureDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoCaptureDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoCrossbarDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoCrossbarDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoPreviewPinDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoPreviewPinDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } if (_deviceSource.IsDialogSupported(ConfigurationDialog.VideoSecondCrossbarDialog)) { _deviceSource.ShowConfigurationDialog(ConfigurationDialog.VideoSecondCrossbarDialog, (new HandleRef(panelVideoPreview, panelVideoPreview.Handle))); } } } else { // No // Setup the video resolution and frame rate of the video device // NOTE: Of course, the resolution and frame rate you specify must be supported by the device! // NOTE2: May be not all video devices support this call, and so it just doesn't work, as if you don't call it (no error is raised) // NOTE3: As a workaround, if the .PickBestVideoFormat method doesn't work, you could force the resolution in the // following instructions (called few lines belows): 'panelVideoPreview.Size=' and '_job.OutputFormat.VideoProfile.Size=' // to be the one you choosed (640, 480). //xyz _deviceSource.PickBestVideoFormat(new Size(Screen.PrimaryScreen.Bounds.Width, Screen.PrimaryScreen.Bounds.Height), 15); } // Get the properties of the device video SourceProperties sp = _deviceSource.SourcePropertiesSnapshot(); // Resize the preview panel to match the video device resolution set panelVideoPreview.Size = new Size(sp.Size.Width, sp.Size.Height); // Setup the output video resolution file as the preview _job.OutputFormat.VideoProfile.Size = new Size(sp.Size.Width, sp.Size.Height); // Display the video device properties set toolStripStatusLabel1.Text = sp.Size.Width.ToString() + "x" + sp.Size.Height.ToString() + " " + sp.FrameRate.ToString() + " fps"; // Sets preview window to winform panel hosted by xaml window _deviceSource.PreviewWindow = new PreviewWindow(new HandleRef(panelVideoPreview, panelVideoPreview.Handle)); // Make this source the active one _job.ActivateSource(_deviceSource); btnStartStopRecording.Enabled = true; btnGrabImage.Enabled = true; toolStripStatusLabel1.Text = "Preview activated"; } else { // Gives error message as no audio and/or video devices found MessageBox.Show("No Video/Audio capture devices have been found.", "Warning"); toolStripStatusLabel1.Text = "No Video/Audio capture devices have been found."; } }
public bool StartWebcam() { if (SelectedVideoDevice == null) return false; LiveJob = null; LiveJob = new LiveJob(); LiveDeviceSource = LiveJob.AddDeviceSource(SelectedVideoDevice, null); System.Drawing.Size framesize = new System.Drawing.Size(1280, 960); LiveDeviceSource.PickBestVideoFormat(framesize, 30); LiveJob.OutputFormat.VideoProfile.Size = framesize; LiveJob.OutputFormat.VideoProfile.Bitrate = new ConstantBitrate(2000); LiveJob.ActivateSource(LiveDeviceSource); isConnected = true; return true; }
public void StartCam() { try { // Start the video stream videojob = new LiveJob(); // Create a new device source. We use the first audio and video devices on the system videosource = videojob.AddDeviceSource(currentcam, null); // Sets preview window to winform panel videosource.PreviewWindow = new PreviewWindow(new HandleRef(pnlVideoPreview, pnlVideoPreview.Handle)); // Activate the source and start videojob.ActivateSource(videosource); } catch { } }
private void viewVideoBtn_Click(object sender, EventArgs e) { EncoderDevice video = null; EncoderDevice audio = null; try { GetSelectedVideoAndAudioDevices(out video, out audio); StopJob(); if(video == null) { return; } // Starts new job for preview window _job = new LiveJob(); // Checks for a/v devices if(video != null && audio != null) { // Create a new device source. We use the first audio and video devices on the system _deviceSource = _job.AddDeviceSource(video, audio); _deviceSource.PickBestVideoFormat(new Size(verticalRes, horizontalRes), framRate); // Get the properties of the device video SourceProperties sp = _deviceSource.SourcePropertiesSnapshot(); // Resize the preview panel to match the video device resolution set panelVideoPreview.Size = new Size(sp.Size.Width, sp.Size.Height); // Setup the output video resolution file as the preview _job.OutputFormat.VideoProfile.Size = new Size(sp.Size.Width, sp.Size.Height); // Display the video device properties set //toolStripStatusLabel1.Text = sp.Size.Width.ToString() + "x" + sp.Size.Height.ToString() + " " + sp.FrameRate.ToString() + " fps"; // Sets preview window to winform panel hosted by xaml window _deviceSource.PreviewWindow = new PreviewWindow(new HandleRef(panelVideoPreview, panelVideoPreview.Handle)); // Make this source the active one _job.ActivateSource(_deviceSource); //btnStartStopRecording.Enabled = true; //btnGrabImage.Enabled = true; debugText = "Preview activated"; } else { // Gives error message as no audio and/or video devices found MessageBox.Show("No Video/Audio capture devices have been found.", "Warning"); debugText = "No Video/Audio capture devices have been found."; } } catch(Exception error) { debugText = error.Message; this.Invoke(new EventHandler(DisplayText)); } }
public void StartCapture() { if (!IsCapturing) { job = new LiveJob(); dvs = job.AddDeviceSource(SelectedVideoDevice, SelectedAudioDevice); job.ActivateSource(dvs); job.ApplyPreset(LivePresets.VC1HighSpeedBroadband4x3); double epoch = GetUnixEpoch(DateTime.UtcNow); string timestamp = epoch.ToString(); VideoFileName = string.Format(VIDEO_FILE, timestamp); FileArchivePublishFormat fileOut = new FileArchivePublishFormat(); fileOut.OutputFileName = Path.Combine(FileLocationUtility.GetVideoFolderLoctation(), VideoFileName); job.PublishFormats.Add(fileOut); job.StartEncoding(); started = DateTime.UtcNow; IsCapturing = true; } }
/// <summary> /// Start the webcam. Create the liveJob and set the activate source (audio and video) /// </summary> /// <returns>Return false if the webcam cannot start</returns> public bool StartWebcam() { if (SelectedVideoDevice == null || SelectedAudioDevice == null) return false; LiveJob = null; LiveJob = new LiveJob(); LiveDeviceSource = LiveJob.AddDeviceSource(SelectedVideoDevice, SelectedAudioDevice); LiveJob.ActivateSource(LiveDeviceSource); return true; }