/// <summary> /// Initializes a video encoder resource asynchronously. This should be called after /// GetSupportedProfiles() and before any functions below. /// </summary> /// <param name="inputFormat">The <code>VideoFrame_Format</code> of the /// frames which will be encoded.</param> /// <param name="inputVisibleSize">A <code>Size</code> specifying the /// dimensions of the visible part of the input frames.</param> /// <param name="outputProfile">A <code>VideoProfile</code> specifying the /// codec profile of the encoded output stream.</param> /// <param name="initialBitrate">The initial bitrate of the encoded output stream</param> /// <param name="acceleration">A <code>HardwareAcceleration</code> specifying /// whether to use a hardware accelerated or a software implementation.</param> /// <param name="messageLoop">Optional MessageLoop instance that can be used to post the command to</param> /// <returns>Error code. Returns NotSupported if video encoding is not available, or the /// requested codec profile is not supported. /// Returns NoMemory if frame and bitstream buffers can't be created.</returns> public Task <PPError> InitializeAsync(VideoFrameFormat inputFormat, PPSize inputVisibleSize, VideoProfile outputProfile, uint initialBitrate, HardwareAcceleration acceleration, MessageLoop messageLoop = null) => InitializeAsyncCore(inputFormat, inputVisibleSize, outputProfile, initialBitrate, acceleration, messageLoop);
private void UpdateProfileValues(VideoProfile profile) { ProfileNameValue.Text = profile.Name; ResolutionValue.Text = profile.Resolution; MaxBitrateValue.Text = profile.Bitrate.ToString(); MaxFrameRateValue.Text = profile.Framerate.ToString(); AgoraSettings.Current.Profile = profile.Id; }
string VideoProfileToString(VideoProfile profile) { if (!profileToString.ContainsKey(profile)) { return("unknown"); } return(profileToString[profile]); }
public Task <VideoProfile> UpdateProfile(string factoryId, VideoProfile profile) { if (profile == null) { throw new ArgumentNullException("profile"); } return(InvokePut <VideoProfile>(factoryId, string.Format("profiles/{0}.json", profile.Id), profile)); }
public Task <VideoProfile> CreateProfile(string factoryId, VideoProfile profile) { if (profile == null) { throw new ArgumentNullException("profile"); } return(InvokePost <VideoProfile>(factoryId, "profiles.json", profile)); }
/// <summary> /// Initializes a video encoder resource. This should be called after /// GetSupportedProfiles() and before any functions below. /// </summary> /// <param name="inputFormat">The <code>VideoFrame_Format</code> of the /// frames which will be encoded.</param> /// <param name="inputVisibleSize">A <code>Size</code> specifying the /// dimensions of the visible part of the input frames.</param> /// <param name="outputProfile">A <code>VideoProfile</code> specifying the /// codec profile of the encoded output stream.</param> /// <param name="initialBitrate">The initial bitrate of the encoded output stream</param> /// <param name="acceleration">A <code>HardwareAcceleration</code> specifying /// whether to use a hardware accelerated or a software implementation.</param> /// <returns>Error code. Returns NotSupported if video encoding is not available, or the /// requested codec profile is not supported. /// Returns NoMemory if frame and bitstream buffers can't be created.</returns> public PPError Initialize(VideoFrameFormat inputFormat, PPSize inputVisibleSize, VideoProfile outputProfile, uint initialBitrate, HardwareAcceleration acceleration) => (PPError)PPBVideoEncoder.Initialize(this, (PPVideoFrameFormat)inputFormat, inputVisibleSize, (PPVideoProfile)outputProfile, initialBitrate, (PPHardwareAcceleration)acceleration, new CompletionCallback(OnInitialize));
public YouTubeServiceTests() { var vidProfile = new VideoProfile(); var configuration = new MapperConfiguration(cfg => { cfg.AddProfile(vidProfile); }); _mapper = new Mapper(configuration); _youtube = new YoutubeClient(); _downloader = new VideoDownloader(_youtube); _converter = new VideoConverter(_mapper); _sut = new YouTubeService(_downloader, _converter); }
public VideoEncode(IntPtr handle) : base(handle) { #if USE_VP8_INSTEAD_OF_H264 videoProfile = VideoProfile.Vp8Any; #else videoProfile = VideoProfile.H264main; #endif frameFormat = VideoFrameFormat.I420; InitializeVideoProfiles(); ProbeEncoder(); HandleMessage += OnHandleMessage; Initialize += OnInitialize; }
private async Task <PPError> InitializeAsyncCore(VideoFrameFormat inputFormat, PPSize inputVisibleSize, VideoProfile outputProfile, uint initialBitrate, HardwareAcceleration acceleration, MessageLoop messageLoop = null) { var tcs = new TaskCompletionSource <PPError>(); EventHandler <PPError> handler = (s, e) => { tcs.TrySetResult(e); }; try { HandleInitialize += handler; if (MessageLoop == null && messageLoop == null) { Initialize(inputFormat, inputVisibleSize, outputProfile, initialBitrate, acceleration); } else { Action <PPError> action = new Action <PPError>((e) => { var result = (PPError)PPBVideoEncoder.Initialize(this, (PPVideoFrameFormat)inputFormat, inputVisibleSize, (PPVideoProfile)outputProfile, initialBitrate, (PPHardwareAcceleration)acceleration, new BlockUntilComplete() ); tcs.TrySetResult(result); } ); InvokeHelper(action, messageLoop); } return(await tcs.Task); } catch (Exception exc) { Console.WriteLine(exc.Message); tcs.SetException(exc); return(PPError.Aborted); } finally { HandleInitialize -= handler; } }
public async Task AddVideoTrackFromDeviceAsync(string trackName) { await Utils.RequestMediaAccessAsync(StreamingCaptureMode.Video); // Create the source VideoCaptureDeviceInfo deviceInfo = VideoCaptureDevices.SelectedItem; if (deviceInfo == null) { throw new InvalidOperationException("No video capture device selected"); } var deviceConfig = new LocalVideoDeviceInitConfig { videoDevice = new VideoCaptureDevice { id = deviceInfo.Id }, }; VideoCaptureFormatViewModel formatInfo = VideoCaptureFormats.SelectedItem; if (formatInfo != null) { deviceConfig.width = formatInfo.Format.width; deviceConfig.height = formatInfo.Format.height; deviceConfig.framerate = formatInfo.Format.framerate; } if (deviceInfo.SupportsVideoProfiles) { VideoProfile profile = VideoProfiles.SelectedItem; deviceConfig.videoProfileId = profile?.uniqueId; deviceConfig.videoProfileKind = SelectedVideoProfileKind; } var source = await DeviceVideoTrackSource.CreateAsync(deviceConfig); // FIXME - this leaks the source, never disposed // Crate the track var trackConfig = new LocalVideoTrackInitConfig { trackName = trackName, }; var track = LocalVideoTrack.CreateFromSource(source, trackConfig); // FIXME - this probably leaks the track, never disposed SessionModel.Current.AddVideoTrack(track, deviceInfo.DisplayName); }
private void OnHandleMessage(object sender, Var varMessage) { if (!varMessage.IsDictionary) { LogToConsole(PPLogLevel.Error, "Invalid message!"); return; } var dictMessage = new VarDictionary(varMessage); string command = dictMessage.Get("command").AsString(); if (command == "start") { requestedSize = new PPSize(dictMessage.Get("width").AsInt(), dictMessage.Get("height").AsInt()); var var_track = dictMessage.Get("track"); if (!var_track.IsResource) { LogToConsole(PPLogLevel.Error, "Given track is not a resource"); return; } var resourceTrack = new MediaStreamVideoTrack(var_track.AsResource()); if (!resourceTrack.IsEmpty) { videoTrack = resourceTrack; videoTrack.HandleConfigure += OnConfigureTrack; videoTrack.HandleFrame += OnTrackFrame; videoEncoder = new VideoEncoder(this); videoProfile = VideoProfileFromString(dictMessage.Get("profile").AsString()); ConfigureTrack(); } } else if (command == "stop") { StopEncode(); Log("stopped"); } else { LogToConsole(PPLogLevel.Error, "Invalid command!"); } }
void AddVideoProfile(VideoProfile profile, string profile_str) { profileToString.Add(profile, profile_str); profileFromString.Add(profile_str, profile); }
public Task <VideoProfile> UpdateProfile(VideoProfile profile) { return(_cloudService.UpdateProfile(FactoryId, profile)); }