public override PublishVideoModel GetPublishVideoModel() { PublishVideoModel publishVideoModel = new PublishVideoModel() { AvSyncGroupId = 0, MediaType = MediaType.VideoCaptureCard, StreamType = StreamType.VideoSend, VideoSendModel = new VideoSendModel() { CaptureModel = new VideoCaptureModel() { VideoColorSpace = VideoColorSpace, Bottom = CapBottom, Fps = EncFps, Left = CapLeft, Right = CapRight, Top = CapTop, CapWinHandle = IntPtr.Zero, }, DisplayFillMode = DisplayFillMode, DisplayWindow = IntPtr.Zero, EncodeModel = new VideoEncodeModel() { Bitrate = EncBitrate, Fps = EncFps, Height = EncHeight, VideoCodeId = VideoCodeId, VideoCodeLevel = VideoCodeLevel, VideoCodeType = VideoCodeType, Width = EncWidth, }, ExtraInfo = null, SourceName = null, SourceType = SourceType.Device, }, VideoTransModel = new TransModel() { CheckRetransSendCount = CheckRetransSendCount, CheckSendCount = CheckSendCount, DataResendCount = DataResendCount, DataRetransSendCount = DataRetransSendCount, DataSendCount = DataSendCount, DelayTimeWinsize = DelayTimeWinsize, FecCheckCount = FecCheckCount, FecDataCount = FecDataCount, }, }; return(publishVideoModel); }
public async Task <MeetingResult <int> > Publish(MediaType mediaType, string deviceName) { ThrowIfPublishVerify(); var result = MeetingResult.Error <int>("未实现的媒体类型。"); VideoBox videoBox; switch (mediaType) { case MediaType.Camera: var cameraParam = StreamParameterProviders.GetParameter <PublishCameraStreamParameter>(deviceName); PublishVideoModel publishCameraModel = cameraParam.GetPublishVideoModel(); publishCameraModel.VideoSendModel.SourceName = deviceName; if (VideoBoxManager.TryGet(Participant.Account, VideoBoxType.Camera, mediaType, out videoBox)) { publishCameraModel.VideoSendModel.DisplayWindow = videoBox.Handle; } var publishCameraResult = await _meetingSdkWrapper.PublishCameraVideo(publishCameraModel); if (publishCameraResult.StatusCode == 0) { var publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishCameraResult.Result, SyncId = publishCameraModel.AvSyncGroupId, StreamParameter = cameraParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); if (videoBox != null) { videoBox.AccountResource.ResourceId = publishStreamResource.ResourceId; videoBox.AccountResource.MediaType = mediaType; _eventAggregator.GetEvent <VideoBoxAddedEvent>().Publish(videoBox); //await _meetingSdkWrapper.StartLocalVideoRender( // publishStreamResource.ResourceId, // videoBox.Handle, // (int)videoBox.Host.ActualWidth, // (int)videoBox.Host.ActualHeight); } } result = publishCameraResult; break; case MediaType.Microphone: PublishMicStreamParameter micParam = StreamParameterProviders.GetParameter <PublishMicStreamParameter>(deviceName); PublishAudioModel publishMicModel = micParam.GetPublishAudioModel(); publishMicModel.AudioSendModel.SourceName = deviceName; MeetingResult <int> publishMicResult = await _meetingSdkWrapper.PublishMicAudio(publishMicModel); if (publishMicResult.StatusCode == 0) { StreamResource <IStreamParameter> publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishMicResult.Result, SyncId = publishMicModel.AvSyncGroupId, StreamParameter = micParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); } result = publishMicResult; break; case MediaType.AudioCaptureCard: case MediaType.AudioDoc: PublishMicStreamParameter docMicParam = StreamParameterProviders.GetParameter <PublishMicStreamParameter>(deviceName); PublishAudioModel publishDocMicModel = docMicParam.GetPublishAudioModel(); publishDocMicModel.AudioSendModel.SourceName = deviceName; MeetingResult <int> publishDocMicResult = await _meetingSdkWrapper.PublishMicAudio(publishDocMicModel); if (publishDocMicResult.StatusCode == 0) { StreamResource <IStreamParameter> publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishDocMicResult.Result, SyncId = publishDocMicModel.AvSyncGroupId, StreamParameter = docMicParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); } result = publishDocMicResult; break; case MediaType.VideoDoc: PublishDataCardStreamParameter dataCardParam = StreamParameterProviders.GetParameter <PublishDataCardStreamParameter>(deviceName); PublishVideoModel publishDataCardModel = dataCardParam.GetPublishVideoModel(); publishDataCardModel.VideoSendModel.SourceName = deviceName; if (VideoBoxManager.TryGet(Participant.Account, VideoBoxType.DataCard, mediaType, out videoBox)) { publishDataCardModel.VideoSendModel.DisplayWindow = videoBox.Handle; } MeetingResult <int> publishDataCardResult = await _meetingSdkWrapper.PublishDataCardVideo(publishDataCardModel); if (publishDataCardResult.StatusCode == 0) { StreamResource <IStreamParameter> publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishDataCardResult.Result, SyncId = publishDataCardModel.AvSyncGroupId, StreamParameter = dataCardParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); if (videoBox != null) { videoBox.AccountResource.ResourceId = publishStreamResource.ResourceId; videoBox.AccountResource.MediaType = mediaType; _eventAggregator.GetEvent <VideoBoxAddedEvent>().Publish(videoBox); //await _meetingSdkWrapper.StartLocalVideoRender( // publishStreamResource.ResourceId, // videoBox.Handle, // (int)videoBox.Host.ActualWidth, // (int)videoBox.Host.ActualHeight); } } result = publishDataCardResult; break; case MediaType.VideoCaptureCard: PublishWinCaptureStreamParameter winCapParam = StreamParameterProviders.GetParameter <PublishWinCaptureStreamParameter>(deviceName); PublishVideoModel publishWinCapModel = winCapParam.GetPublishVideoModel(); publishWinCapModel.VideoSendModel.SourceName = "DesktopCapture"; if (VideoBoxManager.TryGet(Participant.Account, VideoBoxType.WinCapture, mediaType, out videoBox)) { publishWinCapModel.VideoSendModel.DisplayWindow = videoBox.Handle; } MeetingResult <int> publishWinCapResult = await _meetingSdkWrapper.PublishWinCaptureVideo(publishWinCapModel); if (publishWinCapResult.StatusCode == 0) { StreamResource <IStreamParameter> publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishWinCapResult.Result, SyncId = publishWinCapModel.AvSyncGroupId, StreamParameter = winCapParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); if (videoBox != null) { videoBox.AccountResource.ResourceId = publishStreamResource.ResourceId; videoBox.AccountResource.MediaType = mediaType; _eventAggregator.GetEvent <VideoBoxAddedEvent>().Publish(videoBox); //await _meetingSdkWrapper.StartLocalVideoRender( // publishStreamResource.ResourceId, // videoBox.Handle, // (int)videoBox.Host.ActualWidth, // (int)videoBox.Host.ActualHeight); } } result = publishWinCapResult; break; case MediaType.StreamMedia: break; case MediaType.File: break; case MediaType.WhiteBoard: break; case MediaType.RemoteControl: break; case MediaType.MediaTypeMax: break; } LayoutChanged(mediaType); return(result); }
internal static MEETINGMANAGE_PublishCameraParam ToStruct(this PublishVideoModel model) { MEETINGMANAGE_PublishCameraParam publishCameraParam = new MEETINGMANAGE_PublishCameraParam(); try { //视频采集参数 MEETINGMANAGEVideoCapParam videoCapParam = new MEETINGMANAGEVideoCapParam() { left = model.VideoSendModel.CaptureModel.Left, right = model.VideoSendModel.CaptureModel.Right, top = model.VideoSendModel.CaptureModel.Top, bottom = model.VideoSendModel.CaptureModel.Bottom, fps = model.VideoSendModel.CaptureModel.Fps, capWinHandle = model.VideoSendModel.CaptureModel.CapWinHandle, colorSpace = (MEETINGMANAGE_VideoColorSpace)model.VideoSendModel.CaptureModel.VideoColorSpace, }; publishCameraParam.sParam.vsParam.capParam = Marshal.AllocHGlobal(Marshal.SizeOf(videoCapParam)); Marshal.StructureToPtr(videoCapParam, publishCameraParam.sParam.vsParam.capParam, true); publishCameraParam.sParam.vsParam.fillMode = (MEETINGMANAGE_DisplayFillMode)model.VideoSendModel.DisplayFillMode; publishCameraParam.sParam.vsParam.displayWindow = model.VideoSendModel.DisplayWindow; //本地预览的窗口句柄 //视频编码参数 publishCameraParam.sParam.vsParam.encParam = IntPtr.Zero; MEETINGMANAGEVideoEncParam videoEncParam = new MEETINGMANAGEVideoEncParam() { bitrate = model.VideoSendModel.EncodeModel.Bitrate, fps = model.VideoSendModel.EncodeModel.Fps, width = model.VideoSendModel.EncodeModel.Width, height = model.VideoSendModel.EncodeModel.Height, level = (MEETINGMANAGEVideoCodecLevel)model.VideoSendModel.EncodeModel.VideoCodeLevel, codecID = (MEETINGMANAGEVideoCodecID)model.VideoSendModel.EncodeModel.VideoCodeId, codecType = (MEETINGMANAGEVideoCodecType)model.VideoSendModel.EncodeModel.VideoCodeType, }; publishCameraParam.sParam.vsParam.encParam = Marshal.AllocHGlobal(Marshal.SizeOf(videoEncParam)); Marshal.StructureToPtr(videoEncParam, publishCameraParam.sParam.vsParam.encParam, true); publishCameraParam.sParam.sourceType = (MEETINGMANAGESourceType)model.VideoSendModel.SourceType; publishCameraParam.sParam.sourceName = model.VideoSendModel.SourceName; //摄像头名称 //媒体类型 publishCameraParam.mediaType = (MEETINGMANAGE_MediaType)model.MediaType; //媒体流类型 publishCameraParam.sType = (MEETINGMANAGE_StreamType)model.StreamType; publishCameraParam.avSynGroupID = model.AvSyncGroupId; publishCameraParam.sParam.extraInfo = model.VideoSendModel.ExtraInfo; publishCameraParam.transParam.checkRetransSendCount = model.VideoTransModel.CheckRetransSendCount; publishCameraParam.transParam.checkSendCount = model.VideoTransModel.CheckSendCount; publishCameraParam.transParam.dataResendCount = model.VideoTransModel.DataResendCount; publishCameraParam.transParam.dataRetransSendCount = model.VideoTransModel.DataRetransSendCount; publishCameraParam.transParam.dataSendCount = model.VideoTransModel.DataSendCount; publishCameraParam.transParam.delayTimeWinsize = model.VideoTransModel.DelayTimeWinsize; publishCameraParam.transParam.fecCheckCount = model.VideoTransModel.FecCheckCount; publishCameraParam.transParam.fecDataCount = model.VideoTransModel.FecDataCount; } catch (Exception e) { throw new Exception($"发布视频流结构转换失败。{e.Message}"); } return(publishCameraParam); }