internal static MEETINGMANAGE_publishMicParam ToStruct(this PublishAudioModel model) { MEETINGMANAGE_publishMicParam publishMicParam = new MEETINGMANAGE_publishMicParam(); try { publishMicParam.mediaType = (MEETINGMANAGE_MediaType)model.MediaType; publishMicParam.sType = (MEETINGMANAGE_StreamType)model.StreamType; publishMicParam.transParam.checkRetransSendCount = model.TransModel.CheckRetransSendCount; publishMicParam.transParam.checkSendCount = model.TransModel.CheckSendCount; publishMicParam.transParam.dataResendCount = model.TransModel.DataResendCount; publishMicParam.transParam.dataRetransSendCount = model.TransModel.DataRetransSendCount; publishMicParam.transParam.dataSendCount = model.TransModel.DataSendCount; publishMicParam.transParam.delayTimeWinsize = model.TransModel.DelayTimeWinsize; publishMicParam.transParam.fecCheckCount = model.TransModel.FecCheckCount; publishMicParam.transParam.fecDataCount = model.TransModel.FecDataCount; publishMicParam.sParam.sourceName = model.AudioSendModel.SourceName; publishMicParam.sParam.sourceType = (MEETINGMANAGESourceType)model.AudioSendModel.SourceType; publishMicParam.sParam.asParam.isMix = model.AudioSendModel.IsMix; publishMicParam.sParam.extraInfo = model.AudioSendModel.ExtraInfo; publishMicParam.avSynGroupID = model.AvSyncGroupId; MEETINGMANAGEAudioCapParam audioCapParam = new MEETINGMANAGEAudioCapParam() { bitspersample = model.AudioSendModel.AudioCapModel.BitsPerSample, channels = model.AudioSendModel.AudioCapModel.Channels, samplerate = model.AudioSendModel.AudioCapModel.SampleRate, }; MEETINGMANAGEAudioEncParam audioEncParam = new MEETINGMANAGEAudioEncParam() { bitrate = model.AudioSendModel.AudioEncModel.Bitrate, bitspersample = model.AudioSendModel.AudioEncModel.BitsPerSample, channels = model.AudioSendModel.AudioEncModel.Channels, codecID = (MEETINGMANAGEAudioCodecID)model.AudioSendModel.AudioEncModel.AudioCodeId, samplerate = model.AudioSendModel.AudioEncModel.SampleRate, }; publishMicParam.sParam.asParam.capParam = Marshal.AllocHGlobal(Marshal.SizeOf(audioCapParam)); publishMicParam.sParam.asParam.encParam = Marshal.AllocHGlobal(Marshal.SizeOf(audioEncParam)); Marshal.StructureToPtr(audioCapParam, publishMicParam.sParam.asParam.capParam, true); Marshal.StructureToPtr(audioEncParam, publishMicParam.sParam.asParam.encParam, true); } catch (Exception exception) { throw new Exception($"发布音频流结构转换失败。{exception}"); } return(publishMicParam); }
public override PublishAudioModel GetPublishAudioModel() { PublishAudioModel publishAudioModel = new PublishAudioModel() { AudioSendModel = new AudioSendModel() { AudioCapModel = new AudioCapModel() { BitsPerSample = CapBitsPerSample, Channels = CapChannels, SampleRate = CapSampleRate, }, AudioEncModel = new AudioEncModel() { AudioCodeId = AudioCodeId, Bitrate = EncBitrate, BitsPerSample = EncBitsPerSample, Channels = EncChannels, SampleRate = EncSampleRate, }, ExtraInfo = null, IsMix = IsMix, SourceName = null, SourceType = SourceType.Device, }, AvSyncGroupId = 0, MediaType = MediaType.Microphone, StreamType = StreamType.AudioSend, TransModel = new TransModel() { CheckRetransSendCount = CheckRetransSendCount, CheckSendCount = CheckSendCount, DataResendCount = DataResendCount, DataRetransSendCount = DataRetransSendCount, DataSendCount = DataSendCount, DelayTimeWinsize = DelayTimeWinsize, FecCheckCount = FecCheckCount, FecDataCount = FecDataCount, }, }; return(publishAudioModel); }
public async Task <MeetingResult <int> > Publish(MediaType mediaType, string deviceName) { ThrowIfPublishVerify(); var result = MeetingResult.Error <int>("未实现的媒体类型。"); VideoBox videoBox; switch (mediaType) { case MediaType.Camera: var cameraParam = StreamParameterProviders.GetParameter <PublishCameraStreamParameter>(deviceName); PublishVideoModel publishCameraModel = cameraParam.GetPublishVideoModel(); publishCameraModel.VideoSendModel.SourceName = deviceName; if (VideoBoxManager.TryGet(Participant.Account, VideoBoxType.Camera, mediaType, out videoBox)) { publishCameraModel.VideoSendModel.DisplayWindow = videoBox.Handle; } var publishCameraResult = await _meetingSdkWrapper.PublishCameraVideo(publishCameraModel); if (publishCameraResult.StatusCode == 0) { var publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishCameraResult.Result, SyncId = publishCameraModel.AvSyncGroupId, StreamParameter = cameraParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); if (videoBox != null) { videoBox.AccountResource.ResourceId = publishStreamResource.ResourceId; videoBox.AccountResource.MediaType = mediaType; _eventAggregator.GetEvent <VideoBoxAddedEvent>().Publish(videoBox); //await _meetingSdkWrapper.StartLocalVideoRender( // publishStreamResource.ResourceId, // videoBox.Handle, // (int)videoBox.Host.ActualWidth, // (int)videoBox.Host.ActualHeight); } } result = publishCameraResult; break; case MediaType.Microphone: PublishMicStreamParameter micParam = StreamParameterProviders.GetParameter <PublishMicStreamParameter>(deviceName); PublishAudioModel publishMicModel = micParam.GetPublishAudioModel(); publishMicModel.AudioSendModel.SourceName = deviceName; MeetingResult <int> publishMicResult = await _meetingSdkWrapper.PublishMicAudio(publishMicModel); if (publishMicResult.StatusCode == 0) { StreamResource <IStreamParameter> publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishMicResult.Result, SyncId = publishMicModel.AvSyncGroupId, StreamParameter = micParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); } result = publishMicResult; break; case MediaType.AudioCaptureCard: case MediaType.AudioDoc: PublishMicStreamParameter docMicParam = StreamParameterProviders.GetParameter <PublishMicStreamParameter>(deviceName); PublishAudioModel publishDocMicModel = docMicParam.GetPublishAudioModel(); publishDocMicModel.AudioSendModel.SourceName = deviceName; MeetingResult <int> publishDocMicResult = await _meetingSdkWrapper.PublishMicAudio(publishDocMicModel); if (publishDocMicResult.StatusCode == 0) { StreamResource <IStreamParameter> publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishDocMicResult.Result, SyncId = publishDocMicModel.AvSyncGroupId, StreamParameter = docMicParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); } result = publishDocMicResult; break; case MediaType.VideoDoc: PublishDataCardStreamParameter dataCardParam = StreamParameterProviders.GetParameter <PublishDataCardStreamParameter>(deviceName); PublishVideoModel publishDataCardModel = dataCardParam.GetPublishVideoModel(); publishDataCardModel.VideoSendModel.SourceName = deviceName; if (VideoBoxManager.TryGet(Participant.Account, VideoBoxType.DataCard, mediaType, out videoBox)) { publishDataCardModel.VideoSendModel.DisplayWindow = videoBox.Handle; } MeetingResult <int> publishDataCardResult = await _meetingSdkWrapper.PublishDataCardVideo(publishDataCardModel); if (publishDataCardResult.StatusCode == 0) { StreamResource <IStreamParameter> publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishDataCardResult.Result, SyncId = publishDataCardModel.AvSyncGroupId, StreamParameter = dataCardParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); if (videoBox != null) { videoBox.AccountResource.ResourceId = publishStreamResource.ResourceId; videoBox.AccountResource.MediaType = mediaType; _eventAggregator.GetEvent <VideoBoxAddedEvent>().Publish(videoBox); //await _meetingSdkWrapper.StartLocalVideoRender( // publishStreamResource.ResourceId, // videoBox.Handle, // (int)videoBox.Host.ActualWidth, // (int)videoBox.Host.ActualHeight); } } result = publishDataCardResult; break; case MediaType.VideoCaptureCard: PublishWinCaptureStreamParameter winCapParam = StreamParameterProviders.GetParameter <PublishWinCaptureStreamParameter>(deviceName); PublishVideoModel publishWinCapModel = winCapParam.GetPublishVideoModel(); publishWinCapModel.VideoSendModel.SourceName = "DesktopCapture"; if (VideoBoxManager.TryGet(Participant.Account, VideoBoxType.WinCapture, mediaType, out videoBox)) { publishWinCapModel.VideoSendModel.DisplayWindow = videoBox.Handle; } MeetingResult <int> publishWinCapResult = await _meetingSdkWrapper.PublishWinCaptureVideo(publishWinCapModel); if (publishWinCapResult.StatusCode == 0) { StreamResource <IStreamParameter> publishStreamResource = new StreamResource <IStreamParameter> { MediaType = mediaType, ResourceId = publishWinCapResult.Result, SyncId = publishWinCapModel.AvSyncGroupId, StreamParameter = winCapParam, IsUsed = true }; Participant.Resources.Add(publishStreamResource); if (videoBox != null) { videoBox.AccountResource.ResourceId = publishStreamResource.ResourceId; videoBox.AccountResource.MediaType = mediaType; _eventAggregator.GetEvent <VideoBoxAddedEvent>().Publish(videoBox); //await _meetingSdkWrapper.StartLocalVideoRender( // publishStreamResource.ResourceId, // videoBox.Handle, // (int)videoBox.Host.ActualWidth, // (int)videoBox.Host.ActualHeight); } } result = publishWinCapResult; break; case MediaType.StreamMedia: break; case MediaType.File: break; case MediaType.WhiteBoard: break; case MediaType.RemoteControl: break; case MediaType.MediaTypeMax: break; } LayoutChanged(mediaType); return(result); }