public VoiceSession(VoiceGateway conn, string handle) { m_Handle = handle; connector = conn; m_spatial = true; knownParticipants = new Dictionary <string, VoiceParticipant>(); }
public VoiceSession(VoiceGateway conn, string handle) { m_Handle = handle; connector = conn; m_spatial = true; knownParticipants = new Dictionary<string, VoiceParticipant>(); }
/// <summary> /// Set local speaker volume /// </summary> /// <param name="ConnectorHandle">Handle returned from successful Connector ‘create’ request</param> /// <param name="Value">The level of the audio, a number between -100 and 100 where /// 0 represents ‘normal’ speaking volume</param> public int ConnectorSetLocalSpeakerVolume(string ConnectorHandle, int Value) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("ConnectorHandle", ConnectorHandle)); sb.Append(VoiceGateway.MakeXML("Value", Value.ToString())); return(Request("Connector.SetLocalSpeakerVolume.1", sb.ToString())); }
/// <summary> /// Mute or unmute the speaker /// </summary> /// <param name="ConnectorHandle">Handle returned from successful Connector ‘create’ request</param> /// <param name="Mute">true (mute) or false (unmute)</param> public int ConnectorMuteLocalSpeaker(string ConnectorHandle, bool Mute) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("ConnectorHandle", ConnectorHandle)); sb.Append(VoiceGateway.MakeXML("Value", Mute ? "true" : "false")); return(Request("Connector.MuteLocalSpeaker.1", sb.ToString())); }
/// <summary> /// Used to accept a call /// </summary> /// <param name="SessionHandle">SessionHandle such as received from SessionNewEvent</param> /// <param name="AudioMedia">"default"</param> /// <returns></returns> public int SessionConnect(string SessionHandle, string AudioMedia) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("SessionHandle", SessionHandle)); sb.Append(VoiceGateway.MakeXML("AudioMedia", AudioMedia)); return(Request("Session.Connect.1", sb.ToString())); }
/// <summary> /// This command is used to start the audio render process, which will then play /// the passed in file through the selected audio render device. This command /// should not be issued if the user is on a call. /// </summary> /// <param name="SoundFilePath">The fully qualified path to the sound file.</param> /// <param name="Loop">True if the file is to be played continuously and false if it is should be played once.</param> /// <returns></returns> public int SessionRenderAudioStart(string SoundFilePath, bool Loop) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("SoundFilePath", SoundFilePath)); sb.Append(VoiceGateway.MakeXML("Loop", Loop ? "1" : "0")); return(Request("Session.RenderAudioStart.1", sb.ToString())); }
/// <summary> /// Set User Volume for a particular user. Does not affect how other users hear that user. /// </summary> /// <param name="SessionHandle">Handle returned from successful Session ‘create’ request or a SessionNewEvent</param> /// <param name="ParticipantURI"></param> /// <param name="Volume">The level of the audio, a number between -100 and 100 where 0 represents ‘normal’ speaking volume</param> /// <returns></returns> public int SessionSetParticipantVolumeForMe(string SessionHandle, string ParticipantURI, int Volume) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("SessionHandle", SessionHandle)); sb.Append(VoiceGateway.MakeXML("ParticipantURI", ParticipantURI)); sb.Append(VoiceGateway.MakeXML("Volume", Volume.ToString())); return(Request("Session.SetParticipantVolumeForMe.1", sb.ToString())); }
/// <summary> /// This is used to login a specific user account(s). It may only be called after /// Connector initialization has completed successfully /// </summary> /// <param name="ConnectorHandle">Handle returned from successful Connector ‘create’ request</param> /// <param name="AccountName">User's account name</param> /// <param name="AccountPassword">User's account password</param> /// <param name="AudioSessionAnswerMode">Values may be “AutoAnswer” or “VerifyAnswer”</param> /// <param name="AccountURI">""</param> /// <param name="ParticipantPropertyFrequency">This is an integer that specifies how often /// the daemon will send participant property events while in a channel. If this is not set /// the default will be “on state change”, which means that the events will be sent when /// the participant starts talking, stops talking, is muted, is unmuted. /// The valid values are: /// 0 – Never /// 5 – 10 times per second /// 10 – 5 times per second /// 50 – 1 time per second /// 100 – on participant state change (this is the default)</param> /// <param name="EnableBuddiesAndPresence">false</param> /// <returns></returns> public int AccountLogin(string ConnectorHandle, string AccountName, string AccountPassword, string AudioSessionAnswerMode, string AccountURI, int ParticipantPropertyFrequency, bool EnableBuddiesAndPresence) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("ConnectorHandle", ConnectorHandle)); sb.Append(VoiceGateway.MakeXML("AccountName", AccountName)); sb.Append(VoiceGateway.MakeXML("AccountPassword", AccountPassword)); sb.Append(VoiceGateway.MakeXML("AudioSessionAnswerMode", AudioSessionAnswerMode)); sb.Append(VoiceGateway.MakeXML("AccountURI", AccountURI)); sb.Append(VoiceGateway.MakeXML("ParticipantPropertyFrequency", ParticipantPropertyFrequency.ToString())); sb.Append(VoiceGateway.MakeXML("EnableBuddiesAndPresence", EnableBuddiesAndPresence ? "true" : "false")); return(Request("Account.Login.1", sb.ToString())); }
/// <summary> /// Create a Session /// Sessions typically represent a connection to a media session with one or more /// participants. This is used to generate an ‘outbound’ call to another user or /// channel. The specifics depend on the media types involved. A session handle is /// required to control the local user functions within the session (or remote /// users if the current account has rights to do so). Currently creating a /// session automatically connects to the audio media, there is no need to call /// Session.Connect at this time, this is reserved for future use. /// </summary> /// <param name="AccountHandle">Handle returned from successful Connector ‘create’ request</param> /// <param name="URI">This is the URI of the terminating point of the session (ie who/what is being called)</param> /// <param name="Name">This is the display name of the entity being called (user or channel)</param> /// <param name="Password">Only needs to be supplied when the target URI is password protected</param> /// <param name="PasswordHashAlgorithm">This indicates the format of the password as passed in. This can either be /// “ClearText” or “SHA1UserName”. If this element does not exist, it is assumed to be “ClearText”. If it is /// “SHA1UserName”, the password as passed in is the SHA1 hash of the password and username concatenated together, /// then base64 encoded, with the final “=” character stripped off.</param> /// <param name="JoinAudio"></param> /// <param name="JoinText"></param> /// <returns></returns> public int SessionCreate(string AccountHandle, string URI, string Name, string Password, bool JoinAudio, bool JoinText, string PasswordHashAlgorithm) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("AccountHandle", AccountHandle)); sb.Append(VoiceGateway.MakeXML("URI", URI)); sb.Append(VoiceGateway.MakeXML("Name", Name)); sb.Append(VoiceGateway.MakeXML("Password", Password)); sb.Append(VoiceGateway.MakeXML("JoinAudio", JoinAudio ? "true" : "false")); sb.Append(VoiceGateway.MakeXML("JoinText", JoinText ? "true" : "false")); sb.Append(VoiceGateway.MakeXML("PasswordHashAlgorithm", PasswordHashAlgorithm)); return(Request("Session.Create.1", sb.ToString())); }
/// <summary> /// This is used to initialize and stop the Connector as a whole. The Connector /// Create call must be completed successfully before any other requests are made /// (typically during application initialization). The shutdown should be called /// when the application is shutting down to gracefully release resources /// </summary> /// <param name="ClientName">A string value indicting the Application name</param> /// <param name="AccountManagementServer">URL for the management server</param> /// <param name="Logging">LoggingSettings</param> /// <param name="MaximumPort"></param> /// <param name="MinimumPort"></param> public int ConnectorCreate(string ClientName, string AccountManagementServer, ushort MinimumPort, ushort MaximumPort, VoiceLoggingSettings Logging) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("ClientName", ClientName)); sb.Append(VoiceGateway.MakeXML("AccountManagementServer", AccountManagementServer)); sb.Append(VoiceGateway.MakeXML("MinimumPort", MinimumPort.ToString())); sb.Append(VoiceGateway.MakeXML("MaximumPort", MaximumPort.ToString())); sb.Append("<Logging>"); sb.Append(VoiceGateway.MakeXML("Enabled", Logging.Enabled ? "true" : "false")); sb.Append(VoiceGateway.MakeXML("Folder", Logging.Folder)); sb.Append(VoiceGateway.MakeXML("FileNamePrefix", Logging.FileNamePrefix)); sb.Append(VoiceGateway.MakeXML("FileNameSuffix", Logging.FileNameSuffix)); sb.Append(VoiceGateway.MakeXML("LogLevel", Logging.LogLevel.ToString())); sb.Append("</Logging>"); return(Request("Connector.Create.1", sb.ToString())); }
/// <summary> /// This is used to ‘end’ an established session (i.e. hang-up or disconnect). /// </summary> /// <param name="SessionHandle">Handle returned from successful Session ‘create’ request or a SessionNewEvent</param> /// <returns></returns> public int SessionTerminate(string SessionHandle) { string RequestXML = VoiceGateway.MakeXML("SessionHandle", SessionHandle); return(Request("Session.Terminate.1", RequestXML)); }
/// <summary> /// Set the combined speaking and listening position in 3D space. /// </summary> /// <param name="SessionHandle">Handle returned from successful Session ‘create’ request or a SessionNewEvent</param> /// <param name="SpeakerPosition">Speaking position</param> /// <param name="ListenerPosition">Listening position</param> /// <returns></returns> public int SessionSet3DPosition(string SessionHandle, VoicePosition SpeakerPosition, VoicePosition ListenerPosition) { StringBuilder sb = new StringBuilder(); sb.Append(VoiceGateway.MakeXML("SessionHandle", SessionHandle)); sb.Append("<SpeakerPosition>"); sb.Append("<Position>"); sb.Append(VoiceGateway.MakeXML("X", SpeakerPosition.Position.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", SpeakerPosition.Position.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", SpeakerPosition.Position.Z.ToString())); sb.Append("</Position>"); sb.Append("<Velocity>"); sb.Append(VoiceGateway.MakeXML("X", SpeakerPosition.Velocity.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", SpeakerPosition.Velocity.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", SpeakerPosition.Velocity.Z.ToString())); sb.Append("</Velocity>"); sb.Append("<AtOrientation>"); sb.Append(VoiceGateway.MakeXML("X", SpeakerPosition.AtOrientation.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", SpeakerPosition.AtOrientation.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", SpeakerPosition.AtOrientation.Z.ToString())); sb.Append("</AtOrientation>"); sb.Append("<UpOrientation>"); sb.Append(VoiceGateway.MakeXML("X", SpeakerPosition.UpOrientation.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", SpeakerPosition.UpOrientation.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", SpeakerPosition.UpOrientation.Z.ToString())); sb.Append("</UpOrientation>"); sb.Append("<LeftOrientation>"); sb.Append(VoiceGateway.MakeXML("X", SpeakerPosition.LeftOrientation.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", SpeakerPosition.LeftOrientation.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", SpeakerPosition.LeftOrientation.Z.ToString())); sb.Append("</LeftOrientation>"); sb.Append("</SpeakerPosition>"); sb.Append("<ListenerPosition>"); sb.Append("<Position>"); sb.Append(VoiceGateway.MakeXML("X", ListenerPosition.Position.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", ListenerPosition.Position.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", ListenerPosition.Position.Z.ToString())); sb.Append("</Position>"); sb.Append("<Velocity>"); sb.Append(VoiceGateway.MakeXML("X", ListenerPosition.Velocity.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", ListenerPosition.Velocity.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", ListenerPosition.Velocity.Z.ToString())); sb.Append("</Velocity>"); sb.Append("<AtOrientation>"); sb.Append(VoiceGateway.MakeXML("X", ListenerPosition.AtOrientation.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", ListenerPosition.AtOrientation.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", ListenerPosition.AtOrientation.Z.ToString())); sb.Append("</AtOrientation>"); sb.Append("<UpOrientation>"); sb.Append(VoiceGateway.MakeXML("X", ListenerPosition.UpOrientation.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", ListenerPosition.UpOrientation.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", ListenerPosition.UpOrientation.Z.ToString())); sb.Append("</UpOrientation>"); sb.Append("<LeftOrientation>"); sb.Append(VoiceGateway.MakeXML("X", ListenerPosition.LeftOrientation.X.ToString())); sb.Append(VoiceGateway.MakeXML("Y", ListenerPosition.LeftOrientation.Y.ToString())); sb.Append(VoiceGateway.MakeXML("Z", ListenerPosition.LeftOrientation.Z.ToString())); sb.Append("</LeftOrientation>"); sb.Append("</ListenerPosition>"); return(Request("Session.Set3DPosition.1", sb.ToString())); }
/// <summary> /// This command is used to select the capture device. /// </summary> /// <param name="CaptureDeviceSpecifier">The name of the device as returned by the Aux.GetCaptureDevices command.</param> public int AuxSetCaptureDevice(string CaptureDeviceSpecifier) { string RequestXML = VoiceGateway.MakeXML("CaptureDeviceSpecifier", CaptureDeviceSpecifier); return(Request("Aux.SetCaptureDevice.1", RequestXML)); }
/// <summary> /// This command is used to stop the audio render process. /// </summary> /// <param name="SoundFilePath">The fully qualified path to the sound file issued in the start render command.</param> /// <returns></returns> public int SessionRenderAudioStop(string SoundFilePath) { string RequestXML = VoiceGateway.MakeXML("SoundFilePath", SoundFilePath); return(Request("Session.RenderAudioStop.1", RequestXML)); }
/// <summary> /// This is used to logout a user session. It should only be called with a valid AccountHandle. /// </summary> /// <param name="AccountHandle">Handle returned from successful Connector ‘login’ request</param> /// <returns></returns> public int AccountLogout(string AccountHandle) { string RequestXML = VoiceGateway.MakeXML("AccountHandle", AccountHandle); return(Request("Account.Logout.1", RequestXML)); }
/// <summary> /// This command is used to start the audio capture process which will cause /// AuxAudioProperty Events to be raised. These events can be used to display a /// microphone VU meter for the currently selected capture device. This command /// should not be issued if the user is on a call. /// </summary> /// <param name="Duration">(unused but required)</param> /// <returns></returns> public int AuxCaptureAudioStart(int Duration) { string RequestXML = VoiceGateway.MakeXML("Duration", Duration.ToString()); return(Request("Aux.CaptureAudioStart.1", RequestXML)); }
private void Start() { if (gateway == null) { gateway = new VoiceGateway(this.instance.Client); } // Initialize progress bar progressBar1.Maximum = (int)VoiceGateway.ConnectionState.SessionRunning; SetProgress(0); RegisterClientEvents(); gateway.Start(); }
private void Stop() { participants.Clear(); gateway.Stop(); UnregisterClientEvents(); session = null; gateway = null; SetProgress(VoiceGateway.ConnectionState.None); GC.Collect(); }
private void vgate_OnAuxGetRenderDevicesResponse(object sender, VoiceGateway.VoiceDevicesEventArgs e) { BeginInvoke(new MethodInvoker(() => LoadSpeakers(e.Devices))); }
void SetProgress(VoiceGateway.ConnectionState s) { int value = (int)s; if (value == progressBar1.Maximum) { progressBar1.ForeColor = Color.Green; LoadConfigVolume(); } else if (value > (progressBar1.Maximum / 2)) progressBar1.ForeColor = Color.Yellow; else progressBar1.ForeColor = Color.Red; progressBar1.Value = value; }
/// <summary> /// This command is used to set the speaker volume while in the audio tuning /// process. Once an acceptable speaker level is attained, the application must /// issue a connector set speaker volume command to have that level be used while /// on voice calls. /// </summary> /// <param name="Level">the speaker volume (-100 to 100 inclusive)</param> /// <returns></returns> public int AuxSetSpeakerLevel(int Level) { string RequestXML = VoiceGateway.MakeXML("Level", Level.ToString()); return(Request("Aux.SetSpeakerLevel.1", RequestXML)); }
void connector_OnAccountLoginResponse( object sender, VoiceGateway.VoiceAccountEventArgs e) { Logger.Log("Account Login " + e.Message, Helpers.LogLevel.Info); accountHandle = e.AccountHandle; ReportConnectionState(ConnectionState.AccountLogin); ParcelChanged(); }
/// <summary> /// Handle creation of the Connector. /// </summary> void connector_OnConnectorCreateResponse( object sender, VoiceGateway.VoiceConnectorEventArgs e) { Logger.Log("Voice daemon protocol started " + e.Message, Helpers.LogLevel.Info); connectionHandle = e.Handle; if (e.StatusCode != 0) return; // STEP 4 AccountLogin( connectionHandle, voiceUser, voicePassword, "VerifyAnswer", // This can also be "AutoAnswer" "", // Default account management server URI 10, // Throttle state changes true); // Enable buddies and presence }
/// <summary> /// Handle response to audio output device query /// </summary> void connector_OnAuxGetRenderDevicesResponse( object sender, VoiceGateway.VoiceDevicesEventArgs e) { outputDevices = e.Devices; currentPlaybackDevice = e.CurrentDevice; }
/// <summary> /// Handle response to audio input device query /// </summary> void connector_OnAuxGetCaptureDevicesResponse( object sender, VoiceGateway.VoiceDevicesEventArgs e) { inputDevices = e.Devices; currentCaptureDevice = e.CurrentDevice; }
/// <summary> /// Handle miscellaneous request status /// </summary> /// <param name="sender"></param> /// <param name="e"></param> /// ///<remarks>If something goes wrong, we log it.</remarks> void connector_OnVoiceResponse(object sender, VoiceGateway.VoiceResponseEventArgs e) { if (e.StatusCode == 0) return; Logger.Log(e.Message + " on " + sender as string, Helpers.LogLevel.Error); }
private void vgate_OnVoiceConnectionChange(VoiceGateway.ConnectionState state) { if (InvokeRequired) { BeginInvoke(new MethodInvoker(delegate() { vgate_OnVoiceConnectionChange(state); })); return; } try { string s = string.Empty; if (state == VoiceGateway.ConnectionState.AccountLogin) { s = "Logging In..."; } else if (state == VoiceGateway.ConnectionState.ConnectorConnected) { s = "Connected..."; } else if (state == VoiceGateway.ConnectionState.DaemonConnected) { s = "Daemon Connected. Starting..."; } else if (state == VoiceGateway.ConnectionState.DaemonStarted) { s = "Daemon Started. Please wait..."; } else if (state == VoiceGateway.ConnectionState.SessionRunning) { s = "Session Started & Ready"; } label18.Text = s; } catch (Exception ex) { MessageBox.Show(ex.Message, "METAbolt"); } }
/// <summary> /// Shutdown Connector -- Should be called when the application is shutting down /// to gracefully release resources /// </summary> /// <param name="ConnectorHandle">Handle returned from successful Connector ‘create’ request</param> public int ConnectorInitiateShutdown(string ConnectorHandle) { string RequestXML = VoiceGateway.MakeXML("ConnectorHandle", ConnectorHandle); return(Request("Connector.InitiateShutdown.1", RequestXML)); }
void gateway_OnAuxGetRenderDevicesResponse(object sender, VoiceGateway.VoiceDevicesEventArgs e) { BeginInvoke(new MethodInvoker(() => LoadSpkrDevices(e.Devices, e.CurrentDevice))); }
void gateway_OnVoiceConnectionChange(VoiceGateway.ConnectionState state) { BeginInvoke(new MethodInvoker(() => SetProgress(state))); }
private void checkBox5_CheckedChanged(object sender, EventArgs e) { if (!this.CheckVoiceSetupFile("SLVoice.exe")) return; if (!this.CheckVoiceSetupFile("alut.dll")) return; //if (!this.CheckVoiceSetupFile("openal32.dll")) return; if (!this.CheckVoiceSetupFile("ortp.dll")) return; if (!this.CheckVoiceSetupFile("vivoxsdk.dll")) return; if (!this.CheckVoiceSetupFile("wrap_oal.dll")) return; if (checkBox5.Checked) { if (!instance.AllowVoice) { label18.Text = "Voice is disabled on this parcel"; return; } try { vgate = new VoiceGateway(client); vgate.OnVoiceConnectionChange += new VoiceGateway.VoiceConnectionChangeCallback(vgate_OnVoiceConnectionChange); vgate.OnAuxGetCaptureDevicesResponse += new EventHandler<VoiceGateway.VoiceDevicesEventArgs>(vgate_OnAuxGetCaptureDevicesResponse); vgate.OnAuxGetRenderDevicesResponse += new EventHandler<VoiceGateway.VoiceDevicesEventArgs>(vgate_OnAuxGetRenderDevicesResponse); vgate.OnSessionCreate += new EventHandler(vgate_OnSessionCreate); vgate.Start(); } catch (Exception ex) { MessageBox.Show(ex.Message, "METAbolt"); } } else { if (!instance.AllowVoice) { label18.Text = "Voice is disabled on this parcel"; return; } try { vgate.MicMute = true; vgate.Stop(); vgate.Dispose(); EnableVoice(false); cboRender.Items.Clear(); cboCapture.Items.Clear(); vgate.OnVoiceConnectionChange -= new VoiceGateway.VoiceConnectionChangeCallback(vgate_OnVoiceConnectionChange); vgate.OnAuxGetCaptureDevicesResponse -= new EventHandler<VoiceGateway.VoiceDevicesEventArgs>(vgate_OnAuxGetCaptureDevicesResponse); vgate.OnAuxGetRenderDevicesResponse -= new EventHandler<VoiceGateway.VoiceDevicesEventArgs>(vgate_OnAuxGetRenderDevicesResponse); vgate.OnSessionCreate -= new EventHandler(vgate_OnSessionCreate); if (!checkBox3.Checked) { checkBox3.Checked = true; } checkBox5.ForeColor = Color.Black; label18.Text = "Check 'Voice ON' box below. Then on 'Session start' unmute MIC to talk"; } catch (Exception ex) { MessageBox.Show(ex.Message, "METAbolt"); } } }