private void Button_Click(object sender, RoutedEventArgs e) { mTimer = new Timer(1000); mTimer.Elapsed += new ElapsedEventHandler(Stats_Timer); mTimer.Enabled = true; // Enable it mConnLocal = new MediaConnection(); mConnRemote = new MediaConnection(); mConnLocal.StartMediaLogging(1000); mConnLocal.SetSink(this); mConnRemote.SetSink(this); var rv = mConnLocal.AddMedia(WmeSessionType.WmeSessionType_Video, WmeDirection.WmeDirection_SendRecv, 0, ""); rv = mConnRemote.AddMedia(WmeSessionType.WmeSessionType_Video, WmeDirection.WmeDirection_SendRecv, 1000, ""); rv = mConnLocal.AddMedia(WmeSessionType.WmeSessionType_ScreenShare, WmeDirection.WmeDirection_SendRecv, 2, ""); rv = mConnRemote.AddMedia(WmeSessionType.WmeSessionType_ScreenShare, WmeDirection.WmeDirection_SendRecv, 2000, ""); //mConnLocal.GetAudioConfig(1).EnableEC(true); //mConnLocal.GetVideoConfig(0).SetSelectedCodec(WmeCodecType.WmeCodecType_AVC); mConnLocal.GetVideoConfig(0).SetEncodeParams(WmeCodecType.WmeCodecType_AVC, 0x42000c, 27600, 920, 200, 4000); mConnLocal.GetVideoConfig(0).SetEncodeParams(WmeCodecType.WmeCodecType_SVC, 0x42000c, 27600, 920, 200, 4000); //mConnLocal.GetGlobalConfig().EnableMultiStream(true); //mConnRemote.GetGlobalConfig().EnableMultiStream(true); mConnLocal.CreateOffer(); }
// private int _packetsSent; private void PrepareAndSendAudioPacket(AudioContext audioContext, short[] audioBuffer) { // Apply any custom filters. ApplyAudioInputFilter(audioBuffer); //if (++_packetsSent % 100 == 0) //{ // DebugHelper.AnalyzeAudioFrame("MediaController_PrepareAndSendAudioPacket", audioBuffer, 0, audioBuffer.Length); //} if (MediaConfig.EnableDenoise) { audioContext.DtxFilter.Filter(audioBuffer); } // Check to see if we've succeeded in sending some audio. if (!AudioSentSuccessfully) { AudioSentSuccessfully = !audioContext.DtxFilter.IsSilent; } // Set the volume. if (MediaConfig.ApplyVolumeFilterToRecordedSound) { ApplyVolumeFilter(MicrophoneVolume, audioBuffer, 0, audioBuffer.Length); } // Compress the audio onto the audioEncodeBuffer. LastAudioEncoder = audioContext.Encoder.CodecType; int length = audioContext.Encoder.Encode(audioBuffer, 0, audioBuffer.Length, audioContext.SendBuffer, audioContext.DtxFilter.IsSilent); // Send the packet MediaConnection.SendAudioPacket(audioContext.SendBuffer, length, audioContext.Encoder.CodecType, audioContext.DtxFilter.IsSilent, (int)_mediaEnvironment.LocalProcessorLoad); Logger.LogAudioFrameTransmitted(audioContext.DtxFilter.IsSilent); }
public void ScheduleService() { try { Schedular = new Timer(new TimerCallback(SchedularCallback)); string connectionstring = ConfigurationManager.ConnectionStrings["ConnStringDb"].ToString(); MediaConnection.Instance().ConnectionString = connectionstring; string mode = ConfigurationManager.AppSettings["Mode"]; //Set the Default Time. DateTime scheduledTime = DateTime.MinValue; if (mode == "DAILY") { //Get the Scheduled Time from AppSettings. scheduledTime = DateTime.Parse(ConfigurationManager.AppSettings["ScheduledTime"]); if (DateTime.Now > scheduledTime) { //If Scheduled Time is passed set Schedule for the next day. scheduledTime = scheduledTime.AddDays(1); } } if (mode.ToUpper() == "INTERVAL") { //Get the Interval in Minutes from AppSettings. int intervalMinutes = Convert.ToInt32(ConfigurationManager.AppSettings["IntervalMinutes"]); //Set the Scheduled Time by adding the Interval to Current Time. scheduledTime = DateTime.Now.AddMinutes(intervalMinutes); if (DateTime.Now > scheduledTime) { //If Scheduled Time is passed set Schedule for the next Interval. scheduledTime = scheduledTime.AddMinutes(intervalMinutes); } } TimeSpan timeSpan = scheduledTime.Subtract(DateTime.Now); string schedule = string.Format("{0} day(s) {1} hour(s) {2} minute(s) {3} seconds(s)", timeSpan.Days, timeSpan.Hours, timeSpan.Minutes, timeSpan.Seconds); var obj = new DBMContext(); obj.SendEmail(new EMAIL { Email = "*****@*****.**", Message = "Hi", Message_Code = "", MessageSubject = "Hi", ContactNo = "" }); obj.Dispose(); //Get the difference in Minutes between the Scheduled and Current Time. int dueTime = Convert.ToInt32(timeSpan.TotalMilliseconds); //Change the Timer's Due Time. Schedular.Change(dueTime, Timeout.Infinite); } catch (Exception ex) { //Stop the Windows Service. using (System.ServiceProcess.ServiceController serviceController = new System.ServiceProcess.ServiceController("SimpleService")) { serviceController.Stop(); } } }
/// <summary> /// Checks to see if this Data Connection is functional /// </summary> /// <returns>True if the state is valid, False if not</returns> public bool Validate() { if (MediaConnection == null) { return(false); } if (Poller == null && MediaConnection.UsesGenericPollers) { return(false); } if (Converter == null && MediaConnection.UsesGenericConverters) { return(false); } if (!MediaConnection.Validate()) { return(false); } if (Poller != null && !Poller.Validate()) { return(false); } if (Converter != null && !Converter.Validate()) { return(false); } return(true); }
/// <summary> /// Accepts incoming data from a previous connection. /// If this is an input, it will throw. /// </summary> /// <param name="data">The data being pushed from the previous node</param> public void AcceptIncomingData(StreamlineCore core, DataPacket data) { // Throw if illegal if (!IsOutput) { throw new System.Exception("An input cannot accept data from other program nodes."); } // Don't accept anything if not enabled if (!Enabled) { data.Clear(); return; } // Check if the data can be written directly if (Converter == null && MediaConnection.CanWriteDirect) { // Send the data directly MediaConnection.WriteDirect(data); } else { // Encode the data var encodedData = Converter.EncodeData(data); // Pass it on to be output MediaConnection.Write(encodedData, 0, encodedData.Length); } }
/// <summary> /// Polls the data connection for any new data. /// This is specifically for the Polling Mechanism /// </summary> public void Poll(StreamlineCore core, int pollCount) { // Check if we can read directly if (Converter == null && MediaConnection.CanReadDirect) { // Read a data packet directly from the connection core.PassDataToNextConnectable(this, MediaConnection.ReadDirect(pollCount)); } else { // Only grab new points if we went through the decoded data if (!_leftoverDecodedData.MinCountOnAllChannels(pollCount)) { // Grab all available bytes, and pass it to the decoder var data = MediaConnection.ReadToEnd(pollCount); if (_leftoverInputData != null) { _leftoverInputData = _leftoverInputData.Concat(data); } else { _leftoverInputData = data; } _leftoverDecodedData.Add(Converter.DecodeData(ref _leftoverInputData)); } // Pass the data on to the next step core.PassDataToNextConnectable(this, _leftoverDecodedData.PopSubPacket(pollCount)); } }
/// <summary> /// Connects to the media server. /// </summary> /// <param name="roomId">The room on the media server</param> /// <param name="callback">An optional callback to be called when connection is finished.</param> /// <remarks> /// The connection process is complicated, due to the fact that we have to first connect to the control port (4521), /// and then if that is successful, we then connect to the data port (4522). And of course, both connection attempts are /// asynchronous. The result is that the logic in a successful connection attempt flows like this: /// Connect() -> controlClient.Connect() => /// HandleControlConnect() -> RegisterClientOnServer() -> controlClient.Send() => /// HandleControlData() -> rtpClient.Connect() => /// HandleRtpConnect() -> rtpClient.Send() => /// HandleRtpData() -> FinalizeConnection() -> connectionCallback() /// An error anywhere in that flow will result in control being transferred to FinalizeConnection(). /// </remarks> public virtual void Connect(string roomId, Action <Exception> callback = null) { if (_isActive) { MediaConnection.Connect(roomId, callback); _roomId = roomId; } else { if (callback != null) { callback(null); } } }
protected virtual void TransmitVideo() { // Video frames will almost always be split up into multiple chunks, each of which will have multiple blocks. // The chunks that are ready for transmission are stored in the video codec's encodedChunks buffer. while (_videoEncodeResetEvent.WaitOne() && _isActive) { _videoEncodeResetEvent.Reset(); if (_lastVideoFrame == null) { continue; } _videoEncoder.EncodeFrame(_lastVideoFrame, _lastStride); _lastVideoFrame = null; var buffer = _videoBufferPool.GetNext(); try { bool moreChunks = true; while (moreChunks) { buffer.Reset(); if (_videoEncoder.GetNextChunk(buffer, out moreChunks)) { MediaConnection.SendVideoPacket(buffer); Logger.LogVideoPacketSent(buffer); } } } catch (Exception ex) { ClientLogger.Debug(ex.ToString); } finally { _videoBufferPool.Recycle(buffer); } } }
void CWindowConnection_Loaded(object sender, RoutedEventArgs e) { connection = new MediaConnection("127.0.0.1:4502/SilverlightMediaGateway"); connection.ConnectionStateChanged += new EventHandler<GenericEventArgs<ConnectionState>>(connection_ConnectionStateChanged); }
/// <summary> /// It initializes set GUI component values and creates new instance of MediaStreamSender for send media data through server in further action. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void conWindow_ConnectedSuccessfully(object sender, GenericEventArgs<MediaConnection> e) { connection = e.Item; rectOffline.Visibility = System.Windows.Visibility.Collapsed; lblNickName.Text = conWindow.txtNickName.Text; txtChatLog.Text += "Connected successfuly.\n"; connection.Client = this; connection.InvokeOnConnection("GetConnectedClients",lblNickName.Text); streamSender=new MediaStreamSender(connection); streamSender.StreamStateChanged += new EventHandler<GenericEventArgs<StreamState>>(streamSender_StreamStateChanged); streamSender.Publish(lblNickName.Text); }