public HResult Shutdown() { HResult hr = CheckShutdown(); if (MFError.Succeeded(hr)) { if (_spEventQueue != null) { _spEventQueue.Shutdown(); } if (_networkStreamAdapter != null) { _networkStreamAdapter.Close(); } foreach (var stream in _streams) { (stream as MediaStream).Shutdown(); } _eSourceState = SourceState.SourceState_Shutdown; _streams.Clear(); _spEventQueue.Shutdown(); _networkStreamAdapter = null; } return(hr); }
public void BeginSending(AudioSource source) { if (source == null) { throw new ArgumentNullException("source"); } if (source.OwnerId != this.context.CurrentUser.UserId) { throw new ArgumentException("Can not send audio from a source you don't own", "source"); } lock (this.sources) { SourceState state; if (!this.sources.TryGetValue(source, out state)) { this.sources[source] = state = new SourceState(); } if (state.Codec == null) { state.Codec = new AudioCodec(source.CodecSettings); } state.Sequence = 0; } this.context.Connection.SendAsync(new ClientAudioSourceStateChangeMessage { Starting = true, SourceId = source.Id }); OnAudioSourceStarted(new AudioSourceEventArgs(source)); }
public HResult Stop() { HResult hr = HResult.S_OK; lock (_spSource) { hr = CheckShutdown(); if (MFError.Succeeded(hr)) { if (_eSourceState == SourceState.SourceState_Started) { _eSourceState = SourceState.SourceState_Stopped; _tokens.Clear(); _samples.Clear(); // Inform the client that we've stopped. hr = QueueEvent(MediaEventType.MEStreamStopped, Guid.Empty, HResult.S_OK, null); } else { hr = HResult.MF_E_INVALID_STATE_TRANSITION; } } if (MFError.Failed(hr)) { HandleError(hr); } return(HResult.S_OK); } }
public virtual SourceState GetStateForText(string sourceCode, SourceState state) { if (sourceCode != null) { this._state = SourceState.STATE_COLOR_NORMAL; Context sourceContext = new Context(null, sourceCode); this._scanner.SetSource(sourceContext); if ((SourceState.STATE_COLOR_COMMENT == state) && (this._scanner.SkipMultiLineComment() > sourceCode.Length)) { this._state = SourceState.STATE_COLOR_COMMENT; return(this._state); } this._scanner.GetNextToken(); JSToken none = JSToken.None; while (sourceContext.GetToken() != JSToken.EndOfFile) { none = sourceContext.GetToken(); this._scanner.GetNextToken(); } if (JSToken.UnterminatedComment == none) { this._state = SourceState.STATE_COLOR_COMMENT; } } return(this._state); }
public virtual SourceState GetStateForText(string sourceCode, SourceState state) { if (sourceCode != null) { this._state = SourceState.STATE_COLOR_NORMAL; Context sourceContext = new Context(null, sourceCode); this._scanner.SetSource(sourceContext); if ((SourceState.STATE_COLOR_COMMENT == state) && (this._scanner.SkipMultiLineComment() > sourceCode.Length)) { this._state = SourceState.STATE_COLOR_COMMENT; return this._state; } this._scanner.GetNextToken(); JSToken none = JSToken.None; while (sourceContext.GetToken() != JSToken.EndOfFile) { none = sourceContext.GetToken(); this._scanner.GetNextToken(); } if (JSToken.UnterminatedComment == none) { this._state = SourceState.STATE_COLOR_COMMENT; } } return this._state; }
internal void OnServerAudioDataMessage(MessageEventArgs <ServerAudioDataMessage> e) { var source = this.manager[e.Message.SourceId]; if (source == null || this.manager.GetIsIgnored(source)) { return; } int skipped; SourceState state; lock (this.sources) { if (!this.sources.TryGetValue(source, out state)) { this.sources[source] = state = new SourceState(); state.Codec = new AudioCodec(source.CodecSettings); } skipped = e.Message.Sequence - state.Sequence - 1; // We can't wait around for the start signal, and the first message // in the sequence might be dropped. We'll just assume a new stream // if we give a _lower_ sequence than the last one. if (skipped < 0) { skipped = e.Message.Sequence - 1; } state.Sequence = e.Message.Sequence; } var user = this.context.Users[source.OwnerId]; if (user == null || this.context.Users.GetIsIgnored(user)) { return; } int defaultSize = source.CodecSettings.GetBytes(source.CodecSettings.FrameSize); byte[][] data = e.Message.Data; byte[][] decoded = new byte[data.Length + skipped][]; for (int i = 0; i < skipped; i++) { decoded[i] = state.Codec.Decode(null, defaultSize); } for (int i = skipped; i < decoded.Length; i++) { byte[] frame = data[i - skipped]; decoded[i] = state.Codec.Decode(frame, frame.Length); } OnReceivedAudio(new ReceivedAudioEventArgs(source, decoded)); }
public void StoreAndRestore() { Source source = new Source("hello world"); SourceState state = source.Store(); ResultAssert.Captures("hello", "hello".Consume(ref source)); source.Restore(state); ResultAssert.Captures("hello", "hello".Consume(ref source)); }
public override int GetHashCode() { unchecked { var hashCode = SourceState?.GetHashCode() ?? 0; hashCode = (hashCode * 397) ^ (Event?.GetHashCode() ?? 0); hashCode = (hashCode * 397) ^ (TargetState?.GetHashCode() ?? 0); return(hashCode); } }
StateValue GetRoomStateValue(SourceState state, ItemId itemId) { if (roomStateRepository.TryGetValue(GetStateKey(state.Target, state.Key, itemId), out var value)) { return(value); } else { return(StateValue.Default); } }
public HResult Open(string ip, int port) { if (_eSourceState != SourceState.SourceState_Invalid) { Throw(HResult.MF_E_INVALIDREQUEST); } // If everything is ok now we are waiting for network client to connect. // Change state to opening. _eSourceState = SourceState.SourceState_Opening; return(_networkStreamAdapter.Open(ip, port)); }
public async IAsyncEnumerable <SuggestedActionSet> GetSuggestedActionsAsync( ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, [EnumeratorCancellation] CancellationToken cancellationToken) { AssertIsForeground(); using var state = SourceState.TryAddReference(); if (state is null) { yield break; } var workspace = state.Target.Workspace; if (workspace is null) { yield break; } var selection = TryGetCodeRefactoringSelection(state, range); await workspace.Services.GetRequiredService <IWorkspaceStatusService>().WaitUntilFullyLoadedAsync(cancellationToken).ConfigureAwait(false); using (Logger.LogBlock(FunctionId.SuggestedActions_GetSuggestedActionsAsync, cancellationToken)) { var document = range.Snapshot.GetOpenDocumentInCurrentContextWithChanges(); if (document is null) { yield break; } // Compute and return the high pri set of fixes and refactorings first so the user // can act on them immediately without waiting on the regular set. var highPriSet = GetCodeFixesAndRefactoringsAsync( state, requestedActionCategories, document, range, selection, _ => null, CodeActionRequestPriority.High, cancellationToken).WithCancellation(cancellationToken).ConfigureAwait(false); await foreach (var set in highPriSet) { yield return(set); } var lowPriSet = GetCodeFixesAndRefactoringsAsync( state, requestedActionCategories, document, range, selection, _ => null, CodeActionRequestPriority.Normal, cancellationToken).WithCancellation(cancellationToken).ConfigureAwait(false); await foreach (var set in lowPriSet) { yield return(set); } } }
private void Initialize(StspStreamDescription pStreamDescription, IBufferPacket attributesBuffer) { //Create the media event queue. ThrowIfError(MFExtern.MFCreateEventQueue(out _spEventQueue)); IMFMediaType mediaType; IMFStreamDescriptor spSD; IMFMediaTypeHandler spMediaTypeHandler; _isVideo = (pStreamDescription.guiMajorType == MFMediaType.Video); //Create a media type object. ThrowIfError(MFExtern.MFCreateMediaType(out mediaType)); if (attributesBuffer.GetLength() < pStreamDescription.cbAttributesSize || pStreamDescription.cbAttributesSize == 0) { //Invalid stream description Throw(HResult.MF_E_UNSUPPORTED_FORMAT); } //Prepare buffer where we will copy attributes to, then initialize media type's attributes var pAttributes = Marshal.AllocHGlobal(pStreamDescription.cbAttributesSize); try { Marshal.Copy(attributesBuffer.TakeBuffer(pStreamDescription.cbAttributesSize), 0, pAttributes, pStreamDescription.cbAttributesSize); ThrowIfError(MFExtern.MFInitAttributesFromBlob(mediaType, pAttributes, pStreamDescription.cbAttributesSize)); } finally { Marshal.FreeHGlobal(pAttributes); } Validation.ValidateInputMediaType(pStreamDescription.guiMajorType, pStreamDescription.guiSubType, mediaType); ThrowIfError(mediaType.SetGUID(MF_MT_MAJOR_TYPE, pStreamDescription.guiMajorType)); ThrowIfError(mediaType.SetGUID(MF_MT_SUBTYPE, pStreamDescription.guiSubType)); //Now we can create MF stream descriptor. ThrowIfError(MFExtern.MFCreateStreamDescriptor(pStreamDescription.dwStreamId, 1, new IMFMediaType[] { mediaType }, out spSD)); ThrowIfError(spSD.GetMediaTypeHandler(out spMediaTypeHandler)); //Set current media type ThrowIfError(spMediaTypeHandler.SetCurrentMediaType(mediaType)); _spStreamDescriptor = spSD; _id = pStreamDescription.dwStreamId; //State of the stream is started. _eSourceState = SourceState.SourceState_Stopped; }
public IActionResult CreateSourceStateAjax(SourceState sourceState) { if (sourceState.Name != null) { _db.SourceStates.Add(sourceState); _db.SaveChanges(); } SourceStateViewModel model = new SourceStateViewModel { Source = new Source(), SourceStates = _db.SourceStates.Where(c => c.Active).ToList(), }; return(PartialView("PartialViews/SourceStatesPartial", model)); }
protected override async Task OnFollowAsync(FollowEvent ev) { Log.WriteInfo($"SourceType:{ev.Source.Type}, SourceId:{ev.Source.Id}"); await SourceState.AddAsync(ev.Source.Type.ToString(), ev.Source.Id); var userName = ""; if (!string.IsNullOrEmpty(ev.Source.Id)) { var userProfile = await MessagingClient.GetUserProfileAsync(ev.Source.Id); userName = userProfile?.DisplayName ?? ""; } await MessagingClient.ReplyMessageAsync(ev.ReplyToken, $"Hello {userName}! Thank you for following !"); }
/// <summary> /// Converts the SourceState into a PlaybackState. /// </summary> /// <param name="state">The SourceState.</param> /// <returns>PlaybackState.</returns> public static PlaybackState ToPlaybackState(this SourceState state) { switch (state) { case SourceState.Initializing: case SourceState.Stopped: case SourceState.Uninitialized: return(PlaybackState.Stopped); case SourceState.Paused: return(PlaybackState.Paused); case SourceState.Playing: return(PlaybackState.Playing); } return(PlaybackState.Stopped); }
public IActionResult DeleteSourceStateAjax(int id) { SourceState sourceState = _db.SourceStates.FirstOrDefault(c => c.Id == id); if (sourceState != null) { sourceState.Active = false; _db.SaveChanges(); } SourceStateViewModel model = new SourceStateViewModel() { Source = new Source(), SourceStates = _db.SourceStates.Where(c => c.Active).ToList(), }; return(PartialView("PartialViews/SourceStatesPartial", model)); }
public virtual ITokenEnumerator Colorize(string sourceCode, SourceState state) { TokenColorInfoList list = new TokenColorInfoList(); this._state = SourceState.STATE_COLOR_NORMAL; if (sourceCode.Length > 0) { Context sourceContext = new Context(null, sourceCode); this._scanner.SetSource(sourceContext); try { if (SourceState.STATE_COLOR_COMMENT == state) { int length = this._scanner.SkipMultiLineComment(); if (length > sourceCode.Length) { this._state = SourceState.STATE_COLOR_COMMENT; length = sourceCode.Length; } list.Add(sourceContext); if (length == sourceCode.Length) { return(list); } } this._scanner.GetNextToken(); JSToken none = JSToken.None; while (sourceContext.GetToken() != JSToken.EndOfFile) { list.Add(sourceContext); none = sourceContext.GetToken(); this._scanner.GetNextToken(); } if (JSToken.UnterminatedComment == none) { this._state = SourceState.STATE_COLOR_COMMENT; } } catch (ScannerException) { } } return(list); }
public virtual ITokenEnumerator Colorize(string sourceCode, SourceState state) { TokenColorInfoList list = new TokenColorInfoList(); this._state = SourceState.STATE_COLOR_NORMAL; if (sourceCode.Length > 0) { Context sourceContext = new Context(null, sourceCode); this._scanner.SetSource(sourceContext); try { if (SourceState.STATE_COLOR_COMMENT == state) { int length = this._scanner.SkipMultiLineComment(); if (length > sourceCode.Length) { this._state = SourceState.STATE_COLOR_COMMENT; length = sourceCode.Length; } list.Add(sourceContext); if (length == sourceCode.Length) { return list; } } this._scanner.GetNextToken(); JSToken none = JSToken.None; while (sourceContext.GetToken() != JSToken.EndOfFile) { list.Add(sourceContext); none = sourceContext.GetToken(); this._scanner.GetNextToken(); } if (JSToken.UnterminatedComment == none) { this._state = SourceState.STATE_COLOR_COMMENT; } } catch (ScannerException) { } } return list; }
/// <summary> /// Initializes the block state. /// </summary> public virtual void InitBlockState() { PatternIndex = CreateExistingPatternIndex(); PatternState = CreatePatternState(PatternIndex); IReadOnlyInnerReadOnlyDictionary <string> PatternEmptyInnerTable = CreateInnerTable().ToReadOnly(); Dictionary <string, ValuePropertyType> PatternValuePropertyTypeTable = new Dictionary <string, ValuePropertyType>(); PatternValuePropertyTypeTable.Add(nameof(IPattern.Text), ValuePropertyType.String); ((IReadOnlyPatternState <IInner>)PatternState).Init(PatternInner, PatternEmptyInnerTable, PatternValuePropertyTypeTable); Debug.Assert(PatternState.ToString() != null); // For code coverage. SourceIndex = CreateExistingSourceIndex(); SourceState = CreateSourceState(SourceIndex); IReadOnlyInnerReadOnlyDictionary <string> SourceEmptyInnerTable = CreateInnerTable().ToReadOnly(); Dictionary <string, ValuePropertyType> SourceValuePropertyTypeTable = new Dictionary <string, ValuePropertyType>(); SourceValuePropertyTypeTable.Add(nameof(IIdentifier.Text), ValuePropertyType.String); ((IReadOnlySourceState <IInner>)SourceState).Init(SourceInner, SourceEmptyInnerTable, SourceValuePropertyTypeTable); Debug.Assert(SourceState.ToString() != null); // For code coverage. }
protected override async Task OnMessageAsync(MessageEvent ev) { Log.WriteInfo($"SourceType:{ev.Source.Type}, SourceId:{ev.Source.Id}, MessageType:{ev.Message.Type}"); var entry = await SourceState.FindAsync(ev.Source.Type.ToString(), ev.Source.Id); var blobDirectoryName = ev.Source.Type + "_" + ev.Source.Id; switch (ev.Message.Type) { case EventMessageType.Text: //string str = $"テストだよ"; await EchoAsync(ev.ReplyToken, ((TextEventMessage)ev.Message).Text); break; case EventMessageType.Image: await EchoImageAsync(ev.ReplyToken, ev.Message.Id, blobDirectoryName); break; case EventMessageType.Audio: case EventMessageType.Video: case EventMessageType.File: await UploadMediaContentAsync(ev.ReplyToken, ev.Message.Id, blobDirectoryName, ev.Message.Id); break; case EventMessageType.Location: var location = ((LocationEventMessage)ev.Message); await EchoAsync(ev.ReplyToken, $"@{location.Latitude},{location.Longitude}"); break; case EventMessageType.Sticker: await ReplyRandomStickerAsync(ev.ReplyToken); break; } }
public HResult Shutdown() { lock (_spSource) { HResult hr = CheckShutdown(); if (MFError.Succeeded(hr)) { Flush(); if (null != _spEventQueue) { _spEventQueue.Shutdown(); } _spStreamDescriptor = null; _eSourceState = SourceState.SourceState_Shutdown; } return(hr); } }
public MediaStream(NetworkSource pSource) { _cRef = 1; _spSource = pSource; _eSourceState = SourceState.SourceState_Invalid; _fActive = false; _flRate = 1.0f; _isVideo = false; _eDropMode = MFQualityDropMode.None; _fDiscontinuity = false; _fDropTime = false; _fInitDropTime = false; _fWaitingForCleanPoint = true; _hnsStartDroppingAt = 0; _hnsAmountToDrop = 0; _samples = new Queue <object>(); _tokens = new NullableQueue(); _spSource = pSource; }
private void doStart(CSourceOperation pOp) { Debug.Assert(pOp.Type == SourceOperationType.Operation_Start); IMFPresentationDescriptor spPD = pOp.PresentationDescriptor; try { SelectStreams(spPD); _eSourceState = SourceState.SourceState_Starting; _networkStreamAdapter.SendStartRequest(); _eSourceState = SourceState.SourceState_Started; ThrowIfError(_spEventQueue.QueueEventParamVar(MediaEventType.MESourceStarted, Guid.Empty, HResult.S_OK, pOp.Data)); } catch (Exception ex) { _spEventQueue.QueueEventParamVar(MediaEventType.MESourceStarted, Guid.Empty, (HResult)ex.HResult, null); } }
public void SetState(SourceState state) { if (m_State == state) { return; } m_State = state; switch (m_State) { case SourceState.Verifying: m_LastReceivedSector = m_LastRequestedSector = -1; m_LastCommand74Sent = m_LastCommand78Sent = DateTime.MinValue; m_Command74Sent = m_Command78Sent = 0; m_QueueLength = m_QueuePosition = -1; m_IsQueueFull = false; break; case SourceState.Verified: case SourceState.Requesting: m_LastReceivedSector = m_LastRequestedSector = -1; m_LastCommand74Sent = m_LastCommand78Sent = DateTime.MinValue; m_Command74Sent = m_Command78Sent = 0; m_QueuePosition = -1; break; case SourceState.Requested: m_LastReceivedSector = m_LastRequestedSector = -1; m_LastCommand78Sent = DateTime.MinValue; m_Command78Sent = 0; break; case SourceState.NotNeeded: m_LastReceivedSector = m_LastRequestedSector = -1; m_LastCommand70Sent = m_LastCommand74Sent = m_LastCommand78Sent = DateTime.MinValue; m_Command70Sent = m_Command74Sent = m_Command78Sent = 0; m_QueueLength = m_QueuePosition = -1; m_IsQueueFull = false; break; } }
/// <summary> /// The <see cref="T:System.Windows.Controls.MediaElement" /> calls this method to ask the /// <see /// cref="T:System.Windows.Media.MediaStreamSource" /> /// to open the media. /// </summary> protected override async void OpenMediaAsync() { Debug.WriteLine("TsMediaStreamSource.OpenMediaAsync()"); ValidateEvent(MediaStreamFsm.MediaEvent.OpenMediaAsyncCalled); ThrowIfDisposed(); lock (_stateLock) { _isClosed = false; _state = SourceState.Open; Debug.Assert(null == _closeCompleted, "TsMediaStreamSource.OpenMediaAsync() stream is already playing"); _closeCompleted = new TaskCompletionSource<object>(); } _bufferingProgress = -1; try { var configuration = await _streamControl.OpenAsync(CancellationToken.None).ConfigureAwait(false); Configure(configuration); } catch (Exception ex) { Debug.WriteLine("TsMediaStreamSource.OpenMediaAsync() failed: " + ex.Message); ReportError("Unable to open stream " + ex.Message); } }
bool SendLastStreamSample(MediaStreamDescription mediaStreamDescription) { _taskScheduler.ThrowIfNotOnThread(); ReportGetSampleProgress(1); var sample = new MediaStreamSample(mediaStreamDescription, null, 0, 0, 0, NoMediaSampleAttributes); Debug.WriteLine("Sample {0} is null", mediaStreamDescription.Type); switch (mediaStreamDescription.Type) { case MediaStreamType.Audio: AudioStreamSource = null; break; case MediaStreamType.Video: VideoStreamSource = null; break; default: Debug.Assert(false, "Unknown stream type: " + mediaStreamDescription.Type); break; } var allClosed = null == VideoStreamSource && null == AudioStreamSource; if (allClosed) { Debug.WriteLine("TsMediaStreamSource.SendLastStreamSample() All streams closed"); lock (_stateLock) { _isClosed = true; if (SourceState.Closed != _state) _state = SourceState.WaitForClose; } } ValidateEvent(MediaStreamFsm.MediaEvent.CallingReportSampleCompleted); ReportGetSampleCompleted(sample); if (allClosed) ValidateEvent(MediaStreamFsm.MediaEvent.StreamsClosed); return true; }
internal JSColorizer() { this._scanner.SetAuthoringMode(true); this._state = SourceState.STATE_COLOR_NORMAL; }
void StartSeek(TimeSpan seekTimestamp) { lock (_stateLock) { if (_isClosed) return; _state = SourceState.Seek; _pendingSeekTarget = _seekTarget ?? seekTimestamp; } RequestOperationAndSignal(Operation.Seek); }
/// <summary> /// The <see cref="T:System.Windows.Controls.MediaElement" /> can call this method when going through normal shutdown /// or as a result of an error. This lets the developer perform any needed cleanup of the /// <see /// cref="T:System.Windows.Media.MediaStreamSource" /> /// . /// </summary> protected override void CloseMedia() { Debug.WriteLine("TsMediaStreamSource.CloseMedia()"); ValidateEvent(MediaStreamFsm.MediaEvent.CloseMediaCalled); lock (_stateLock) { _isClosed = true; _state = SourceState.Closed; } var task = Task.Factory.StartNew(CloseMediaHandler, CancellationToken.None, TaskCreationOptions.None, _taskScheduler); TaskCollector.Default.Add(task, "TsMediaStreamSource CloseMedia"); }
protected override async Task OnUnfollowAsync(UnfollowEvent ev) { Log.WriteInfo($"SourceType:{ev.Source.Type}, SourceId:{ev.Source.Id}"); await SourceState.DeleteAsync(ev.Source.Type.ToString(), ev.Source.Id); }
private void ChangeState(SourceState newState) { _stateSubject.OnNext(newState); }
private protected virtual IIdentifier CloneSource() { return(SourceState.CloneNode()); }
public Task CloseAsync() { Debug.WriteLine("TsMediaStreamSource.CloseAsync(): close {0}", null == _closeCompleted ? "<none>" : _closeCompleted.Task.Status.ToString()); TaskCompletionSource<object> closeCompleted; bool closedState; lock (_stateLock) { _isClosed = true; closedState = SourceState.Closed == _state; if (!closedState) _state = SourceState.WaitForClose; closeCompleted = _closeCompleted; if (null != closeCompleted && closeCompleted.Task.IsCompleted) { closeCompleted = null; _closeCompleted = null; } } if (0 == _streamOpenFlags || closedState) { if (null != closeCompleted) closeCompleted.TrySetResult(string.Empty); return TplTaskExtensions.CompletedTask; } if (null == closeCompleted) return TplTaskExtensions.CompletedTask; CheckPending(); var timeout = Task.Delay(7 * 1000) .ContinueWith( t => { if (closeCompleted.TrySetCanceled()) { Debug.WriteLine("TsMediaStreamSource.CloseAsync() close timeout (remember to set MediaElement.Source to null before removing it from the visual tree)"); FireCloseMediaHandler(); } }); TaskCollector.Default.Add(timeout, "TsMediaStreamSource CloseAsync timeout"); return closeCompleted.Task; }
public static RTCSourceState ToPlatformNative(this SourceState nativePort) => (RTCSourceState)nativePort;
internal void OnServerAudioDataMessage(MessageEventArgs<ServerAudioDataMessage> e) { var source = this.manager[e.Message.SourceId]; if (source == null || this.manager.GetIsIgnored (source)) return; int skipped; SourceState state; lock (this.sources) { if (!this.sources.TryGetValue (source, out state)) { this.sources[source] = state = new SourceState(); state.Codec = new AudioCodec (source.CodecSettings); } skipped = e.Message.Sequence - state.Sequence - 1; // We can't wait around for the start signal, and the first message // in the sequence might be dropped. We'll just assume a new stream // if we give a _lower_ sequence than the last one. if (skipped < 0) skipped = e.Message.Sequence - 1; state.Sequence = e.Message.Sequence; } var user = this.context.Users[source.OwnerId]; if (user == null || this.context.Users.GetIsIgnored (user)) return; int defaultSize = source.CodecSettings.GetBytes (source.CodecSettings.FrameSize); byte[][] data = e.Message.Data; byte[][] decoded = new byte[data.Length + skipped][]; for (int i = 0; i < skipped; i++) { decoded[i] = state.Codec.Decode (null, defaultSize); } for (int i = skipped; i < decoded.Length; i++) { byte[] frame = data[i - skipped]; decoded[i] = state.Codec.Decode (frame, frame.Length); } OnReceivedAudio (new ReceivedAudioEventArgs (source, decoded)); }
public void BeginSending(AudioSource source) { if (source == null) throw new ArgumentNullException ("source"); if (source.OwnerId != this.context.CurrentUser.UserId) throw new ArgumentException ("Can not send audio from a source you don't own", "source"); lock (this.sources) { SourceState state; if (!this.sources.TryGetValue (source, out state)) this.sources[source] = state = new SourceState(); if (state.Codec == null) state.Codec = new AudioCodec (source.CodecSettings); state.Sequence = 0; } this.context.Connection.SendAsync (new ClientAudioSourceStateChangeMessage { Starting = true, SourceId = source.Id }); OnAudioSourceStarted (new AudioSourceEventArgs (source)); }
public void SetState(SourceState state) { if (m_State == state) return; m_State = state; switch (m_State) { case SourceState.Verifying: m_LastReceivedSector = m_LastRequestedSector = -1; m_LastCommand74Sent = m_LastCommand78Sent = DateTime.MinValue; m_Command74Sent = m_Command78Sent = 0; m_QueueLength = m_QueuePosition = -1; m_IsQueueFull = false; break; case SourceState.Verified: case SourceState.Requesting: m_LastReceivedSector = m_LastRequestedSector = -1; m_LastCommand74Sent = m_LastCommand78Sent = DateTime.MinValue; m_Command74Sent = m_Command78Sent = 0; m_QueuePosition = -1; break; case SourceState.Requested: m_LastReceivedSector = m_LastRequestedSector = -1; m_LastCommand78Sent = DateTime.MinValue; m_Command78Sent = 0; break; case SourceState.NotNeeded: m_LastReceivedSector = m_LastRequestedSector = -1; m_LastCommand70Sent = m_LastCommand74Sent = m_LastCommand78Sent = DateTime.MinValue; m_Command70Sent = m_Command74Sent = m_Command78Sent = 0; m_QueueLength = m_QueuePosition = -1; m_IsQueueFull = false; break; } }
public override int GetHashCode() { return(Input.GetHashCode() ^ Output.GetHashCode() ^ SourceState.GetHashCode() ^ TargetState.GetHashCode()); }
public NetworkSource() { _eSourceState = SourceState.SourceState_Invalid; _streams = new List <IMFMediaStream>(); }
private async Task GetSuggestedActionsWorkerAsync( ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, ImmutableArray <ISuggestedActionSetCollector> collectors, ArrayBuilder <ISuggestedActionSetCollector> completedCollectors, CancellationToken cancellationToken) { AssertIsForeground(); using var state = SourceState.TryAddReference(); if (state is null) { return; } var workspace = state.Target.Workspace; if (workspace is null) { return; } var selection = TryGetCodeRefactoringSelection(state, range); await workspace.Services.GetRequiredService <IWorkspaceStatusService>().WaitUntilFullyLoadedAsync(cancellationToken).ConfigureAwait(false); using (Logger.LogBlock(FunctionId.SuggestedActions_GetSuggestedActionsAsync, cancellationToken)) { var document = range.Snapshot.GetOpenDocumentInCurrentContextWithChanges(); if (document is null) { return; } // Keep track of how many actions we've put in the lightbulb at each priority level. We do // this as each priority level will both sort and inline actions. However, we don't want to // inline actions at each priority if it's going to make the total number of actions too high. // This does mean we might inline actions from a higher priority group, and then disable // inlining for lower pri groups. However, intuitively, that is what we want. More important // items should be pushed higher up, and less important items shouldn't take up that much space. var currentActionCount = 0; using var _ = ArrayBuilder <SuggestedActionSet> .GetInstance(out var lowPrioritySets); // Collectors are in priority order. So just walk them from highest to lowest. foreach (var collector in collectors) { var priority = collector.Priority switch { VisualStudio.Utilities.DefaultOrderings.Highest => CodeActionRequestPriority.High, VisualStudio.Utilities.DefaultOrderings.Default => CodeActionRequestPriority.Normal, VisualStudio.Utilities.DefaultOrderings.Lowest => CodeActionRequestPriority.Lowest, _ => (CodeActionRequestPriority?)null, }; if (priority != null) { var allSets = GetCodeFixesAndRefactoringsAsync( state, requestedActionCategories, document, range, selection, addOperationScope: _ => null, priority.Value, currentActionCount, cancellationToken).WithCancellation(cancellationToken).ConfigureAwait(false); await foreach (var set in allSets) { if (priority == CodeActionRequestPriority.High && set.Priority == SuggestedActionSetPriority.Low) { // if we're processing the high pri bucket, but we get action sets for lower pri // groups, then keep track of them and add them in later when we get to that group. lowPrioritySets.Add(set); } else { currentActionCount += set.Actions.Count(); collector.Add(set); } } if (priority == CodeActionRequestPriority.Normal) { // now, add any low pri items we've been waiting on to the final group. foreach (var set in lowPrioritySets) { currentActionCount += set.Actions.Count(); collector.Add(set); } } } // Ensure we always complete the collector even if we didn't add any items to it. // This ensures that we unblock the UI from displaying all the results for that // priority class. collector.Complete(); completedCollectors.Add(collector); } } }
internal JSColorizer() { this._scanner.SetAuthoringMode(true); this._state = SourceState.STATE_COLOR_NORMAL; }