/// <summary> /// Dispose of the blob stream. /// </summary> protected override void Dispose(bool disposing) { if (!disposing) { return; } _lock.Take(); FlushInternal(); if (_sections != null) { // dispose of the sections collection for (int i = _sections.Count - 1; i >= 0; --i) { if (_sections[i] == null) { continue; } _sections[i].Bytes = null; } _sections.Dispose(); _sections = null; } _lock.Release(); }
/// <summary> /// Dispose of this Extract instance. /// </summary> public override void Dispose() { if (SubExtractSet) { SubExtract.Dispose(); } }
/// <summary> /// Manually submit all unsubmitted changes to rows. /// </summary> public void SubmitChanges() { // ensure initialization if (_initialize) { Initialize(); } // if no updated collections - skip if (_currentCollections.Count == 0) { return; } _lock.Take(); // copy the updated collections ArrayRig <RowCollection> collections = _currentCollections; _currentCollections = new ArrayRig <RowCollection>(collections.Count); _lock.Release(); // iterate all updated collections foreach (RowCollection collection in collections) { // run an update for the collection Update(collection, true); } collections.Dispose(); }
/// <summary> /// Dispose of this Extract instance. /// </summary> public override void Dispose() { if (SubExtractSet) { SubExtract.Dispose(); } ProtocolSearch.Dispose(); }
/// <summary> /// Dispose of this Table instance. /// </summary> public virtual void Dispose() { if (!_initialize) { _keyspace.RemoveTable(this); _keyspace = null; Columns.Dispose(); } }
/// <summary> /// Dispose of this Extract instance. /// </summary> public override void Dispose() { foreach (Extract extract in SubExtracts) { extract.Dispose(); } foreach (Extract extract in ReqExtracts) { extract.Dispose(); } SubExtracts.Dispose(); ReqExtracts.Dispose(); }
/// <summary> /// Disposes of the host, committing any DB changes. /// </summary> public void Dispose() { if (!Disposed) { Disposed = true; // remove the host from url control _session.OnDisposeHost(this); } _lock.Take(); if (_changed && !_committing) { _committing = true; _lock.Release(); _commit.Run(); return; } _newUrls.Dispose(); _oldUrls.Dispose(); _newUrls = null; _oldUrls = null; _commit = null; _lock.Release(); }
/// <summary> /// Parse the specified characters and test against the scheme and it's children. Returns if it's /// requirements are satisfied. /// </summary> public unsafe override bool Next(char[] characters, int start, int end) { // if the result isn't going anywhere - skip if (!_extract.OnSectionSet && _extract.SubExtracts.Count == 0) { // no result method or sub extracts return(false); } // have the results of this parser been satisfied? if (_runSubParsers) { // yes, pass any characters to the sub-parsers foreach (Parse parser in _activeSubParsers) { if (!parser.Next(characters, start, end)) { _activeSubParsers.RemoveQuick(parser); _runSubParsers = _activeSubParsers.Count > 0; } } } // the ultimate result of this parse is whether a result is achieved and // passed to a return method or a sub extract method is similarly successful bool result = false; // the last index to be copied into the current section int lastIndex = start; // are there prefixes and suffixes? if (_prefixesSet && _suffixesSet) { // yes, both prefixes and suffixes // set the current index within the block Index = lastIndex - 1; // loop until the block is complete while (Index <= end) { // are there prefixes or has a match been found? if (Active) { // iterate characters in current block while (++Index < end) { char character = characters[Index]; // if a suffix has been found if (_suffixSearch.Next(character)) { // if current section count + current block is greater than max allowed if (_currentSection.Count + Index - lastIndex > _extract.MaxCharacters) { // overflow - reset section stack Reset(); // decrement the index to start searching for the prefix --Index; // revert to prefix search break; } ++Index; // is the current section able to contain the next block of characters? if (_currentSection.Capacity - _currentSection.Count < Index - lastIndex) { // no, resize _currentSection.SetCapacity(_currentSection.Capacity + Index - lastIndex); } // append the block up to the current index fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, Index - lastIndex); } // update the current section count _currentSection.Count += Index - lastIndex; // update the last index --Index; lastIndex = Index; // if there are nested requirements if (_reqParsersSet) { // run required nested extracts foreach (Parse reqParser in _reqParsers) { // if required extract wasn't satisfied then not active Active &= reqParser.Next(_currentSection.Array, 0, _currentSection.Count); } } // if any requirements were fulfilled if (Active) { // if there are sub parsers if (_subParsersSet) { // begin running sub parsers for (int i = _subParsers.Count - 1; i >= 0; --i) { if (_subParsers[i].Next(_currentSection.Array, 0, _currentSection.Count)) { _activeSubParsers.Add(_subParsers[i]); } else { _activeSubParsers.RemoveQuick(_subParsers[i]); } } _runSubParsers = _activeSubParsers.Count != 0; } // if section callback is set if (_extract.OnSectionSet) { // run complete section callback _extract.OnSection.Take(); _extract.OnSection.Item.ArgA = _currentSection.ToArray(); _extract.OnSection.Item.Run(); _extract.OnSection.Release(); result = true; } } // pop the last section and check max length if (Sections.Count > 1 && Sections[Sections.Count - 1].Count + _currentSection.Count - 1 < _extract.MaxCharacters) { // get the next session Sections.Remove(Sections.Count - 1); ArrayRig <char> nextSection = Sections[Sections.Count - 1]; // will the next section contain the current sections characters? if (nextSection.Capacity - nextSection.Count < _currentSection.Count) { // no, resize nextSection.SetCapacity(nextSection.Capacity + _currentSection.Count); } // append the last section fixed(void *dst = &nextSection.Array[0]) { Marshal.Copy(_currentSection.Array, 0, (IntPtr)dst, _currentSection.Count); } // update the next section count nextSection.Count += _currentSection.Count; // set current section _currentSection = nextSection; // flag for suffix search Active = true; } else { // reset the section stack Reset(); // decrement index --Index; // move back to prefix search break; } } // has the prefix been found? if (_prefixSearch.Next(character)) { // nested prefix has been found // check max length of current section if (_currentSection.Count + Index - lastIndex > _extract.MaxCharacters) { // overflow - reset the sections stack Reset(); } else { // add the current section to the stack Sections.Add(_currentSection); // initialize new nested section _currentSection = new ArrayRig <char>(); } // get the prefix char array char[] prefix = _prefixSearch.Values[0].ToCharArray(); // if the current section won't contain the prefix if (_currentSection.Capacity - _currentSection.Count < prefix.Length) { // resize _currentSection.SetCapacity(_currentSection.Capacity + prefix.Length); } // copy the prefix charcters to the current section fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(prefix, 0, (IntPtr)dst, prefix.Length); } // update the section count _currentSection.Count += prefix.Length; } } // if block is finished if (Index == end - 1) { // check max length if (_currentSection.Count + Index - lastIndex > _extract.MaxCharacters) { // overflow - reset Reset(); } else { // if the current section won't contain the block if (_currentSection.Capacity - _currentSection.Count < Index - lastIndex) { // resize _currentSection.SetCapacity(_currentSection.Capacity + Index - lastIndex); } // copy the block charcters to the current section fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, Index - lastIndex); } // update the section count _currentSection.Count += Index - lastIndex; lastIndex = Index; } // return the result of the current block return(result); } } // search for a prefix // iterate chacters in current block while (++Index < end) { char character = characters[Index]; // if a prefix has been found if (_prefixSearch.Next(character)) { // get the prefix char array char[] prefix = _prefixSearch.Values[0].ToCharArray(); // is the current section collection large enough for the prefix? if (_currentSection.Capacity < prefix.Length) { // no, resize _currentSection.SetCapacity(_currentSection.Capacity + prefix.Length); } // copy the prefix charcters to the current section fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(prefix, 0, (IntPtr)dst, prefix.Length); } // update the section count _currentSection.Count += prefix.Length; // update the last index lastIndex = Index + 1; // catch up the suffix search (for partially overlapping prefixes and suffixes) foreach (char c in prefix) { _suffixSearch.Next(c); } // set to look for suffix Active = true; // go on to find a suffix break; } } // if block is finished if (!Active) { return(result); } } } else if (_prefixesSet) { // no, prefixes only // iterate chacters in current block while (++Index < end) { char character = characters[Index]; // if a prefix has been found if (_prefixSearch.Next(character)) { // get the prefix char[] prefix = _prefixSearch.Values[0].ToCharArray(); // is the parser already active? if (Active) { // yes, a nested prefix has been found // create a new section var newSection = new ArrayRig <char>(_extract.MaxCharacters < prefix.Length ? prefix.Length : _extract.MaxCharacters); // copy the prefix charcters to the new section fixed(void *dst = &newSection.Array[0]) { Marshal.Copy(prefix, 0, (IntPtr)dst, prefix.Length); } // update the new section count newSection.Count += prefix.Length; /////// copy the current block to the current sections // is the current section going to be full? while (_currentSection.Count + Index - lastIndex >= _extract.MaxCharacters) { /////// yes, complete the current section // resize the current section if needed if (_extract.MaxCharacters > _currentSection.Capacity) { _currentSection.SetCapacity(_extract.MaxCharacters); } // are there null characters in the current section? if (_extract.MaxCharacters > _currentSection.Count) { // yes, append the block to the current section fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, _extract.MaxCharacters - _currentSection.Count); } // update the current section count _currentSection.Count = _extract.MaxCharacters; } // are there requirements? if (_reqParsersSet) { // yes, run the required parsers foreach (Parse parser in _reqParsers) { result |= parser.Next(_currentSection.Array, 0, _currentSection.Count); } } else { // no, the parse was successful result = true; } // have the requirements been fulfilled? if (result) { // are there sub parsers? if (_subParsersSet) { // yes, begin running sub parsers for (int i = _subParsers.Count - 1; i >= 0; --i) { if (_subParsers[i].Next(_currentSection.Array, 0, _currentSection.Count)) { _activeSubParsers.Add(_subParsers[i]); } else { _activeSubParsers.RemoveQuick(_subParsers[i]); } } _runSubParsers = _activeSubParsers.Count != 0; } // is the section callback set? if (_extract.OnSectionSet) { // yes, run complete section callback _extract.OnSection.Take(); _extract.OnSection.Item.ArgA = _currentSection.ToArray(); _extract.OnSection.Item.Run(); _extract.OnSection.Release(); result = true; } } // are there more sections? if (Sections.Count == 0) { // no, exit the iteration _currentSection.Reset(); break; } // yes, get the next section _currentSection.Dispose(); _currentSection = Sections[0]; Sections.Remove(0); } // are there any partial sections apart from the new section? if (_currentSection.Count != 0) { ////// yes, copy the current block to all other sections that will not be filled // resize the current section if needed if (_currentSection.Count + Index - lastIndex > _currentSection.Capacity) { _currentSection.SetCapacity(_currentSection.Count + Index - lastIndex); } // append the block to the current section fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, Index - lastIndex); } // update the section count _currentSection.Count += Index - lastIndex; // iterate all sections foreach (ArrayRig <char> section in Sections) { // append the block to each section fixed(void *dst = §ion.Array[section.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, Index - lastIndex); } // update the section count section.Count += Index - lastIndex; } // add the new section Sections.Add(newSection); } else { // no, the new section is the only section left _currentSection = newSection; } // update the last index lastIndex = Index; } else { // no, start the first section // will the section contain the prefix? if (prefix.Length > _currentSection.Capacity) { // nope, resize _currentSection.SetCapacity(prefix.Length); } // copy the characters to the current section fixed(void *dst = &_currentSection.Array[0]) { Marshal.Copy(prefix, 0, (IntPtr)dst, prefix.Length); } // update the section count _currentSection.Count = prefix.Length; // update the last index lastIndex = Index; } // set parser active Active = true; } } ////// copy the remaining characters to the current sections if any // is there a prefix and is the current section going to be filled by the current block? while (Active && _currentSection.Count + Index - lastIndex >= _extract.MaxCharacters) { /////// yes, complete the current section // resize the current section if needed if (_extract.MaxCharacters > _currentSection.Capacity) { _currentSection.SetCapacity(_extract.MaxCharacters); } // is there space in the current section? if (_extract.MaxCharacters > _currentSection.Count) { // yes, fill the block to the current section fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, _extract.MaxCharacters - _currentSection.Count); } // update the current section count _currentSection.Count = _extract.MaxCharacters; } // are there requirements? if (_reqParsersSet) { // yes, run the required parsers foreach (Parse parser in _reqParsers) { result |= parser.Next(_currentSection.Array, 0, _currentSection.Count); } } else { // no, the parse was successful result = true; } // have the requirements been fulfilled? if (result) { // are there sub parsers? if (_subParsersSet) { // yes, begin running sub parsers for (int i = _subParsers.Count - 1; i >= 0; --i) { if (_subParsers[i].Next(_currentSection.Array, 0, _currentSection.Count)) { _activeSubParsers.Add(_subParsers[i]); } else { _activeSubParsers.RemoveQuick(_subParsers[i]); } } _runSubParsers = _activeSubParsers.Count != 0; } // is the section callback set? if (_extract.OnSectionSet) { // yes, run complete section callback _extract.OnSection.Take(); _extract.OnSection.Item.ArgA = _currentSection.ToArray(); _extract.OnSection.Item.Run(); _extract.OnSection.Release(); result = true; } } // are there more sections? if (Sections.Count == 0) { // no, exit the iteration _currentSection.Reset(); break; } // yes, get the next section _currentSection.Dispose(); _currentSection = Sections[0]; Sections.Remove(0); } // are there any partial sections left? if (_currentSection.Count != 0) { ////// yes, copy the current block to all other sections thatill not be filled w // resize the current section if needed if (_currentSection.Count + Index - lastIndex > _currentSection.Capacity) { _currentSection.SetCapacity(_currentSection.Count + Index - lastIndex); } // append the block to the current section fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, Index - lastIndex); } // update the section count _currentSection.Count += Index - lastIndex; // iterate all sections foreach (ArrayRig <char> section in Sections) { // append the block to each section fixed(void *dst = §ion.Array[section.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, Index - lastIndex); } // update the section count section.Count += Index - lastIndex; } } } else if (_suffixesSet) { // no, suffixes only // iterate characters in current block while (++Index < end) { char character = characters[Index]; // does the current character complete a suffix? if (_suffixSearch.Next(character)) { // yes, push current character block // if current section count + current block is greater than max allowed if (_currentSection.Count + Index - lastIndex > _extract.MaxCharacters) { // overflow - move the current section // shift characters from the current section fixed(void *dst = &_currentSection.Array[0]) { Marshal.Copy(_currentSection.Array, Index - lastIndex, (IntPtr)dst, _currentSection.Count - Index - lastIndex); } // set the new count _currentSection.Count = _currentSection.Count - Index - lastIndex; } // resize the current section if needed if (_currentSection.Count + Index - lastIndex > _currentSection.Capacity) { _currentSection.SetCapacity(_currentSection.Count + Index - lastIndex); } // append the block up to the current index fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, Index - lastIndex); } // update the current section count _currentSection.Count += Index - lastIndex; // update the last index lastIndex = Index; // are there requirements? if (_reqParsersSet) { // yes, run the required parsers foreach (Parse parser in _reqParsers) { result |= parser.Next(_currentSection.Array, 0, _currentSection.Count); } } else { result = true; } // have the requirements been fulfilled? if (result) { // are there sub parsers? if (_subParsersSet) { // yes, begin running sub parsers for (int i = _subParsers.Count - 1; i >= 0; --i) { if (_subParsers[i].Next(_currentSection.Array, 0, _currentSection.Count)) { _activeSubParsers.Add(_subParsers[i]); } else { _activeSubParsers.RemoveQuick(_subParsers[i]); } } _runSubParsers = _activeSubParsers.Count != 0; } // is the section callback set? if (_extract.OnSectionSet) { // yes, run complete section callback _extract.OnSection.Take(); _extract.OnSection.Item.ArgA = _currentSection.ToArray(); _extract.OnSection.Item.Run(); _extract.OnSection.Release(); result = true; } } } } // copy the current character block // if current section count + current block is greater than max allowed if (_currentSection.Count + Index - lastIndex > _extract.MaxCharacters) { // overflow - move the current section // shift characters in the current section fixed(void *dst = &_currentSection.Array[0]) { Marshal.Copy(_currentSection.Array, Index - lastIndex, (IntPtr)dst, _currentSection.Count - Index - lastIndex); } // set the new count _currentSection.Count = _currentSection.Count - Index - lastIndex; } // resize the current section if needed if (_currentSection.Count + Index - lastIndex > _currentSection.Capacity) { _currentSection.SetCapacity(_currentSection.Count + Index - lastIndex); } // append the block up to the current index fixed(void *dst = &_currentSection.Array[_currentSection.Count]) { Marshal.Copy(characters, lastIndex, (IntPtr)dst, Index - lastIndex); } // update the current section count _currentSection.Count += Index - lastIndex; } else { // no prefixes or suffixes // are there requirements? if (_reqParsersSet) { // yes, check requirements foreach (Parse parser in _reqParsers) { result |= parser.Next(characters, start, end); } } else { // no, the result is positive result = true; } // is the parser becoming active? if (result && !Active) { // yes, update state Active = true; // run sub parsers if (!_runSubParsers) { _runSubParsers = true; // iterate sub parsers for (int i = _subParsers.Count - 1; i >= 0; --i) { if (_subParsers[i].Next(_currentSection.Array, 0, _currentSection.Count)) { _activeSubParsers.Add(_subParsers[i]); } else { _activeSubParsers.RemoveQuick(_subParsers[i]); } } } } } // return the result return(result); }
/// <summary> /// Execute the current collection of statements. /// The batch lock should be taken when this is called and is released during execution. /// </summary> protected RowSet ExecuteBatch(bool takeLock = true) { // is the lock to be used? yeah, take it if (takeLock) { _batchLock.Take(); } // reset the timer _batchTimer.Reset(BatchInterval); _batchTimer.Run = false; // create a new batch statement BatchStatement batchStatement = new BatchStatement(); batchStatement.SetReadTimeoutMillis(BatchTimeout); // add all current statements for (int i = 0; i < _batch.Count; ++i) { batchStatement.Add(_batch[i]); } // set the batch page size batchStatement.SetPageSize(_batchPageSize); // reset the batch _batch.Reset(); _batchReturnsResults = false; // copy the collection of callbacks ArrayRig <IAction> callbacks = new ArrayRig <IAction>(_onExecuted); _onExecuted.Clear(); // release the batch lock _batchLock.Release(); RowSet rowSet; try { // execute the batch statement rowSet = _session.Execute(batchStatement); } catch (Exception ex) { var builder = StringBuilderCache.Get(); builder.Append("An exception occured executing a batch statement '"); var obj = typeof(BatchStatement).GetProperty("Queries", System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.DeclaredOnly | System.Reflection.BindingFlags.NonPublic) .GetValue(batchStatement); if (obj == null) { builder.Append("Unknown"); } else { var statements = (List <Statement>)obj; bool first = true; foreach (var statement in statements) { if (statement.OutgoingPayload == null) { continue; } foreach (var query in statement.OutgoingPayload.Keys) { if (first) { first = false; } else { builder.Append(", "); } builder.Append(query); } } } builder.Append("'. "); Log.Error(builder.ToString(), ex); return(null); } // run callback with results if (_onExecutedResults != null) { _onExecutedResults.ArgA = rowSet; _onExecutedResults.Run(); _onExecutedResults = null; } // run callbacks foreach (IAction callback in callbacks) { callback.Run(); } callbacks.Dispose(); return(rowSet); }
//----------------------------------// /// <summary> /// Update the structure of the element table. /// </summary> protected void Update() { _update = false; _rows.Dispose(); _rows = new ArrayRig <Row>(); // update the row elements foreach (var child in Children) { if (child.Tag == Tag.TableRow) { var row = new Row(this); row.RowElement = child; if (child.Children == null) { continue; } foreach (var cell in child.Children) { if (cell.Tag == Tag.TableCell || cell.Tag == Tag.TableHeadCell) { row.Cells.Add(cell); if (row.Cells.Count > _columnCount) { _columnCount = row.Cells.Count; } } } if (row.Cells.Count > 0 && row.Cells[0] != null && row.Cells[0].Tag == Tag.TableHeadCell) { Header = row; } else { _rows.Add(row); } } else if (child.Tag == Tag.ColumnGroup) { var row = new Row(this); row.RowElement = child; if (child.Children == null) { continue; } foreach (var cell in child.Children) { if (cell.Tag == Tag.Column) { row.Cells.Add(cell); if (row.Cells.Count > _columnCount) { _columnCount = row.Cells.Count; } } } ColumnGroup = row; } } }
/// <summary> /// Load all referenced assemblies that could include a Singleton. /// </summary> private static void LoadAssemblies() { // load assemblies that reference the Efz.Common assembly ignoring the .Net assemblies ArrayRig <System.Reflection.Assembly> loadedAssemblies = new ArrayRig <System.Reflection.Assembly>(); loadedAssemblies.Add(System.Reflection.Assembly.GetCallingAssembly()); ArrayRig <System.Reflection.Assembly> nextAssemblies = new ArrayRig <System.Reflection.Assembly>(AppDomain.CurrentDomain.GetAssemblies()); // while there are more assemblies to load while (nextAssemblies.Count != 0) { // iterate the next assemblies to load for (int i = nextAssemblies.Count - 1; i >= 0; --i) { var assembly = nextAssemblies[i]; // remove the assembly nextAssemblies.Remove(i); // is it a system assembly? if ((assembly.FullName[6] == Chars.Stop || assembly.FullName[6] == Chars.Comma) && assembly.FullName.StartsWith("System", StringComparison.Ordinal) || assembly.FullName.StartsWith("mscorlib", StringComparison.Ordinal)) { // yes, skip loading referenced assemblies continue; } // get the referenced assemblies var referenced = assembly.GetReferencedAssemblies(); // iterate the referenced assemblies foreach (var reference in referenced) { // is the referenced assembly a system assembly? if (reference.Name.StartsWith("System", StringComparison.Ordinal) || reference.Name.Equals("mscorlib")) { // yes, skip it continue; } // does the current loaded assemblies contain the referenced assembly? bool found = false; foreach (var a in loadedAssemblies) { if (a.FullName == reference.FullName) { found = true; break; } } // was the assembly already loaded? yes, skip if (found) { continue; } // ensure each reference is loaded System.Reflection.Assembly next; try { next = AppDomain.CurrentDomain.Load(reference); } catch { // gracefully ignore continue; } loadedAssemblies.Add(next); nextAssemblies.Add(next); } } } loadedAssemblies.Dispose(); nextAssemblies.Dispose(); }
/// <summary> /// Main thread method. /// </summary> public void Run() { _running = true; Interlocked.Increment(ref _handleCount); while (_running) { //#if !DEBUG try { //#endif // main thread loop while (_running) { // iterate needles and get the next task _nextIndex = Needles.Count; while (--_nextIndex >= 0) { // is there a waiting task? if (Needles[_nextIndex].Next(out _next)) { // yes, has the context been changed? if (_index != _nextIndex) { // yes, update _index = _nextIndex; // set the synchronization context to allow async operations to be joined // back to the same threads assigned to the needle SynchronizationContext.SetSynchronizationContext(Needles[_index].Context); } //Log.D("Running : "+Needles[_nextIndex].Name+" === " + _next); // try update the thread-local time values Time.Update(); // run the task _next.Run(); _iteration = 0; _wait = false; break; } } if (_wait) { // determine the thread action based on the current iteration count ++_iteration; Time.Update(); if (_iteration % 5 == 0) { Thread.Sleep(1); } else if (_iteration > HandleCount) { _iteration = 0; Thread.Sleep(HandleCount); } else { Thread.Sleep(0); } } else { _wait = true; } } //#if !DEBUG } catch (Exception ex) { Log.Error("Unhandled exception", ex); if (!_wait) { _next.Stop(); _wait = true; } continue; } //#endif } // remove from the handles collection Handles.TakeItem().Remove(this); Handles.Release(); // remove this from the handles map _handleMap.TakeItem().Remove(Id); _handleMap.Release(); // clear this handles needle collection Needles.Dispose(); }
/// <summary> /// Try add the specified bytes to the request. /// Returns the number of bytes read from the buffer. /// </summary> internal unsafe int TryAdd(byte[] buffer, int index, int count) { // is the request complete? if (_complete) { return(0); } // has the header been completely read? if (_section == Section.Body) { // does the current buffer contain the required bytes? if (_contentLength <= count) { // yes, write the remaining content length to the stream _stream.Write(buffer, index, _contentLength); count = _contentLength; _contentLength = 0; // the request is complete _complete = true; // move back to the start of the content body - this flushes the byte stream _stream.Position = _bodyIndex; // TODO : Auto decompression return(count); } // no, write the buffer _stream.Write(buffer, index, count); // decrement the remaining content length _contentLength -= count; return(count); } // persist the starting index of the stream long startIndex = _stream.WriteEnd; // write the buffer to the request _stream.Write(buffer, index, count); // return to the start of the buffer _stream.Position = startIndex; char c; // while characters can be read from the stream while ((c = _stream.ReadChar()) != Chars.Null) { // should the character be skipped? yes, continue reading if (c == Chars.CarriageReturn) { continue; } // determine the state of the request method switch (_section) { case Section.Method: // read until a space is encountered if (c == Chars.Space) { // try determine the type of web request if (_chars.EndsWith(Chars.G, Chars.E, Chars.T)) { Method = HttpMethod.Get; } else if (_chars.EndsWith(Chars.P, Chars.O, Chars.S, Chars.T)) { Method = HttpMethod.Post; } else if (_chars.EndsWith(Chars.P, Chars.U, Chars.T, Chars.Space)) { Method = HttpMethod.Put; } else if (_chars.EndsWith(Chars.D, Chars.E, Chars.L, Chars.E, Chars.T, Chars.E)) { Method = HttpMethod.Delete; } else if (_chars.EndsWith(Chars.U, Chars.P, Chars.D, Chars.A, Chars.T, Chars.E)) { Method = HttpMethod.Update; } _section = Section.Path; _chars.Reset(); } else { // add the current character to the collection _chars.Add(c); } break; case Section.Path: // yes, does the current character equal a new line? if (c == Chars.NewLine) { // yes, derive the request path and the http version index = _chars.Count; bool versionFound = false; // while the indices haven't been explored while (--index >= 0) { // does the current character equal a space? if (_chars[index] == Chars.Space) { // no, derive the request version Headers[HttpRequestHeader.HttpVersion] = new string(_chars.Array, index + 1, _chars.Count - index - 1); versionFound = true; break; } } // yes, has the version been derived? yes, derive the request path if (versionFound) { RequestPath = new string(_chars.Array, 0, index); } else { RequestPath = "/"; } _section = Section.HeaderKey; _chars.Reset(); } else { // add the current character to the collection _chars.Add(c); } break; case Section.HeaderKey: // is the current character a separator between header names and values? if (c == Chars.Colon) { // get the name of the chracter header _headerKey = new string(_chars.Array, 0, _chars.Count); // read the header value _section = Section.HeaderValue; // clear the characters _chars.Reset(); } else if (c == Chars.NewLine && _chars.Count == 0) { // yes, the request header has been completed _chars.Dispose(); // determine whether to continue the request switch (Method) { case HttpMethod.Put: case HttpMethod.Post: case HttpMethod.Update: // set the start of the body index _bodyIndex = (int)_stream.Position; // get the content length - was it able to be parsed? if (!int.TryParse(Headers[HttpRequestHeader.ContentLength], out _contentLength)) { // the content length wasn't able to be parsed Log.Warning("Invalid content length parameter."); // set the content length to '0'. _contentLength = 0; _complete = true; return(count); } // the request contains a body // should this request claim the remaining bytes written to the stream? if (_stream.WriteEnd - _stream.Position >= _contentLength) { // no, get the excess number of bytes from the request count = (int)(_stream.WriteEnd - startIndex); _contentLength = 0; _complete = true; } else { // decrement the content length _contentLength -= (int)(_stream.WriteEnd - _stream.Position); // flag the header as complete _section = Section.Body; // move the stream to the end of the written section _stream.Position = _stream.WriteEnd; } // return the number of bytes that contribute to the request return(count); default: // the request doesn't contain a body, end reading the request _complete = true; // return the number of bytes read from the buffer return((int)(_stream.Position - startIndex)); } } else { // add the current character to the collection _chars.Add(c); } break; case Section.HeaderValue: // is the current character a new line? if (c == Chars.NewLine) { // add the header to the collection Headers[_headerKey] = new string(_chars.Array, 0, _chars.Count); _chars.Reset(); _section = Section.HeaderKey; } else if (c != Chars.Space || _chars.Count != 0) { // add the current character to the collection _chars.Add(c); } break; } } // return the number of bytes read from the buffer return(count); }