public override void StreamParserOnStreamElement(object sender, Node e) { base.StreamParserOnStreamElement(sender, e); if (e is Handshake) { this.m_Authenticated = true; OnLogin?.Invoke(this); if (this.KeepAlive) { this.CreateKeepAliveTimer(); } } else if (e is Route) { OnRoute?.Invoke(this, e as Route); } else if (e is Protocol.XmppStreamError) { var streamErr = e as Protocol.XmppStreamError; switch (streamErr.Condition) { // Auth errors are important for the users here, so throw catch auth errors // in a separate event here case Protocol.XmppStreamErrorCondition.NotAuthorized: // Authentication Error OnAuthError?.Invoke(this, e as Element); break; default: OnStreamError?.Invoke(this, e as Element); break; } } else if (e is Message) { OnMessage?.Invoke(this, e as Message); } else if (e is Presence) { OnPresence?.Invoke(this, e as Presence); } else if (e is IQ) { OnIq?.Invoke(this, e as IQ); } }
private string NormalizeAttributeValue(byte[] buf, int offset, int length) { if (length == 0) { return(null); } string val = null; var buffer = new BufferAggregate(); var copy = new byte[length]; Buffer.BlockCopy(buf, offset, copy, 0, length); buffer.Write(copy); var b = buffer.GetBuffer(); var off = 0; var tok = TOK.END_TAG; var ct = new ContentToken(); try { while (off < b.Length) { //tok = m_enc.tokenizeContent(b, off, b.Length, ct); tok = this.m_enc.tokenizeAttributeValue(b, off, b.Length, ct); switch (tok) { case TOK.ATTRIBUTE_VALUE_S: case TOK.DATA_CHARS: case TOK.DATA_NEWLINE: val += (utf.GetString(b, off, ct.TokenEnd - off)); break; case TOK.CHAR_REF: case TOK.MAGIC_ENTITY_REF: val += new string(new char[] { ct.RefChar1 }); break; case TOK.CHAR_PAIR_REF: val += new string(new char[] { ct.RefChar1, ct.RefChar2 }); break; case TOK.ENTITY_REF: #if CF throw new util.NotImplementedException("Token type not implemented: " + tok); #else throw new NotImplementedException("Token type not implemented: " + tok); #endif } off = ct.TokenEnd; } } catch (PartialTokenException) { // ignored; } catch (ExtensibleTokenException) { // ignored; } catch (Exception ex) { OnStreamError?.Invoke(this, ex); } finally { buffer.Clear(off); } return(val); }
/// <summary> /// Put bytes into the parser. /// </summary> /// <param name="buf">The bytes to put into the parse stream</param> /// <param name="offset">Offset into buf to start at</param> /// <param name="length">Number of bytes to write</param> public void Push(byte[] buf, int offset, int length) { // or assert, really, but this is a little nicer. if (length == 0) { return; } // No locking is required. Read() won't get called again // until this method returns. // TODO: only do this copy if we have a partial token at the // end of parsing. var copy = new byte[length]; Buffer.BlockCopy(buf, offset, copy, 0, length); this.m_buf.Write(copy); var b = this.m_buf.GetBuffer(); var off = 0; var tok = TOK.END_TAG; var ct = new ContentToken(); try { while (off < b.Length) { if (this.m_cdata) { tok = this.m_enc.tokenizeCdataSection(b, off, b.Length, ct); } else { tok = this.m_enc.tokenizeContent(b, off, b.Length, ct); } switch (tok) { case TOK.EMPTY_ELEMENT_NO_ATTS: case TOK.EMPTY_ELEMENT_WITH_ATTS: this.StartTag(b, off, ct, tok); this.EndTag(b, off, ct, tok); break; case TOK.START_TAG_NO_ATTS: case TOK.START_TAG_WITH_ATTS: this.StartTag(b, off, ct, tok); break; case TOK.END_TAG: this.EndTag(b, off, ct, tok); break; case TOK.DATA_CHARS: case TOK.DATA_NEWLINE: this.AddText(utf.GetString(b, off, ct.TokenEnd - off)); break; case TOK.CHAR_REF: case TOK.MAGIC_ENTITY_REF: this.AddText(new string(new char[] { ct.RefChar1 })); break; case TOK.CHAR_PAIR_REF: this.AddText(new string(new char[] { ct.RefChar1, ct.RefChar2 })); break; case TOK.COMMENT: if (this.current != null) { // <!-- 4 // --> 3 var start = off + 4 * this.m_enc.MinBytesPerChar; var end = ct.TokenEnd - off - 7 * this.m_enc.MinBytesPerChar; var text = utf.GetString(b, start, end); this.current.AddChild(new Comment(text)); } break; case TOK.CDATA_SECT_OPEN: this.m_cdata = true; break; case TOK.CDATA_SECT_CLOSE: this.m_cdata = false; break; case TOK.XML_DECL: // thou shalt use UTF8, and XML version 1. // i shall ignore evidence to the contrary... // TODO: Throw an exception if these assuptions are // wrong break; case TOK.ENTITY_REF: case TOK.PI: #if CF throw new util.NotImplementedException("Token type not implemented: " + tok); #else throw new NotImplementedException("Token type not implemented: " + tok); #endif } off = ct.TokenEnd; } } catch (PartialTokenException) { // ignored; } catch (ExtensibleTokenException) { // ignored; } catch (Exception ex) { OnStreamError?.Invoke(this, ex); } finally { this.m_buf.Clear(off); } }
/// <summary> /// Write bytes into the parser. /// </summary> /// <param name="buf">The bytes to put into the parse stream</param> /// <param name="offset">Offset into buf to start at</param> /// <param name="length">Number of bytes to write</param> /// <exception cref="System.NotImplementedException">Token type not implemented: + tok</exception> public void Write(byte[] buf, int offset, int length) { // or assert, really, but this is a little nicer. if (length == 0) { return; } // No locking is required. Read() won't get called again // until this method returns. // TODO: only do this copy if we have a partial token at the // end of parsing. var copy = new byte[length]; Buffer.BlockCopy(buf, offset, copy, 0, length); bufferAggregate.Write(copy); byte[] b = bufferAggregate.GetBuffer(); int off = 0; var ct = new ContentToken(); try { while (off < b.Length) { Tokens tok; if (isCData) { tok = utf8Encoding.TokenizeCdataSection(b, off, b.Length, ct); } else { tok = utf8Encoding.TokenizeContent(b, off, b.Length, ct); } switch (tok) { case Tokens.PartialToken: case Tokens.PartialChar: case Tokens.ExtensibleToken: return; case Tokens.EmptyElementNoAtts: case Tokens.EmptyElementWithAtts: StartTag(b, off, ct, tok); EndTag(b, off, ct, tok); break; case Tokens.StartTagNoAtts: case Tokens.StartTagWithAtts: StartTag(b, off, ct, tok); break; case Tokens.EndTag: EndTag(b, off, ct, tok); break; case Tokens.DataChars: case Tokens.DataNewline: AddText(utf.GetString(b, off, ct.TokenEnd - off)); break; case Tokens.CharReference: case Tokens.MagicEntityReference: AddText(new string(new[] { ct.RefChar1 })); break; case Tokens.CharPairReference: AddText(new string(new[] { ct.RefChar1, ct.RefChar2 })); break; case Tokens.Comment: if (current != null) { // <!-- 4 // --> 3 int start = off + 4 * utf8Encoding.MinBytesPerChar; int end = ct.TokenEnd - off - 7 * utf8Encoding.MinBytesPerChar; string text = utf.GetString(b, start, end); current.Add(text); } break; case Tokens.CdataSectOpen: isCData = true; break; case Tokens.CdataSectClose: CloseCDataSection(); isCData = false; break; case Tokens.XmlDeclaration: // thou shalt use UTF8, and XML version 1. // i shall ignore evidence to the contrary... // TODO: Throw an exception if these assuptions are // wrong break; case Tokens.EntityReference: case Tokens.ProcessingInstruction: throw new NotImplementedException("Token type not implemented: " + tok); } off = ct.TokenEnd; } } catch (Exception ex) { OnStreamError?.Invoke(ex); } finally { bufferAggregate.RemoveFirst(off); } }
private string NormalizeAttributeValue(byte[] buf, int offset, int length) { if (length == 0) { return(string.Empty); } string val = null; var buffer = new ByteBuffer(); var copy = new byte[length]; Buffer.BlockCopy(buf, offset, copy, 0, length); buffer.Write(copy); byte[] b = buffer.GetBuffer(); int off = 0; var ct = new ContentToken(); try { while (off < b.Length) { //tok = m_enc.tokenizeContent(b, off, b.Length, ct); Tokens tok = utf8Encoding.TokenizeAttributeValue(b, off, b.Length, ct); switch (tok) { case Tokens.PartialToken: case Tokens.PartialChar: case Tokens.ExtensibleToken: return(null); case Tokens.AttributeValueS: case Tokens.DataChars: case Tokens.DataNewline: val += (utf.GetString(b, off, ct.TokenEnd - off)); break; case Tokens.CharReference: case Tokens.MagicEntityReference: val += new string(new[] { ct.RefChar1 }); break; case Tokens.CharPairReference: val += new string(new[] { ct.RefChar1, ct.RefChar2 }); break; case Tokens.EntityReference: throw new NotImplementedException("Token type not implemented: " + tok); } off = ct.TokenEnd; } } catch (Exception ex) { OnStreamError?.Invoke(ex); } finally { buffer.RemoveFirst(off); } return(val); }