public int HandleMatch(IProcessorState processor, int bufferLength, ref int currentBufferPosition, int token, Stream target)
            {
                bool flag;

                if (processor.Config.Flags.TryGetValue("include", out flag) && !flag)
                {
                    byte[] tokenValue = Tokens[token];
                    target.Write(tokenValue, 0, tokenValue.Length);
                    return(tokenValue.Length);
                }

                List <byte> pathBytes = new List <byte>();

                while (!_endTokenMatcher.GetOperation(processor.CurrentBuffer, bufferLength, ref currentBufferPosition, out token))
                {
                    pathBytes.Add(processor.CurrentBuffer[currentBufferPosition++]);
                    if (bufferLength - currentBufferPosition < _endTokenMatcher.MinLength)
                    {
                        processor.AdvanceBuffer(currentBufferPosition);
                        bufferLength          = processor.CurrentBufferLength;
                        currentBufferPosition = 0;

                        if (bufferLength == 0)
                        {
                            break;
                        }
                    }
                }

                byte[] pathBytesArray = pathBytes.ToArray();
                string sourceLocation = processor.Encoding.GetString(pathBytesArray).Trim();

                const int pageSize = 65536;

                //Start off with a 64K buffer, we'll keep adding chunks to this
                byte[] composite = new byte[pageSize];
                int    totalLength;

                using (Stream data = _source.SourceStreamOpener(sourceLocation))
                {
                    int index = composite.Length - pageSize;
                    int nRead = data.Read(composite, index, pageSize);

                    //As long as we're reading whole pages, keep allocating more space ahead
                    while (nRead == pageSize)
                    {
                        byte[] newBuffer = new byte[composite.Length + pageSize];
                        Buffer.BlockCopy(composite, 0, newBuffer, 0, composite.Length);
                        composite = newBuffer;
                        nRead     = data.Read(composite, index, pageSize);
                    }

                    totalLength = composite.Length - (pageSize - nRead);
                }

                byte[]   bom;
                Encoding realEncoding = EncodingUtil.Detect(composite, totalLength, out bom);

                if (!Equals(realEncoding, processor.Encoding))
                {
                    composite   = Encoding.Convert(realEncoding, processor.Encoding, composite, bom.Length, totalLength - bom.Length);
                    totalLength = composite.Length;
                }

                target.Write(composite, 0, totalLength - bom.Length);
                return(composite.Length);
            }
示例#2
0
        public ProcessorState(Stream source, Stream target, int bufferSize, int flushThreshold, EngineConfig config, IReadOnlyList <IOperationProvider> operationProviders)
        {
            bool sizedToStream = false;

            //Buffer has to be at least as large as the largest BOM we could expect
            if (bufferSize < 4)
            {
                bufferSize = 4;
            }
            else
            {
                try
                {
                    if (source.Length < bufferSize)
                    {
                        sizedToStream = true;
                        bufferSize    = (int)source.Length;
                    }
                }
                catch
                {
                    //The stream may not support getting the length property (in NetworkStream for instance, which throw a NotSupportedException), suppress any errors in
                    //  accessing the property and continue with the specified buffer size
                }
            }

            _source             = source;
            _target             = target;
            Config              = config;
            _flushThreshold     = flushThreshold;
            CurrentBuffer       = new byte[bufferSize];
            CurrentBufferLength = source.Read(CurrentBuffer, 0, CurrentBuffer.Length);

            byte[]   bom;
            Encoding encoding = EncodingUtil.Detect(CurrentBuffer, CurrentBufferLength, out bom);

            Encoding = encoding;
            CurrentBufferPosition = bom.Length;
            target.Write(bom, 0, bom.Length);

            Dictionary <Encoding, Trie> byEncoding;

            if (!TrieLookup.TryGetValue(operationProviders, out byEncoding))
            {
                TrieLookup[operationProviders] = byEncoding = new Dictionary <Encoding, Trie>();
            }

            if (!byEncoding.TryGetValue(encoding, out _trie))
            {
                List <IOperation> operations = new List <IOperation>(operationProviders.Count);

                for (int i = 0; i < operationProviders.Count; ++i)
                {
                    IOperation op = operationProviders[i].GetOperation(encoding, this);
                    if (op != null)
                    {
                        operations.Add(op);
                    }
                }

                byEncoding[encoding] = _trie = Trie.Create(operations);
            }

            if (bufferSize < _trie.MaxLength && !sizedToStream)
            {
                byte[] tmp = new byte[_trie.MaxLength];
                Buffer.BlockCopy(CurrentBuffer, CurrentBufferPosition, tmp, 0, CurrentBufferLength - CurrentBufferPosition);
                int nRead = _source.Read(tmp, CurrentBufferLength - CurrentBufferPosition, tmp.Length - CurrentBufferLength);
                CurrentBuffer         = tmp;
                CurrentBufferLength  += nRead;
                CurrentBufferPosition = 0;
            }
        }