//Method retypes as many headers as it can to be Indexed //and checks if headers marked as indexed are present in the headers table /*private void OptimizeInputAndSendOptimized(List<KeyValuePair<string, string>> headers) * { * for (int i = 0; i < headers.Count; i++ ) * { * var headerKv = new KeyValuePair<string, string>(headers[i].Item1, headers[i].Item2); * IndexationType headerType = (headers[i].Item3 as Indexation).Type; * * int index = _remoteHeaderTable.IndexOf(headerKv); * * //case headerType == IndexationType.Incremental * //must not be considered because headers table can contain duplicates * if (index != -1 && headerType == IndexationType.Substitution) * { * CompressIndexed(headerKv); * headers.Remove(headers[i--]); * } * * //If header marked as indexed, but not found in the table, compress it as incremental. * if (index == -1 && headerType == IndexationType.Indexed) * { * CompressNonIndexed(headerKv.Key, headerKv.Value, IndexationType.Incremental, 5); * headers.Remove(headers[i--]); * } * } * }*/ public byte[] Compress(HeadersList headers) { var toSend = new SizedHeadersList(); var toDelete = new SizedHeadersList(_remoteRefSet); ClearStream(_serializerStream, (int)_serializerStream.Position); //OptimizeInputAndSendOptimized(headersCopy); - dont need this? foreach (var header in headers) { if (header.Key == null || header.Value == null) { throw new InvalidHeaderException(header); } if (!_remoteRefSet.Contains(header)) { //Not there, Will send toSend.Add(header); } else { //Already there, don't delete toDelete.Remove(header); } } foreach (var header in toDelete) { //Anything left in toDelete, should send, so it is deleted from ref set. CompressIndexed(header); _remoteRefSet.Remove(header); //Update our copy } foreach (var header in toSend) { //Send whatever was left in headersCopy if (_remoteHeaderTable.Contains(header)) { CompressIndexed(header); } else { CompressHeader(header, new Indexation(IndexationType.Incremental)); } _remoteRefSet.Add(header); //Update our copy } _serializerStream.Flush(); var result = new byte[_serializerStream.Position]; var streamBuffer = _serializerStream.GetBuffer(); Buffer.BlockCopy(streamBuffer, 0, result, 0, (int)_serializerStream.Position); return(result); }
public HeadersList Decompress(byte[] serializedHeaders) { try { SizedHeadersList workingSet = new SizedHeadersList(_localRefSet); _currentOffset = 0; while (_currentOffset != serializedHeaders.Length) { var entry = ParseHeader(serializedHeaders); var header = new KeyValuePair <string, string>(entry.Item1, entry.Item2); if (entry.Item3 == IndexationType.Indexed) { if (workingSet.Contains(header)) { workingSet.RemoveAll(h => h.Equals(header)); } else { workingSet.Add(header); } } else { workingSet.Add(header); } } _localRefSet = new SizedHeadersList(workingSet); for (int i = _localRefSet.Count - 1; i >= 0; --i) { var header = _localRefSet[i]; if (!_localHeaderTable.Contains(header)) { _localRefSet.RemoveAll(h => h.Equals(header)); } } return(workingSet); } catch (Exception e) { throw new CompressionError(e); } }