public void SetContentLength_ContentLengthSet_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var startRange = 0l; var endRange = 499l; var rangeItem = new RangeItem { StartRange = startRange, EndRange = endRange }; var contentLength = endRange - startRange + 1; httpResponseHeaderHelper.Expect(x => x.AppendHeader(httpResponse, SingleByteRangeResponse.HTTP_HEADER_CONTENT_LENGTH, contentLength.ToString())); //Act var singleByteRangeResponse = new SingleByteRangeResponse(httpResponseHeaderHelper, rangeItem); singleByteRangeResponse.SetContentLength(httpResponse, entity); //Assert httpResponseHeaderHelper.VerifyAllExpectations(); httpResponse.VerifyAllExpectations(); }
public void SendHeaders_EntityCompressionMatchesRequestedCompression_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var responseCompressionType = ResponseCompressionType.None; var entityLength = 1000L; var entityCompressionType = ResponseCompressionType.None; var startRange = 0L; var endRange = 499L; var rangeItem = new RangeItem { StartRange = startRange, EndRange = endRange }; var contentLength = endRange - startRange + 1; entity.Stub(x => x.ContentLength).Return(entityLength); entity.Stub(x => x.CompressionType).Return(entityCompressionType); httpResponseHeaderHelper.Expect(x => x.SetContentEncoding(httpResponse, responseCompressionType)); httpResponseHeaderHelper.Expect(x => x.AppendHeader(httpResponse, EntityResponseSinglePart.HttpHeaderContentLength, contentLength.ToString())); httpResponse.Expect(x => x.ContentType = entity.ContentType); //Act var singlePartEntityResponse = new EntityResponseSinglePart(httpResponseHeaderHelper, rangeItem); singlePartEntityResponse.SendHeaders(httpResponse, responseCompressionType, entity); //Assert httpResponseHeaderHelper.VerifyAllExpectations(); httpResponse.VerifyAllExpectations(); }
public void SetOtherHeaders_OtherHeadersSet_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var contentType = "image/gif"; var contentLength = 1000l; var startRange = 0l; var endRange = 499l; var rangeItem = new RangeItem { StartRange = startRange, EndRange = endRange }; entity.Stub(x => x.ContentType).Return(contentType); entity.Stub(x => x.ContentLength).Return(contentLength); httpResponse.Expect(x => x.ContentType = contentType); httpResponseHeaderHelper.Expect(x => x.AppendHeader(httpResponse, SingleByteRangeResponse.HTTP_HEADER_CONTENT_RANGE, SingleByteRangeResponse.BYTES + " " + startRange + "-" + endRange + "/" + contentLength)); //Act var singleByteRangeResponse = new SingleByteRangeResponse(httpResponseHeaderHelper, rangeItem); singleByteRangeResponse.SetOtherHeaders(httpResponse, entity); //Assert httpResponseHeaderHelper.VerifyAllExpectations(); httpResponse.VerifyAllExpectations(); }
public void SendHeaders_EntityCompressionNoneRequestedCompressionGzip_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var responseCompressionType = ResponseCompressionType.GZip; var startRange = 0L; var endRange = 499L; var rangeItem = new RangeItem { StartRange = startRange, EndRange = endRange }; httpResponse.Stub(x => x.Filter).PropertyBehavior(); httpResponse.Filter = new MemoryStream(); httpResponseHeaderHelper.Expect(x => x.SetContentEncoding(httpResponse, responseCompressionType)); httpResponse.Expect(x => x.ContentType = entity.ContentType); //Act var singlePartEntityResponse = new EntityResponseSinglePart(httpResponseHeaderHelper, rangeItem); singlePartEntityResponse.SendHeaders(httpResponse, responseCompressionType, entity); //Assert httpResponseHeaderHelper.VerifyAllExpectations(); httpResponse.VerifyAllExpectations(); Assert.IsInstanceOf(typeof(GZipStream), httpResponse.Filter); }
public void SendBody_HeadRequest_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var transmitEntityStrategy = MockRepository.GenerateMock<ITransmitEntityStrategy>(); var requestHttpMethod = HttpMethod.Head; var startRange = 0l; var endRange = 499l; var rangeItem = new RangeItem { StartRange = startRange, EndRange = endRange }; var bytesToRead = endRange - startRange + 1; //Act var singleByteRangeResponse = new EntityResponseSinglePart(httpResponseHeaderHelper, rangeItem); singleByteRangeResponse.SendBody(requestHttpMethod, httpResponse, transmitEntityStrategy); //Assert transmitEntityStrategy.AssertWasNotCalled(x => x.Transmit(httpResponse, startRange, bytesToRead)); }
public void SetContentLength_ContentLengthSet_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var contentType = "image/gif"; var contentLength = 1000l; var firstStartRange = 0l; var firstEndRange = 499l; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500l; var secondEndRange = 999l; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] {firstRangeItem, secondRangeItem}; var firstHeader = "\r\n--" + MultipleByteRangeResponse.MULTIPART_BOUNDARY + "\r\n" + MultipleByteRangeResponse.HTTP_HEADER_CONTENT_TYPE + ": " + contentType + "\r\n" + MultipleByteRangeResponse.HTTP_HEADER_CONTENT_RANGE + ": " + MultipleByteRangeResponse.BYTES + " " + firstStartRange + "-" + firstEndRange + "/" + contentLength + "\r\n" + "\r\n"; var secondHeader = "\r\n--" + MultipleByteRangeResponse.MULTIPART_BOUNDARY + "\r\n" + MultipleByteRangeResponse.HTTP_HEADER_CONTENT_TYPE + ": " + contentType + "\r\n" + MultipleByteRangeResponse.HTTP_HEADER_CONTENT_RANGE + ": " + MultipleByteRangeResponse.BYTES + " " + secondStartRange + "-" + secondEndRange + "/" + contentLength + "\r\n" + "\r\n"; var footer = "\r\n--" + MultipleByteRangeResponse.MULTIPART_BOUNDARY + "--\r\n"; var responseContentLength = firstHeader.Length + firstEndRange - firstStartRange + 1 + secondHeader.Length + secondEndRange - secondStartRange + 1 + footer.Length; entity.Stub(x => x.ContentType).Return(contentType); entity.Stub(x => x.ContentLength).Return(contentLength); httpResponseHeaderHelper.Expect(x => x.AppendHeader(httpResponse, SingleByteRangeResponse.HTTP_HEADER_CONTENT_LENGTH, responseContentLength.ToString())); //Act var multipleByteRangeResponse = new MultipleByteRangeResponse(httpResponseHeaderHelper, rangeItems); multipleByteRangeResponse.SetContentLength(httpResponse, entity); //Assert httpResponseHeaderHelper.VerifyAllExpectations(); httpResponse.VerifyAllExpectations(); }
public RangeKeyItem(RangeItem item) { DocCount = item.DocCount; From = item.From; To = item.To; FromAsString = item.FromAsString; ToAsString = item.ToAsString; Key = Text = Value = item.Key; Aggregations = SearchAggregationParser.Parse(item.Aggregations); var splitKey = Key.Split(new[] { FilterField.DefaultDelimiter }, StringSplitOptions.RemoveEmptyEntries); if (splitKey.Length == 2) { Value = splitKey[0]; Text = splitKey[1]; } }
public void SendBody_NotAHeadOrGetRequest_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var transmitEntityStrategy = MockRepository.GenerateMock<ITransmitEntityStrategy>(); var requestHttpMethod = HttpMethod.Options; var startRange = 0l; var endRange = 499l; var rangeItem = new RangeItem { StartRange = startRange, EndRange = endRange }; var bytesToRead = endRange - startRange + 1; //Act var singleByteRangeResponse = new EntityResponseSinglePart(httpResponseHeaderHelper, rangeItem); var ex = Assert.Throws<Exception>(() => singleByteRangeResponse.SendBody(requestHttpMethod, httpResponse, transmitEntityStrategy)); }
public void SendHeaders_EntityCompressionNoneRequestedCompressionGzip_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var responseCompressionType = ResponseCompressionType.GZip; var entityCompressionType = ResponseCompressionType.None; var firstStartRange = 0L; var firstEndRange = 499L; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500L; var secondEndRange = 999L; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] { firstRangeItem, secondRangeItem }; entity.Stub(x => x.CompressionType).Return(entityCompressionType); httpResponse.Stub(x => x.Filter).PropertyBehavior(); httpResponse.Filter = new MemoryStream(); httpResponseHeaderHelper.Expect(x => x.SetContentEncoding(httpResponse, responseCompressionType)); httpResponse.Expect(x => x.ContentType = EntityResponseMultiPart.MultipartContenttype); //Act var multiPartEntityResponse = new EntityResponseMultiPart(httpResponseHeaderHelper, rangeItems); multiPartEntityResponse.SendHeaders(httpResponse, responseCompressionType, entity); //Assert httpResponseHeaderHelper.VerifyAllExpectations(); httpResponse.VerifyAllExpectations(); Assert.IsInstanceOf(typeof(GZipStream), httpResponse.Filter); }
public IAggregation GetRangeAggregation(JsonReader reader, JsonSerializer serializer, string key = null) { string fromAsString = null, toAsString = null; long? docCount = null; double? toDouble = null, fromDouble = null; var readExpectedProperty = true; while (readExpectedProperty) { switch (reader.Value as string) { case "from": reader.Read(); if (reader.ValueType == typeof (double)) fromDouble = (double) reader.Value; reader.Read(); break; case "to": reader.Read(); if (reader.ValueType == typeof (double)) toDouble = (double) reader.Value; reader.Read(); break; case "key": reader.Read(); key = reader.Value as string; reader.Read(); break; case "from_as_string": reader.Read(); fromAsString = reader.Value as string; reader.Read(); break; case "to_as_string": reader.Read(); toAsString = reader.Value as string; reader.Read(); break; case "doc_count": reader.Read(); docCount = (reader.Value as long?).GetValueOrDefault(0); reader.Read(); break; default: readExpectedProperty = false; break; } } var bucket = new RangeItem { Key = key, From = fromDouble, To = toDouble, DocCount = docCount.GetValueOrDefault(), FromAsString = fromAsString, ToAsString = toAsString }; bucket.Aggregations = this.GetNestedAggregations(reader, serializer); return bucket; }
private void PopulateData(String shippingkey) { var modCtrl = new NBrightBuyController(); Info = modCtrl.GetByGuidKey(PortalSettings.Current.PortalId, -1, "SHIPPING", shippingkey); if (Info == null) { Info = new NBrightInfo(true); Info.GUIDKey = shippingkey; Info.TypeCode = "SHIPPING"; Info.ModuleId = -1; Info.PortalId = PortalSettings.Current.PortalId; } _shippingList = GetRuleList(); // build range Data _rangeData = new List<RangeItem>(); foreach (var i in _shippingList) { var rangeList = i.GetXmlProperty("genxml/textbox/shiprange"); var rl = rangeList.Split(new string[] {"\n", "\r\n"}, StringSplitOptions.RemoveEmptyEntries); foreach (var s in rl) { var ri = s.Split('='); if (ri.Count() == 2 && Utils.IsNumeric(ri[1])) { var riV = ri[0].Split('-'); if (riV.Count() == 2 && Utils.IsNumeric(riV[0]) && Utils.IsNumeric(riV[1])) { var rItem = new RangeItem(); rItem.RefCsv = "," + i.GetXmlProperty("genxml/textbox/shipref") + ","; rItem.RangeLow = Convert.ToDouble(riV[0],CultureInfo.GetCultureInfo("en-US")); rItem.Cost = Convert.ToDouble(ri[1], CultureInfo.GetCultureInfo("en-US")); rItem.RangeHigh = Convert.ToDouble(riV[1],CultureInfo.GetCultureInfo("en-US")); _rangeData.Add(rItem); } } } } }
// 上传本地文件,或者删除服务器端文件 // parameters: // strStyle 当包含 delete 的时候,表示要删除 strFilePath 所指的文件 // return: // -2 时间戳不匹配 // -1 一般性错误 // 0 成功 // 其他 成功删除的文件和目录个数 public int WriteFile( string strRootPath, string strFilePath, string strRanges, long lTotalLength, byte[] baSource, string strStyle, byte[] baInputTimestamp, out byte[] baOutputTimestamp, out string strError) { baOutputTimestamp = null; strError = ""; if (String.IsNullOrEmpty(strFilePath) == true) { strError = "strFilePath 参数值不能为空"; return(-1); } if (lTotalLength < 0) { strError = "lTotalLength 参数值不能为负数"; return(-1); } if (strStyle == null) { strStyle = ""; } // 2017/12/16 if (StringUtil.IsInList("gzip", strStyle) && baSource != null && baSource.Length > 0) { baSource = ByteArray.DecompressGzip(baSource); } bool bDelete = StringUtil.IsInList("delete", strStyle) == true; if (bDelete == true) { int nDeleteCount = 0; string strDirectory = Path.GetDirectoryName(strFilePath); string strPattern = Path.GetFileName(strFilePath); DirectoryInfo di = new DirectoryInfo(strDirectory); FileSystemInfo[] sis = di.GetFileSystemInfos(strPattern); foreach (FileSystemInfo si in sis) { // 安全性检查:不允许文件和目录越出指定的根目录 if (PathUtil.IsChildOrEqual(si.FullName, strRootPath) == false) { continue; } if (si is DirectoryInfo) { // 删除一个目录 _physicalFileCache.ClearAll(); PathUtil.DeleteDirectory(si.FullName); nDeleteCount++; continue; } if (si is FileInfo) { // 删除一个文件 //if (File.Exists(si.FullName) == true) // File.Delete(si.FullName); _physicalFileCache.FileDeleteIfExists(si.FullName); string strNewFilePath1 = GetNewFileName(si.FullName); //if (File.Exists(strNewFilePath1) == true) // File.Delete(strNewFilePath1); _physicalFileCache.FileDeleteIfExists(strNewFilePath1); string strRangeFileName = GetRangeFileName(si.FullName); //if (File.Exists(strRangeFileName) == true) // File.Delete(strRangeFileName); _physicalFileCache.FileDeleteIfExists(strRangeFileName); nDeleteCount++; } } return(nDeleteCount); } #if NO if (bDelete == true && Directory.Exists(strFilePath) == true) { // 删除一个目录 PathUtil.DeleteDirectory(strFilePath); return(0); } string strNewFilePath = GetNewFileName(strFilePath); if (bDelete == true && File.Exists(strFilePath) == true) { // 删除一个文件 if (File.Exists(strFilePath) == true) { File.Delete(strFilePath); } if (File.Exists(strNewFilePath) == true) { File.Delete(strNewFilePath); } string strRangeFileName = GetRangeFileName(strFilePath); if (File.Exists(strRangeFileName) == true) { File.Delete(strRangeFileName); } return(0); } #endif if (bDelete == false && baSource == null) { strError = "baSource 参数值不能为 null"; return(-1); } string strNewFilePath = GetNewFileName(strFilePath); // 确保文件的路径所经过的所有子目录已经创建 PathUtil.TryCreateDir(Path.GetDirectoryName(strFilePath)); //************************************************* // 检查时间戳,当目标文件存在时 if (File.Exists(strFilePath) == true || File.Exists(strNewFilePath) == true) { if (StringUtil.IsInList("ignorechecktimestamp", strStyle) == false) { if (File.Exists(strNewFilePath) == true) { baOutputTimestamp = FileUtil.GetFileTimestamp(strNewFilePath); } else { baOutputTimestamp = FileUtil.GetFileTimestamp(strFilePath); } if (ByteArray.Compare(baOutputTimestamp, baInputTimestamp) != 0) { strError = "时间戳不匹配"; return(-2); } } } else { if (bDelete == true) { string strRangeFileName = GetRangeFileName(strFilePath); //if (File.Exists(strRangeFileName) == true) // File.Delete(strRangeFileName); _physicalFileCache.FileDeleteIfExists(strRangeFileName); return(0); } // 创建空文件 _physicalFileCache.ClearItems(strFilePath); using (FileStream s = File.Create(strFilePath)) { } baOutputTimestamp = FileUtil.GetFileTimestamp(strFilePath); } #if NO // 删除文件 if (bDelete == true) { if (File.Exists(strFilePath) == true) { File.Delete(strFilePath); } if (File.Exists(strNewFilePath) == true) { File.Delete(strNewFilePath); } string strRangeFileName = GetRangeFileName(strFilePath); if (File.Exists(strRangeFileName) == true) { File.Delete(strRangeFileName); } return(0); } #endif //************************************************** long lCurrentLength = 0; { if (baSource.Length == 0) { if (strRanges != "") { strError = "当 baSource 参数的长度为 0 时,strRanges 的值却为 '" + strRanges + "',不匹配,此时 strRanges 的值应为空字符串"; return(-1); } // 把写到 metadata 里的尺寸设好 FileInfo fi = new FileInfo(strFilePath); lCurrentLength = fi.Length; // TODO:? fi = null; } } //****************************************** // 写数据 if (string.IsNullOrEmpty(strRanges) == true) { if (lTotalLength > 0) { strRanges = "0-" + Convert.ToString(lTotalLength - 1); } else { strRanges = ""; } } string strRealRanges = strRanges; // 检查本次传来的范围是否是完整的文件。 bool bIsComplete = false; if (lTotalLength == 0) { bIsComplete = true; } else { // -1 出错 // 0 还有未覆盖的部分 // 1 本次已经完全覆盖 int nState = RangeList.MergeContentRangeString(strRanges, "", lTotalLength, out strRealRanges, out strError); if (nState == -1) { strError = "MergeContentRangeString() error 1 : " + strError + " (strRanges='" + strRanges + "' lTotalLength=" + lTotalLength.ToString() + ")"; return(-1); } if (nState == 1) { bIsComplete = true; } } if (bIsComplete == true) { if (baSource.Length != lTotalLength) { strError = "范围 '" + strRanges + "' 与数据字节数组长度 '" + baSource.Length.ToString() + "' 不符合"; return(-1); } } RangeList rangeList = new RangeList(strRealRanges); #if NO // 开始写数据 Stream target = null; if (bIsComplete == true) { target = File.Create(strFilePath); //一次性发完,直接写到文件 } else { target = File.Open(strNewFilePath, FileMode.OpenOrCreate); } try { int nStartOfBuffer = 0; for (int i = 0; i < rangeList.Count; i++) { RangeItem range = (RangeItem)rangeList[i]; // int nStartOfTarget = (int)range.lStart; int nLength = (int)range.lLength; if (nLength == 0) { continue; } Debug.Assert(range.lStart >= 0, ""); // 移动目标流的指针到指定位置 target.Seek(range.lStart, SeekOrigin.Begin); target.Write(baSource, nStartOfBuffer, nLength); nStartOfBuffer += nLength; } } finally { target.Close(); } #endif // 开始写数据 StreamItem target = null; if (bIsComplete == true) { target = _physicalFileCache.GetStream(strFilePath, FileMode.Create, FileAccess.Write, false); //一次性发完,直接写到文件 } else { target = _physicalFileCache.GetStream(strNewFilePath, FileMode.OpenOrCreate, FileAccess.Write); } try { int nStartOfBuffer = 0; for (int i = 0; i < rangeList.Count; i++) { RangeItem range = rangeList[i]; // int nStartOfTarget = (int)range.lStart; int nLength = (int)range.lLength; if (nLength == 0) { continue; } Debug.Assert(range.lStart >= 0, ""); // 2019/6/21 // TODO: 测试阶段,暂时不允许隔空追加写 if (range.lStart > target.FileStream.Length) { strError = "不允许隔空写入"; return(-1); } // 移动目标流的指针到指定位置 target.FileStream.FastSeek(range.lStart); target.FileStream.Write(baSource, nStartOfBuffer, nLength); nStartOfBuffer += nLength; } } finally { // 2019/6/21 增加 var filepath = target.FilePath; _physicalFileCache.ReturnStream(target); File.SetLastWriteTime(filepath, DateTime.Now); } { string strRangeFileName = GetRangeFileName(strFilePath); // 如果一次性写满的情况,需要做下列几件事情: // 1.时间戳以目标文件计算 // 2.写到metadata的长度为目标文件总长度 // 3.如果存在临时辅助文件,则删除这些文件。 // 4. 设置目标文件的 LastWriteTime if (bIsComplete == true) { // baOutputTimestamp = CreateTimestampForCfg(strFilePath); lCurrentLength = lTotalLength; // 删除辅助文件 //if (File.Exists(strNewFilePath) == true) // File.Delete(strNewFilePath); _physicalFileCache.FileDeleteIfExists(strNewFilePath); //if (File.Exists(strRangeFileName) == true) // File.Delete(strRangeFileName); _physicalFileCache.FileDeleteIfExists(strRangeFileName); goto END1; } //**************************************** //处理辅助文件 bool bEndWrite = false; // 是否为最后一次写入操作 string strResultRange = ""; if (strRanges == "" || strRanges == null) { bEndWrite = true; } else { string strOldRanges = ""; if (IsFirstRange(strRanges, lTotalLength, out bEndWrite) == false) { if (File.Exists(strRangeFileName) == true) { string strText = FileUtil.File2StringE(strRangeFileName); string strOldTotalLength = ""; StringUtil.ParseTwoPart(strText, "|", out strOldRanges, out strOldTotalLength); } // return // -1 出错 // 0 还有未覆盖的部分 // 1 本次已经完全覆盖 int nState1 = RangeList.MergeContentRangeString(strRanges, strOldRanges, lTotalLength, out strResultRange, out strError); if (nState1 == -1) { strError = "MergeContentRangeString() error 2 : " + strError + " (strRanges='" + strRanges + "' strOldRanges='" + strOldRanges + "' ) lTotalLength=" + lTotalLength.ToString() + ""; return(-1); } if (nState1 == 1) { bEndWrite = true; } } else { strResultRange = strRanges; } } // 如果文件已满,需要做下列几件事情: // 1.按最大长度截临时文件 // 2.将临时文件拷到目标文件 // 3.删除new,range辅助文件 // 4.时间戳以目标文件计算 // 5.metadata的长度为目标文件的总长度 // 6. 设置目标文件的 LastWriteTime if (bEndWrite == true) { _physicalFileCache.ClearItems(strNewFilePath); using (Stream s = new FileStream(strNewFilePath, FileMode.OpenOrCreate)) { s.SetLength(lTotalLength); } // TODO: Move 文件较好。改名 //File.Delete(strFilePath); //File.Move(strNewFilePath, strFilePath); this._physicalFileCache.FileDelete(strFilePath); this._physicalFileCache.FileMove(strNewFilePath, strFilePath, true); //if (File.Exists(strRangeFileName) == true) // File.Delete(strRangeFileName); _physicalFileCache.FileDeleteIfExists(strRangeFileName); baOutputTimestamp = FileUtil.GetFileTimestamp(strFilePath); lCurrentLength = lTotalLength; bIsComplete = true; } else { //如果文件未满,需要做下列几件事情: // 1.把目前的range写到range辅助文件 // 2.时间戳以临时文件计算 // 3.metadata的长度为-1,即未知的情况 FileUtil.String2File(strResultRange + "|" + lTotalLength.ToString(), strRangeFileName); lCurrentLength = -1; baOutputTimestamp = FileUtil.GetFileTimestamp(strNewFilePath); } } END1: if (bIsComplete == true) { // 多轮上传的内容完成后,最后需要单独设置文件最后修改时间 string strLastWriteTime = StringUtil.GetStyleParam(strStyle, "last_write_time"); // parameters: // baTimeStamp 8 byte 的表示 ticks 的文件最后修改时间。应该是 GMT 时间 FileUtil.SetFileLastWriteTimeByTimestamp(strFilePath, ByteArray.GetTimeStampByteArray(strLastWriteTime)); baOutputTimestamp = FileUtil.GetFileTimestamp(strFilePath); // 结束时自动展开一个压缩文件 if (StringUtil.IsInList("extractzip", strStyle) == true) { try { ReadOptions option = new ReadOptions(); option.Encoding = Encoding.UTF8; _physicalFileCache.ClearItems(strFilePath); using (ZipFile zip = ZipFile.Read(strFilePath, option)) { foreach (ZipEntry e in zip) { string strTargetDir = Path.GetDirectoryName(strFilePath); e.Extract(strTargetDir, ExtractExistingFileAction.OverwriteSilently); // 2017/4/8 修正文件最后修改时间 string strFullPath = Path.Combine(strTargetDir, e.FileName); if ((e.Attributes & FileAttributes.Directory) == 0) { if (e.LastModified != File.GetLastWriteTime(strFullPath)) { // 时间有可能不一致,可能是夏令时之类的问题 File.SetLastWriteTime(strFullPath, e.LastModified); } Debug.Assert(e.LastModified == File.GetLastWriteTime(strFullPath)); } } } File.Delete(strFilePath); } catch (Exception ex) { strError = ExceptionUtil.GetAutoText(ex); return(-1); } } } return(0); }
// 给sql库写一条记录 // 把baContent或streamContent写到image字段中range指定目标位置, // 说明:sql中的记录可以是Xml体记录也可以对象资源记录 // parameters: // connection 连接对象 不能为null // strID 记录ID 不能为null或空字符串 // strRanges 目标范围,多个范围用逗号分隔 // nTotalLength 记录内容总长度 // 对于Sql Server目前只支持int,所以nTotalLength设为int类型,但对外接口是long // baContent 内容字节数组 可以为null // streamContent 内容流 可以为null // strStyle 风格 // ignorechecktimestamp 忽略时间戳 // baInputTimestamp 输入的时间戳 可以为null // baOutputTimestamp out参数,返回的时间戳 // bFull out参数,记录是否已满 // strError out参数,返回出错信息 // return: // -1 一般性错误 // -2 时间戳不匹配 // 0 成功 // 说明 baContent与streamContent中谁有值就算谁 private int WriteSqlRecord(SqlConnection connection, string strID, string strRanges, int nTotalLength, byte[] baSource, Stream streamSource, string strMetadata, string strStyle, byte[] baInputTimestamp, out byte[] baOutputTimestamp, out bool bFull, out string strError) { baOutputTimestamp = null; strError = ""; bFull = false; int nRet = 0; //------------------------------------------- //对输入参数做例行检查 //------------------------------------------- // return: // -1 出错 // 0 正常 nRet = this.CheckConnection(connection, out strError); if (nRet == -1) { strError = "WriteSqlRecord()调用错误," + strError; return -1; } Debug.Assert(nRet == 0, ""); if (strID == null || strID == "") { strError = "WriteSqlRecord()调用错误,strID参数不能为null或空字符串。"; return -1; } if (nTotalLength < 0) { strError = "WriteSqlRecord()调用错误,nTotalLength参数值不能为'" + Convert.ToString(nTotalLength) + "',必须大于等于0。"; return -1; } if (baSource == null && streamSource == null) { strError = "WriteSqlRecord()调用错误,baSource参数与streamSource参数不能同时为null。"; return -1; } if (baSource != null && streamSource != null) { strError = "WriteSqlRecord()调用错误,baSource参数与streamSource参数只能有一个被赋值。"; return -1; } if (strStyle == null) strStyle = ""; if (strRanges == null) strRanges = ""; if (strMetadata == null) strMetadata = ""; //------------------------------------------- //开始做事情 //------------------------------------------- //////////////////////////////////////////////////// // 检查记录是否存在,时间是否匹配,并得到长度,range与textPtr ///////////////////////////////////////////////////// string strCommand = "use " + this.m_strSqlDbName + " " + " SELECT TEXTPTR(newdata)," + " DataLength(newdata)," + " range," + " dptimestamp," + " metadata " + " FROM records " + " WHERE id=@id"; strCommand += " use master " + "\n"; SqlCommand command = new SqlCommand(strCommand, connection); SqlParameter idParam = command.Parameters.Add("@id", SqlDbType.NVarChar); idParam.Value = strID; byte[] textPtr = null; string strOldMetadata = ""; string strCurrentRange = ""; int nCurrentLength = 0; string strOutputTimestamp = ""; SqlDataReader dr = command.ExecuteReader(CommandBehavior.SingleResult); try { // 1.记录不存在报错 if (dr == null || dr.HasRows == false) { strError = "记录'" + strID + "'在库中不存在,是不可能的情况"; return -1; } dr.Read(); // 2.textPtr为null报错 if (dr[0] is System.DBNull) { strError = "TextPtr不可能为null"; return -1; } textPtr = (byte[])dr[0]; // 3.时间戳不可能为null,时间戳不匹配报错 if ((dr[4] is System.DBNull)) { strError = "时间戳不可能为null"; return -1; } // 当strStyle存在 ignorechecktimestamp时,不判断时间戳 strOutputTimestamp = dr.GetString(3); baOutputTimestamp = ByteArray.GetTimeStampByteArray(strOutputTimestamp); if (StringUtil.IsInList("ignorechecktimestamp", strStyle) == false) { if (ByteArray.Compare(baInputTimestamp, baOutputTimestamp) != 0) { strError = "时间戳不匹配"; return -2; } } // 4.metadata为null报错 if ((dr[4] is System.DBNull)) { strError = "Metadata不可能为null"; return -1; } strOldMetadata = dr.GetString(4); // 5.range为null的报错 if ((dr[2] is System.DBNull)) { strError = "range此时也不可能为null"; return -1; } strCurrentRange = dr.GetString(2); // 6.取出长度 nCurrentLength = dr.GetInt32(1); } finally { dr.Close(); } bFull = false; bool bDeleted = false; long nSourceTotalLength = 0; if (baSource != null) nSourceTotalLength = baSource.Length; else nSourceTotalLength = streamSource.Length; // 根据range写数据 RangeList rangeList = null; if (strRanges == "") { RangeItem rangeItem = new RangeItem(); rangeItem.lStart = 0; rangeItem.lLength = nSourceTotalLength; rangeList = new RangeList(); rangeList.Add(rangeItem); } else { rangeList = new RangeList(strRanges); } int nStartOfBuffer = 0; // 缓冲区的位置 int nState = 0; for (int i = 0; i < rangeList.Count; i++) { bool bCanDeleteDuoYu = false; // 缺省不可能删除多余的长度 RangeItem range = (RangeItem)rangeList[i]; int nStartOfTarget = (int)range.lStart; // 恢复到image字段的位置 int nNeedReadLength = (int)range.lLength; // 需要读缓冲区的长度 if (rangeList.Count == 1 && nNeedReadLength == 0) { bFull = true; break; } string strThisEnd = Convert.ToString(nStartOfTarget + nNeedReadLength - 1); string strThisRange = Convert.ToString(nStartOfTarget) + "-" + strThisEnd; string strNewRange; nState = RangeList.MergContentRangeString(strThisRange, strCurrentRange, nTotalLength, out strNewRange); if (nState == -1) { strError = "MergContentRangeString() error"; return -1; } if (nState == 1) //范围已满 { bFull = true; string strFullEnd = ""; int nPosition = strNewRange.IndexOf('-'); if (nPosition >= 0) strFullEnd = strNewRange.Substring(nPosition + 1); // 当为范围的最后一次,且本次范围的末尾等于总范围的末尾,且还没有删除时 if (i == rangeList.Count - 1 && (strFullEnd == strThisEnd) && bDeleted == false) { bCanDeleteDuoYu = true; bDeleted = true; } } strCurrentRange = strNewRange; // return: // -1 出错 // 0 成功 nRet = this.WriteImage(connection, textPtr, ref nCurrentLength, // 当前image的长度在不断的变化着 bCanDeleteDuoYu, strID, "newdata", nStartOfTarget, baSource, streamSource, nStartOfBuffer, nNeedReadLength, out strError); if (nRet == -1) return -1; nStartOfBuffer += nNeedReadLength; } if (bFull == true) { if (bDeleted == false) { // 当记录覆盖满时,删除多余的值 // return: // -1 出错 // 0 成功 nRet = this.DeleteDuoYuImage(connection, strID, "newdata", nTotalLength, out strError); if (nRet == -1) return -1; } strCurrentRange = ""; nCurrentLength = nTotalLength; } else { nCurrentLength = -1; } // 最后,更新range,metadata,dptimestamp; // 得到组合后的Metadata; string strResultMetadata; // return: // -1 出错 // 0 成功 nRet = DatabaseUtil.MergeMetadata(strOldMetadata, strMetadata, nCurrentLength, out strResultMetadata, out strError); if (nRet == -1) return -1; // 生成新的时间戳,保存到数据库里 strOutputTimestamp = this.CreateTimestampForDb(); strCommand = "use " + this.m_strSqlDbName + "\n" + " UPDATE records " + " SET dptimestamp=@dptimestamp," + " range=@range," + " metadata=@metadata " + " WHERE id=@id"; strCommand += " use master " + "\n"; command = new SqlCommand(strCommand, connection); idParam = command.Parameters.Add("@id", SqlDbType.NVarChar); idParam.Value = strID; SqlParameter dptimestampParam = command.Parameters.Add("@dptimestamp", SqlDbType.NVarChar, 100); dptimestampParam.Value = strOutputTimestamp; SqlParameter rangeParam = command.Parameters.Add("@range", SqlDbType.NVarChar, 4000); rangeParam.Value = strCurrentRange; SqlParameter metadataParam = command.Parameters.Add("@metadata", SqlDbType.NVarChar, 4000); metadataParam.Value = strResultMetadata; int nCount = command.ExecuteNonQuery(); if (nCount == 0) { strError = "没有更新到记录号为'" + strID + "'的时间戳,range,metadata"; return -1; } baOutputTimestamp = ByteArray.GetTimeStampByteArray(strOutputTimestamp);//Encoding.UTF8.GetBytes(strOutputTimestamp); return 0; }
public void SetOtherHeaders_OtherHeadersSet_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var firstStartRange = 0l; var firstEndRange = 499l; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500l; var secondEndRange = 999l; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] {firstRangeItem, secondRangeItem}; httpResponse.Expect(x => x.ContentType = MultipleByteRangeResponse.MULTIPART_CONTENTTYPE); //Act var multipleByteRangeResponse = new MultipleByteRangeResponse(httpResponseHeaderHelper, rangeItems); multipleByteRangeResponse.SetOtherHeaders(httpResponse, entity); //Assert httpResponseHeaderHelper.VerifyAllExpectations(); httpResponse.VerifyAllExpectations(); }
public void SendBody_NotAHeadOrGetRequest_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var transmitEntityStrategy = MockRepository.GenerateMock<ITransmitEntityStrategy>(); var requestHttpMethod = HttpMethod.Options; var firstStartRange = 0l; var firstEndRange = 499l; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500l; var secondEndRange = 999l; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] { firstRangeItem, secondRangeItem }; //Act var multipleByteRangeResponse = new MultipleByteRangeResponse(httpResponseHeaderHelper, rangeItems); multipleByteRangeResponse.SendBody(requestHttpMethod, httpResponse, transmitEntityStrategy); }
public void SendBody_HeadRequest_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var transmitEntityStrategy = MockRepository.GenerateMock<ITransmitEntityStrategy>(); var entity = MockRepository.GenerateMock<IEntity>(); var output = MockRepository.GenerateMock<TextWriter>(); var requestHttpMethod = HttpMethod.Head; var contentType = "image/gif"; var contentLength = 1000l; var firstStartRange = 0l; var firstEndRange = 499l; var firstBytesToRead = firstEndRange - firstStartRange + 1; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500l; var secondEndRange = 999l; var secondBytesToRead = secondEndRange - secondStartRange + 1; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] { firstRangeItem, secondRangeItem }; var firstHeader = "\r\n--" + MultipleByteRangeResponse.MULTIPART_BOUNDARY + "\r\n" + MultipleByteRangeResponse.HTTP_HEADER_CONTENT_TYPE + ": " + contentType + "\r\n" + MultipleByteRangeResponse.HTTP_HEADER_CONTENT_RANGE + ": " + MultipleByteRangeResponse.BYTES + " " + firstStartRange + "-" + firstEndRange + "/" + contentLength + "\r\n" + "\r\n"; var secondHeader = "\r\n--" + MultipleByteRangeResponse.MULTIPART_BOUNDARY + "\r\n" + MultipleByteRangeResponse.HTTP_HEADER_CONTENT_TYPE + ": " + contentType + "\r\n" + MultipleByteRangeResponse.HTTP_HEADER_CONTENT_RANGE + ": " + MultipleByteRangeResponse.BYTES + " " + secondStartRange + "-" + secondEndRange + "/" + contentLength + "\r\n" + "\r\n"; var footer = "\r\n--" + MultipleByteRangeResponse.MULTIPART_BOUNDARY + "--\r\n"; transmitEntityStrategy.Stub(x => x.Entity).Return(entity); entity.Stub(x => x.ContentType).Return(contentType); entity.Stub(x => x.ContentLength).Return(contentLength); httpResponse.Stub(x => x.Output).Return(output); //Act var multipleByteRangeResponse = new MultipleByteRangeResponse(httpResponseHeaderHelper, rangeItems); multipleByteRangeResponse.SendBody(requestHttpMethod, httpResponse, transmitEntityStrategy); //Assert output.AssertWasNotCalled(x => x.Write(firstHeader)); transmitEntityStrategy.AssertWasNotCalled(x => x.Transmit(httpResponse, firstStartRange, firstBytesToRead)); output.AssertWasNotCalled(x => x.Write(secondHeader)); transmitEntityStrategy.AssertWasNotCalled(x => x.Transmit(httpResponse, secondStartRange, secondBytesToRead)); output.AssertWasNotCalled(x => x.Write(footer)); }
public IEnumerator GetEnumerator() { string strError = ""; int nRet = 0; long lRet = 0; if (this.Date.Length != 8) { throw new ArgumentException("FileName 成员值的长度应该是 8 字符"); } if ((this.LogType & LogType.AccessLog) != 0 && (this.LogType & LogType.OperLog) != 0) { throw new ArgumentException("OperLogItemLoader 的 LogType 只能使用一种类型"); } if (this.Stop != null && this.Estimate != null) { this.Stop.SetMessage("正在装入日志文件 " + this.Date + " 中的记录。" + "剩余时间 " + ProgressEstimate.Format(Estimate.Estimate(lProgressValue)) + " 已经过时间 " + ProgressEstimate.Format(Estimate.delta_passed)); } string strXml = ""; long lAttachmentTotalLength = 0; byte[] attachment_data = null; long lFileSize = 0; // 2021/3/17 // 如果是当天的日志文件,尺寸易变,要每次都探测一下 if (lServerFileSize == -1 || IsToday(this.Date)) { long _lServerFileSize = 0; string strStyle = "level-" + Level.ToString(); if ((this.LogType & LogType.AccessLog) != 0) { strStyle += ",accessLog"; } // 获得服务器端日志文件尺寸 lRet = this.Channel.GetOperLog( //this.Stop, this.Date + ".log", // 2021/3/18 增加 ".log" -1, // lIndex, -1, // lHint, strStyle, "", // strFilter out strXml, out _lServerFileSize, 0, // lAttachmentFragmentStart, 0, // nAttachmentFramengLength, out attachment_data, out lAttachmentTotalLength, out strError); // 2015/11/25 if (lRet == -1) { //throw new ChannelException(this.Channel.ErrorCode, strError); throw new Exception(strError); } this.lServerFileSize = _lServerFileSize; if (lRet == 0) { yield break; } // 2015/11/25 if (_lServerFileSize == -1) { yield break; // 此类型的日志尚未启用 } } Stream stream = null; bool bCacheFileExist = false; bool bRemoveCacheFile = false; // 是否要自动删除未全部完成的本地缓存文件 bool bAutoCache = this.AutoCache; if (bAutoCache == true) { string strFileName = this.Date; if ((this.LogType & LogType.AccessLog) != 0) { strFileName = this.Date + ".a"; } nRet = PrepareCacheFile( this.CacheDir, strFileName, // this.FileName, lServerFileSize, out bCacheFileExist, out stream, out strError); if (nRet == -1) { throw new Exception(strError); } if (bCacheFileExist == false && stream != null) { bRemoveCacheFile = true; } } try { if (bCacheFileExist == true) { lFileSize = stream.Length; } else { lFileSize = lServerFileSize; } // stop.SetProgressRange(0, lTotalSize); if (String.IsNullOrEmpty(Range) == true) { Range = "0-" + (long.MaxValue - 1).ToString(); // "0-9999999999"; } RangeList rl = new RangeList(Range); for (int i = 0; i < rl.Count; i++) { RangeItem ri = (RangeItem)rl[i]; // 让 100- 这样的 range 可以使用 if (ri.lLength == -1) { ri.lLength = long.MaxValue - ri.lStart; } OperLogInfo[] records = null; long lStartRecords = 0; long lHint = -1; long lHintNext = -1; for (long lIndex = ri.lStart; lIndex < ri.lStart + ri.lLength; lIndex++) { // Application.DoEvents(); if (this.Stop != null && this.Stop.State != 0) { strError = "用户中断"; throw new InterruptException(strError); // yield break; ? } if (lIndex == ri.lStart) { lHint = -1; } else { lHint = lHintNext; } if (bCacheFileExist == true) { if (lHint == -1) { // return: // -1 error // 0 成功 // 1 到达文件末尾或者超出 nRet = LocationRecord(stream, lIndex, out strError); if (nRet == -1) { throw new Exception(strError); } } else { // 根据暗示找到 if (lHint == stream.Length) { break; } if (lHint > stream.Length) { strError = "lHint参数值不正确"; throw new Exception(strError); } if (stream.Position != lHint) { stream.Seek(lHint, SeekOrigin.Begin); } } nRet = ReadCachedEnventLog( stream, out strXml, out lAttachmentTotalLength, out strError); if (nRet == -1) { throw new Exception(strError); } lHintNext = stream.Position; } else { if (records == null || lIndex /*- ri.lStart*/ >= lStartRecords + records.Length) { int nCount = -1; if (ri.lLength >= Int32.MaxValue) { nCount = -1; // 500; // -1; } else { nCount = (int)ri.lLength; // Math.Min(500, (int)ri.lLength); } string strStyle = "level-" + Level.ToString(); if ((this.LogType & LogType.AccessLog) != 0) { strStyle += ",accessLog"; } // 2017/10/9 if (this.ReplicationLevel == true) { strStyle += ",supervisor"; // 注:当前账户中还应该包含 replicatoin 权限才能真正获得日志记录中的密码字段 } if (string.IsNullOrEmpty(this.ServerVersion) == false && StringUtil.CompareVersion(this.ServerVersion, "3.17") >= 0) { strStyle += ",wait"; } REDO: // 获得日志 // return: // -1 error // 0 file not found // 1 succeed // 2 超过范围,本次调用无效 lRet = this.Channel.GetOperLogs( //this.Stop, this.Date + ".log", lIndex, lHint, nCount, strStyle, this.Filter, // strFilter out records, out strError); if (lRet == -1) { #if NO DialogResult result = MessageBox.Show(owner, "获取日志信息 (" + this.FileName + " " + lIndex.ToString() + ") 的操作发生错误: " + strError + "\r\n\r\n是否重试操作?\r\n\r\n(是: 重试; 否: 跳过本次操作,继续后面的操作; 放弃: 停止全部操作)", "OperLogItemLoader", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Question, MessageBoxDefaultButton.Button1); if (result == DialogResult.Yes) { goto REDO; } if (result == DialogResult.Cancel) { throw new Exception(strError); } else { // TODO: 是否要在listview中装入一条表示出错的行? lHintNext = -1; continue; } #endif bool isStopped = (this.Stop != null && this.Stop.State != 0); if (isStopped) { throw new InterruptException(strError); } /* * if (this.Prompt != null) * { * MessagePromptEventArgs e = new MessagePromptEventArgs(); * e.MessageText = "获取 " + this._logType.ToString() + " 日志信息 (" + this.Date + " " + lIndex.ToString() + ") 的操作发生错误: " + strError; * e.Actions = "yes,no,cancel"; * this.Prompt(this, e); * if (e.ResultAction == "cancel") * throw new InterruptException(strError); * else if (e.ResultAction == "yes") * { * if (this.Stop != null) * this.Stop.Continue(); * goto REDO; * } * else * { * lHintNext = -1; * continue; * } * } * else * throw new ChannelException(this.Channel.ErrorCode, strError); */ throw new Exception(strError); } if (lRet == 0) { yield break; } if (lRet == 2) { break; } // records数组表示的起点位置 lStartRecords = lIndex /* - ri.lStart*/; } OperLogInfo info = records[lIndex - lStartRecords]; strXml = info.Xml; lHintNext = info.HintNext; lAttachmentTotalLength = info.AttachmentLength; // 写入本地缓存的日志文件 if (stream != null) { try { WriteCachedEnventLog( stream, strXml, lAttachmentTotalLength); } catch (Exception ex) { strError = "写入本地缓存文件的时候出错: " + ex.Message; throw new Exception(strError); } } } #if NO // 2011/12/30 // 日志记录可能动态地增加了,超过了原先为ProgressBar设置的范围 if (lFizeTotalSize < (int)lHintNext) { lFizeTotalSize = lHintNext; stop.SetProgressRange(0, lFizeTotalSize); } #endif if (lHintNext >= 0) { // 校正 if (lProgressValue + lHintNext >= lSize) // > 2017/12/4 修改为 >= { lSize = lProgressValue + lHintNext; if (this.Stop != null) { this.Stop.SetProgressRange(0, lSize); } if (this.Estimate != null) { Estimate.SetRange(0, lSize); } } this.Stop?.SetProgressValue(lProgressValue + lHintNext); } if (lIndex % 100 == 0) { if (this.Stop != null && this.Estimate != null) { Estimate.Text = "剩余时间 " + ProgressEstimate.Format(Estimate.Estimate(lProgressValue + lHintNext)) + " 已经过时间 " + ProgressEstimate.Format(Estimate.delta_passed); this.Stop.SetMessage("正在装入日志文件 " + this.Date + " 中的记录 " + lIndex.ToString() + " 。" + Estimate.Text); } } { OperLogItem item = new OperLogItem { Xml = strXml, Index = lIndex, Date = this.Date.Substring(0, 8), AttachmentLength = lAttachmentTotalLength }; yield return(item); } } } // 创建本地缓存的日志文件的 metadata 文件 if (bCacheFileExist == false && stream != null) { string strFileName = this.Date; if ((this.LogType & LogType.AccessLog) != 0) { strFileName = this.Date + ".a"; } nRet = CreateCacheMetadataFile( this.CacheDir, strFileName, // this.FileName, lServerFileSize, out strError); if (nRet == -1) { throw new Exception(strError); } } bRemoveCacheFile = false; // 不删除 } finally { if (stream != null) { stream.Close(); } if (bRemoveCacheFile == true) { string strFileName = this.Date; if ((this.LogType & LogType.AccessLog) != 0) { strFileName = this.Date + ".a"; } string strError1 = ""; nRet = DeleteCacheFile( this.CacheDir, strFileName, // this.FileName, out strError1); if (nRet == -1) { // MessageBox.Show(owner, strError1); //if (this.Prompt != null) //{ // MessagePromptEventArgs e = new MessagePromptEventArgs(); // e.MessageText = strError1; // e.Actions = "ok"; // this.Prompt(this, e); //} } } } lProgressValue += lFileSize; }
public void SendBody_NotAHeadOrGetRequest_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var transmitEntityStrategy = MockRepository.GenerateMock<ITransmitEntityStrategy>(); var requestHttpMethod = HttpMethod.Options; var firstStartRange = 0L; var firstEndRange = 499L; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500L; var secondEndRange = 999L; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] { firstRangeItem, secondRangeItem }; //Act var multiPartEntityResponse = new EntityResponseMultiPart(httpResponseHeaderHelper, rangeItems); var ex = Assert.Throws<Exception>(() => multiPartEntityResponse.SendBody(requestHttpMethod, httpResponse, transmitEntityStrategy)); }
public void SendBody_HeadRequest_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var transmitEntityStrategy = MockRepository.GenerateMock<ITransmitEntityStrategy>(); var entity = MockRepository.GenerateMock<IEntity>(); var output = MockRepository.GenerateMock<TextWriter>(); var requestHttpMethod = HttpMethod.Head; var contentType = "image/gif"; var contentLength = 1000L; var firstStartRange = 0L; var firstEndRange = 499L; var firstBytesToRead = firstEndRange - firstStartRange + 1; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500L; var secondEndRange = 999L; var secondBytesToRead = secondEndRange - secondStartRange + 1; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] { firstRangeItem, secondRangeItem }; var firstHeader = "\r\n--" + EntityResponseMultiPart.MultipartBoundary + "\r\n" + EntityResponseMultiPart.HttpHeaderContentType + ": " + contentType + "\r\n" + EntityResponseMultiPart.HttpHeaderContentRange + ": " + EntityResponseMultiPart.Bytes + " " + firstStartRange + "-" + firstEndRange + "/" + contentLength + "\r\n" + "\r\n"; var secondHeader = "\r\n--" + EntityResponseMultiPart.MultipartBoundary + "\r\n" + EntityResponseMultiPart.HttpHeaderContentType + ": " + contentType + "\r\n" + EntityResponseMultiPart.HttpHeaderContentRange + ": " + EntityResponseMultiPart.Bytes + " " + secondStartRange + "-" + secondEndRange + "/" + contentLength + "\r\n" + "\r\n"; var footer = "\r\n--" + EntityResponseMultiPart.MultipartBoundary + "--\r\n"; transmitEntityStrategy.Stub(x => x.Entity).Return(entity); entity.Stub(x => x.ContentType).Return(contentType); entity.Stub(x => x.ContentLength).Return(contentLength); httpResponse.Stub(x => x.Output).Return(output); //Act var multiPartEntityResponse = new EntityResponseMultiPart(httpResponseHeaderHelper, rangeItems); multiPartEntityResponse.SendBody(requestHttpMethod, httpResponse, transmitEntityStrategy); //Assert output.AssertWasNotCalled(x => x.Write(firstHeader)); transmitEntityStrategy.AssertWasNotCalled(x => x.Transmit(httpResponse, firstStartRange, firstBytesToRead)); output.AssertWasNotCalled(x => x.Write(secondHeader)); transmitEntityStrategy.AssertWasNotCalled(x => x.Transmit(httpResponse, secondStartRange, secondBytesToRead)); output.AssertWasNotCalled(x => x.Write(footer)); }
public void SendHeaders_EntityCompressionDeflateRequestedCompressionGzip_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var responseCompressionType = ResponseCompressionType.GZip; var entityCompressionType = ResponseCompressionType.Deflate; var firstStartRange = 0L; var firstEndRange = 499L; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500L; var secondEndRange = 999L; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] { firstRangeItem, secondRangeItem }; entity.Stub(x => x.CompressionType).Return(entityCompressionType); //Act var multiPartEntityResponse = new EntityResponseMultiPart(httpResponseHeaderHelper, rangeItems); var ex = Assert.Throws<Exception>(() => multiPartEntityResponse.SendHeaders(httpResponse, responseCompressionType, entity)); }
public void SendHeaders_EntityCompressionMatchesRequestedCompression_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var responseCompressionType = ResponseCompressionType.None; var entityLength = 1000L; var entityType = "image/gif"; var entityCompressionType = ResponseCompressionType.None; var firstStartRange = 0L; var firstEndRange = 499L; var firstRangeItem = new RangeItem { StartRange = firstStartRange, EndRange = firstEndRange }; var secondStartRange = 500L; var secondEndRange = 999L; var secondRangeItem = new RangeItem { StartRange = secondStartRange, EndRange = secondEndRange }; var rangeItems = new[] { firstRangeItem, secondRangeItem }; var firstHeader = "\r\n--" + EntityResponseMultiPart.MultipartBoundary + "\r\n" + EntityResponseMultiPart.HttpHeaderContentType + ": " + entityType + "\r\n" + EntityResponseMultiPart.HttpHeaderContentRange + ": " + EntityResponseMultiPart.Bytes + " " + firstStartRange + "-" + firstEndRange + "/" + entityLength + "\r\n" + "\r\n"; var secondHeader = "\r\n--" + EntityResponseMultiPart.MultipartBoundary + "\r\n" + EntityResponseMultiPart.HttpHeaderContentType + ": " + entityType + "\r\n" + EntityResponseMultiPart.HttpHeaderContentRange + ": " + EntityResponseMultiPart.Bytes + " " + secondStartRange + "-" + secondEndRange + "/" + entityLength + "\r\n" + "\r\n"; var footer = "\r\n--" + EntityResponseMultiPart.MultipartBoundary + "--\r\n"; var contentLength = firstHeader.Length + firstEndRange - firstStartRange + 1 + secondHeader.Length + secondEndRange - secondStartRange + 1 + footer.Length; entity.Stub(x => x.ContentType).Return(entityType); entity.Stub(x => x.ContentLength).Return(entityLength); entity.Stub(x => x.CompressionType).Return(entityCompressionType); httpResponseHeaderHelper.Expect(x => x.SetContentEncoding(httpResponse, responseCompressionType)); httpResponseHeaderHelper.Expect(x => x.AppendHeader(httpResponse, EntityResponseMultiPart.HttpHeaderContentLength, contentLength.ToString())); httpResponse.Expect(x => x.ContentType = EntityResponseMultiPart.MultipartContenttype); //Act var multiPartEntityResponse = new EntityResponseMultiPart(httpResponseHeaderHelper, rangeItems); multiPartEntityResponse.SendHeaders(httpResponse, responseCompressionType, entity); //Assert httpResponseHeaderHelper.VerifyAllExpectations(); httpResponse.VerifyAllExpectations(); }
public void SendHeaders_EntityCompressionDeflateRequestedCompressionGzip_Void() { //Arrange var httpResponse = MockRepository.GenerateMock<HttpResponseBase>(); var httpResponseHeaderHelper = MockRepository.GenerateMock<IHttpResponseHeaderHelper>(); var entity = MockRepository.GenerateMock<IEntity>(); var entityCompressionType = ResponseCompressionType.Deflate; var responseCompressionType = ResponseCompressionType.GZip; var startRange = 0l; var endRange = 499l; var rangeItem = new RangeItem { StartRange = startRange, EndRange = endRange }; entity.Stub(x => x.CompressionType).Return(entityCompressionType); //Act var singlePartEntityResponse = new EntityResponseSinglePart(httpResponseHeaderHelper, rangeItem); var ex = Assert.Throws<Exception>(() => singlePartEntityResponse.SendHeaders(httpResponse, responseCompressionType, entity)); }
public IAggregation GetRangeAggregation(JsonReader reader, JsonSerializer serializer, string key = null) { string fromAsString = null, toAsString = null; long? docCount = null; double?toDouble = null, fromDouble = null; var readExpectedProperty = true; while (readExpectedProperty) { switch (reader.Value as string) { case "from": reader.Read(); if (reader.ValueType == typeof(double)) { fromDouble = (double)reader.Value; } reader.Read(); break; case "to": reader.Read(); if (reader.ValueType == typeof(double)) { toDouble = (double)reader.Value; } reader.Read(); break; case "key": reader.Read(); key = reader.Value as string; reader.Read(); break; case "from_as_string": reader.Read(); fromAsString = reader.Value as string; reader.Read(); break; case "to_as_string": reader.Read(); toAsString = reader.Value as string; reader.Read(); break; case "doc_count": reader.Read(); docCount = (reader.Value as long?).GetValueOrDefault(0); reader.Read(); break; default: readExpectedProperty = false; break; } } var bucket = new RangeItem { Key = key, From = fromDouble, To = toDouble, DocCount = docCount.GetValueOrDefault(), FromAsString = fromAsString, ToAsString = toAsString }; bucket.Aggregations = this.GetNestedAggregations(reader, serializer); return(bucket); }