public ICompressor CreateCompressor(CompressionType compressionType) { Lazy<ICompressor> compressor; if(s_Dictionary.TryGetValue(compressionType.ToString(), out compressor)) { return compressor.Value; } throw new InvalidOperationException(string.Format(CultureInfo.CurrentCulture, "Unknown Compressor Type: '{0}'", compressionType.ToString())); }
/// <summary> /// Decompress a file using the specified compression type /// </summary> /// <param name="compressionType"></param> /// <param name="inputFilePath"></param> /// <param name="outputFilePath"></param> /// <example> /// <code title="Example" description="" lang="CS"> /// Compression compression = new Compression(); /// /// /// StringBuilder sb = new StringBuilder(); /// for (int i = 0; i < 1024; i++) /// sb.AppendLine("This is a test"); /// /// File.WriteAllText("input.txt", sb.ToString()); /// /// compression.CompressFile(CompressionType.MiniLZO, "input.txt", "compressed.txt"); /// compression.DecompressFile(CompressionType.MiniLZO, "compressed.txt", "decompressed.txt");</code> /// <code title="Example2" description="" lang="VB.NET"> /// Dim compression As New Compression() /// /// /// Dim sb As New StringBuilder() /// For i As Integer = 0 To 1023 /// sb.AppendLine("This is a test") /// Next i /// /// File.WriteAllText("input.txt", sb.ToString()) /// /// compression.CompressFile(CompressionType.MiniLZO, "input.txt", "compressed.txt") /// compression.DecompressFile(CompressionType.MiniLZO, "compressed.txt", "decompressed.txt")</code> /// </example> public void DecompressFile(CompressionType compressionType, string inputFilePath, string outputFilePath) { try { switch (compressionType) { case CompressionType.Deflate: DeflateDecompressFile(inputFilePath, outputFilePath); break; case CompressionType.GZip: GZipDecompressFile(inputFilePath, outputFilePath); break; case CompressionType.MiniLZO: MiniLZODecompressFile(inputFilePath, outputFilePath); break; default: throw new NotSupportedException(compressionType.ToString() + " is not supported"); } } catch (Exception ex) { throw new Exception("Could not decompress, check input. Error: " + ex.Message, ex); } }
private static Task ExportFileAsync(StreamWriter swExport, MS2File file) { string fileName = file.Name; if (String.IsNullOrWhiteSpace(fileName)) { Logger.Warning($"File number \"{file.Id}\" has no name and will be ignored."); return(Task.CompletedTask); } uint id = file.Header.Id; CompressionType typeId = file.CompressionType; FileTypes.AddOrUpdate(Path.GetExtension(fileName), new HashSet <string>() { typeId.ToString() }, (_, v) => { v.Add(typeId.ToString()); return(v); }); string rootDirectory = PathEx.GetRootDirectory(fileName); if (!String.IsNullOrEmpty(rootDirectory)) { if (String.IsNullOrEmpty(file.InfoHeader.RootFolderId)) { Logger.Warning($"Root folder id is empty but it has a root folder ({rootDirectory})!"); } RootFolderIds.AddOrUpdate(rootDirectory, new HashSet <string>() { file.InfoHeader.RootFolderId }, (_, v) => { v.Add(file.InfoHeader.RootFolderId); return(v); }); } int propCount = file.InfoHeader.Properties.Count; string info = String.Join(",", file.InfoHeader.Properties); return(swExport.WriteLineAsync($"{id:d6} - Type:{typeId}; Properties:{propCount}; Info={info}")); }
private byte[] ReadCompressedData(UnityDataReader reader, int ciblockSize, int uiblockSize, CompressionType compression) { if (compression == CompressionType.None) { return(reader.ReadBytes(ciblockSize)); } if (compression == CompressionType.Lz4 || compression == CompressionType.Lz4hc) { var compressedData = reader.ReadBytes(ciblockSize); return(LZ4Codec.Decode(compressedData, 0, ciblockSize, uiblockSize)); } throw new DecompressionException("Unsupported compression type: " + compression.ToString()); }
/// <summary> /// Gets compression method /// </summary> /// <param name="optionsCompressType">Gzip/Snappy/None</param> /// <returns>CompressionMethod</returns> public static CompressionMethod GetCompressionMethod(CompressionType optionsCompressType) { switch (optionsCompressType) { case CompressionType.Gzip: return(CompressionMethod.Gzip); case CompressionType.Snappy: return(CompressionMethod.Snappy); case CompressionType.None: return(CompressionMethod.None); default: throw new ArgumentOutOfRangeException(optionsCompressType.ToString()); } }
/// <summary> /// Sets the output cache parameters and also the client side caching parameters /// </summary> /// <param name="context"></param> /// <param name="fileName">The name of the file that has been saved to disk</param> /// <param name="fileset">The Base64 encoded string supplied in the query string for the handler</param> /// <param name="compressionType"></param> private void SetCaching(HttpContext context, string fileName, string fileset, CompressionType compressionType) { //This ensures OutputCaching is set for this handler and also controls //client side caching on the browser side. Default is 10 days. var duration = TimeSpan.FromDays(10); var cache = context.Response.Cache; cache.SetCacheability(HttpCacheability.Public); cache.SetExpires(xUserTime.LocalTime().Add(duration)); cache.SetMaxAge(duration); cache.SetValidUntilExpires(true); cache.SetLastModified(xUserTime.LocalTime()); cache.SetETag("\"" + (fileset + compressionType.ToString()) + "\""); //set server OutputCache to vary by our params // in any case, cache already varies by pathInfo (build-in) so for path formats, we do not need anything // just add params for querystring format, just in case... cache.VaryByParams["t"] = true; cache.VaryByParams["s"] = true; cache.VaryByParams["cdv"] = true; //ensure the cache is different based on the encoding specified per browser cache.VaryByContentEncodings["gzip"] = true; cache.VaryByContentEncodings["deflate"] = true; //don't allow varying by wildcard cache.SetOmitVaryStar(true); //ensure client browser maintains strict caching rules cache.AppendCacheExtension("must-revalidate, proxy-revalidate"); //This is the only way to set the max-age cachability header in ASP.Net! //FieldInfo maxAgeField = cache.GetType().GetField("_maxAge", BindingFlags.Instance | BindingFlags.NonPublic); //maxAgeField.SetValue(cache, duration); //make this output cache dependent on the file if there is one. if (!string.IsNullOrEmpty(fileName)) { context.Response.AddFileDependency(fileName); } }
public ICompression GetCompressionEngine(CompressionType typeOfEngine) { switch (typeOfEngine) { case CompressionType.NoCompression: return(m_NoCompression); case CompressionType.Internal: if (m_QuickLZ == null) { m_QuickLZ = new InternalCompression(); } return(m_QuickLZ); default: throw new UnsupportedCompressionTypeException("Type is " + typeOfEngine.ToString()); } }
public static HttpActionExecutedContext GetCompressedHttpActionExecutedContext(HttpActionExecutedContext context, CompressionType compressionType) { var content = new byte[0]; if (context.Response.Content != null) { var bytes = context.Response.Content.ReadAsByteArrayAsync().Result; if (bytes != null) { content = CompressionHelper.Compress(bytes, compressionType, CompressionLevel.Fastest); } } context.Response.Content = new ByteArrayContent(content); context.Response.Content.Headers.Remove("Content-Type"); context.Response.Content.Headers.Add("Content-encoding", compressionType.ToString().ToLower()); context.Response.Content.Headers.Add("Content-Type", "application/json"); return(context); }
/// <summary> /// Sets the output cache parameters and also the client side caching parameters /// </summary> /// <param name="context"></param> /// <param name="fileName">The name of the file that has been saved to disk</param> /// <param name="fileset">The Base64 encoded string supplied in the query string for the handler</param> /// <param name="compressionType"></param> /// <param name="page">The outputcache page - ensures server side output cache is stored</param> private void SetCaching(HttpContextBase context, string fileName, string fileset, CompressionType compressionType, OutputCachedPage page) { //this initializes the webforms page part to get outputcaching working server side page.ProcessRequest(HttpContext.Current); // in any case, cache already varies by pathInfo (build-in) so for path formats, we do not need anything // just add params for querystring format, just in case... context.SetClientCachingResponse( //the e-tag to use (fileset + compressionType.ToString()).GenerateHash(), //browser cache ClientDependencySettings.Instance.OutputCacheDays, //vary-by params new[] { "t", "s", "cdv" }); //make this output cache dependent on the file if there is one. if (!string.IsNullOrEmpty(fileName)) { context.Response.AddFileDependency(fileName); } }
/// <summary> /// 在 HttpApplication 或其派生类的 Application_BeginRequest 中调用此方法以压缩输出到客户端的 Response 流 /// </summary> /// <param name="httpApplication"></param> /// <param name="compressFilePathExtension">可压缩输出流的请求后缀,默认“html|htm|css|js”</param> /// <param name="filterWhiteSpace">是否过滤空白、换行、制表符等字符串,默认为否</param> public static void CompressResponse(this HttpApplication httpApplication, string compressFilePathExtension = "html|htm|css|js", bool filterWhiteSpace = false) { HttpRequest request = httpApplication.Request; HttpResponse response = httpApplication.Response; string currentExecutionFilePathExtension = httpApplication.Request.CurrentExecutionFilePathExtension; if (string.IsNullOrWhiteSpace(currentExecutionFilePathExtension) || (!string.IsNullOrWhiteSpace(compressFilePathExtension) && Regex.IsMatch(currentExecutionFilePathExtension, @"\.(" + compressFilePathExtension + ")", RegexOptions.IgnoreCase))) { try { CompressionType compressionType = request.SupportCompression(); if (compressionType != CompressionType.None) { response.AppendHeader("Content-Encoding", compressionType.ToString().ToLower()); switch (compressionType) { case CompressionType.GZip: response.Filter = new GZipStream(response.Filter, CompressionMode.Compress, true); break; case CompressionType.Deflate: response.Filter = new DeflateStream(response.Filter, CompressionMode.Compress, true); break; case CompressionType.None: default: break; } } if (filterWhiteSpace) { response.Filter = new YuYuFilter(response.Filter, request.CurrentExecutionFilePathExtension); } } catch { } } }
/// <summary> /// Human-readable string version of the object. /// </summary> /// <returns>String.</returns> public override string ToString() { string ret = "---" + Environment.NewLine; ret += " Preshared key : " + (PresharedKey != null ? WatsonCommon.ByteArrayToHex(PresharedKey) : "null") + Environment.NewLine; ret += " Status : " + Status.ToString() + Environment.NewLine; ret += " SyncRequest : " + SyncRequest.ToString() + Environment.NewLine; ret += " SyncResponse : " + SyncResponse.ToString() + Environment.NewLine; ret += " ExpirationUtc : " + (Expiration != null ? Expiration.Value.ToString(_DateTimeFormat) : "null") + Environment.NewLine; ret += " Conversation GUID : " + ConversationGuid + Environment.NewLine; ret += " Compression : " + Compression.ToString() + Environment.NewLine; if (Metadata != null) { ret += " Metadata : " + Metadata.Count + " entries" + Environment.NewLine; } if (DataStream != null) { ret += " DataStream : present, " + ContentLength + " bytes" + Environment.NewLine; } return(ret); }
/// <summary> /// Decompress a stream using the specified compression type /// </summary> /// <param name="compressionType"></param> /// <param name="input"></param> /// <returns></returns> /// <example> /// <code title="Example" description="" lang="CS"> /// Compression compression = new Compression(); /// /// /// StringBuilder sb = new StringBuilder(); /// for (int i = 0; i < 1024; i++) /// sb.AppendLine("This is a test"); /// /// File.WriteAllText("input.txt", sb.ToString()); /// /// using (FileStream inputStream = new FileStream("input.txt", FileMode.Open, FileAccess.Read)) /// { /// Stream compressedStream = compression.CompressStream(CompressionType.MiniLZO, inputStream); /// Console.WriteLine("{0} bytes compressed to {1} bytes with MiniLZO", inputStream.Length, compressedStream.Length); /// /// Stream decompressedStream = compression.DecompressStream(CompressionType.MiniLZO, compressedStream); /// }</code> /// <code title="Example2" description="" lang="VB.NET"> /// Dim compression As New Compression() /// /// /// Dim sb As New StringBuilder() /// For i As Integer = 0 To 1023 /// sb.AppendLine("This is a test") /// Next i /// /// File.WriteAllText("input.txt", sb.ToString()) /// /// Using inputStream As New FileStream("input.txt", FileMode.Open, FileAccess.Read) /// Dim compressedStream As Stream = compression.CompressStream(CompressionType.MiniLZO, inputStream) /// Console.WriteLine("{0} bytes compressed to {1} bytes with MiniLZO", inputStream.Length, compressedStream.Length) /// /// Dim decompressedStream As Stream = compression.DecompressStream(CompressionType.MiniLZO, compressedStream) /// End Using</code> /// </example> public Stream DecompressStream(CompressionType compressionType, Stream input) { try { switch (compressionType) { case CompressionType.Deflate: return(DeflateDecompressStream(input)); case CompressionType.GZip: return(GZipDecompressStream(input)); case CompressionType.MiniLZO: return(MiniLZODecompressStream(input)); default: throw new NotSupportedException(compressionType.ToString() + " is not supported"); } } catch (Exception ex) { throw new Exception("Could not decompress, check input. Error: " + ex.Message, ex); } }
private MemoryStream CompressMemoryStream(CompressionType compressionType, MemoryStream input) { try { switch (compressionType) { case CompressionType.Deflate: return(DeflateCompressMemoryStream(CompressionMode.Compress, input)); case CompressionType.GZip: return(GZipCompressMemoryStream(CompressionMode.Compress, input)); case CompressionType.MiniLZO: return(MiniLZOCompressMemoryStream(input)); default: throw new NotSupportedException(compressionType.ToString() + " is not supported"); } } catch (Exception ex) { throw new Exception("Could not compress, check input. Error: " + ex.Message, ex); } }
/// <summary> /// Sets the output cache parameters and also the client side caching parameters /// </summary> /// <param name="context"></param> /// <param name="fileName">The name of the file that has been saved to disk</param> /// <param name="fileset">The Base64 encoded string supplied in the query string for the handler</param> /// <param name="compressionType"></param> /// <param name="page">The outputcache page - ensures server side output cache is stored</param> private void SetCaching(HttpContextBase context, string fileName, string fileset, CompressionType compressionType, OutputCachedPage page) { //this initializes the webforms page part to get outputcaching working server side page.ProcessRequest(HttpContext.Current); // in any case, cache already varies by pathInfo (build-in) so for path formats, we do not need anything // just add params for querystring format, just in case... context.SetClientCachingResponse( //the e-tag to use (fileset + compressionType.ToString()).GenerateHash(), //10 days 10, //vary-by params new[] { "t", "s", "cdv" }); //make this output cache dependent on the file if there is one. if (!string.IsNullOrEmpty(fileName)) context.Response.AddFileDependency(fileName); }
/// <summary> /// Build the Message object from data that awaits in a NetworkStream or SslStream. /// </summary> /// <returns>True if successful.</returns> internal async Task <bool> BuildFromStream() { try { #region Read-Headers byte[] buffer = new byte[0]; byte[] end = AppendBytes(Encoding.UTF8.GetBytes(Environment.NewLine), Encoding.UTF8.GetBytes(Environment.NewLine)); while (true) { byte[] data = await ReadFromStreamAsync(_DataStream, 1); if (data != null && data.Length == 1) { buffer = AppendBytes(buffer, data); if (buffer.Length >= 4) { byte[] endCheck = buffer.Skip(buffer.Length - 4).Take(4).ToArray(); if (endCheck.SequenceEqual(end)) { _Logger?.Invoke(_Header + "ReadHeaders found header demarcation"); break; } } } } WatsonMessage msg = SerializationHelper.DeserializeJson <WatsonMessage>(Encoding.UTF8.GetString(buffer)); ContentLength = msg.ContentLength; PresharedKey = msg.PresharedKey; Status = msg.Status; Metadata = msg.Metadata; SyncRequest = msg.SyncRequest; SyncResponse = msg.SyncResponse; Expiration = msg.Expiration; ConversationGuid = msg.ConversationGuid; Compression = msg.Compression; _Logger?.Invoke(_Header + "BuildFromStream header processing complete" + Environment.NewLine + Encoding.UTF8.GetString(buffer).Trim()); #endregion #region Setup-Stream if (Compression == CompressionType.None) { // do nothing } else { if (Compression == CompressionType.Deflate) { _DataStream = new DeflateStream(_DataStream, CompressionMode.Decompress, true); } else if (Compression == CompressionType.Gzip) { _DataStream = new GZipStream(_DataStream, CompressionMode.Decompress, true); } else { throw new InvalidOperationException("Unknown compression type: " + Compression.ToString()); } } #endregion return(true); } catch (IOException) { _Logger?.Invoke(_Header + "BuildStream IOexception, disconnect assumed"); return(false); } catch (SocketException) { _Logger?.Invoke(_Header + "BuildStream SocketException, disconnect assumed"); return(false); } catch (ObjectDisposedException) { _Logger?.Invoke(_Header + "BuildStream ObjectDisposedException, disconnect assumed"); return(false); } catch (Exception e) { _Logger?.Invoke(_Header + "BuildStream exception: " + Environment.NewLine + SerializationHelper.SerializeJson(e, true) + Environment.NewLine); return(false); } }
public static byte[] Decompress(string file, CompressionType cmp) { byte[] ret = new byte[0]; try { switch (cmp) { case CompressionType.Uncompressed: ret = File.ReadAllBytes(file); break; case CompressionType.Kosinski: ret = Kosinski.Decompress(file); break; case CompressionType.KosinskiM: ret = ModuledKosinski.Decompress(file, LevelData.littleendian ? Endianness.LittleEndian : Endianness.BigEndian); break; case CompressionType.Nemesis: ret = Nemesis.Decompress(file); break; case CompressionType.Enigma: ret = Enigma.Decompress(file, LevelData.littleendian ? Endianness.LittleEndian : Endianness.BigEndian); break; case CompressionType.SZDD: ret = SZDDComp.SZDDComp.Decompress(file); break; case CompressionType.Comper: ret = Comper.Decompress(file); break; default: throw new ArgumentOutOfRangeException("cmp", "Invalid compression type " + cmp + "!"); } } catch { LevelData.Log("Unable to read file \"" + file + "\" with compression " + cmp.ToString() + ":"); throw; } return(ret); }
/// <summary> /// Establish a RDP connection with RDP Server /// </summary> /// <param name="requestedProtocols">Flags indicate supported security protocols</param> /// <param name="SVCNames">Array of static virtual channels' name</param> /// <param name="highestCompressionTypeSupported">Indicate the highest compression type supported</param> /// <param name="isReconnect">Whether this is in a reconnection sequence</param> /// /// <param name="autoLogon">Whether auto logon using username and password in client info PDU</param> /// <param name="supportEGFX">Set the support of RDPEGFX</param> /// <param name="supportAutoDetect">Set the support of auto-detect</param> /// <param name="supportHeartbeatPDU">Set the support of Heartbeat PDU</param> /// <param name="supportMultitransportReliable">Set the support of reliable multitransport</param> /// <param name="supportMultitransportLossy">Set the support of lossy multitransport</param> /// <param name="supportAutoReconnect">Set the support of auto-reconnect</param> /// <param name="supportFastPathInput">Set the support of fast-path input</param> /// <param name="supportFastPathOutput">Set the support of fast-path output</param> /// <param name="supportSurfaceCommands">Set the support of surface commands</param> /// <param name="supportSVCCompression">Set the support of static virtual channel data compression</param> /// <param name="supportRemoteFXCodec">Set the support of RemoteFX codecs</param> public void EstablishRDPConnection(requestedProtocols_Values requestedProtocols, string[] SVCNames, CompressionType highestCompressionTypeSupported, bool isReconnect = false, bool autoLogon = false, bool supportEGFX = false, bool supportAutoDetect = false, bool supportHeartbeatPDU = false, bool supportMultitransportReliable = false, bool supportMultitransportLossy = false, bool supportAutoReconnect = false, bool supportFastPathInput = false, bool supportFastPathOutput = false, bool supportSurfaceCommands = false, bool supportSVCCompression = false, bool supportRemoteFXCodec = false) { #region logging string requestProtocolString = "PROTOCOL_RDP_FLAG"; if (requestedProtocols.HasFlag(requestedProtocols_Values.PROTOCOL_SSL_FLAG)) { requestProtocolString = requestProtocolString + "|PROTOCOL_SSL_FLAG"; } if (requestedProtocols.HasFlag(requestedProtocols_Values.PROTOCOL_HYBRID_FLAG)) { requestProtocolString = requestProtocolString + "|PROTOCOL_HYBRID_FLAG"; } if (requestedProtocols.HasFlag(requestedProtocols_Values.PROTOCOL_HYBRID_EX)) { requestProtocolString = requestProtocolString + "|PROTOCOL_HYBRID_EX"; } string svcNameString = ""; if (SVCNames != null && SVCNames.Length > 0) { foreach (string svcName in SVCNames) { svcNameString = svcNameString + svcName + ","; } } this.Site.Log.Add(LogEntryKind.Comment, @"EstablishRDPConnection: request Protocols = {0}, Name of static virtual channels = {1}, Highest compression type supported = {2}, Is Reconnect = {3} RDPEGFX supported = {4}, AutoDetect supported = {5}, HeartbeatPDU supported = {6}, Reliable Multitransport supported = {7}, Lossy Multitransport supported = {8}, AutoReconnect supported = {9}, FastPathInput supported= {10}, FastPathOutput supported = {11}, SurfaceCommands supported = {12}, SVCCompression supported = {13}, RemoteFXCodec supported = {14}.", requestProtocolString, svcNameString, highestCompressionTypeSupported.ToString(), isReconnect, supportEGFX, supportAutoDetect, supportHeartbeatPDU, supportMultitransportReliable, supportMultitransportLossy, supportAutoReconnect, supportFastPathInput, supportFastPathOutput, supportSurfaceCommands, supportSVCCompression, supportRemoteFXCodec); #endregion logging #region Connection Initiation SendClientX224ConnectionRequest(NegativeType.None, requestedProtocols); Server_X_224_Connection_Confirm_Pdu connectionConfirmPdu = ExpectPacket <Server_X_224_Connection_Confirm_Pdu>(pduWaitTimeSpan); if (connectionConfirmPdu == null) { TimeSpan waitTime = new TimeSpan(0, 0, 1); Server_X_224_Negotiate_Failure_Pdu failurePdu = ExpectPacket <Server_X_224_Negotiate_Failure_Pdu>(waitTime); if (failurePdu != null) { Site.Assert.Fail("Received a Server X224 Connection confirm with RDP_NEG_FAILURE structure, failureCode is {0}.", failurePdu.rdpNegFailure.failureCode); } Site.Assert.Fail("Expecting a Server X224 Connection Confirm PDU."); } if (supportEGFX) { // Verify support of EGFX on Server Site.Assert.IsTrue(connectionConfirmPdu.rdpNegData != null && connectionConfirmPdu.rdpNegData.flags.HasFlag(RDP_NEG_RSP_flags_Values.DYNVC_GFX_PROTOCOL_SUPPORTED), "The RDP Server should support RDPEGFX."); } #endregion Connection Initiation #region Basic Setting Exchange SendClientMCSConnectInitialPDU(NegativeType.None, SVCNames, supportEGFX, supportAutoDetect, supportHeartbeatPDU, supportMultitransportReliable, supportMultitransportLossy, false); Server_MCS_Connect_Response_Pdu_with_GCC_Conference_Create_Response connectResponsePdu = ExpectPacket <Server_MCS_Connect_Response_Pdu_with_GCC_Conference_Create_Response>(pduWaitTimeSpan); Site.Assert.IsNotNull(connectResponsePdu, "Expecting a Server MCS Connect Response PDU with GCC Conference Create Response."); if (connectResponsePdu.mcsCrsp.gccPdu.serverMultitransportChannelData != null) { if (connectResponsePdu.mcsCrsp.gccPdu.serverMultitransportChannelData.flags.HasFlag(MULTITRANSPORT_TYPE_FLAGS.TRANSPORTTYPE_UDPFECR)) { this.serverSupportUDPFECR = true; } if (connectResponsePdu.mcsCrsp.gccPdu.serverMultitransportChannelData.flags.HasFlag(MULTITRANSPORT_TYPE_FLAGS.TRANSPORTTYPE_UDPFECL)) { this.serverSupportUDPFECL = true; } if (connectResponsePdu.mcsCrsp.gccPdu.serverMultitransportChannelData.flags.HasFlag(MULTITRANSPORT_TYPE_FLAGS.TRANSPORTTYPE_UDP_PREFERRED)) { this.serverSupportUDPPrefferred = true; } } #endregion Basic Setting Exchange #region Channel Connection SendClientMCSErectDomainRequest(NegativeType.None); SendClientMCSAttachUserRequest(NegativeType.None); Server_MCS_Attach_User_Confirm_Pdu userConfirmPdu = ExpectPacket <Server_MCS_Attach_User_Confirm_Pdu>(pduWaitTimeSpan); Site.Assert.IsNotNull(userConfirmPdu, "Expecting a Server MCS Attach User Confirm PDU."); ChannelJoinRequestAndConfirm(); #endregion #region RDP Security Commencement if (rdpbcgrClientStack.Context.ServerSelectedProtocol == (uint)selectedProtocols_Values.PROTOCOL_RDP_FLAG) { SendClientSecurityExchangePDU(NegativeType.None); } #endregion #region Secure Setting Exchange SendClientInfoPDU(NegativeType.None, highestCompressionTypeSupported, isReconnect, autoLogon); #endregion #region Licensing Server_License_Error_Pdu_Valid_Client licenseErrorPdu = ExpectPacket <Server_License_Error_Pdu_Valid_Client>(pduWaitTimeSpan); Site.Assert.IsNotNull(licenseErrorPdu, "Expecting a Server License Error PDU."); #endregion #region Capabilities Exchange Server_Demand_Active_Pdu demandActivePdu = ExpectPacket <Server_Demand_Active_Pdu>(pduWaitTimeSpan); Site.Assert.IsNotNull(demandActivePdu, "Expecting a Server Demand Active PDU."); SendClientConfirmActivePDU(NegativeType.None, supportAutoReconnect, supportFastPathInput, supportFastPathOutput, supportSurfaceCommands, supportSVCCompression, supportRemoteFXCodec); #endregion #region Connection Finalization SendClientSynchronizePDU(); Server_Synchronize_Pdu syncPdu = ExpectPacket <Server_Synchronize_Pdu>(pduWaitTimeSpan); Site.Assert.IsNotNull(syncPdu, "Expecting a Server Synchronize PDU."); Server_Control_Pdu_Cooperate CoopControlPdu = ExpectPacket <Server_Control_Pdu_Cooperate>(pduWaitTimeSpan); Site.Assert.IsNotNull(CoopControlPdu, "Expecting a Server Control PDU - Cooperate."); SendClientControlCooperatePDU(); SendClientControlRequestPDU(); Server_Control_Pdu_Granted_Control grantedControlPdu = ExpectPacket <Server_Control_Pdu_Granted_Control>(pduWaitTimeSpan); Site.Assert.IsNotNull(grantedControlPdu, "Expecting a Server Control PDU - Granted Control."); if (IsBitmapCacheHostSupport) { SendClientPersistentKeyListPDU(); } SendClientFontListPDU(); Server_Font_Map_Pdu fontMapPdu = ExpectPacket <Server_Font_Map_Pdu>(pduWaitTimeSpan); Site.Assert.IsNotNull(fontMapPdu, "Expecting a Server Font Map PDU."); #endregion }
/// <summary> /// Sets the output cache parameters and also the client side caching parameters /// </summary> /// <param name="context"></param> /// <param name="fileName">The name of the file that has been saved to disk</param> /// <param name="fileset">The Base64 encoded string supplied in the query string for the handler</param> /// <param name="compressionType"></param> private void SetCaching(HttpContextBase context, string fileName, string fileset, CompressionType compressionType) { if (string.IsNullOrEmpty(fileName)) { ClientDependencySettings.Instance.Logger.Error("ClientDependency handler path is null", new Exception()); return; } //This ensures OutputCaching is set for this handler and also controls //client side caching on the browser side. Default is 10 days. var duration = TimeSpan.FromDays(10); var cache = context.Response.Cache; cache.SetCacheability(HttpCacheability.Public); cache.SetExpires(DateTime.Now.Add(duration)); cache.SetMaxAge(duration); cache.SetValidUntilExpires(true); cache.SetLastModified(DateTime.Now); cache.SetETag("\"" + FormsAuthentication.HashPasswordForStoringInConfigFile(fileset + compressionType.ToString(), "MD5") + "\""); //set server OutputCache to vary by our params /* // proper way to do it is to have * cache.SetVaryByCustom("cdparms"); * * // then have this in global.asax * public override string GetVaryByCustomString(HttpContext context, string arg) * { * if (arg == "cdparms") * { * if (string.IsNullOrEmpty(context.Request.PathInfo)) * { * // querystring format * return context.Request["s"] + "+" + context.Request["t"] + "+" + (context.Request["v"] ?? "0"); * } * else * { * // path format * return context.Request.PathInfo.Replace('/', ''); * } * } * } * * // that way, there would be one cache entry for both querystring and path formats. * // but, it requires a global.asax and I can't find a way to do without it. */ // in any case, cache already varies by pathInfo (build-in) so for path formats, we do not need anything // just add params for querystring format, just in case... cache.VaryByParams["t"] = true; cache.VaryByParams["s"] = true; cache.VaryByParams["cdv"] = true; //ensure the cache is different based on the encoding specified per browser cache.VaryByContentEncodings["gzip"] = true; cache.VaryByContentEncodings["deflate"] = true; //don't allow varying by wildcard cache.SetOmitVaryStar(true); //ensure client browser maintains strict caching rules cache.AppendCacheExtension("must-revalidate, proxy-revalidate"); //This is the only way to set the max-age cachability header in ASP.Net! //FieldInfo maxAgeField = cache.GetType().GetField("_maxAge", BindingFlags.Instance | BindingFlags.NonPublic); //maxAgeField.SetValue(cache, duration); //make this output cache dependent on the file if there is one. if (!string.IsNullOrEmpty(fileName)) { context.Response.AddFileDependency(fileName); } }
/// <summary> /// Sets the output cache parameters and also the client side caching parameters /// </summary> /// <param name="context"></param> /// <param name="fileName">The name of the file that has been saved to disk</param> /// <param name="fileset">The Base64 encoded string supplied in the query string for the handler</param> /// <param name="compressionType"></param> private void SetCaching(HttpContextBase context, string fileName, string fileset, CompressionType compressionType) { if (string.IsNullOrEmpty(fileName)) { ClientDependencySettings.Instance.Logger.Error("ClientDependency handler path is null", new Exception()); return; } //This ensures OutputCaching is set for this handler and also controls //client side caching on the browser side. Default is 10 days. var duration = TimeSpan.FromDays(10); var cache = context.Response.Cache; cache.SetCacheability(HttpCacheability.Public); cache.SetExpires(DateTime.Now.Add(duration)); cache.SetMaxAge(duration); cache.SetValidUntilExpires(true); cache.SetLastModified(DateTime.Now); cache.SetETag("\"" + FormsAuthentication.HashPasswordForStoringInConfigFile(fileset + compressionType.ToString(), "MD5") + "\""); //set server OutputCache to vary by our params /* // proper way to do it is to have * cache.SetVaryByCustom("cdparms"); * * // then have this in global.asax * public override string GetVaryByCustomString(HttpContext context, string arg) * { * if (arg == "cdparms") * { * if (string.IsNullOrEmpty(context.Request.PathInfo)) * { * // querystring format * return context.Request["s"] + "+" + context.Request["t"] + "+" + (context.Request["v"] ?? "0"); * } * else * { * // path format * return context.Request.PathInfo.Replace('/', ''); * } * } * } * * // that way, there would be one cache entry for both querystring and path formats. * // but, it requires a global.asax and I can't find a way to do without it. */ // in any case, cache already varies by pathInfo (build-in) so for path formats, we do not need anything // just add params for querystring format, just in case... cache.VaryByParams["t"] = true; cache.VaryByParams["s"] = true; cache.VaryByParams["cdv"] = true; //ensure the cache is different based on the encoding specified per browser cache.VaryByContentEncodings["gzip"] = true; cache.VaryByContentEncodings["deflate"] = true; //don't allow varying by wildcard cache.SetOmitVaryStar(true); //ensure client browser maintains strict caching rules cache.AppendCacheExtension("must-revalidate, proxy-revalidate"); //This is the only way to set the max-age cachability header in ASP.Net! //FieldInfo maxAgeField = cache.GetType().GetField("_maxAge", BindingFlags.Instance | BindingFlags.NonPublic); //maxAgeField.SetValue(cache, duration); //make this output cache dependent on the file if there is one. if (!string.IsNullOrEmpty(fileName)) context.Response.AddFileDependency(fileName); }
/// <summary> /// Compresses the data using the specified compression type /// </summary> /// <param name="Data">Data to compress</param> /// <param name="CompressionType">Compression type</param> /// <returns>The compressed data</returns> public static byte[] Compress(this byte[] Data, CompressionType CompressionType = CompressionType.Deflate) { return(IoC.Manager.Bootstrapper.Resolve <Manager>().Compress(Data, CompressionType.ToString())); }
/// <summary> /// Returns the cache folder for composite files for the current compression supported /// </summary> /// <returns></returns> public string GetCurrentCompositeFolder(CompressionType type) { return Path.Combine(CurrentCacheFolder, type.ToString()); }
public static ImageInfo SetImage(Image newImage, CompressionType compression) { int width = newImage.Width; int height = newImage.Height; byte[] header = null; byte[] data = null; if (compression == CompressionType.BGRA) { Bitmap image = new Bitmap(newImage); MemoryStream mspixels = new MemoryStream(); for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { Color pixel = image.GetPixel(j, i); mspixels.WriteByte(pixel.B); mspixels.WriteByte(pixel.G); mspixels.WriteByte(pixel.R); mspixels.WriteByte(pixel.A); } } data = mspixels.ToArray(); MemoryStream msx = new MemoryStream(); BinaryWriter bw = new BinaryWriter(msx); bw.Write((int)0); bw.Write((int)0); bw.Write((int)0); bw.Write((int)1); bw.Write((int)0x15); bw.Write((short)width); bw.Write((short)height); bw.Write((int)0x15); bw.Write((int)0); bw.Flush(); header = msx.ToArray(); bw.Close(); } else { DXTCompression dxt = DXTCompression.DXT1; if (compression == CompressionType.DXT3) { dxt = DXTCompression.DXT1; } else if (compression == CompressionType.DXT5) { dxt = DXTCompression.DXT5; } data = ImageUtil.CompressImage(newImage, dxt); MemoryStream msx = new MemoryStream(); BinaryWriter bw = new BinaryWriter(msx); bw.Write((int)0); bw.Write((int)0); bw.Write((int)0); bw.Write((int)1); bw.Write(Encoding.ASCII.GetBytes(compression.ToString())); bw.Write((short)width); bw.Write((short)height); bw.Write((int)0x15); bw.Write((int)0); bw.Flush(); header = msx.ToArray(); bw.Close(); } return(new ImageInfo(header, data)); }
/// <summary> /// Returns the cache folder for composite files for the current compression supported /// </summary> /// <returns></returns> public string GetCurrentCompositeFolder(ICacheBuster cacheBuster, CompressionType type) { return(Path.Combine(CurrentCacheFolder, cacheBuster.GetValue(), type.ToString())); }
public static void Compress(byte[] file, string destination, CompressionType cmp) { try { string dir = Path.GetDirectoryName(destination); if (!string.IsNullOrWhiteSpace(dir)) { Directory.CreateDirectory(dir); } switch (cmp) { /* * case CompressionType.Uncompressed: * File.WriteAllBytes(destination, file); * break; * case CompressionType.Kosinski: * using (MemoryStream input = new MemoryStream(file)) * using (FileStream output = File.Create(destination)) * using (PaddedStream paddedOutput = new PaddedStream(output, 2, PaddedStreamMode.Write)) * Kosinski.Compress(input, paddedOutput); * break; * case CompressionType.KosinskiM: * using (MemoryStream input = new MemoryStream(file)) * using (FileStream output = File.Create(destination)) * using (PaddedStream paddedOutput = new PaddedStream(output, 2, PaddedStreamMode.Write)) * ModuledKosinski.Compress(input, paddedOutput, LevelData.littleendian ? Endianness.LittleEndian : Endianness.BigEndian); * break; * case CompressionType.Nemesis: * Nemesis.Compress(file, destination); * break; * case CompressionType.Enigma: * Enigma.Compress(file, destination, LevelData.littleendian ? Endianness.LittleEndian : Endianness.BigEndian); * break; * case CompressionType.SZDD: * SZDDComp.SZDDComp.Compress(file, destination); * break; * case CompressionType.Comper: * Comper.Compress(file, destination); * break; * case CompressionType.KosinskiPlus: * using (MemoryStream input = new MemoryStream(file)) * using (FileStream output = File.Create(destination)) * using (PaddedStream paddedOutput = new PaddedStream(output, 2, PaddedStreamMode.Write)) * KosinskiPlus.Compress(input, paddedOutput); * break; * case CompressionType.KosinskiPlusM: * using (MemoryStream input = new MemoryStream(file)) * using (FileStream output = File.Create(destination)) * using (PaddedStream paddedOutput = new PaddedStream(output, 2, PaddedStreamMode.Write)) * ModuledKosinskiPlus.Compress(input, paddedOutput); * break; */ default: throw new ArgumentOutOfRangeException("cmp", "Invalid compression type " + cmp + "!"); } } catch { LevelData.Log("Unable to write file \"" + destination + "\" with compression " + cmp.ToString() + ":"); throw; } }
/// <summary> /// Returns the cache folder for composite files for the current compression supported /// </summary> /// <returns></returns> public string GetCurrentCompositeFolder(CompressionType type) { return(Path.Combine(CurrentCacheFolder, type.ToString())); }
/// <summary> /// Compresses the data using the specified compression type /// </summary> /// <param name="Data">Data to compress</param> /// <param name="CompressionType">Compression type</param> /// <returns>The compressed data</returns> public static byte[] Compress(this byte[] Data, CompressionType CompressionType = CompressionType.Deflate) { return IoC.Manager.Bootstrapper.Resolve<Manager>().Compress(Data, CompressionType.ToString()); }
public static void Compress(byte[] file, string destination, CompressionType cmp) { try { switch (cmp) { case CompressionType.Uncompressed: File.WriteAllBytes(destination, file); break; case CompressionType.Kosinski: using (MemoryStream input = new MemoryStream(file)) { using (FileStream output = File.Create(destination)) { using (PaddedStream paddedOutput = new PaddedStream(output, 2, PaddedStreamMode.Write)) { Kosinski.Compress(input, paddedOutput); } } } break; case CompressionType.KosinskiM: using (MemoryStream input = new MemoryStream(file)) { using (FileStream output = File.Create(destination)) { using (PaddedStream paddedOutput = new PaddedStream(output, 2, PaddedStreamMode.Write)) { ModuledKosinski.Compress(input, paddedOutput, LevelData.littleendian ? Endianness.LittleEndian : Endianness.BigEndian); } } } break; case CompressionType.Nemesis: Nemesis.Compress(file, destination); break; case CompressionType.Enigma: Enigma.Compress(file, destination, LevelData.littleendian ? Endianness.LittleEndian : Endianness.BigEndian); break; case CompressionType.SZDD: SZDDComp.SZDDComp.Compress(file, destination); break; case CompressionType.Comper: Comper.Compress(file, destination); break; default: throw new ArgumentOutOfRangeException("cmp", "Invalid compression type " + cmp + "!"); } } catch { LevelData.Log("Unable to write file \"" + destination + "\" with compression " + cmp.ToString() + ":"); throw; } }
void OnEnable() { string version = CaronteSharp.Caronte.GetNativeDllVersion(); string versionTypeName; if (CarVersionChecker.IsFreeVersion()) { versionTypeName = " FREE"; versionType_ = VersionType.Free; } else if (CarVersionChecker.IsPremiumVersion()) { if (CarVersionChecker.IsEvaluationVersion()) { versionTypeName = " PREMIUM TRIAL"; versionType_ = VersionType.Premium | VersionType.Evaluation; } else { versionTypeName = " PREMIUM"; versionType_ = VersionType.Premium; } } else // PRO VERSION { if (CarVersionChecker.IsEvaluationVersion()) { versionTypeName = " PRO TRIAL"; versionType_ = VersionType.Pro | VersionType.Evaluation; } else { versionTypeName = " PRO"; versionType_ = VersionType.Pro; } } if (CarVersionChecker.DoVersionExpires()) { expirationDateTime_ = CarVersionChecker.GetExpirationDateDateInSeconds(); } if (CarVersionChecker.IsAdvanceCompressionVersion()) { compressionType_ = CompressionType.Advanced; } else { compressionType_ = CompressionType.Normal; } companyIcon_ = CarEditorResource.LoadEditorTexture(CarVersionChecker.CompanyIconName); versionString_ = "Version: " + version + versionTypeName + " \n(Compression type: " + compressionType_.ToString() + ")"; }
void IHttpHandler.ProcessRequest(HttpContext context) { HttpResponse response = context.Response; string fileset = context.Server.UrlDecode(context.Request["s"]); ClientDependencyType type; try { type = (ClientDependencyType)Enum.Parse(typeof(ClientDependencyType), context.Request["t"], true); } catch { throw new ArgumentException("Could not parse the type set in the request"); } if (string.IsNullOrEmpty(fileset)) { throw new ArgumentException("Must specify a fileset in the request"); } string compositeFileName = ""; byte[] outputBytes = null; //get the map to the composite file for this file set, if it exists. CompositeFileMap map = CompositeFileXmlMapper.Instance.GetCompositeFile(fileset); if (map != null && map.HasFileBytes) { ProcessFromFile(context, map, out compositeFileName, out outputBytes); } else { bool fromFile = false; lock (m_Lock) { //check again... if (map == null || !map.HasFileBytes) { //need to do the combining, etc... and save the file map //get the file list string[] strFiles = DecodeFrom64(fileset).Split(';'); //combine files and get the definition types of them (internal vs external resources) List <CompositeFileDefinition> fDefs; byte[] fileBytes = CompositeFileProcessor.CombineFiles(strFiles, context, type, out fDefs); //compress data CompressionType cType = GetCompression(context); outputBytes = CompositeFileProcessor.CompressBytes(cType, fileBytes); SetContentEncodingHeaders(context, cType); //save combined file compositeFileName = CompositeFileProcessor.SaveCompositeFile(outputBytes, type); if (!string.IsNullOrEmpty(compositeFileName)) { //Update the XML file map CompositeFileXmlMapper.Instance.CreateMap(fileset, cType.ToString(), fDefs .Where(f => f.IsLocalFile) .Select(x => new FileInfo(context.Server.MapPath(x.Uri))).ToList(), compositeFileName); } } else { //files are there now, process from file. fromFile = true; } } if (fromFile) { ProcessFromFile(context, map, out compositeFileName, out outputBytes); } } SetCaching(context, compositeFileName); context.Response.ContentType = type == ClientDependencyType.Javascript ? "text/javascript" : "text/css"; context.Response.OutputStream.Write(outputBytes, 0, outputBytes.Length); }