private static async Task <string> FromGZipStringAsync(string value) { var bytes = Convert.FromBase64String(value); await using var input = new MemoryStream(bytes); await using var output = new MemoryStream(); await using var stream = new GZipStream(input, CompressionMode.Decompress); await stream.CopyToAsync(output).ConfigureAwait(false); await stream.FlushAsync().ConfigureAwait(false); return(Encoding.Unicode.GetString(output.ToArray())); }
/// <summary> /// 压缩流 /// </summary> /// <param name="data"></param> /// <returns></returns> private static async Task <byte[]> CompressAsync(byte[] data) { using (var memoryStream = new MemoryStream()) { using (var compressionStream = new GZipStream(memoryStream, CompressionMode.Compress)) { await compressionStream.WriteAsync(data, 0, data.Length); await compressionStream.FlushAsync(); } //必须先关了compressionStream后才能取得正确的压缩流 return(memoryStream.ToArray()); } }
private static async Task <string> ToGZipStringAsync(string value, CompressionLevel level = CompressionLevel.Fastest) { var bytes = Encoding.Unicode.GetBytes(value); await using var input = new MemoryStream(bytes); await using var output = new MemoryStream(); await using var stream = new GZipStream(output, level); await input.CopyToAsync(stream); await stream.FlushAsync() ; return(Convert.ToBase64String(output.ToArray())); }
/// <summary> /// Gzip and serialize an IEnumerable to a file /// </summary> /// <typeparam name="T"></typeparam> /// <param name="input"></param> /// <param name="fileName"></param> /// <returns></returns> public static async Task SerializeGzipToFile <T>(this IEnumerable <T> input, string fileName) { using var stream = new MemoryStream(); using var writer = new StreamWriter(stream); using var json = new JsonTextWriter(writer); using var file = File.Create(fileName); using var gzip = new GZipStream(file, CompressionLevel.Optimal); JsonSerializer.Create(JsonSerializerSettings) .Serialize(json, input); await json.FlushAsync(); stream.Seek(0, SeekOrigin.Begin); await stream.CopyToAsync(gzip); await gzip.FlushAsync(); }
public static async Task Compress(Stream data, Stream compressedData) { using (var auxStream = new MemoryStream()) using (var zipStream = new GZipStream(auxStream, CompressionMode.Compress)) { await data.CopyToAsync(zipStream); await zipStream.FlushAsync(); auxStream.Position = 0; await auxStream.CopyToAsync(compressedData); await compressedData.FlushAsync(); } compressedData.Position = 0; }
/// <summary> /// Saves the plans to a file. /// </summary> /// <param name="plans">The plans.</param> public static async Task SavePlansAsync(IList <Plan> plans) { Character character = (Character)plans.First().Character; // Prompt the user to pick a file name using (SaveFileDialog sfdSave = new SaveFileDialog()) { sfdSave.FileName = $"{character.Name} - Plans Backup"; sfdSave.Title = @"Save to File"; sfdSave.Filter = @"EVEMon Plans Backup Format (*.epb)|*.epb"; sfdSave.FilterIndex = (int)PlanFormat.Emp; if (sfdSave.ShowDialog() == DialogResult.Cancel) { return; } try { string content = PlanIOHelper.ExportAsXML(plans); // Moves to the final file await FileHelper.OverwriteOrWarnTheUserAsync( sfdSave.FileName, async fs => { // Emp is actually compressed xml Stream stream = new GZipStream(fs, CompressionMode.Compress); using (StreamWriter writer = new StreamWriter(stream, Encoding.UTF8)) { await writer.WriteAsync(content); await writer.FlushAsync(); await stream.FlushAsync(); await fs.FlushAsync(); } return(true); }); } catch (IOException err) { ExceptionHandler.LogException(err, false); MessageBox.Show($"There was an error writing out the file:\n\n{err.Message}", @"Save Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } } }
protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context) { // Open a GZipStream that writes to the specified output stream. using (GZipStream gzip = new GZipStream(stream, CompressionMode.Compress, true)) { // Copy all the input content to the GZip stream. if (content != null) { await content.CopyToAsync(gzip); } else { await(new System.Net.Http.StringContent(string.Empty)).CopyToAsync(gzip); } await gzip.FlushAsync(); } }
/// <summary> /// Compress string /// </summary> public static async Task <MemoryStream> GZipAsync(string s) { var memoryStream = new MemoryStream(); using (var zipStream = new GZipStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true)) using (var writer = new StreamWriter(zipStream)) { await writer.WriteAsync(s); await zipStream.FlushAsync(); await memoryStream.FlushAsync(); } memoryStream.Position = 0; return(memoryStream); }
private async Task <StreamContent> CompressAsync(StringContent content) { var ms = new MemoryStream(); using (var gzipStream = new GZipStream(ms, CompressionMode.Compress, true)) { await content.CopyToAsync(gzipStream); await gzipStream.FlushAsync(); } ms.Position = 0; var compressedStreamContent = new StreamContent(ms); compressedStreamContent.Headers.ContentType = new MediaTypeHeaderValue(_contentType.MimeType()); compressedStreamContent.Headers.Add("Content-Encoding", "gzip"); return(compressedStreamContent); }
/// <summary> /// Decompresses the specified bytes using <see cref="GZipStream"/> and the /// <see cref="CompressionSettings"/> that have been used to originally compress the bytes. /// </summary> /// <param name="gzippedBytes">The gzipped <c>byte[]</c> array that you want to decompress.</param> /// <param name="compressionSettings">The <see cref="CompressionSettings"/> that have been used to compress the bytes.</param> /// <returns>The decompressed <c>bytes[]</c>.</returns> public async Task <byte[]> Decompress(byte[] gzippedBytes, CompressionSettings compressionSettings) { if (ReferenceEquals(gzippedBytes, null)) { return(null); } if (gzippedBytes.Length == 0) { return(Array.Empty <byte>()); } await using MemoryStream input = new MemoryStream(gzippedBytes); await using MemoryStream output = new MemoryStream(gzippedBytes.Length / 2 * 3); await using GZipStream decompressionStream = new GZipStream(input, CompressionMode.Decompress); await decompressionStream.CopyToAsync(output, Math.Max(4096, compressionSettings.bufferSize)).ConfigureAwait(false); await decompressionStream.FlushAsync().ConfigureAwait(false); return(output.ToArray()); }
/// <summary> /// Compresses the specified bytes using <see cref="GZipStream"/> and the provided <see cref="CompressionSettings"/>. /// </summary> /// <returns>The gzipped <c>byte[]</c> array.</returns> /// <param name="bytes">The <c>byte[]</c> array to compress.</param> /// <param name="compressionSettings">The desired <see cref="CompressionSettings"/>.</param> public async Task <byte[]> Compress(byte[] bytes, CompressionSettings compressionSettings) { if (ReferenceEquals(bytes, null)) { return(null); } if (bytes.Length == 0) { return(Array.Empty <byte>()); } await using MemoryStream input = new MemoryStream(bytes); await using MemoryStream output = new MemoryStream(bytes.Length / 4 * 3); await using GZipStream compressionStream = new GZipStream(output, compressionSettings.compressionLevel); await input.CopyToAsync(compressionStream, Math.Max(4096, compressionSettings.bufferSize)).ConfigureAwait(false); await compressionStream.FlushAsync().ConfigureAwait(false); return(output.ToArray()); }
public async Task ShouldWork() { var dummyDump = CreateDummyDump(1); using (var ctx = JsonOperationContext.ShortTermSingleUse()) using (var bjro = ctx.ReadObject(dummyDump, "dump")) await using (var ms = new MemoryStream()) await using (var zipStream = new GZipStream(ms, CompressionMode.Compress)) { await bjro.WriteJsonToAsync(zipStream); await zipStream.FlushAsync(); ms.Position = 0; using (var store = GetDocumentStore()) { var operation = await store.Smuggler.ImportAsync(new DatabaseSmugglerImportOptions { OperateOnTypes = DatabaseItemType.Documents | DatabaseItemType.Identities | DatabaseItemType.CompareExchange, MaxStepsForTransformScript = int.MaxValue, TransformScript = @" function sleep(milliseconds) { var date = Date.now(); var currentDate = null; do { currentDate = Date.now(); } while (currentDate - date < milliseconds); } sleep(1000); ", }, ms); await operation.WaitForCompletionAsync(); var stats = store.Maintenance.Send(new GetStatisticsOperation()); Assert.Equal(1, stats.CountOfDocuments); } } }
/// <summary> /// Compress string /// </summary> public static async Task <MemoryStream> GZipAsync(Stream input) { var memoryStream = new MemoryStream(); if (input.CanSeek) { input.Position = 0; } using (var zipStream = new GZipStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true)) { await input.CopyToAsync(zipStream); await zipStream.FlushAsync(); await memoryStream.FlushAsync(); } memoryStream.Position = 0; return(memoryStream); }
/// <inheritdoc /> protected override async Task CompressInternalAsync(Stream source, Stream target, CancellationToken token = default(CancellationToken)) { token.ThrowIfCancellationRequested(); if (source.CanSeek) { source.Position = 0; } GZipStream gZipStream = null; try { gZipStream = new GZipStream(target, CompressionLevel.ToSystemCompressionLevel(), true); await source.CopyToAsync(gZipStream, Constants.BUFFER_16_KB, token); await gZipStream.FlushAsync(token); await target.FlushAsync(token); } finally { ObjectHelper.Dispose(ref gZipStream); } }
/// <summary> /// Compress and remove indentation for json data /// </summary> public static async Task <MemoryStream> GZipAndMinifyAsync(Stream input) { var memoryStream = new MemoryStream(); if (input.CanSeek) { input.Position = 0; } var json = await LoadJsonAsync(input); using (var zipStream = new GZipStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true)) { await WriteJsonAsync(json, zipStream); await zipStream.FlushAsync(); await memoryStream.FlushAsync(); } memoryStream.Position = 0; return(memoryStream); }
private async Task RunAsync(CancellationToken stoppingToken) { try { using var httpClient = new HttpClient(); httpClient.DefaultRequestHeaders.Add("User-Agent", "BikeDataProject"); var clientFactory = new ClientsFactory(null, httpClient, _configuration["OSM_API"]); // get client credentials. var userName = await File.ReadAllTextAsync(_configuration["OSM_USER_ID"]); var userPass = await File.ReadAllTextAsync(_configuration["OSM_USER_PASS"]); var client = clientFactory.CreateBasicAuthClient(userName, userPass); // get unsynced track. var unSyncedTrack = await _db.GetUnSyncedPublicTrack(); if (unSyncedTrack == null) { return; } // sync gpx track. try { var track = await client.GetTraceData(unSyncedTrack.OsmTrackId); using (var memoryStream = new MemoryStream()) using (var gzipStream = new GZipStream(memoryStream, CompressionLevel.Optimal)) { await track.Stream.CopyToAsync(gzipStream); await gzipStream.FlushAsync(); await gzipStream.DisposeAsync(); unSyncedTrack.GpxFile = memoryStream.ToArray(); } unSyncedTrack.GpxContentType = track.ContentType.MediaType; unSyncedTrack.SyncState = TrackSyncStateEnum.GpxSynced; _db.Tracks.Update(unSyncedTrack); await _db.SaveChangesAsync(); _logger.LogInformation($"Synchronized GPX for public track: {unSyncedTrack.OsmTrackId}"); } catch (Exception e) { unSyncedTrack.SyncState = TrackSyncStateEnum.Error; _db.Tracks.Update(unSyncedTrack); await _db.SaveChangesAsync(); _logger.LogWarning(e, $"Failed to get public track: {unSyncedTrack.OsmTrackId}"); } } catch (Exception e) { _logger.LogError(e, $"Unhandled exception while synchronizing GPX track."); } }
public async Task FlushAsyncFailsAfterDispose() { var ms = new MemoryStream(); var ds = new GZipStream(ms, CompressionMode.Compress); ds.Dispose(); await Assert.ThrowsAsync<ObjectDisposedException>(async () => { await ds.FlushAsync(); }); }
public async Task Flush() { var ms = new MemoryStream(); var ds = new GZipStream(ms, CompressionMode.Compress); ds.Flush(); await ds.FlushAsync(); // Just ensuring Flush doesn't throw }
public async Task Members(CommandContext ctx) { if (!await CheckLock.WaitAsync(0).ConfigureAwait(false)) { await ctx.RespondAsync("Another check is already in progress").ConfigureAwait(false); return; } try { await ctx.ReactWithAsync(Config.Reactions.PleaseWait).ConfigureAwait(false); var members = GetMembers(ctx.Client); using (var compressedResult = new MemoryStream()) { using (var memoryStream = new MemoryStream()) { using (var writer = new StreamWriter(memoryStream, new UTF8Encoding(false), 4096, true)) { foreach (var member in members) { await writer.WriteLineAsync($"{member.Username}\t{member.Nickname}\t{member.JoinedAt:O}\t{(string.Join(',', member.Roles.Select(r => r.Name)))}").ConfigureAwait(false); } await writer.FlushAsync().ConfigureAwait(false); } memoryStream.Seek(0, SeekOrigin.Begin); if (memoryStream.Length <= Config.AttachmentSizeLimit) { await ctx.RespondWithFileAsync("names.txt", memoryStream).ConfigureAwait(false); return; } using (var gzip = new GZipStream(compressedResult, CompressionLevel.Optimal, true)) { await memoryStream.CopyToAsync(gzip).ConfigureAwait(false); await gzip.FlushAsync().ConfigureAwait(false); } } compressedResult.Seek(0, SeekOrigin.Begin); if (compressedResult.Length <= Config.AttachmentSizeLimit) { await ctx.RespondWithFileAsync("names.txt.gz", compressedResult).ConfigureAwait(false); } else { await ctx.RespondAsync($"Dump is too large: {compressedResult.Length} bytes").ConfigureAwait(false); } } } catch (Exception e) { Config.Log.Warn(e, "Failed to dump guild members"); await ctx.ReactWithAsync(Config.Reactions.Failure, "Failed to dump guild members").ConfigureAwait(false); } finally { CheckLock.Release(1); await ctx.RemoveReactionAsync(Config.Reactions.PleaseWait).ConfigureAwait(false); } }