protected void Write(CompressionType compressionType, string archive, string archiveToVerifyAgainst, Encoding encoding = null) { using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive))) { WriterOptions writerOptions = new WriterOptions(compressionType) { LeaveStreamOpen = true, }; writerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default; using (var writer = WriterFactory.Open(stream, type, writerOptions)) { writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories); } } CompareArchivesByPath(Path.Combine(SCRATCH2_FILES_PATH, archive), Path.Combine(TEST_ARCHIVES_PATH, archiveToVerifyAgainst)); using (Stream stream = File.OpenRead(Path.Combine(SCRATCH2_FILES_PATH, archive))) { ReaderOptions readerOptions = new ReaderOptions(); readerOptions.ArchiveEncoding.Default = encoding ?? Encoding.Default; using (var reader = ReaderFactory.Open(new NonDisposingStream(stream), readerOptions)) { reader.WriteAllToDirectory(SCRATCH_FILES_PATH, new ExtractionOptions() { ExtractFullPath = true }); } } VerifyFiles(); }
public ParquetOutputter(int RowGroupSize) { writeroptions = new WriterOptions() { RowGroupsSize = RowGroupSize }; }
/// <summary> /// 压缩文件/文件夹 /// </summary> /// <param name="filePath">需要压缩的文件/文件夹路径</param> /// <param name="zipPath">压缩文件路径(zip后缀)</param> /// <param name="filterExtenList">需要过滤的文件后缀名</param> public static void CompressionFile(string filePath, string zipPath, List <string> filterExtenList = null) { try { using var zip = File.Create(zipPath); var option = new WriterOptions(CompressionType.Deflate) { ArchiveEncoding = new ArchiveEncoding() { Default = Encoding.UTF8 } }; using var zipWriter = WriterFactory.Open(zip, ArchiveType.Zip, option); if (Directory.Exists(filePath)) { //添加文件夹 zipWriter.WriteAll(filePath, "*", (path) => filterExtenList == null || !filterExtenList.Any(d => Path.GetExtension(path).Contains(d, StringComparison.OrdinalIgnoreCase)), SearchOption.AllDirectories); } else if (File.Exists(filePath)) { zipWriter.Write(Path.GetFileName(filePath), filePath); } } catch (Exception ex) { LogHelper.Error($"{filePath}:{ex.Message}"); throw; } }
public ApplicationInsightsWriter(string instrumentationKey, WriterOptions options) { TelemetryConfiguration.Active.InstrumentationKey = instrumentationKey; TelemetryProcessorChainBuilder builder = TelemetryConfiguration.Active.TelemetryProcessorChainBuilder; QuickPulseTelemetryProcessor quickPulseProcessor = null; // add our own telemetry processor that can override session based variables //builder.Use(next => new LogMagicTelemetryProcessor(next)); // optionally enable QuickPulse /* * - Free and is not counted towards the bill. * - The latency is 1 second compared to a few minutes. * - Retention is while the data is on the chart, not 90 days. * - Data is only streamed while you are in Live Metrics view. */ if (options.EnableQuickPulse) { builder.Use((next) => { quickPulseProcessor = new QuickPulseTelemetryProcessor(next); return(quickPulseProcessor); }); } builder.Build(); _telemetryClient = new TelemetryClient(TelemetryConfiguration.Active); _telemetryClient.InstrumentationKey = instrumentationKey; _context = new InsightsContext(_telemetryClient, options); if (options.EnableQuickPulse) { var quickPulse = new QuickPulseTelemetryModule(); quickPulse.Initialize(TelemetryConfiguration.Active); quickPulse.RegisterTelemetryProcessor(quickPulseProcessor); } #if NETFULL // see https://github.com/Microsoft/ApplicationInsights-dotnet-server/blob/develop/Src/PerformanceCollector/Perf.Shared/PerformanceCollectorModule.cs if (options.CollectPerformanceCounters) { //optionally enable performance counters collection var pcm = new PerformanceCollectorModule(); //custom counters can be easily added here if required pcm.Counters.Add(new PerformanceCounterCollectionRequest(@"\.NET CLR Memory(LogMagic.Console)\# GC Handles", "GC Handles")); pcm.Initialize(TelemetryConfiguration.Active); } #endif TelemetryConfiguration.Active.TelemetryInitializers.Add(new OperationTelemetryInitialiser()); _options = options; }
internal ZipWriterOptions(WriterOptions options) : base(options.CompressionType) { LeaveStreamOpen = options.LeaveStreamOpen; if (options is ZipWriterOptions) { UseZip64 = ((ZipWriterOptions)options).UseZip64; } }
public static DataSet WriteRead(DataSet original, WriterOptions writerOptions = null) { var ms = new MemoryStream(); ParquetWriter.Write(original, ms, CompressionMethod.None, null, writerOptions); ms.Position = 0; return(ParquetReader.Read(ms)); }
public FileMetadataBuilder(WriterOptions writerOptions) { _meta = new Thrift.FileMetaData(); this._writerOptions = writerOptions; _meta.Created_by = CreatedBy; _meta.Version = 1; _meta.Row_groups = new List <Thrift.RowGroup>(); }
protected override StageResult Init() { if (!InputFile.CheckExistsAndReportError(L)) { return(StageResult.INPUT_ERROR); } WriterOptions.Add("RecordLimitSize", RecordLimitSize); return(StageResult.SUCCESS); }
/// <summary> /// Zips the folder to file system. /// </summary> /// <param name="fullPathToZipFile"></param> /// <param name="archiveType">Type of the archive.</param> /// <param name="writerOptions"></param> /// <param name="searchPattern"></param> /// <param name="searchOption"></param> public ZipFileObject ZipFolderToFileSystem(string fullPathToZipFile, ArchiveType archiveType, WriterOptions writerOptions = null, string searchPattern = "*", SearchOption searchOption = SearchOption.AllDirectories) { var zipFileObject = new ZipFileObject(fullPathToZipFile, archiveType, null, writerOptions); zipFileObject.AddFromDirectory(FullName, searchPattern, searchOption); return(zipFileObject); }
protected override StageResult Init() { if (!Success(base.Init(), out StageResult r)) { return(r); } WriterOptions.Add("HasHeaderRecord", true); return(StageResult.SUCCESS); }
public void SizeTriggersTest() { RunTest(TestCloseChunk).Wait(); async Task TestCloseChunk(string fname) { var opt = new WriterOptions() { CloseChunk = new Triggers() { Size = 0 }, FlushToOS = new Triggers() { Size = 0 }, DisposeFlushToDisk = false, }; using (var writer = new Writer(fname, opt)) { long written = 0; while (true) { try { await writer.WriteAsync(new Event <long>(new DateTime(written + 1, DateTimeKind.Utc), written + 1)); ++written; break; } catch (TimeSeriesWriteException e) { Assert.IsInstanceOfType(e.InnerException, typeof(InjectedWriteException)); if (e.Result == TimeSeriesWriteResult.RecordsBuffered) { ++written; } else { Assert.AreEqual(TimeSeriesWriteResult.RecordsDropped, e.Result); } } } Assert.IsTrue(written > 0); while (true) { ReadStats stats = await ReadAllAfter(fname, 0, 1, FileState.Expanding); if (stats.Total >= written) { Assert.AreEqual(1, stats.First); Assert.AreEqual(written, stats.Last); Assert.AreEqual(written, stats.Total); break; } await Task.Delay(TimeSpan.FromMilliseconds(1)); } } } }
/// <summary> /// Adds Azure Application Insights writer /// </summary> /// <param name="configuration">Configuration reference</param> /// <param name="instrumentationKey">Instrumentation key</param> public static ILogConfiguration AzureApplicationInsights(this IWriterConfiguration configuration, string instrumentationKey, WriterOptions options) { if (options == null) { options = new WriterOptions(); } return(configuration.Custom(new ApplicationInsightsWriter(instrumentationKey, options))); }
public static DataSet WriteReadOpt(DataSet original, WriterOptions writerOptions = null) { var ms = new MemoryStream(); ParquetWriter.Write(original, ms, CompressionMethod.None, null, writerOptions); ms.Flush(); //System.IO.File.WriteAllBytes("c:\\tmp\\wr.parquet", ms.ToArray()); ms.Position = 0; return(ParquetReader.Read(ms)); }
private void SaveTo(ZipArchive archive, string fileName, WriterOptions writerOptions) { IsEnabled = false; var controller = new ProgressDialog { Title = "Busy" }; controller.Closed += delegate { IsEnabled = true; }; controller.StartCompress(archive, fileName, writerOptions); }
/** * Create an ORC file writer. This is the public interface for creating * writers going forward and new options will only be added to this method. * @param path filename to write to * @param opts the options * @return a new ORC file writer * @ */ public static Writer createWriter(string path, Stream stream, WriterOptions opts) { return(new WriterImpl(stream, path, opts, opts._inspector, opts.schema, opts.stripeSizeValue, opts.compressValue, opts.bufferSizeValue, opts.rowIndexStrideValue, opts.memoryManagerValue, opts.blockPaddingValue, opts.versionValue, opts._callback, opts._encodingStrategy, opts.compressionStrategy, opts._paddingTolerance, opts.blockSizeValue, opts._bloomFilterColumns, opts._bloomFilterFpp)); }
public void CompressZipFile(string SourceFileName, string FileName) { WriterOptions options = new WriterOptions(CompressionType.Deflate); options.ArchiveEncoding.Default = Encoding.UTF8; //指定要压缩的文件夹路径 using (var zip = File.OpenWrite(FileName + "\\test.zip")) using (var zipWriter = WriterFactory.Open(zip, ArchiveType.Zip, options)) { zipWriter.Write(Path.GetFileName(SourceFileName), SourceFileName); } }
/// <summary> /// Adds Azure Application Insights writer /// </summary> /// <param name="configuration">Configuration reference</param> /// <param name="instrumentationKey">Instrumentation key</param> /// <param name="flushOnWrite">When true, flush will be forced on every write</param> /// <returns></returns> public static ILogConfiguration AzureApplicationInsights(this IWriterConfiguration configuration, string instrumentationKey, bool traceExceptions = true, bool flushOnWrite = false) { var options = new WriterOptions { FlushOnWrite = flushOnWrite, TraceExceptions = traceExceptions }; return(configuration.Custom(new ApplicationInsightsWriter(instrumentationKey, options))); }
public ArchiveWriter( IFileService fileService, IPathService pathService, IDirectoryService directoryService, ArchiveType archiveType, WriterOptions options) { _fileService = fileService; _pathService = pathService; _directoryService = directoryService; _archiveType = archiveType; _options = options; }
public void StartCompress(ZipArchive archive, string fileName, WriterOptions writerOptions) { Message.Text = "Compressing..."; this.Show(); progress.IsIndeterminate = true; var th = new Thread(delegate() { archive.SaveTo(fileName, writerOptions); Dispatcher.Invoke(Close); }); th.Start(); }
internal GZipWriterOptions(WriterOptions options) : base(options.CompressionType) { LeaveStreamOpen = options.LeaveStreamOpen; ArchiveEncoding = options.ArchiveEncoding; var writerOptions = options as GZipWriterOptions; if (writerOptions != null) { CompressionLevel = writerOptions.CompressionLevel; } }
/// <summary> /// Compress (LZMA) an unencrypted FAES File. /// </summary> /// <param name="unencryptedFile">Unencrypted FAES File</param> /// <returns>Path of the unencrypted, LZMA compressed file</returns> public string CompressFAESFile(FAES_File unencryptedFile) { FileAES_IntUtilities.CreateEncryptionFilePath(unencryptedFile, "LZMA", out string tempRawPath, out _, out string tempOutputPath); WriterOptions wo = new WriterOptions(CompressionType.LZMA); using (Stream stream = File.OpenWrite(tempOutputPath)) using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, wo)) { writer.WriteAll(tempRawPath, "*", SearchOption.AllDirectories); } return(tempOutputPath); }
public static ReadOnlyMemory <byte> Compress(this MemoryStream sourceStream) { var zipResultStream = new MemoryStream(); var o = new WriterOptions(CompressionType.Deflate); var zipWriter = WriterFactory.Open(zipResultStream, ArchiveType.Zip, o); zipWriter.Write(ZipEntryName, sourceStream); zipWriter.Dispose(); return(zipResultStream.ToArray()); }
internal ZipWriterOptions(WriterOptions options) : base(options.CompressionType) { LeaveStreamOpen = options.LeaveStreamOpen; ArchiveEncoding = options.ArchiveEncoding; var writerOptions = options as ZipWriterOptions; if (writerOptions != null) { UseZip64 = writerOptions.UseZip64; DeflateCompressionLevel = writerOptions.DeflateCompressionLevel; ArchiveComment = writerOptions.ArchiveComment; } }
public ApplicationInsightsWriter(string instrumentationKey, WriterOptions options) { TelemetryConfiguration.Active.InstrumentationKey = instrumentationKey; TelemetryProcessorChainBuilder builder = TelemetryConfiguration.Active.TelemetryProcessorChainBuilder; builder.Use(next => new LogMagicTelemetryProcessor(next)); builder.Build(); _telemetryClient = new TelemetryClient(TelemetryConfiguration.Active); _telemetryClient.InstrumentationKey = instrumentationKey; _context = new InsightsContext(_telemetryClient, options); _options = options; }
protected override void SaveTo(Stream stream, WriterOptions options, IEnumerable <TarArchiveEntry> oldEntries, IEnumerable <TarArchiveEntry> newEntries) { using (var writer = new TarWriter(stream, options)) { foreach (var entry in oldEntries.Concat(newEntries) .Where(x => !x.IsDirectory)) { using (var entryStream = entry.OpenEntryStream()) { writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size); } } } }
public void CompressFolder(string source, string dest) { var sourcePath = Path.Combine(RootStorageDirectory, source); var destPath = Path.Combine(RootStorageDirectory, dest); var options = new WriterOptions(CompressionType.GZip); options.LeaveStreamOpen = true; using (Stream stream = File.OpenWrite(destPath)) using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, options)) { writer.WriteAll(sourcePath, "*", SearchOption.AllDirectories); } }
/// <summary> /// 将一系列文件压缩到指定ZIP文件 todo 下载文件大小为0,待修改! /// </summary> /// <param name="filePaths"></param> private MemoryStream CompressionZIP(List <string> filePaths) { //设置属性 WriterOptions options = new WriterOptions(CompressionType.Deflate); options.ArchiveEncoding.Default = Encoding.UTF8; var zipStream = new MemoryStream(); using (var zipWriter = WriterFactory.Open(zipStream, ArchiveType.Zip, options)) { foreach (var filePath in filePaths) { zipWriter.Write(Path.GetFileName(filePath), filePath); } } return(zipStream); }
public void Read_from_negative_offset_fails() { DataSet ds = DataSetGenerator.Generate(15); var wo = new WriterOptions { RowGroupsSize = 5 }; var ro = new ReaderOptions { Offset = -4, Count = 2 }; var ms = new MemoryStream(); ParquetWriter.Write(ds, ms, CompressionMethod.None, null, wo); ms.Position = 0; Assert.Throws <ParquetException>(() => ParquetReader.Read(ms, null, ro)); }
protected override void ExecuteInternal(Pipeline pipeline) { var output = Output.Resolve(pipeline, this); var source = Source.Resolve(pipeline, this); var outputDir = Path.GetDirectoryName(output); File.Delete(output); Directory.CreateDirectory(outputDir); using (var archive = ArchiveFactory.Create(ArchiveType)) { archive.AddAllFromDirectory(source, searchOption: SearchOption.AllDirectories); var options = new WriterOptions(CompressionType.Deflate); archive.SaveTo(output, options); } }
public ColumnWriter(Stream output, ThriftStream thriftStream, FileMetadataBuilder builder, SchemaElement schema, CompressionMethod compressionMethod, ParquetOptions formatOptions, WriterOptions writerOptions) { _output = output; _thriftStream = thriftStream; _meta = builder; _schema = schema; _compressionMethod = compressionMethod; _formatOptions = formatOptions; _writerOptions = writerOptions; _plainWriter = new PlainValuesWriter(formatOptions); _rleWriter = new RunLengthBitPackingHybridValuesWriter(); _dicWriter = new PlainDictionaryValuesWriter(_rleWriter); }
/** * Create an ORC file writer. This is the public interface for creating * writers going forward and new options will only be added to this method. * @param path filename to write to * @param opts the options * @return a new ORC file writer * @ */ public static Writer createWriter(string path, Stream stream, WriterOptions opts) { return new WriterImpl(stream, path, opts, opts._inspector, opts.schema, opts.stripeSizeValue, opts.compressValue, opts.bufferSizeValue, opts.rowIndexStrideValue, opts.memoryManagerValue, opts.blockPaddingValue, opts.versionValue, opts._callback, opts._encodingStrategy, opts.compressionStrategy, opts._paddingTolerance, opts.blockSizeValue, opts._bloomFilterColumns, opts._bloomFilterFpp); }