protected override void DisposeManagedResources() { if (StreamOutput != null) { StreamOutput.Dispose(); StreamOutput = null; } if (StreamError != null) { StreamError.Dispose(); StreamError = null; } if (StreamWarning != null) { StreamWarning.Dispose(); StreamWarning = null; } if (StreamVerbose != null) { StreamVerbose.Dispose(); StreamVerbose = null; } if (StreamDebug != null) { StreamDebug.Dispose(); StreamDebug = null; } base.DisposeManagedResources(); }
private void InflateStream(string fileName) { // then we will get it fresh into local deflatedName // StreamOutput deflatedStream = new StreamOutput(CacheDirectory.CreateOutput(deflatedName)); using (var deflatedStream = new MemoryStream()) { // get the deflated blob _blob.DownloadToStream(deflatedStream); #if FULLDEBUG Trace.WriteLine($"GET {_name} RETREIVED {deflatedStream.Length} bytes"); #endif // seek back to begininng deflatedStream.Seek(0, SeekOrigin.Begin); // open output file for uncompressed contents using (var fileStream = new StreamOutput(CacheDirectory.CreateOutput(fileName))) using (var decompressor = new DeflateStream(deflatedStream, CompressionMode.Decompress)) { var bytes = new byte[65535]; var nRead = 0; do { nRead = decompressor.Read(bytes, 0, 65535); if (nRead > 0) { fileStream.Write(bytes, 0, nRead); } } while (nRead == 65535); } } }
public static string ToHtml(this BaseCellOutput cell) { return(cell switch { DataOutput dataCell => convertDataToHtml(dataCell), //execute_result are also dataOutput ErrorOutput errCell => convertErrorToHtml(errCell), StreamOutput streamCell => convertStreamToHtml(streamCell), _ => throw new NotSupportedException(), });
public FastAzureIndexInput(AzureDirectory azuredirectory, BlobMeta blobMeta) { this._name = blobMeta.Name; this._fileMutex = BlobMutexManager.GrabMutex(this._name); this._fileMutex.WaitOne(); try { this._azureDirectory = azuredirectory; this._blobContainer = azuredirectory.BlobContainer; this._blob = blobMeta.Blob; string name = this._name; bool flag = false; if (!this.CacheDirectory.FileExists(name)) { flag = true; } else { long num = this.CacheDirectory.FileLength(name); long result1 = blobMeta.Length; DateTime dateTime = blobMeta.LastModified; if (num != result1) { flag = true; } else { long ticks = this.CacheDirectory.FileModified(name); if (ticks > FastAzureIndexInput.ticks1970) { ticks -= FastAzureIndexInput.ticks1970; } DateTime universalTime = new DateTime(ticks, DateTimeKind.Local).ToUniversalTime(); if (universalTime != dateTime && dateTime.Subtract(universalTime).TotalSeconds > 1.0) { flag = true; } } } if (flag) { StreamOutput cachedOutputAsStream = this._azureDirectory.CreateCachedOutputAsStream(name); this._blob.ParallelDownloadBlob((Stream)cachedOutputAsStream); cachedOutputAsStream.Flush(); cachedOutputAsStream.Close(); this._indexInput = this.CacheDirectory.OpenInput(name); } else { this._indexInput = this.CacheDirectory.OpenInput(name); } } finally { this._fileMutex.ReleaseMutex(); } }
/// <summary>. </summary> public void SyncFile(Lucene.Net.Store.Directory directory, string fileName, bool CompressBlobs) { Trace.WriteLine($"INFO Syncing file {fileName} for {_rootFolderName}"); // then we will get it fresh into local deflatedName // StreamOutput deflatedStream = new StreamOutput(CacheDirectory.CreateOutput(deflatedName)); // seek back to begininng if (ShouldCompressFile(fileName, CompressBlobs)) { using (var deflatedStream = new MemoryStream()) { #if FULLDEBUG Trace.WriteLine($"GET {fileName} RETREIVED {deflatedStream.Length} bytes"); #endif // get the deflated blob blob.DownloadTo(deflatedStream); deflatedStream.Seek(0, SeekOrigin.Begin); // open output file for uncompressed contents using (var fileStream = new StreamOutput(directory.CreateOutput(fileName))) using (var decompressor = new DeflateStream(deflatedStream, CompressionMode.Decompress)) { var bytes = new byte[65535]; var nRead = 0; do { nRead = decompressor.Read(bytes, 0, 65535); if (nRead > 0) { fileStream.Write(bytes, 0, nRead); } } while (nRead == 65535); } } } else { using (var fileStream = new StreamOutput(directory.CreateOutput(fileName))) { // get the blob blob.DownloadTo(fileStream); fileStream.Flush(); #if FULLDEBUG Trace.WriteLine($"GET {fileName} RETREIVED {fileStream.Length} bytes"); #endif } } }
public AzureIndexInput(AzureDirectory azuredirectory, ICloudBlob blob) { _name = blob.Uri.Segments[blob.Uri.Segments.Length - 1]; _azureDirectory = azuredirectory ?? throw new ArgumentNullException(nameof(azuredirectory)); #if FULLDEBUG Trace.WriteLine($"opening {_name} "); #endif _fileMutex = SyncMutexManager.GrabMutex(_azureDirectory, _name); _fileMutex.WaitOne(); try { _blobContainer = azuredirectory.BlobContainer; _blob = blob; var fileName = _name; var fFileNeeded = false; if (!CacheDirectory.FileExists(fileName)) { fFileNeeded = true; } else { var cachedLength = CacheDirectory.FileLength(fileName); var hasMetadataValue = blob.Metadata.TryGetValue("CachedLength", out var blobLengthMetadata); var blobLength = blob.Properties.Length; if (hasMetadataValue) { long.TryParse(blobLengthMetadata, out blobLength); } var blobLastModifiedUtc = blob.Properties.LastModified.Value.UtcDateTime; if (blob.Metadata.TryGetValue("CachedLastModified", out var blobLastModifiedMetadata)) { if (long.TryParse(blobLastModifiedMetadata, out var longLastModified)) { blobLastModifiedUtc = new DateTime(longLastModified).ToUniversalTime(); } } if (cachedLength != blobLength) { fFileNeeded = true; } else { // cachedLastModifiedUTC was not ouputting with a date (just time) and the time was always off var unixDate = CacheDirectory.FileModified(fileName); var start = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); var cachedLastModifiedUtc = start.AddMilliseconds(unixDate).ToUniversalTime(); if (cachedLastModifiedUtc != blobLastModifiedUtc) { var timeSpan = blobLastModifiedUtc.Subtract(cachedLastModifiedUtc); if (timeSpan.TotalSeconds > 1) { fFileNeeded = true; } else { #if FULLDEBUG Trace.WriteLine(timeSpan.TotalSeconds); #endif // file not needed } } } } // if the file does not exist // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage) if (fFileNeeded) { if (_azureDirectory.ShouldCompressFile(_name)) { InflateStream(fileName); } else { using (var fileStream = new StreamOutput(CacheDirectory.CreateOutput(fileName))) { // get the blob _blob.DownloadToStream(fileStream); fileStream.Flush(); #if FULLDEBUG Trace.WriteLine($"GET {_name} RETREIVED {fileStream.Length} bytes"); #endif } } // and open it as an input _indexInput = CacheDirectory.OpenInput(fileName); } else { #if FULLDEBUG Trace.WriteLine($"Using cached file for {_name}"); #endif // open the file in read only mode _indexInput = CacheDirectory.OpenInput(fileName); } } finally { _fileMutex.ReleaseMutex(); } }
private Task <StreamOutput> GetStreamOutput( IOrganizationPerson person, List <TimeEntry> entries, List <OrganizationPayoutIntent> organizationPayoutIntents, List <IndividualPayoutIntent> individualPayouts) { var streamOutput = new StreamOutput { ProviderAgencyStream = entries.Where(x => x.ProviderAgencyOwnerId == person.PersonId && x.ProviderOrganizationId == person.OrganizationId) .GroupBy(x => x.Status).ToDictionary(x => x.Key, x => x.Sum(y => y.TotalAgencyStream)), MarketingAgencyStream = entries.Where(x => x.MarketingAgencyOwnerId == person.PersonId && x.MarketingOrganizationId == person.OrganizationId) .GroupBy(x => x.Status).ToDictionary(x => x.Key, x => x.Sum(y => y.TotalMarketingAgencyStream)), RecruitingAgencyStream = entries.Where(x => x.RecruitingAgencyOwnerId == person.PersonId && x.RecruitingOrganizationId == person.OrganizationId) .GroupBy(x => x.Status).ToDictionary(x => x.Key, x => x.Sum(y => y.TotalRecruitingAgencyStream)), MarketerStream = entries.Where(x => x.MarketerId == person.PersonId && x.MarketingOrganizationId == person.OrganizationId) .GroupBy(x => x.Status).ToDictionary(x => x.Key, x => x.Sum(y => y.TotalMarketerStream)), RecruiterStream = entries.Where(x => x.RecruiterId == person.PersonId && x.RecruitingOrganizationId == person.OrganizationId) .GroupBy(x => x.Status).ToDictionary(x => x.Key, x => x.Sum(y => y.TotalRecruiterStream)), ContractorStream = entries.Where(x => x.ContractorId == person.PersonId && x.ProviderOrganizationId == person.OrganizationId) .GroupBy(x => x.Status).ToDictionary(x => x.Key, x => x.Sum(y => y.TotalContractorStream)), ProjectManagerStream = entries.Where(x => x.ProjectManagerId == person.PersonId && x.ProviderOrganizationId == person.OrganizationId) .GroupBy(x => x.Status).ToDictionary(x => x.Key, x => x.Sum(y => y.TotalProjectManagerStream)), AccountManagerStream = entries.Where(x => x.AccountManagerId == person.PersonId && x.ProviderOrganizationId == person.OrganizationId) .GroupBy(x => x.Status).ToDictionary(x => x.Key, x => x.Sum(y => y.TotalAccountManagerStream)) }; streamOutput.ProviderAgencyStream[TimeStatus.Dispersed] = organizationPayoutIntents .Where(x => x.Type == CommissionType.ProviderAgencyStream && x.InvoiceTransferId != null) .Sum(x => x.Amount); streamOutput.ProviderAgencyStream[TimeStatus.PendingPayout] = organizationPayoutIntents .Where(x => x.Type == CommissionType.ProviderAgencyStream && x.InvoiceTransferId == null) .Sum(x => x.Amount); streamOutput.AccountManagerStream[TimeStatus.Dispersed] = individualPayouts .Where(x => x.Type == CommissionType.AccountManagerStream && x.InvoiceTransferId != null) .Sum(x => x.Amount); streamOutput.AccountManagerStream[TimeStatus.PendingPayout] = individualPayouts .Where(x => x.Type == CommissionType.AccountManagerStream && x.InvoiceTransferId == null) .Sum(x => x.Amount); streamOutput.ProjectManagerStream[TimeStatus.Dispersed] = individualPayouts .Where(x => x.Type == CommissionType.ProjectManagerStream && x.InvoiceTransferId != null) .Sum(x => x.Amount); streamOutput.ProjectManagerStream[TimeStatus.PendingPayout] = individualPayouts .Where(x => x.Type == CommissionType.ProjectManagerStream && x.InvoiceTransferId == null) .Sum(x => x.Amount); streamOutput.ContractorStream[TimeStatus.PendingPayout] = individualPayouts .Where(x => x.Type == CommissionType.ContractorStream && x.InvoiceTransferId == null) .Sum(x => x.Amount); streamOutput.ContractorStream[TimeStatus.Dispersed] = individualPayouts .Where(x => x.Type == CommissionType.ContractorStream && x.InvoiceTransferId != null) .Sum(x => x.Amount); streamOutput.RecruitingAgencyStream[TimeStatus.Dispersed] = organizationPayoutIntents .Where(x => x.Type == CommissionType.RecruitingAgencyStream && x.InvoiceTransferId != null) .Sum(x => x.Amount); streamOutput.RecruitingAgencyStream[TimeStatus.PendingPayout] = organizationPayoutIntents .Where(x => x.Type == CommissionType.RecruitingAgencyStream && x.InvoiceTransferId == null) .Sum(x => x.Amount); streamOutput.RecruiterStream[TimeStatus.Dispersed] = individualPayouts .Where(x => x.Type == CommissionType.RecruiterStream && x.InvoiceTransferId != null) .Sum(x => x.Amount); streamOutput.RecruiterStream[TimeStatus.PendingPayout] = individualPayouts .Where(x => x.Type == CommissionType.RecruiterStream && x.InvoiceTransferId == null) .Sum(x => x.Amount); streamOutput.MarketingAgencyStream[TimeStatus.Dispersed] = organizationPayoutIntents .Where(x => x.Type == CommissionType.MarketingAgencyStream && x.InvoiceTransferId != null) .Sum(x => x.Amount); streamOutput.MarketingAgencyStream[TimeStatus.PendingPayout] = organizationPayoutIntents .Where(x => x.Type == CommissionType.MarketingAgencyStream && x.InvoiceTransferId == null) .Sum(x => x.Amount); streamOutput.MarketerStream[TimeStatus.Dispersed] = individualPayouts .Where(x => x.Type == CommissionType.MarketerStream && x.InvoiceTransferId != null) .Sum(x => x.Amount); streamOutput.MarketerStream[TimeStatus.PendingPayout] = individualPayouts .Where(x => x.Type == CommissionType.MarketerStream && x.InvoiceTransferId == null) .Sum(x => x.Amount); return(Task.FromResult(streamOutput)); }
public FastAzureIndexInput(AzureDirectory azuredirectory, ICloudBlob blob, out BlobMeta meta) { meta = new BlobMeta(); this._name = blob.Uri.Segments[blob.Uri.Segments.Length - 1]; this._fileMutex = BlobMutexManager.GrabMutex(this._name); this._fileMutex.WaitOne(); try { this._azureDirectory = azuredirectory; this._blobContainer = azuredirectory.BlobContainer; this._blob = blob; string name = this._name; bool flag = false; if (!this.CacheDirectory.FileExists(name)) { flag = true; } else { long num = this.CacheDirectory.FileLength(name); long result1 = blob.Properties.Length; long.TryParse(blob.Metadata["CachedLength"], out result1); long result2 = 0; DateTime dateTime = blob.Properties.LastModified.Value.UtcDateTime; if (long.TryParse(blob.Metadata["CachedLastModified"], out result2)) { if (result2 > FastAzureIndexInput.ticks1970) { result2 -= FastAzureIndexInput.ticks1970; } dateTime = new DateTime(result2).ToUniversalTime(); } if (num != result1) { flag = true; } else { long ticks = this.CacheDirectory.FileModified(name); if (ticks > FastAzureIndexInput.ticks1970) { ticks -= FastAzureIndexInput.ticks1970; } DateTime universalTime = new DateTime(ticks, DateTimeKind.Local).ToUniversalTime(); if (universalTime != dateTime && dateTime.Subtract(universalTime).TotalSeconds > 1.0) { flag = true; } } meta.Name = this._name; meta.LastModified = dateTime; meta.Length = result1; meta.Blob = blob; } if (flag) { StreamOutput cachedOutputAsStream = this._azureDirectory.CreateCachedOutputAsStream(name); this._blob.ParallelDownloadBlob((Stream)cachedOutputAsStream); cachedOutputAsStream.Flush(); cachedOutputAsStream.Close(); this._indexInput = this.CacheDirectory.OpenInput(name); } else { this._indexInput = this.CacheDirectory.OpenInput(name); } meta.HasData = true; } finally { this._fileMutex.ReleaseMutex(); } }
/// <summary>. </summary> public bool SyncFile(Lucene.Net.Store.Directory directory, string fileName, bool CompressBlobs) { var success = false; try { var blob = _blobContainer.GetBlobClient(_rootFolderName + fileName); _loggingService.Log(new LogEntry(LogLevel.Info, null, $"Syncing file {fileName} for {_rootFolderName}")); // then we will get it fresh into local deflatedName // StreamOutput deflatedStream = new StreamOutput(CacheDirectory.CreateOutput(deflatedName)); using (var deflatedStream = new MemoryStream()) { // get the deflated blob blob.DownloadTo(deflatedStream); #if FULLDEBUG _loggingService.Log(new LogEntry(LogLevel.Info, null, $"GET {fileName} RETREIVED {deflatedStream.Length} bytes")); #endif // seek back to begininng deflatedStream.Seek(0, SeekOrigin.Begin); if (ShouldCompressFile(fileName, CompressBlobs)) { // open output file for uncompressed contents using (var fileStream = new StreamOutput(directory.CreateOutput(fileName))) using (var decompressor = new DeflateStream(deflatedStream, CompressionMode.Decompress)) { var bytes = new byte[65535]; var nRead = 0; do { nRead = decompressor.Read(bytes, 0, 65535); if (nRead > 0) { fileStream.Write(bytes, 0, nRead); } } while (nRead == 65535); } } else { using (var fileStream = new StreamOutput(directory.CreateOutput(fileName))) { // get the blob blob.DownloadTo(fileStream); fileStream.Flush(); #if FULLDEBUG _loggingService.Log(new LogEntry(LogLevel.Info, null, $"GET {fileName} RETREIVED {fileStream.Length} bytes")); #endif } } } success = true; } catch (Exception e) { _loggingService.Log(new LogEntry(LogLevel.Error, e, $"GET {fileName} RETREIVED failed")); } return(success); }