public FastAzureIndexInput(AzureDirectory azuredirectory, BlobMeta blobMeta) { this._name = blobMeta.Name; this._fileMutex = BlobMutexManager.GrabMutex(this._name); this._fileMutex.WaitOne(); try { this._azureDirectory = azuredirectory; this._blobContainer = azuredirectory.BlobContainer; this._blob = blobMeta.Blob; string name = this._name; bool flag = false; if (!this.CacheDirectory.FileExists(name)) { flag = true; } else { long num = this.CacheDirectory.FileLength(name); long result1 = blobMeta.Length; DateTime dateTime = blobMeta.LastModified; if (num != result1) { flag = true; } else { long ticks = this.CacheDirectory.FileModified(name); if (ticks > FastAzureIndexInput.ticks1970) { ticks -= FastAzureIndexInput.ticks1970; } DateTime universalTime = new DateTime(ticks, DateTimeKind.Local).ToUniversalTime(); if (universalTime != dateTime && dateTime.Subtract(universalTime).TotalSeconds > 1.0) { flag = true; } } } if (flag) { StreamOutput cachedOutputAsStream = this._azureDirectory.CreateCachedOutputAsStream(name); this._blob.ParallelDownloadBlob((Stream)cachedOutputAsStream); cachedOutputAsStream.Flush(); cachedOutputAsStream.Close(); this._indexInput = this.CacheDirectory.OpenInput(name); } else { this._indexInput = this.CacheDirectory.OpenInput(name); } } finally { this._fileMutex.ReleaseMutex(); } }
/// <summary>. </summary> public void SyncFile(Lucene.Net.Store.Directory directory, string fileName, bool CompressBlobs) { Trace.WriteLine($"INFO Syncing file {fileName} for {_rootFolderName}"); // then we will get it fresh into local deflatedName // StreamOutput deflatedStream = new StreamOutput(CacheDirectory.CreateOutput(deflatedName)); // seek back to begininng if (ShouldCompressFile(fileName, CompressBlobs)) { using (var deflatedStream = new MemoryStream()) { #if FULLDEBUG Trace.WriteLine($"GET {fileName} RETREIVED {deflatedStream.Length} bytes"); #endif // get the deflated blob blob.DownloadTo(deflatedStream); deflatedStream.Seek(0, SeekOrigin.Begin); // open output file for uncompressed contents using (var fileStream = new StreamOutput(directory.CreateOutput(fileName))) using (var decompressor = new DeflateStream(deflatedStream, CompressionMode.Decompress)) { var bytes = new byte[65535]; var nRead = 0; do { nRead = decompressor.Read(bytes, 0, 65535); if (nRead > 0) { fileStream.Write(bytes, 0, nRead); } } while (nRead == 65535); } } } else { using (var fileStream = new StreamOutput(directory.CreateOutput(fileName))) { // get the blob blob.DownloadTo(fileStream); fileStream.Flush(); #if FULLDEBUG Trace.WriteLine($"GET {fileName} RETREIVED {fileStream.Length} bytes"); #endif } } }
public AzureIndexInput(AzureDirectory azuredirectory, ICloudBlob blob) { _name = blob.Uri.Segments[blob.Uri.Segments.Length - 1]; _azureDirectory = azuredirectory ?? throw new ArgumentNullException(nameof(azuredirectory)); #if FULLDEBUG Trace.WriteLine($"opening {_name} "); #endif _fileMutex = SyncMutexManager.GrabMutex(_azureDirectory, _name); _fileMutex.WaitOne(); try { _blobContainer = azuredirectory.BlobContainer; _blob = blob; var fileName = _name; var fFileNeeded = false; if (!CacheDirectory.FileExists(fileName)) { fFileNeeded = true; } else { var cachedLength = CacheDirectory.FileLength(fileName); var hasMetadataValue = blob.Metadata.TryGetValue("CachedLength", out var blobLengthMetadata); var blobLength = blob.Properties.Length; if (hasMetadataValue) { long.TryParse(blobLengthMetadata, out blobLength); } var blobLastModifiedUtc = blob.Properties.LastModified.Value.UtcDateTime; if (blob.Metadata.TryGetValue("CachedLastModified", out var blobLastModifiedMetadata)) { if (long.TryParse(blobLastModifiedMetadata, out var longLastModified)) { blobLastModifiedUtc = new DateTime(longLastModified).ToUniversalTime(); } } if (cachedLength != blobLength) { fFileNeeded = true; } else { // cachedLastModifiedUTC was not ouputting with a date (just time) and the time was always off var unixDate = CacheDirectory.FileModified(fileName); var start = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); var cachedLastModifiedUtc = start.AddMilliseconds(unixDate).ToUniversalTime(); if (cachedLastModifiedUtc != blobLastModifiedUtc) { var timeSpan = blobLastModifiedUtc.Subtract(cachedLastModifiedUtc); if (timeSpan.TotalSeconds > 1) { fFileNeeded = true; } else { #if FULLDEBUG Trace.WriteLine(timeSpan.TotalSeconds); #endif // file not needed } } } } // if the file does not exist // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage) if (fFileNeeded) { if (_azureDirectory.ShouldCompressFile(_name)) { InflateStream(fileName); } else { using (var fileStream = new StreamOutput(CacheDirectory.CreateOutput(fileName))) { // get the blob _blob.DownloadToStream(fileStream); fileStream.Flush(); #if FULLDEBUG Trace.WriteLine($"GET {_name} RETREIVED {fileStream.Length} bytes"); #endif } } // and open it as an input _indexInput = CacheDirectory.OpenInput(fileName); } else { #if FULLDEBUG Trace.WriteLine($"Using cached file for {_name}"); #endif // open the file in read only mode _indexInput = CacheDirectory.OpenInput(fileName); } } finally { _fileMutex.ReleaseMutex(); } }
public FastAzureIndexInput(AzureDirectory azuredirectory, ICloudBlob blob, out BlobMeta meta) { meta = new BlobMeta(); this._name = blob.Uri.Segments[blob.Uri.Segments.Length - 1]; this._fileMutex = BlobMutexManager.GrabMutex(this._name); this._fileMutex.WaitOne(); try { this._azureDirectory = azuredirectory; this._blobContainer = azuredirectory.BlobContainer; this._blob = blob; string name = this._name; bool flag = false; if (!this.CacheDirectory.FileExists(name)) { flag = true; } else { long num = this.CacheDirectory.FileLength(name); long result1 = blob.Properties.Length; long.TryParse(blob.Metadata["CachedLength"], out result1); long result2 = 0; DateTime dateTime = blob.Properties.LastModified.Value.UtcDateTime; if (long.TryParse(blob.Metadata["CachedLastModified"], out result2)) { if (result2 > FastAzureIndexInput.ticks1970) { result2 -= FastAzureIndexInput.ticks1970; } dateTime = new DateTime(result2).ToUniversalTime(); } if (num != result1) { flag = true; } else { long ticks = this.CacheDirectory.FileModified(name); if (ticks > FastAzureIndexInput.ticks1970) { ticks -= FastAzureIndexInput.ticks1970; } DateTime universalTime = new DateTime(ticks, DateTimeKind.Local).ToUniversalTime(); if (universalTime != dateTime && dateTime.Subtract(universalTime).TotalSeconds > 1.0) { flag = true; } } meta.Name = this._name; meta.LastModified = dateTime; meta.Length = result1; meta.Blob = blob; } if (flag) { StreamOutput cachedOutputAsStream = this._azureDirectory.CreateCachedOutputAsStream(name); this._blob.ParallelDownloadBlob((Stream)cachedOutputAsStream); cachedOutputAsStream.Flush(); cachedOutputAsStream.Close(); this._indexInput = this.CacheDirectory.OpenInput(name); } else { this._indexInput = this.CacheDirectory.OpenInput(name); } meta.HasData = true; } finally { this._fileMutex.ReleaseMutex(); } }
/// <summary>. </summary> public bool SyncFile(Lucene.Net.Store.Directory directory, string fileName, bool CompressBlobs) { var success = false; try { var blob = _blobContainer.GetBlobClient(_rootFolderName + fileName); _loggingService.Log(new LogEntry(LogLevel.Info, null, $"Syncing file {fileName} for {_rootFolderName}")); // then we will get it fresh into local deflatedName // StreamOutput deflatedStream = new StreamOutput(CacheDirectory.CreateOutput(deflatedName)); using (var deflatedStream = new MemoryStream()) { // get the deflated blob blob.DownloadTo(deflatedStream); #if FULLDEBUG _loggingService.Log(new LogEntry(LogLevel.Info, null, $"GET {fileName} RETREIVED {deflatedStream.Length} bytes")); #endif // seek back to begininng deflatedStream.Seek(0, SeekOrigin.Begin); if (ShouldCompressFile(fileName, CompressBlobs)) { // open output file for uncompressed contents using (var fileStream = new StreamOutput(directory.CreateOutput(fileName))) using (var decompressor = new DeflateStream(deflatedStream, CompressionMode.Decompress)) { var bytes = new byte[65535]; var nRead = 0; do { nRead = decompressor.Read(bytes, 0, 65535); if (nRead > 0) { fileStream.Write(bytes, 0, nRead); } } while (nRead == 65535); } } else { using (var fileStream = new StreamOutput(directory.CreateOutput(fileName))) { // get the blob blob.DownloadTo(fileStream); fileStream.Flush(); #if FULLDEBUG _loggingService.Log(new LogEntry(LogLevel.Info, null, $"GET {fileName} RETREIVED {fileStream.Length} bytes")); #endif } } } success = true; } catch (Exception e) { _loggingService.Log(new LogEntry(LogLevel.Error, e, $"GET {fileName} RETREIVED failed")); } return(success); }