public override void Close() { _fileMutex.WaitOne(); try { string fileName = _name; // make sure it's all written out _indexOutput.Flush(); long originalLength = _indexOutput.Length(); _indexOutput.Close(); Stream blobStream; // optionally put a compressor around the blob stream if (_azureDirectory.ShouldCompressFile(_name)) { blobStream = CompressStream(fileName, originalLength); } else { blobStream = new StreamInput(CacheDirectory.OpenInput(fileName)); } try { // push the blobStream up to the cloud _blob.UploadFromStream(blobStream); // set the metadata with the original index file properties _blob.Metadata["CachedLength"] = originalLength.ToString(); _blob.Metadata["CachedLastModified"] = CacheDirectory.FileModified(fileName).ToString(); _blob.SetMetadata(); #if FULLDEBUG Trace.WriteLine($"PUT {blobStream.Length} bytes to {_name} in cloud"); #endif } finally { blobStream.Dispose(); } #if FULLDEBUG Trace.WriteLine($"CLOSED WRITESTREAM {_name}"); #endif // clean up _indexOutput = null; _blobContainer = null; _blob = null; GC.SuppressFinalize(this); } finally { _fileMutex.ReleaseMutex(); } }
public AzureIndexInput(AzureDirectory azuredirectory, ICloudBlob blob) { if (azuredirectory == null) { throw new ArgumentNullException(nameof(azuredirectory)); } _name = blob.Uri.Segments[blob.Uri.Segments.Length - 1]; _azureDirectory = azuredirectory; #if FULLDEBUG Debug.WriteLine(String.Format("opening {0} ", _name)); #endif _fileMutex = SyncMutexManager.GrabMutex(_azureDirectory, _name); _fileMutex.WaitOne(); try { _blobContainer = azuredirectory.BlobContainer; _blob = blob; var fileName = _name; var fFileNeeded = false; if (!CacheDirectory.FileExists(fileName)) { fFileNeeded = true; } else { long cachedLength = CacheDirectory.FileLength(fileName); string blobLengthMetadata; bool hasMetadataValue = blob.Metadata.TryGetValue("CachedLength", out blobLengthMetadata); long blobLength = blob.Properties.Length; if (hasMetadataValue) { long.TryParse(blobLengthMetadata, out blobLength); } string blobLastModifiedMetadata; long longLastModified = 0; DateTime blobLastModifiedUTC = blob.Properties.LastModified.Value.UtcDateTime; if (blob.Metadata.TryGetValue("CachedLastModified", out blobLastModifiedMetadata)) { if (long.TryParse(blobLastModifiedMetadata, out longLastModified)) { blobLastModifiedUTC = new DateTime(longLastModified).ToUniversalTime(); } } if (cachedLength != blobLength) { fFileNeeded = true; } else { // cachedLastModifiedUTC was not ouputting with a date (just time) and the time was always off long unixDate = CacheDirectory.FileModified(fileName); DateTime start = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); var cachedLastModifiedUTC = start.AddMilliseconds(unixDate).ToUniversalTime(); if (cachedLastModifiedUTC != blobLastModifiedUTC) { var timeSpan = blobLastModifiedUTC.Subtract(cachedLastModifiedUTC); if (timeSpan.TotalSeconds > 1) { fFileNeeded = true; } else { #if FULLDEBUG Debug.WriteLine(timeSpan.TotalSeconds); #endif // file not needed } } } } // if the file does not exist // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage) if (fFileNeeded) { if (_azureDirectory.ShouldCompressFile(_name)) { InflateStream(fileName); } else { using (var fileStream = _azureDirectory.CreateCachedOutputAsStream(fileName)) { // get the blob _blob.DownloadToStream(fileStream); fileStream.Flush(); Debug.WriteLine(string.Format("GET {0} RETREIVED {1} bytes", _name, fileStream.Length)); } } // and open it as an input _indexInput = CacheDirectory.OpenInput(fileName); } else { #if FULLDEBUG Debug.WriteLine(String.Format("Using cached file for {0}", _name)); #endif // open the file in read only mode _indexInput = CacheDirectory.OpenInput(fileName); } } finally { _fileMutex.ReleaseMutex(); } }