public AzureIndexInput(AzureDirectory azuredirectory, ICloudBlob blob) : base(blob.Name) { _name = blob.Uri.Segments[blob.Uri.Segments.Length - 1]; _fileMutex = BlobMutexManager.GrabMutex(_name); _fileMutex.WaitOne(); try { _azureDirectory = azuredirectory; _blobContainer = azuredirectory.BlobContainer; _blob = blob; string fileName = _name; StreamOutput fileStream = _azureDirectory.CreateCachedOutputAsStream(fileName); // get the blob _blob.DownloadToStream(fileStream); fileStream.Flush(); Debug.WriteLine("GET {0} RETREIVED {1} bytes", _name, fileStream.Length); fileStream.Close(); // and open it as an input _indexInput = CacheDirectory.openInput(fileName, IOContext.DEFAULT); } finally { _fileMutex.ReleaseMutex(); } }
public AzureIndexInput(AzureDirectory azureDirectory, string name, BlobClient blob) : base(name) { this._name = name; this._azureDirectory = azureDirectory; #if FULLDEBUG Debug.WriteLine($"{_azureDirectory.Name} opening {name} "); #endif _fileMutex = BlobMutexManager.GrabMutex(name); _fileMutex.WaitOne(); try { _blobContainer = azureDirectory.BlobContainer; _blob = blob; bool fileNeeded = false; if (!CacheDirectory.FileExists(name)) { fileNeeded = true; } else { long cachedLength = CacheDirectory.FileLength(name); var properties = blob.GetProperties(); long blobLength = properties.Value?.ContentLength ?? 0; if (cachedLength != blobLength) { fileNeeded = true; } } // if the file does not exist // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage) if (fileNeeded) { using (StreamOutput fileStream = _azureDirectory.CreateCachedOutputAsStream(name)) { // get the blob _blob.DownloadTo(fileStream); fileStream.Flush(); Debug.WriteLine($"{_azureDirectory.Name} GET {_name} RETREIVED {fileStream.Length} bytes"); } } #if FULLDEBUG Debug.WriteLine($"{_azureDirectory.Name} Using cached file for {name}"); #endif // and open it as our input, this is now available forevers until new file comes along _indexInput = CacheDirectory.OpenInput(name, IOContext.DEFAULT); } finally { _fileMutex.ReleaseMutex(); } }
public AzureIndexInput(AzureDirectory azuredirectory, CloudBlob blob) { _name = blob.Uri.Segments[blob.Uri.Segments.Length - 1]; #if FULLDEBUG Debug.WriteLine(String.Format("opening {0} ", _name)); #endif _fileMutex = BlobMutexManager.GrabMutex(_name); _fileMutex.WaitOne(); try { _azureDirectory = azuredirectory; _blobContainer = azuredirectory.BlobContainer; _blob = blob; string fileName = _name; bool fFileNeeded = false; if (!CacheDirectory.FileExists(fileName)) { fFileNeeded = true; } else { long cachedLength = CacheDirectory.FileLength(fileName); long blobLength = blob.Properties.Length; long.TryParse(blob.Metadata["CachedLength"], out blobLength); long longLastModified = 0; DateTime blobLastModifiedUTC = blob.Properties.LastModifiedUtc; if (long.TryParse(blob.Metadata["CachedLastModified"], out longLastModified)) { blobLastModifiedUTC = new DateTime(longLastModified).ToUniversalTime(); } if (cachedLength != blobLength) { fFileNeeded = true; } else { // there seems to be an error of 1 tick which happens every once in a while // for now we will say that if they are within 1 tick of each other and same length DateTime cachedLastModifiedUTC = new DateTime(CacheDirectory.FileModified(fileName), DateTimeKind.Local).ToUniversalTime(); if (cachedLastModifiedUTC != blobLastModifiedUTC) { TimeSpan timeSpan = blobLastModifiedUTC.Subtract(cachedLastModifiedUTC); if (timeSpan.TotalSeconds > 1) { fFileNeeded = true; } else { #if FULLDEBUG Debug.WriteLine(timeSpan.TotalSeconds); #endif // file not needed } } } } // if the file does not exist // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage) if (fFileNeeded) { #if COMPRESSBLOBS if (_azureDirectory.ShouldCompressFile(_name)) { // then we will get it fresh into local deflatedName // StreamOutput deflatedStream = new StreamOutput(CacheDirectory.CreateOutput(deflatedName)); MemoryStream deflatedStream = new MemoryStream(); // get the deflated blob _blob.DownloadToStream(deflatedStream); Debug.WriteLine(string.Format("GET {0} RETREIVED {1} bytes", _name, deflatedStream.Length)); // seek back to begininng deflatedStream.Seek(0, SeekOrigin.Begin); // open output file for uncompressed contents StreamOutput fileStream = _azureDirectory.CreateCachedOutputAsStream(fileName); // create decompressor DeflateStream decompressor = new DeflateStream(deflatedStream, CompressionMode.Decompress); byte[] bytes = new byte[65535]; int nRead = 0; do { nRead = decompressor.Read(bytes, 0, 65535); if (nRead > 0) { fileStream.Write(bytes, 0, nRead); } } while (nRead == 65535); decompressor.Close(); // this should close the deflatedFileStream too fileStream.Close(); } else #endif { StreamOutput fileStream = _azureDirectory.CreateCachedOutputAsStream(fileName); // get the blob _blob.DownloadToStream(fileStream); fileStream.Flush(); Debug.WriteLine(string.Format("GET {0} RETREIVED {1} bytes", _name, fileStream.Length)); fileStream.Close(); } // and open it as an input _indexInput = CacheDirectory.OpenInput(fileName); } else { #if FULLDEBUG Debug.WriteLine(String.Format("Using cached file for {0}", _name)); #endif // open the file in read only mode _indexInput = CacheDirectory.OpenInput(fileName); } } finally { _fileMutex.ReleaseMutex(); } }