public void ProcessRequest(HttpContext context) { RuntimeManager.BindLicense(ProductCode.EngineOrDesktop); extent = context.Request.QueryString["Extent"].Split(new[] { ',' }).Select(Convert.ToDouble).ToList(); level = double.Parse(context.Request.QueryString["level"]); _config = ConfigHelper.GetConfig(EnumBruTileLayer.OSM); _tileSource = _config.CreateTileSource(); _tileProvider = (WebTileProvider)_tileSource.Provider; _cacheDir = CacheSettings.GetCacheFolder(); _fileCache = CacheDirectory.GetFileCache(_cacheDir, _config, EnumBruTileLayer.OSM); Draw(); //using (Bitmap map = new Bitmap(@"D:\我的文件\天津师大切片解决方案\DownloadTiles\DownloadTiles\bin\Debug\p.png")) using (Bitmap map = mosaicImage()) { using (MemoryStream mem = new MemoryStream()) { map.Save(mem, ImageFormat.Png); mem.Seek(0, SeekOrigin.Begin); context.Response.ContentType = "image/png"; mem.CopyTo(context.Response.OutputStream, 4096); context.Response.Flush(); } } }
public override IndexInput OpenInput(string name) { SetDirty(); CheckDirty(); LoggingService.Log(new LogEntry(LogLevel.Info, null, $"Opening input for {_oldIndexFolderName}")); return(CacheDirectory.OpenInput(name)); }
private void button1_Click(object sender, EventArgs e) { extent.Add(double.Parse(textBox3.Text)); extent.Add(double.Parse(textBox4.Text)); extent.Add(double.Parse(textBox5.Text)); extent.Add(double.Parse(textBox2.Text)); var tile = comboBox1.SelectedItem; if (tile == null) { MessageBox.Show("请选择至少一种地图"); return; } EnumBruTileLayer enumBruTileLayer = (EnumBruTileLayer)Enum.Parse(typeof(EnumBruTileLayer), tile.ToString()); IConfig config = ConfigHelper.GetConfig(enumBruTileLayer); string cacheDir = CacheSettings.GetCacheFolder(); tileSource = config.CreateTileSource(); schema = tileSource.Schema; tileProvider = (WebTileProvider)tileSource.Provider; fileCache = CacheDirectory.GetFileCache(cacheDir, config, enumBruTileLayer); _tiles = GetTile(); if (_tiles.ToList().Count > 0) { DownloadTiles(); } }
protected override void Dispose(bool disposing) { _fileMutex.WaitOne(); try { string fileName = _name; // make sure it's all written out _indexOutput.Flush(); long originalLength = _indexOutput.Length; _indexOutput.Dispose(); Stream blobStream; //blobStream = new StreamInput(CacheDirectory.OpenInput(fileName,IOContext.DEFAULT)); try { var cacheInput = CacheDirectory.OpenInput(fileName, IOContext.DEFAULT); byte[] cacheInputBytes = new byte[cacheInput.Length]; cacheInput.ReadBytes(cacheInputBytes, 0, (int)cacheInputBytes.Length); using (var primaryOutput = PrimaryDirectory.CreateOutput(fileName, IOContext.DEFAULT)) { primaryOutput.WriteBytes(cacheInputBytes, (int)cacheInputBytes.Length); primaryOutput.Flush(); } cacheInput.Dispose(); // push the blobStream up to the cloud //_blob.UploadFromStream(blobStream); // set the metadata with the original index file properties //_blob.Metadata["CachedLength"] = originalLength.ToString(); var cachedFilePath = Path.Combine(_syncDirectory.CacheDirectoryPath, fileName); var lastModified = File.GetLastWriteTimeUtc(cachedFilePath); var primaryFilePath = Path.Combine(_syncDirectory.PrimaryDirectoryPath, fileName); File.SetLastWriteTime(primaryFilePath, lastModified); Debug.WriteLine(string.Format("PUT {1} bytes to {0} in primary directory", _name, cacheInputBytes.Length)); } finally { //blobStream.Dispose(); } #if FULLDEBUG Debug.WriteLine(string.Format("CLOSED WRITESTREAM {0}", _name)); #endif // clean up _indexOutput = null; GC.SuppressFinalize(this); } finally { _fileMutex.ReleaseMutex(); } }
public CassetteSettings(string cacheVersion) { Version = cacheVersion; DefaultFileSearches = CreateDefaultFileSearches(); BundleFactories = CreateBundleFactories(); cassetteManifestCache = new Lazy <ICassetteManifestCache>( () => new CassetteManifestCache(CacheDirectory.GetFile("cassette.xml")) ); }
public MooDir Initialize() { Path.AsDir().Create(); PackageDirectory.AsDir().Create(); CacheDirectory.AsDir().Create(); BinDirectory.AsDir().Create(); SourcesFilePath.AsFile().Initialize(DefaultSourcesListText); return(this); }
public void WriteCachedFileETag(string name, string eTag) { var fileName = name + ".etag"; using (var output = CacheDirectory.CreateOutput(fileName)) { output.WriteString(eTag); } }
public override int GetHashCode() { return(MaximumConnections + MaximumDownloadSpeed + MaximumUploadSpeed + MaximumHalfOpenConnections + ListenEndPoint?.GetHashCode() ?? 0 + AllowedEncryption.GetHashCode() + CacheDirectory.GetHashCode()); }
public AzureIndexInput(AzureDirectory azureDirectory, string name, BlobClient blob) : base(name) { this._name = name; this._azureDirectory = azureDirectory; #if FULLDEBUG Debug.WriteLine($"{_azureDirectory.Name} opening {name} "); #endif _fileMutex = BlobMutexManager.GrabMutex(name); _fileMutex.WaitOne(); try { _blobContainer = azureDirectory.BlobContainer; _blob = blob; bool fileNeeded = false; if (!CacheDirectory.FileExists(name)) { fileNeeded = true; } else { long cachedLength = CacheDirectory.FileLength(name); var properties = blob.GetProperties(); long blobLength = properties.Value?.ContentLength ?? 0; if (cachedLength != blobLength) { fileNeeded = true; } } // if the file does not exist // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage) if (fileNeeded) { using (StreamOutput fileStream = _azureDirectory.CreateCachedOutputAsStream(name)) { // get the blob _blob.DownloadTo(fileStream); fileStream.Flush(); Debug.WriteLine($"{_azureDirectory.Name} GET {_name} RETREIVED {fileStream.Length} bytes"); } } #if FULLDEBUG Debug.WriteLine($"{_azureDirectory.Name} Using cached file for {name}"); #endif // and open it as our input, this is now available forevers until new file comes along _indexInput = CacheDirectory.OpenInput(name, IOContext.DEFAULT); } finally { _fileMutex.ReleaseMutex(); } }
public string ReadCachedFileETag(string name) { var fileName = name + ".etag"; if (CacheDirectory.FileExists(name) == false) { return(String.Empty); } using (var input = CacheDirectory.OpenInput(fileName)) { return(input.ReadString()); } }
public override void Close() { _fileMutex.WaitOne(); try { string fileName = _name; // make sure it's all written out if (_indexOutput != null) { _indexOutput.Flush(); _indexOutput.Close(); } if (CacheDirectory.FileExists(fileName)) { //open stream to read cache file using (var cacheStream = new StreamInput(CacheDirectory.OpenInput(fileName))) // push the blobStream up to the master using (var masterStream = new StreamOutput(MasterDirectory.CreateOutput(fileName))) { cacheStream.CopyTo(masterStream); masterStream.Flush(); Trace.WriteLine(string.Format("PUT {1} bytes to {0} in cloud", _name, cacheStream.Length)); } //sync the last file write times - at least get them close. //TODO: The alternative would be to force both directory instances to be FSDirectory, // or try casting the master directory to FSDirectory to get the raw FileInfo and manually // set the lastmodified time - this should work though MasterDirectory.TouchFile(fileName); CacheDirectory.TouchFile(fileName); #if FULLDEBUG Debug.WriteLine(string.Format("CLOSED WRITESTREAM {0}", _name)); #endif } // clean up _indexOutput = null; GC.SuppressFinalize(this); } finally { _fileMutex.ReleaseMutex(); } }
public CompositeIndexOutput(SyncDirectory syncDirectory, string name) { _name = name; _fileMutex = SyncMutexManager.GrabMutex(_name); _fileMutex.WaitOne(); try { _syncDirectory = syncDirectory; // create the local cache one we will operate against... _indexOutput = CacheDirectory.CreateOutput(_name, IOContext.DEFAULT); } finally { _fileMutex.ReleaseMutex(); } }
public void DoMainTask(DateTime time, string _extent, string basePath, string tileDir, string csv) { this.basePath = basePath; this.csv = csv; extent = _extent.Split(new[] { ',' }).Select(Convert.ToDouble).ToList(); _config = ConfigHelper.GetConfig(EnumBruTileLayer.OSM); _tileSource = _config.CreateTileSource(); _tileProvider = (WebTileProvider)_tileSource.Provider; _cacheDir = CacheSettings.GetCacheFolder(tileDir); while (true) { try { DeleteDirectory(_cacheDir); break; } catch (Exception ex) { LogManager.LogPath = AppDomain.CurrentDomain.BaseDirectory + "\\log\\"; LogManager.WriteLog("error", ex.Message); } } _fileCache = CacheDirectory.GetFileCache(_cacheDir, _config, EnumBruTileLayer.OSM); Draw(); var map = mosaicImage(); DirectoryInfo directory = new DirectoryInfo(basePath + time.ToString("yyyy") + "\\" + time.ToString("yyyyMMdd") + "\\" + time.ToString("yyyyMMddHHmm")); if (!directory.Exists) { directory.Create(); } map.Save(directory.FullName + "\\traffic.png"); CreateTrafficCsv(directory); }
public AzureIndexOutput(AzureDirectory azureDirectory, string name, CloudBlockBlob blob) { this._name = name; _fileMutex = BlobMutexManager.GrabMutex(_name); _fileMutex.WaitOne(); try { _azureDirectory = azureDirectory; _blobContainer = _azureDirectory.BlobContainer; _blob = blob; // create the local cache one we will operate against... _indexOutput = CacheDirectory.CreateOutput(_name, IOContext.DEFAULT); } finally { _fileMutex.ReleaseMutex(); } }
/// <summary> /// Sets up the cache write process and return default cache file /// </summary> /// <param name="path">The directory path to store the setup cache files</param> public static string SetupWrite(string path) { // Create cache file directory if theres not one if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } // Get the appended file directory string string filePath = CacheDirectory.AppendFileToDirectory(path, "cache", CacheFileType.txt); // If no cache file, create one if (!File.Exists(filePath)) { File.Create(filePath).Dispose(); } return(filePath); }
/// <summary> /// Removes an existing file in the directory. /// </summary> /// <param name="name">The name.</param> public override void DeleteFile(string name) { lock (m_syncRoot) { var fileUrl = SPUtility.ConcatUrls(m_folder.Value.ServerRelativeUrl, name); var file = m_web.GetFile(fileUrl); file.Delete(); } if (CacheDirectory.FileExists(name)) { CacheDirectory.DeleteFile(name); } if (CacheDirectory.FileExists(name + ".etag")) { CacheDirectory.DeleteFile(name + ".etag"); } }
private void AssembleFile(int FileId) { var segments = queue.Where(q => q.FileID == FileId).OrderBy(q => q.Number); string filename = segments.First().Filename; bool broken = segments.Any(s => s.Status == JobStatus.Failed); if (broken) { filename = filename.ToUpper(); } string path = Path.Combine(CompletedDirectory.FullName, filename); using (FileStream file = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None)) { foreach (var segment in segments) { if (segment.Status == JobStatus.Complete) { using (FileStream part = new FileStream(segment.CacheLocation, FileMode.Open, FileAccess.Read, FileShare.None)) { if (segment.ByteOffset > -1) { file.Position = segment.ByteOffset; } part.CopyTo(file); } File.Delete(segment.CacheLocation); } } } assembled[FileId] = true; FileCompleted(this, new FileCompletedEventArgs(path, broken)); if (assembled.All(f => f.Value)) { CacheDirectory.Delete(true); QueueCompleted(this, System.EventArgs.Empty); } }
public SyncIndexOutput(SyncDirectory syncDirectory, string name) { if (syncDirectory == null) { throw new ArgumentNullException(nameof(syncDirectory)); } //TODO: _name was null here, is this intended? https://github.com/azure-contrib/AzureDirectory/issues/19 // I have changed this to be correct now _name = name; _syncDirectory = syncDirectory; _fileMutex = SyncMutexManager.GrabMutex(_syncDirectory, _name); _fileMutex.WaitOne(); try { // create the local cache one we will operate against... _indexOutput = CacheDirectory.CreateOutput(_name); } finally { _fileMutex.ReleaseMutex(); } }
protected override void Dispose(bool disposing) { if (m_disposeOfSiteAndWeb == false) { return; } CacheDirectory.Dispose(); lock (m_syncRoot) { if (m_site != null) { m_site.Dispose(); m_site = null; } if (m_web != null) { m_web.Dispose(); m_web = null; } } }
public void Clear() => CacheDirectory.Delete(true);
public void ClearCache() { CacheDirectory.Delete(true); InitChildDirectory(CACHE_DIR); }
public CompositeIndexInput(SyncDirectory azuredirectory, string name, string resourceDescription) : base(resourceDescription) { _name = name; #if FULLDEBUG Debug.WriteLine(String.Format("opening {0} ", _name)); #endif _fileMutex = SyncMutexManager.GrabMutex(_name); _fileMutex.WaitOne(); try { _syncDirectory = azuredirectory; var fileName = _name; var fFileNeeded = false; if (!CacheDirectory.FileExists(fileName)) { fFileNeeded = true; } else { long cachedLength = CacheDirectory.FileLength(fileName); long primaryLength = PrimaryDirectory.FileLength(fileName); if (cachedLength != primaryLength) { fFileNeeded = true; } else { // cachedLastModifiedUTC was not ouputting with a date (just time) and the time was always off var cachedFilePath = Path.Combine(_syncDirectory.CacheDirectoryPath, fileName); var primaryFilePath = Path.Combine(_syncDirectory.PrimaryDirectoryPath, fileName); var cachedLastModified = File.GetLastWriteTimeUtc(cachedFilePath); var primaryLastModified = File.GetLastWriteTimeUtc(primaryFilePath); if (cachedLastModified != primaryLastModified) { var timeSpan = primaryLastModified.Subtract(cachedLastModified); if (timeSpan.TotalSeconds > 1) { fFileNeeded = true; } else { #if FULLDEBUG Debug.WriteLine(timeSpan.TotalSeconds); #endif // file not needed } } } } // if the file does not exist // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage) if (fFileNeeded) { var primaryInput = PrimaryDirectory.OpenInput(fileName, IOContext.DEFAULT); byte[] primaryInputBytes = new byte[primaryInput.Length]; primaryInput.ReadBytes(primaryInputBytes, 0, (int)primaryInputBytes.Length); using (var cachedOutput = CacheDirectory.CreateOutput(fileName, IOContext.DEFAULT)) { cachedOutput.WriteBytes(primaryInputBytes, (int)primaryInputBytes.Length); cachedOutput.Flush(); } primaryInput.Dispose(); var cachedFilePath = Path.Combine(_syncDirectory.CacheDirectoryPath, fileName); var primaryFilePath = Path.Combine(_syncDirectory.PrimaryDirectoryPath, fileName); var primaryLastModified = File.GetLastWriteTimeUtc(primaryFilePath); File.SetLastWriteTimeUtc(cachedFilePath, primaryLastModified); //using (var fileStream = _azureDirectory.CreateCachedOutputAsStream(fileName)) //{ // // get the blob // _blob.DownloadToStream(fileStream); // fileStream.Flush(); // Debug.WriteLine(string.Format("GET {0} RETREIVED {1} bytes", _name, fileStream.Length)); //} // and open it as an input _indexInput = CacheDirectory.OpenInput(fileName, IOContext.DEFAULT); } else { #if FULLDEBUG Debug.WriteLine(String.Format("Using cached file for {0}", _name)); #endif // open the file in read only mode _indexInput = CacheDirectory.OpenInput(fileName, IOContext.DEFAULT); } } finally { _fileMutex.ReleaseMutex(); } }
public void Draw(esriDrawPhase drawPhase, IDisplay display, ITrackCancel trackCancel) { switch (drawPhase) { case esriDrawPhase.esriDPGeography: if (Valid) { if (Visible) { try { var clipEnvelope = display.ClipEnvelope; // when loading from a file the active map doesn't exist yet // so just deal with it here. if (_map == null) { var mxdoc = (IMxDocument)_application.Document; _map = mxdoc.FocusMap; } Debug.WriteLine("Draw event"); var activeView = _map as IActiveView; Logger.Debug("Layer name: " + Name); if (activeView != null) { //_envelope = activeView.Extent; //_envelope = clipEnvelope; Logger.Debug("Draw extent: xmin:" + clipEnvelope.XMin + ", ymin:" + clipEnvelope.YMin + ", xmax:" + clipEnvelope.XMax + ", ymax:" + clipEnvelope.YMax ); if (SpatialReference != null) { Logger.Debug("Layer spatial reference: " + SpatialReference.FactoryCode); } if (_map.SpatialReference != null) { Logger.Debug("Map spatial reference: " + _map.SpatialReference.FactoryCode); } var bruTileHelper = new BruTileHelper(_tileTimeOut); _displayFilter.Transparency = (short)(255 - ((_transparency * 255) / 100)); if (display.Filter == null) { display.Filter = _displayFilter; } FileCache fileCache; if (_config != null) { fileCache = CacheDirectory.GetFileCache(_cacheDir, _config, _enumBruTileLayer); } else { fileCache = CacheDirectory.GetFileCache(_cacheDir, _tileSource, _enumBruTileLayer); } if (_enumBruTileLayer == EnumBruTileLayer.Giscloud && _config.CreateTileSource().Schema.Format == "jpg") { // potential black borders: need to add a check for clip.... bruTileHelper.ClipTilesEnvelope = _envelope; } bruTileHelper.Draw(_application.StatusBar.ProgressBar, activeView, fileCache, trackCancel, SpatialReference, ref _currentLevel, _tileSource, display, _auth); } } catch (Exception ex) { var mbox = new ExceptionMessageBox(ex); mbox.Show(null); } } // isVisible } // isValid break; case esriDrawPhase.esriDPAnnotation: break; } }
public void ClearCache() { CacheDirectory.Delete(true); CacheDirectory.Create(); }
public SyncIndexInput(SyncDirectory directory, string name) { if (directory == null) { throw new ArgumentNullException(nameof(directory)); } _name = name; _syncDirectory = directory; #if FULLDEBUG Trace.WriteLine(String.Format("opening {0} ", _name)); #endif _fileMutex = SyncMutexManager.GrabMutex(_syncDirectory, _name); _fileMutex.WaitOne(); try { var fileName = _name; var fFileNeeded = false; if (!CacheDirectory.FileExists(fileName)) { fFileNeeded = true; } else { long cachedLength = CacheDirectory.FileLength(fileName); long masterLength = MasterDirectory.FileLength(fileName); if (cachedLength != masterLength) { fFileNeeded = true; } else { long cacheDate = CacheDirectory.FileModified(fileName); long masterDate = MasterDirectory.FileModified(fileName); //we need to compare to the second instead of by ticks because when we call //TouchFile in SyncIndexOutput this won't set the files to be identical DateTime start = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); var cachedLastModifiedUTC = start.AddMilliseconds(cacheDate).ToUniversalTime(); var masterLastModifiedUTC = start.AddMilliseconds(masterDate).ToUniversalTime(); if (cachedLastModifiedUTC != masterLastModifiedUTC) { var timeSpan = masterLastModifiedUTC.Subtract(cachedLastModifiedUTC); //NOTE: This heavily depends on TouchFile in SyncIndexOutput which sets both the //master and slave files to be 'Now', in theory this operation shouldn't //make the file times any bigger than 1 second if (timeSpan.TotalSeconds > 2) { fFileNeeded = true; } else { #if FULLDEBUG Debug.WriteLine(timeSpan.TotalSeconds); #endif // file not needed } } } } // if the file does not exist // or if it exists and it is older then the lastmodified time in the blobproperties (which always comes from the blob storage) if (fFileNeeded) { //get the master file stream using (var masterStream = new StreamInput(MasterDirectory.OpenInput(fileName))) using (var cacheStream = _syncDirectory.CreateCachedOutputAsStream(fileName)) { //copy this to the cached file stream masterStream.CopyTo(cacheStream); cacheStream.Flush(); Debug.WriteLine(string.Format("GET {0} RETREIVED {1} bytes", _name, cacheStream.Length)); } // and open it as an input _indexInput = CacheDirectory.OpenInput(fileName); } else { #if FULLDEBUG Debug.WriteLine(String.Format("Using cached file for {0}", _name)); #endif // open the file in read only mode _indexInput = CacheDirectory.OpenInput(fileName); } } finally { _fileMutex.ReleaseMutex(); } }
internal StreamInput OpenCachedInputAsStream(string name) { return(new StreamInput(CacheDirectory.OpenInput(name))); }
internal StreamOutput CreateCachedOutputAsStream(string name) { return(new StreamOutput(CacheDirectory.CreateOutput(name))); }