Beispiel #1
0
		// Add an URL to the cache.   There are times where double work is performed.
		//   e.g. if we get multiple simultaneous requests for the same URL
		//    both might have gotten not found requests and proceeded to create
		//    new entries.  This is a waste as we end up only using one.   But this 
		//    shouldn't occur very often and the inefficiency should be slight and 
		//    temporary.
		//	 In this case, the passed files are deleted and the old ones are used
		public IList Add(string url, IList files)
		{
			CacheEntry ce=null;
			lock (this)
			{
				ce = (CacheEntry) urls[url];
				if (ce == null)
				{	// entry isn't found; create new one
					ce = new CacheEntry(url, files);
					urls.Add(url, ce);
				}
				else
				{	// We already had this entry; delete the new files
					Console.WriteLine("debug: Double execution of url {0}", url);
					ce.Timestamp = DateTime.Now;	// update the timestamp
					foreach (string file in files)
					{
						try 
						{
							File.Delete(file);
						}
						catch (Exception e)
						{
							Console.WriteLine("An Exception occurred while clearing file cache :" +e.ToString());
						}
					}
				}
			}
			return ce.Files;
		}
Beispiel #2
0
		private int DeleteUrlEntry(CacheEntry ce)
		{
			int numDeletedFiles=0;
			urls.Remove(ce.Url);
			foreach (string file in ce.Files)
			{
				try 
				{
					File.Delete(file);
					numDeletedFiles++;
				}
				catch (Exception e)
				{
					Console.WriteLine("An Exception occurred while clearing file cache :" +e.ToString());
				}
			}
			return numDeletedFiles;
		}