Beispiel #1
0
		void VisitPostAuthenticateRequest()
		{
			if (!HttpContext.Current.Request.Url.LocalPath.ToLower().StartsWith("/webservices/controls/chatclient/service.asmx") && !HttpContext.Current.Request.Url.LocalPath.Equals("/support/DbChatServer.aspx") && !HttpContext.Current.Request.Url.LocalPath.Equals("/WebResource.axd"))
			{
				Guid guid = Guid.Empty;
				int usrK = 0;
				Visit currentVisit = null;
				VisitSet vs = null;
				string userAgent = "";
				bool noLoggedInPreviousVisitExists = true;
				bool browserIsCrawler = false;
				string ipAddress = "";

				try
				{

					#region Get ipAddress
					try
					{
						ipAddress = Utilities.TruncateIp(HttpContext.Current.Request.ServerVariables["REMOTE_HOST"]);
					}
					catch (Exception ex)
					{
						SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting IpAddress!..."), ex));
					}
					#endregion

					#region Get UserAgent
					try
					{
						if (HttpContext.Current.Request.UserAgent != null)
						{
							userAgent = HttpContext.Current.Request.UserAgent;
						}
					}
					catch (Exception ex)
					{
						SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting UserAgent!... IpAddress={0}", ipAddress), ex));
					}
					#endregion

					#region Get browserIsCrawler
					try
					{
						if (HttpContext.Current.Request.Browser != null)
						{
							browserIsCrawler = HttpContext.Current.Request.Browser.Crawler;
						}
					}
					catch (Exception ex)
					{
						SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting BrowserIsCrawler!... UserAgent={0}, IpAddress={1}", userAgent, ipAddress), ex));
					}
					#endregion

					#region Get UsrK
					try
					{
						//The user is authenticated - lets find the UsrK...
						if (HttpContext.Current.User.Identity.IsAuthenticated)
						{
							string usrStr = HttpContext.Current.User.Identity.Name;
							string[] split = usrStr.Split('&');
							usrK = int.Parse(split[0]);
						}
					}
					catch (Exception ex)
					{
						//We might get a corrupt cookie from the client... (not likely because IsAuthenticated should check for this!)
						SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting UsrK!... BrowserIsCrawler={0}, UserAgent={1}, IpAddress={2}", browserIsCrawler, userAgent, ipAddress), ex));
					} 
					
					#endregion

					#region Get Guid
					try
					{
						if (HttpContext.Current.Request.Cookies["DsiGuid"] != null)
						{
							//We have a Guid from the client...
							guid = new Guid(HttpContext.Current.Request.Cookies["DsiGuid"].Value);
						}
					}
					catch (Exception ex)
					{
						//We might get a corrupt cookie from the client...
						SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting guid!... BrowserIsCrawler={0}, UserAgent={1}, UsrK={2}, IpAddress={3}", browserIsCrawler, userAgent, usrK, ipAddress), ex));
					}
					#endregion

					#region Get previous Visit(s)
					if (!guid.Equals(Guid.Empty))
					{
						try
						{
							//OK so we've got a Guid, lets see if this page request is part of a previous visit by looking for 
							//visits in the last 30 minutes with this Guid / UsrK combination...
							Query q = new Query();
							q.QueryCondition = new And(
								new Q(Visit.Columns.UsrK, usrK),
								new Q(Visit.Columns.Guid, guid),
								new Q(Visit.Columns.DateTimeLast, QueryOperator.GreaterThan, DateTime.Now.AddMinutes(-30)));
							q.OrderBy = new OrderBy(Visit.Columns.Hits, OrderBy.OrderDirection.Descending);
							vs = new VisitSet(q);
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting previous visit(s) [PART 1]... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}

						try
						{
							if (browserIsCrawler && (vs == null || vs.Count == 0))
							{
								// if not, still see if we can match the Guid to a previous LOGGED IN visit to reduce chance of detecting a bot when it's not..
								var vs2 = new VisitSet(new Query
								{
									QueryCondition = new And(
										new Q(Visit.Columns.UsrK, QueryOperator.GreaterThan, 0),
										new Q(Visit.Columns.Guid, guid),
										new Q(Visit.Columns.DateTimeLast, QueryOperator.GreaterThan, DateTime.Now.AddMonths(-1))),
									TopRecords = 1
								});
								if (vs2.Count > 0)
								{
									noLoggedInPreviousVisitExists = false;
								}
							}
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting previous visit(s) [PART 2]... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}
					}
					else if (usrK == 0)
					{
						try
						{
							//We don't have a Guid from the cookie, and they're not logged in. Either it's their first page 
							//request or they have cookies disabled. Lets see if this IP adress has hit the site in the last 
							//30 minutes...
							Query q = new Query();
							q.QueryCondition = new And(
								new Q(Visit.Columns.UsrK, 0),
								new Q(Visit.Columns.IpAddress, Utilities.TruncateIp(HttpContext.Current.Request.ServerVariables["REMOTE_HOST"])),
								new Q(Visit.Columns.DateTimeLast, QueryOperator.GreaterThan, DateTime.Now.AddMinutes(-30)));
							q.OrderBy = new OrderBy(Visit.Columns.Hits, OrderBy.OrderDirection.Descending);
							vs = new VisitSet(q);
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting previous visit(s) [PART 3]... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}
					}
					else
					{
						try
						{
							//This should never happen, but we've found it happening when the Guid doesn't get set to the 
							//cookie properly when it's done just before a redirect? Lets see if this UsrK has hit the site 
							//in the last 30 minutes...
							Query q = new Query();
							q.QueryCondition = new And(
								new Q(Visit.Columns.UsrK, usrK),
								new Q(Visit.Columns.DateTimeLast, QueryOperator.GreaterThan, DateTime.Now.AddMinutes(-30)));
							q.OrderBy = new OrderBy(Visit.Columns.Hits, OrderBy.OrderDirection.Descending);
							vs = new VisitSet(q);
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting previous visit(s) [PART 4]... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}
					}
					#endregion

					#region Create / merge visit
					if (vs == null || vs.Count == 0)
					{
						try
						{
							//If we didn't find any visits, lets create one. I wish there was a way we could avoid duplicates!
							Visit v = new Visit();
							if (guid.Equals(Guid.Empty))
							{
								guid = Guid.NewGuid();
								v.Guid = guid;
								v.IsNewGuid = true;
								try
								{
									Cambro.Web.Helpers.SetCookie("DsiGuid", guid.ToString(), true);
								}
								catch (Exception ex)
								{
									SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception setting cookie while creating new visit... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
								}
							}
							else
							{
								v.Guid = guid;
								v.IsNewGuid = false;
							}
							v.UsrK = usrK;
							v.Pages = 0;
							v.Photos = 0;
							v.DateTimeStart = DateTime.Now;
							v.DateTimeLast = DateTime.Now;
							v.IpAddress = ipAddress;
							v.CountryK = Bobs.IpCountry.ClientCountryK();
							if (userAgent.Length > 0)
							{
								v.IsCrawler = browserIsCrawler && !userAgent.StartsWith("Opera") && usrK == 0 && noLoggedInPreviousVisitExists;
								v.UserAgent = userAgent.TruncateWithDots(400);
							}
							if (usrK > 0 && browserIsCrawler)
							{
								SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(
									string.Format("False-positive crawler? UsrK={0}, UserAgent={1}", usrK, userAgent)));
							}
							v.Update();
							currentVisit = v;
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception creating new visit... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}
					}
					else if (vs.Count == 1)
					{
						try
						{
							//If we found just one visit, then great!
							currentVisit = vs[0];
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception getting single visit from visit set... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}
					}
					else// if (vs.Count > 1)
					{
						try
						{
							//If we've found more than one visit from the last half hour, we should merge them together. This 
							//isn't great, and I wish we could avoid duplicates when we create them - good use for a stored 
							//procedure?
							int mergeWithK = vs[0].K;
							int pages = 0;
							int photos = 0;
							for (int i = 1; i < vs.Count; i++)
							{
								pages += vs[i].Pages;
								photos += vs[i].Photos;
								vs[i].Delete();
							}
							Visit.Increment(mergeWithK, pages, photos);
							currentVisit = vs[0];
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception merging visits... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}
					}
					#endregion

					if (currentVisit != null)
					{
						#region Update Guid from Visit if needed
						try
						{
							if (guid.Equals(Guid.Empty))
							{
								//If we don't have a guid, lets create a new one and set it in the cookie. 
								guid = currentVisit.Guid;
								Cambro.Web.Helpers.SetCookie("DsiGuid", guid.ToString(), true);
							}
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception setting guid at end of visit code... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}
						#endregion

						#region Store current visit in HttpContext.Current.Items
						try
						{
							HttpContext.Current.Items["CurrentVisit"] = currentVisit;
						}
						catch (Exception ex)
						{
							SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Exception setting current visit in HttpContext.Current.Items... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
						}
						#endregion
					}
					else
					{
						SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("Got to end of visit code but didn't have a visit!... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress)));
					}
				}
				catch (Exception ex)
				{
					SpottedException.TryToSaveExceptionAndChildExceptions(new Exception(string.Format("General exception in visit code... UsrK={0}, DsiGuid={1}, UserAgent={2}, BrowserIsCrawler={3}, IpAddress={4}", usrK, guid, userAgent, browserIsCrawler, ipAddress), ex));
				}
			}
		}
		public static SpottedException TryToSaveExceptionAndChildExceptions(Exception exception, HttpContext currentHttpContext, Usr currentUsr, Visit currentVisit, string currentFilter, string masterPath, string pagePath, int objectFilterK, Model.Entities.ObjectType? objectFilterType)
		{
			// attempt to save detail to database
			string url = "", ipAddress = "", cookieXml = "", postDataXml = "";

			if (currentHttpContext != null)
			{
				url = currentHttpContext.Request.Url.ToString();
				ipAddress = currentHttpContext.Request.UserHostAddress;
				cookieXml = Utilities.GetCookieDataAsXml(currentHttpContext.Request.Cookies);
				postDataXml = Utilities.GetPostDataAsXml(currentHttpContext.Request.Form);
			}

			int? usrK = null;
			if (currentUsr != null)
			{
				usrK = currentUsr.K;
			}
			Guid? browserGuid = null;
			if (currentVisit != null)
			{
				browserGuid = currentVisit.Guid;
			}

			return TryToSaveExceptionAndChildExceptions(exception, url, currentFilter, masterPath, pagePath, objectFilterK, objectFilterType, cookieXml, postDataXml, usrK, browserGuid, ipAddress);
		}