private void getTablesInDatabase(Resource parentResource, object userState) { database = new Database(parentResource.Url); database.GetTablesInDatabaseFailed += (o, e) => { OnGetChildResourcesFailed(e); }; database.GetTablesInDatabaseCompleted += (o, e) => { OnGetChildResourcesCompleted(new GetChildResourcesCompletedEventArgs() { ChildResources = e.ChildResources, UserState = e.UserState }); }; database.GetTables(userState); }
private void DownloadStringCompleted(object sender, DownloadStringCompletedEventArgs e) { if (e.Cancelled) return; if (e.Error != null) { bool redownloadAttempted = WebClientFactory.RedownloadAttempted.Contains(webClient); if (Utils.IsMessageLimitExceededException(e.Error) && !redownloadAttempted) { // Re-issue the request which should serve it out of cache // and helps us avoid the error which is caused by setting AllowReadStreamBuffering=false // which was used to workaround the problem of SL4 and gzipped content WebClientFactory.RedownloadStringAsync(webClient, finalUrl, e.UserState); } else { if (redownloadAttempted) WebClientFactory.RedownloadAttempted.Remove(webClient); OnGetCatalogFailed(new ExceptionEventArgs(e.Error, e.UserState)); } return; } if (string.IsNullOrEmpty(e.Result)) { OnGetCatalogFailed(new ExceptionEventArgs(new Exception(Resources.Strings.ExceptionEmptyResponse), e.UserState)); return; } string json = e.Result; Exception exception = Utils.CheckJsonForException(json); if (exception != null) { OnGetCatalogFailed(new ExceptionEventArgs(exception, e.UserState)); return; } DatabaseCatalog catalog = null; try { byte[] bytes = Encoding.Unicode.GetBytes(json); using (System.IO.MemoryStream memoryStream = new System.IO.MemoryStream(bytes)) { DataContractJsonSerializer dataContractJsonSerializer = new DataContractJsonSerializer(typeof(DatabaseCatalog)); catalog = dataContractJsonSerializer.ReadObject(memoryStream) as DatabaseCatalog; memoryStream.Close(); } } catch (Exception ex) { OnGetCatalogFailed(new ExceptionEventArgs(ex, e.UserState)); return; } if (catalog == null) { OnGetCatalogFailed(new ExceptionEventArgs(new Exception(Resources.Strings.ExceptionUnableToDeserializeCatalog), e.UserState)); return; } List<Resource> childResources = new List<Resource>(); int totalDatabasesCount = catalog.Databases == null ? 0 : catalog.Databases.Count; if (catalog.Databases != null) { int databaseCount = 0; foreach (string databaseName in catalog.Databases) { Resource databaseResource = new Resource() { DisplayName = databaseName, Url = string.Format("{0}/{1}", Uri, databaseName), ResourceType = ResourceType.Database, }; if (!FilterForSpatialContent) { childResources.Add(databaseResource); } else { Database db = new Database(databaseResource.Url) { FilterForSpatialContent = true }; db.GetTablesInDatabaseFailed += (o, args) => { // remove the database childResources.Remove(args.UserState as Resource); databaseCount++; if (databaseCount >= totalDatabasesCount) { // all done, raise the event OnGetCatalogFailed(args); } }; db.GetTablesInDatabaseCompleted += (o, args) => { databaseCount++; bool hasAtleastOneSpatialTable = args.ChildResources.Count() > 0; if (!hasAtleastOneSpatialTable) { // remove the database childResources.Remove(args.UserState as Resource); } if (databaseCount >= totalDatabasesCount) { // all done, raise the event OnGetCatalogRequestCompleted(new GetCatalogCompletedEventArgs() { ChildResources = childResources, UserState = e.UserState }); } }; // Add database prior to validation to preserve catalog order. Database will be removed // if validation fails. Resource child = new Resource() { DisplayName = databaseName, Url = string.Format("{0}/{1}", Uri, databaseName), ResourceType = ResourceType.Database, }; childResources.Add(child); db.GetTables(child); } } } if(!FilterForSpatialContent || totalDatabasesCount == 0) { OnGetCatalogRequestCompleted(new GetCatalogCompletedEventArgs() { ChildResources = childResources, UserState = e.UserState }); } }