private async Task LoadAllAsync(HttpQueryDispatcher dispatcher, HttpQueryDispatcher.Next next, ListAllCategory listAll) { models.Clear(); if (!serverConnection.IsAvailable) { var items = await localStorage.LoadAsync(); if (items != null) { models.AddRange(items); return; } } if (dispatcher != null) { models.AddRange(await dispatcher.QueryAsync(listAll)); } else { models.AddRange((List <CategoryModel>) await next(listAll)); } await localStorage.SaveAsync(models); }
private async Task LoadAllAsync(HttpQueryDispatcher dispatcher, HttpQueryDispatcher.Next next, ListAllCategory listAll) { models.Clear(); if (!serverConnection.IsAvailable) { log.Debug($"Using local storage."); var items = await localStorage.LoadAsync(); if (items != null) { models.AddRange(items); return; } } if (dispatcher != null) { log.Debug("Using dispatcher to run the query. Skipping the query result as other brach will process it."); await dispatcher.QueryAsync(listAll); } else { log.Debug("Using next middleware to run the query."); models.AddRange((List <CategoryModel>) await next(listAll)); log.Debug("Storing to local storage."); await localStorage.SaveAsync(models); } }
private async Task LoadAllAsync(ListAllCategory listAll, HttpQueryDispatcher.Next next) { models.Clear(); if (!network.IsOnline) { var items = await localStorage.LoadAsync(); if (items != null) { models.AddRange(items); return; } } models.AddRange((List <CategoryModel>) await next(listAll)); await localStorage.SaveAsync(models); }