protected void MergeDeepStart(Object obj, MergeHandle handle) { if (handle.PendingValueHolders.Count > 0) { IList <Object> pendingValueHolders = handle.PendingValueHolders; PrefetchHelper.Prefetch(pendingValueHolders); pendingValueHolders.Clear(); } MergeDeepIntern(obj, handle); while (true) { IList <IBackgroundWorkerDelegate> pendingRunnables = handle.PendingRunnables; IList <Object> pendingValueHolders = handle.PendingValueHolders; if (pendingValueHolders.Count == 0 && pendingRunnables.Count == 0) { return; } if (pendingValueHolders.Count > 0) { PrefetchHelper.Prefetch(pendingValueHolders); pendingValueHolders.Clear(); } if (pendingRunnables.Count > 0) { IList <IBackgroundWorkerDelegate> pendingRunnablesClone = new List <IBackgroundWorkerDelegate>(pendingRunnables); for (int a = 0, size = pendingRunnablesClone.Count; a < size; a++) { pendingRunnablesClone[a].Invoke(); } } } }
public async Task PrefetchSingleAsyncTest() { Require.ProviderIsNot(StorageProvider.Firebird); List <Key> keys; using (var session = Domain.OpenSession()) using (var tx = session.OpenTransaction()) keys = session.Query.All <Invoice>().Select(o => o.Key).ToList(); using (var session = Domain.OpenSession()) using (var tx = session.OpenTransaction()) { var invoiceType = Domain.Model.Types[typeof(Invoice)]; var employeeType = Domain.Model.Types[typeof(Employee)]; var employeeField = Domain.Model.Types[typeof(Invoice)].Fields["DesignatedEmployee"]; var invoicesField = Domain.Model.Types[typeof(Employee)].Fields["Invoices"]; var invoices = session.Query.Many <Invoice>(keys) .Prefetch(o => o.DesignatedEmployee.Invoices).AsAsync(); var count = 0; foreach (var invoice in await invoices) { Assert.AreEqual(keys[count], invoice.Key); count++; PrefetchTestHelper.AssertOnlySpecifiedColumnsAreLoaded(invoice.Key, invoiceType, session, field => PrefetchHelper.IsFieldToBeLoadedByDefault(field) || field.Equals(employeeField) || (field.Parent != null && field.Parent.Equals(employeeField))); var state = session.EntityStateCache[invoice.Key, true]; PrefetchTestHelper.AssertOnlySpecifiedColumnsAreLoaded( state.Entity.GetFieldValue <Employee>(employeeField).Key, employeeType, session, field => PrefetchHelper.IsFieldToBeLoadedByDefault(field) || field.Equals(invoicesField)); } Assert.AreEqual(keys.Count, count); Assert.AreEqual(12, session.Handler.PrefetchTaskExecutionCount); } }
/// <summary> /// Fetches an <see cref="EntityState"/>. /// </summary> /// <param name="key">The key.</param> /// <returns>The key of fetched <see cref="EntityState"/>.</returns> public override EntityState FetchEntityState(Key key) { var type = key.TypeReference.Type; prefetchManager.Prefetch(key, type, PrefetchHelper.GetCachedDescriptorsForFieldsLoadedByDefault(Session.Domain, type)); prefetchManager.ExecuteTasks(true); return(LookupState(key, out var result) ? result : null); }
public void AppendToNewlineSeparatedFileTests() { MockFileSystem fileSystem = new MockFileSystem(new MockDirectory(Path.Combine("mock:", "GVFS", "UnitTests", "Repo"), null, null)); // Validate can write to a file that doesn't exist. string testFileName = Path.Combine("mock:", "GVFS", "UnitTests", "Repo", "appendTests"); PrefetchHelper.AppendToNewlineSeparatedFile(fileSystem, testFileName, "expected content line 1"); fileSystem.ReadAllText(testFileName).ShouldEqual("expected content line 1\n"); // Validate that if the file doesn't end in a newline it gets a newline added. fileSystem.WriteAllText(testFileName, "existing content"); PrefetchHelper.AppendToNewlineSeparatedFile(fileSystem, testFileName, "expected line 2"); fileSystem.ReadAllText(testFileName).ShouldEqual("existing content\nexpected line 2\n"); // Validate that if the file ends in a newline, we don't end up with two newlines fileSystem.WriteAllText(testFileName, "existing content\n"); PrefetchHelper.AppendToNewlineSeparatedFile(fileSystem, testFileName, "expected line 2"); fileSystem.ReadAllText(testFileName).ShouldEqual("existing content\nexpected line 2\n"); }
public void Test_Prefetch() { Object obj = new UsableObservableCollection <Object>(); IList <TestEntity> testEntities = HelloWorldService.GetAllTestEntities(); Assert.AssertNotEquals(0, testEntities.Count); IPrefetchHandle prefetch = PrefetchHelper.CreatePrefetch().Add(typeof(TestEntity), "Relation").Add(typeof(TestEntity), "Relations").Build(); prefetch.Prefetch(testEntities); IEntityMetaData metaData = EntityMetaDataProvider.GetMetaData(typeof(TestEntity)); int indexOfRelation = metaData.GetIndexByRelationName("Relation"); int indexOfRelations = metaData.GetIndexByRelationName("Relations"); foreach (TestEntity testEntity in testEntities) { Assert.AssertTrue(((IObjRefContainer)testEntity).Is__Initialized(indexOfRelation)); Assert.AssertTrue(((IObjRefContainer)testEntity).Is__Initialized(indexOfRelations)); } }
public virtual IList <FilterType> Filter(IList <FilterType> bOsToFilter) { if (FilterValue == null) { return(bOsToFilter); } IList <FilterType> result = new List <FilterType>(bOsToFilter.Count); PrefetchHelper.CreatePrefetch().Add(EntityType, FilterMember).Build().Prefetch(bOsToFilter); // Now we are safe to use potential valueholders within a loop without a performance hit foreach (FilterType bO in bOsToFilter) { Object relation = typeInfoItem.GetValue(bO); if (Object.Equals(relation, FilterValue)) { result.Add(bO); } } return(result); }
protected ICUDResult ApplyIntern(ICUDResult cudResult, bool checkBaseState, IncrementalMergeState incrementalState) { ICache stateCache = incrementalState.GetStateCache(); IList <IChangeContainer> allChanges = cudResult.AllChanges; IList <Object> originalRefs = cudResult.GetOriginalRefs(); IList <Object> allObjects = GetAllExistingObjectsFromCache(stateCache, allChanges); List <Object> hardRefs = new List <Object>(); hardRefs.Add(allObjects); // add list as item intended. adding each item of the source is NOT needed List <IObjRef> toFetchFromCache = new List <IObjRef>(); List <DirectValueHolderRef> toPrefetch = new List <DirectValueHolderRef>(); List <IBackgroundWorkerDelegate> runnables = new List <IBackgroundWorkerDelegate>(); IEntityFactory entityFactory = this.EntityFactory; IdentityHashMap <IObjRef, StateEntry> newObjRefToStateEntryMap = new IdentityHashMap <IObjRef, StateEntry>(); IdentityHashMap <IChangeContainer, IChangeContainer> alreadyClonedMap = new IdentityHashMap <IChangeContainer, IChangeContainer>(); List <IChangeContainer> newAllChanges = new List <IChangeContainer>(allChanges.Count); for (int a = 0, size = allChanges.Count; a < size; a++) { IChangeContainer changeContainer = allChanges[a]; Object originalEntity = originalRefs[a]; StateEntry stateEntry = incrementalState.entityToStateMap.Get(originalEntity); IChangeContainer newChangeContainer; if (changeContainer is CreateContainer) { newChangeContainer = new CreateContainer(); } else if (changeContainer is UpdateContainer) { newChangeContainer = new UpdateContainer(); } else { newChangeContainer = new DeleteContainer(); } newAllChanges.Add(newChangeContainer); alreadyClonedMap.Put(changeContainer, newChangeContainer); if (!(changeContainer is CreateContainer)) { Object stateCacheEntity2 = allObjects[a]; stateEntry = incrementalState.entityToStateMap.Get(stateCacheEntity2); if (stateEntry == null) { stateEntry = new StateEntry(stateCacheEntity2, changeContainer.Reference, incrementalState.entityToStateMap.Count + 1); incrementalState.entityToStateMap.Put(stateCacheEntity2, stateEntry); incrementalState.objRefToStateMap.Put(stateEntry.objRef, stateEntry); } // delete & update do not need further handling continue; } Type realType = changeContainer.Reference.RealType; Object stateCacheEntity; if (stateEntry == null) { stateCacheEntity = entityFactory.CreateEntity(realType); DirectObjRef directObjRef = new DirectObjRef(realType, stateCacheEntity); directObjRef.CreateContainerIndex = a; stateEntry = new StateEntry(stateCacheEntity, directObjRef, incrementalState.entityToStateMap.Count + 1); incrementalState.entityToStateMap.Put(stateCacheEntity, stateEntry); incrementalState.objRefToStateMap.Put(stateEntry.objRef, stateEntry); newObjRefToStateEntryMap.Put(changeContainer.Reference, stateEntry); } else { stateCacheEntity = stateEntry.entity; } allObjects[a] = stateCacheEntity; } cloneStateTL.Value = new CloneState(newObjRefToStateEntryMap, incrementalState); try { for (int a = allChanges.Count; a-- > 0;) { IChangeContainer changeContainer = allChanges[a]; IObjRefContainer entity = (IObjRefContainer)allObjects[a]; changeContainer = FillClonedChangeContainer(changeContainer, alreadyClonedMap); IPrimitiveUpdateItem[] puis; IRelationUpdateItem[] ruis; if (changeContainer is CreateContainer) { CreateContainer createContainer = (CreateContainer)changeContainer; puis = createContainer.Primitives; ruis = createContainer.Relations; } else if (changeContainer is UpdateContainer) { UpdateContainer updateContainer = (UpdateContainer)changeContainer; puis = updateContainer.Primitives; ruis = updateContainer.Relations; } else { ((IDataObject)entity).ToBeDeleted = true; continue; } IEntityMetaData metaData = ((IEntityMetaDataHolder)entity).Get__EntityMetaData(); ApplyPrimitiveUpdateItems(entity, puis, metaData); if (ruis != null) { bool isUpdate = changeContainer is UpdateContainer; foreach (IRelationUpdateItem rui in ruis) { ApplyRelationUpdateItem(entity, rui, isUpdate, metaData, toPrefetch, toFetchFromCache, checkBaseState, runnables); } } } while (toPrefetch.Count > 0 || toFetchFromCache.Count > 0 || runnables.Count > 0) { if (toPrefetch.Count > 0) { PrefetchHelper.Prefetch(toPrefetch); toPrefetch.Clear(); } if (toFetchFromCache.Count > 0) { IList <Object> fetchedObjects = stateCache.GetObjects(toFetchFromCache, CacheDirective.None); hardRefs.Add(fetchedObjects); // add list as item intended. adding each item of the source is NOT needed toFetchFromCache.Clear(); } IBackgroundWorkerDelegate[] runnableArray = runnables.ToArray(); runnables.Clear(); foreach (IBackgroundWorkerDelegate runnable in runnableArray) { runnable(); } } List <Object> newObjects = new List <Object>(allObjects.Count); List <DirectValueHolderRef> changedRelationRefs = new List <DirectValueHolderRef>(); for (int a = allObjects.Count; a-- > 0;) { IChangeContainer newChange = newAllChanges[a]; IRelationUpdateItem[] ruis = null; Object entity = allObjects[a]; if (newChange is CreateContainer) { newObjects.Add(entity); ruis = ((CreateContainer)newChange).Relations; } else if (newChange is UpdateContainer) { ruis = ((UpdateContainer)newChange).Relations; } if (ruis == null) { continue; } IEntityMetaData metaData = EntityMetaDataProvider.GetMetaData(entity.GetType()); foreach (IRelationUpdateItem rui in ruis) { Member member = metaData.GetMemberByName(rui.MemberName); changedRelationRefs.Add(new DirectValueHolderRef((IObjRefContainer)entity, (RelationMember)member)); } } if (newObjects.Count > 0) { ((IWritableCache)stateCache).Put(newObjects); } if (changedRelationRefs.Count > 0) { PrefetchHelper.Prefetch(changedRelationRefs); } return(new CUDResult(newAllChanges, allObjects)); } finally { cloneStateTL.Value = null; } }
public virtual void AfterStarted() { prefetch = PrefetchHelper.CreatePrefetch().Add(typeof(TestEntity), "Relations").Build(); }
private int ExecuteWithExitCode() { // CmdParser doesn't strip quotes, and Path.Combine will throw this.GitBinPath = this.GitBinPath.Replace("\"", string.Empty); if (!GVFSPlatform.Instance.GitInstallation.GitExists(this.GitBinPath)) { Console.WriteLine( "Could not find git.exe {0}", !string.IsNullOrWhiteSpace(this.GitBinPath) ? "at " + this.GitBinPath : "on %PATH%"); return(ExitFailure); } if (this.Commit != null && this.Branch != null) { Console.WriteLine("Cannot specify both a commit sha and a branch name."); return(ExitFailure); } this.SearchThreadCount = this.SearchThreadCount > 0 ? this.SearchThreadCount : Environment.ProcessorCount; this.DownloadThreadCount = this.DownloadThreadCount > 0 ? this.DownloadThreadCount : Math.Min(Environment.ProcessorCount, MaxDefaultDownloadThreads); this.IndexThreadCount = this.IndexThreadCount > 0 ? this.IndexThreadCount : Environment.ProcessorCount; this.CheckoutThreadCount = this.CheckoutThreadCount > 0 ? this.CheckoutThreadCount : Environment.ProcessorCount; this.GitBinPath = !string.IsNullOrWhiteSpace(this.GitBinPath) ? this.GitBinPath : GVFSPlatform.Instance.GitInstallation.GetInstalledGitBinPath(); GitEnlistment enlistment = GitEnlistment.CreateFromCurrentDirectory(this.GitBinPath); if (enlistment == null) { Console.WriteLine("Must be run within a git repo"); return(ExitFailure); } string commitish = this.Commit ?? this.Branch; if (string.IsNullOrWhiteSpace(commitish)) { GitProcess.Result result = new GitProcess(enlistment).GetCurrentBranchName(); if (result.HasErrors || string.IsNullOrWhiteSpace(result.Output)) { Console.WriteLine("Could not retrieve current branch name: " + result.Errors); return(ExitFailure); } commitish = result.Output.Trim(); } Guid parentActivityId = Guid.Empty; if (!string.IsNullOrWhiteSpace(this.ParentActivityId) && !Guid.TryParse(this.ParentActivityId, out parentActivityId)) { Console.WriteLine("The ParentActivityId provided (" + this.ParentActivityId + ") is not a valid GUID."); } using (JsonTracer tracer = new JsonTracer("Microsoft.Git.FastFetch", parentActivityId, "FastFetch", disableTelemetry: true)) { if (this.Verbose) { tracer.AddDiagnosticConsoleEventListener(EventLevel.Informational, Keywords.Any); } else { tracer.AddPrettyConsoleEventListener(EventLevel.Error, Keywords.Any); } string fastfetchLogFile = Enlistment.GetNewLogFileName(enlistment.FastFetchLogRoot, "fastfetch"); tracer.AddLogFileEventListener(fastfetchLogFile, EventLevel.Informational, Keywords.Any); CacheServerInfo cacheServer = new CacheServerInfo(this.GetRemoteUrl(enlistment), null); tracer.WriteStartEvent( enlistment.EnlistmentRoot, enlistment.RepoUrl, cacheServer.Url, new EventMetadata { { "TargetCommitish", commitish }, { "Checkout", this.Checkout }, }); RetryConfig retryConfig = new RetryConfig(this.MaxAttempts, TimeSpan.FromMinutes(RetryConfig.FetchAndCloneTimeoutMinutes)); PrefetchHelper fetchHelper = this.GetFetchHelper(tracer, enlistment, cacheServer, retryConfig); string error; if (!PrefetchHelper.TryLoadFolderList(enlistment, this.FolderList, this.FolderListFile, fetchHelper.FolderList, out error)) { tracer.RelatedError(error); Console.WriteLine(error); return(ExitFailure); } bool isSuccess; try { Func <bool> doPrefetch = () => { try { bool isBranch = this.Commit == null; fetchHelper.Prefetch(commitish, isBranch); return(!fetchHelper.HasFailures); } catch (PrefetchHelper.FetchException e) { tracer.RelatedError(e.Message); return(false); } }; if (this.Verbose) { isSuccess = doPrefetch(); } else { isSuccess = ConsoleHelper.ShowStatusWhileRunning( doPrefetch, "Fetching", output: Console.Out, showSpinner: !Console.IsOutputRedirected, gvfsLogEnlistmentRoot: null); Console.WriteLine(); Console.WriteLine("See the full log at " + fastfetchLogFile); } isSuccess &= !fetchHelper.HasFailures; } catch (AggregateException e) { isSuccess = false; foreach (Exception ex in e.Flatten().InnerExceptions) { tracer.RelatedError(ex.ToString()); } } catch (Exception e) { isSuccess = false; tracer.RelatedError(e.ToString()); } EventMetadata stopMetadata = new EventMetadata(); stopMetadata.Add("Success", isSuccess); tracer.Stop(stopMetadata); return(isSuccess ? ExitSuccess : ExitFailure); } }