public ProjectedFileInfo(string name, long size, bool isFolder, Sha1Id sha) { this.Name = name; this.Size = size; this.IsFolder = isFolder; this.Sha = sha; }
public virtual bool TryGetSize(Sha1Id sha, out long length) { try { length = -1; sha.ToBuffer(this.shaBuffer); this.shaParam.Value = this.shaBuffer; using (SqliteDataReader reader = this.querySizeCommand.ExecuteReader()) { if (reader.Read()) { length = reader.GetInt64(0); return(true); } } } catch (Exception e) { throw new BlobSizesException(e); } return(false); }
public void TryParseFailsForLowerCaseShas() { Sha1Id sha1; string error; Sha1Id.TryParse("abcdef7890123456789012345678901234567890", out sha1, out error).ShouldBeFalse(); Sha1Id.TryParse(new string('a', 40), out sha1, out error).ShouldBeFalse(); }
public bool TryPopulateSizeLocally( ITracer tracer, GVFSGitObjects gitObjects, BlobSizes.BlobSizesConnection blobSizesConnection, Dictionary<string, long> availableSizes, out string missingSha) { missingSha = null; long blobLength = 0; Sha1Id sha1Id = new Sha1Id(this.shaBytes1through8, this.shaBytes9Through16, this.shaBytes17Through20); string shaString = null; if (availableSizes != null) { shaString = this.ConvertShaToString(); if (availableSizes.TryGetValue(shaString, out blobLength)) { this.Size = blobLength; return true; } } try { if (blobSizesConnection.TryGetSize(sha1Id, out blobLength)) { this.Size = blobLength; return true; } } catch (BlobSizesException e) { EventMetadata metadata = CreateEventMetadata(e); missingSha = this.ConvertShaToString(); metadata.Add(nameof(missingSha), missingSha); tracer.RelatedWarning(metadata, $"{nameof(this.TryPopulateSizeLocally)}: Exception while trying to get file size", Keywords.Telemetry); } if (missingSha == null) { missingSha = (shaString == null) ? this.ConvertShaToString() : shaString; } if (gitObjects.TryGetBlobSizeLocally(missingSha, out blobLength)) { this.Size = blobLength; // There is no flush for this value because it's already local, so there's little loss if it doesn't get persisted // But it's faster to wait for some remote call to batch this value into a different flush blobSizesConnection.BlobSizesDatabase.AddSize(sha1Id, blobLength); return true; } return false; }
public void TryParseFailsForInvalidShas() { Sha1Id sha1; string error; Sha1Id.TryParse(null, out sha1, out error).ShouldBeFalse(); Sha1Id.TryParse("0", out sha1, out error).ShouldBeFalse(); Sha1Id.TryParse("abcdef", out sha1, out error).ShouldBeFalse(); Sha1Id.TryParse(new string('H', 40), out sha1, out error).ShouldBeFalse(); }
public void TryParseSucceedsForUpperCaseShas() { Sha1Id sha1Id; string error; string sha = "ABCDEF7890123456789012345678901234567890"; Sha1Id.TryParse(sha, out sha1Id, out error).ShouldBeTrue(); sha1Id.ToString().ShouldEqual(sha); sha = new string('A', 40); Sha1Id.TryParse(sha, out sha1Id, out error).ShouldBeTrue(); sha1Id.ToString().ShouldEqual(sha); }
private void MigrateBlobSizes(ITracer tracer, string enlistmentRoot, string newBlobSizesRoot) { string esentBlobSizeFolder = Path.Combine(enlistmentRoot, GVFSPlatform.Instance.Constants.DotGVFSRoot, BlobSizesName); PhysicalFileSystem fileSystem = new PhysicalFileSystem(); if (!fileSystem.DirectoryExists(esentBlobSizeFolder)) { tracer.RelatedInfo("Copied no ESENT blob size entries. {0} does not exist", esentBlobSizeFolder); return; } try { using (PersistentDictionary <string, long> oldBlobSizes = new PersistentDictionary <string, long>(esentBlobSizeFolder)) using (BlobSizes newBlobSizes = new BlobSizes(newBlobSizesRoot, fileSystem, tracer)) { newBlobSizes.Initialize(); int copiedCount = 0; int totalCount = oldBlobSizes.Count; foreach (KeyValuePair <string, long> kvp in oldBlobSizes) { Sha1Id sha1; string error; if (Sha1Id.TryParse(kvp.Key, out sha1, out error)) { newBlobSizes.AddSize(sha1, kvp.Value); if (copiedCount++ % 5000 == 0) { tracer.RelatedInfo("Copied {0}/{1} ESENT blob size entries", copiedCount, totalCount); } } else { tracer.RelatedWarning($"Corrupt entry ({kvp.Key}) found in BlobSizes, skipping. Error: {error}"); } } newBlobSizes.Flush(); newBlobSizes.Shutdown(); tracer.RelatedInfo("Upgrade complete: Copied {0}/{1} ESENT blob size entries", copiedCount, totalCount); } } catch (EsentException ex) { tracer.RelatedWarning("BlobSizes appears to be from an older version of GVFS and corrupted, skipping upgrade of blob sizes: " + ex.Message); } }
public static bool IsDescendantOf(this TfsGitCommit commit, TeamFoundationRequestContext requestContext, Sha1Id ancestorId) { Queue<TfsGitCommit> q = new Queue<TfsGitCommit>(); HashSet<Sha1Id> visited = new HashSet<Sha1Id>(); q.Enqueue(commit); while (q.Count > 0) { TfsGitCommit current = q.Dequeue(); if (!visited.Add(current.ObjectId)) continue; if (current.ObjectId.Equals(ancestorId)) return true; foreach (var c in current.GetParents(requestContext)) q.Enqueue(c); } return false; }
public BlobSize(Sha1Id sha, long size) { this.Sha = sha; this.Size = size; }
public virtual void AddSize(Sha1Id sha, long size) { this.queuedSizes.Enqueue(new BlobSize(sha, size)); }
public static string ToHexString(this Sha1Id id, int length) { return(id.ToByteArray().ToHexString(length)); }
public static string ToHexString(this Sha1Id id) { return(id.ToByteArray().ToHexString()); }
public static bool IsDescendantOf(this TfsGitCommit commit, TeamFoundationRequestContext requestContext, Sha1Id ancestorId) { Queue <TfsGitCommit> q = new Queue <TfsGitCommit>(); HashSet <Sha1Id> visited = new HashSet <Sha1Id>(); q.Enqueue(commit); while (q.Count > 0) { TfsGitCommit current = q.Dequeue(); if (!visited.Add(current.ObjectId)) { continue; } if (current.ObjectId.Equals(ancestorId)) { return(true); } foreach (var c in current.GetParents(requestContext)) { q.Enqueue(c); } } return(false); }
public override bool TryGetSize(Sha1Id sha, out long length) { throw new NotSupportedException("TryGetSize has not been implemented yet."); }
public override void AddSize(Sha1Id sha, long length) { throw new NotSupportedException("SaveValue has not been implemented yet."); }
public void ResetData(LazyUTF8String name, byte[] shaBytes) { this.Name = name; this.Size = InvalidSize; Sha1Id.ShaBufferToParts(shaBytes, out this.shaBytes1through8, out this.shaBytes9Through16, out this.shaBytes17Through20); }