private static IDictionary <string, PackFile> ReuseMap(ObjectDirectory.PackList old ) { IDictionary <string, PackFile> forReuse = new Dictionary <string, PackFile>(); foreach (PackFile p in old.packs) { if (p.Invalid()) { // The pack instance is corrupted, and cannot be safely used // again. Do not include it in our reuse map. // p.Close(); continue; } PackFile prior = forReuse.Put(p.GetPackFile().GetName(), p); if (prior != null) { // This should never occur. It should be impossible for us // to have two pack files with the same name, as all of them // came out of the same directory. If it does, we promised to // close any PackFiles we did not reuse, so close the second, // readers are likely to be actively using the first. // forReuse.Put(prior.GetPackFile().GetName(), prior); p.Close(); } } return(forReuse); }
private ObjectDirectory.PackList ScanPacks(ObjectDirectory.PackList original) { lock (packList) { ObjectDirectory.PackList o; ObjectDirectory.PackList n; do { o = packList.Get(); if (o != original) { // Another thread did the scan for us, while we // were blocked on the monitor above. // return(o); } n = ScanPacksImpl(o); if (n == o) { return(n); } }while (!packList.CompareAndSet(o, n)); return(n); } }
private void InsertPack(PackFile pf) { ObjectDirectory.PackList o; ObjectDirectory.PackList n; do { o = packList.Get(); // If the pack in question is already present in the list // (picked up by a concurrent thread that did a scan?) we // do not want to insert it a second time. // PackFile[] oldList = o.packs; string name = pf.GetPackFile().GetName(); foreach (PackFile p in oldList) { if (PackFile.SORT.Compare(pf, p) < 0) { break; } if (name.Equals(p.GetPackFile().GetName())) { return; } } PackFile[] newList = new PackFile[1 + oldList.Length]; newList[0] = pf; System.Array.Copy(oldList, 0, newList, 1, oldList.Length); n = new ObjectDirectory.PackList(o.snapshot, newList); }while (!packList.CompareAndSet(o, n)); }
/// <exception cref="System.IO.IOException"></exception> internal override long GetObjectSize1(WindowCursor curs, AnyObjectId objectId) { ObjectDirectory.PackList pList = packList.Get(); for (; ;) { foreach (PackFile p in pList.packs) { try { long sz = p.GetObjectSize(curs, objectId); if (0 <= sz) { return(sz); } } catch (PackMismatchException) { // Pack was modified; refresh the entire pack list. // pList = ScanPacks(pList); goto SEARCH_continue; } catch (IOException) { // Assume the pack is corrupted. // RemovePack(p); } } return(-1); SEARCH_continue :; } SEARCH_break :; }
private ObjectDirectory.PackList ScanPacksImpl(ObjectDirectory.PackList old) { IDictionary <string, PackFile> forReuse = ReuseMap(old); FileSnapshot snapshot = FileSnapshot.Save(packDirectory); ICollection <string> names = ListPackDirectory(); IList <PackFile> list = new AList <PackFile>(names.Count >> 2); bool foundNew = false; foreach (string indexName in names) { // Must match "pack-[0-9a-f]{40}.idx" to be an index. // if (indexName.Length != 49 || !indexName.EndsWith(".idx")) { continue; } string @base = Sharpen.Runtime.Substring(indexName, 0, indexName.Length - 4); string packName = @base + ".pack"; if (!names.Contains(packName)) { // Sometimes C Git's HTTP fetch transport leaves a // .idx file behind and does not download the .pack. // We have to skip over such useless indexes. // continue; } PackFile oldPack = Sharpen.Collections.Remove(forReuse, packName); if (oldPack != null) { list.AddItem(oldPack); continue; } FilePath packFile = new FilePath(packDirectory, packName); FilePath idxFile = new FilePath(packDirectory, indexName); list.AddItem(new PackFile(idxFile, packFile)); foundNew = true; } // If we did not discover any new files, the modification time was not // changed, and we did not remove any files, then the set of files is // the same as the set we were given. Instead of building a new object // return the same collection. // if (!foundNew && forReuse.IsEmpty() && snapshot.Equals(old.snapshot)) { old.snapshot.SetClean(snapshot); return(old); } foreach (PackFile p in forReuse.Values) { p.Close(); } if (list.IsEmpty()) { return(new ObjectDirectory.PackList(snapshot, NO_PACKS.packs)); } PackFile[] r = Sharpen.Collections.ToArray(list, new PackFile[list.Count]); Arrays.Sort(r, PackFile.SORT); return(new ObjectDirectory.PackList(snapshot, r)); }
internal override bool TryAgain1() { ObjectDirectory.PackList old = packList.Get(); if (old.snapshot.IsModified(packDirectory)) { return(old != ScanPacks(old)); } return(false); }
/// <returns> /// unmodifiable collection of all known pack files local to this /// directory. Most recent packs are presented first. Packs most /// likely to contain more recent objects appear before packs /// containing objects referenced by commits further back in the /// history of the repository. /// </returns> public virtual ICollection <PackFile> GetPacks() { ObjectDirectory.PackList list = packList.Get(); if (list == NO_PACKS) { list = ScanPacks(list); } PackFile[] packs = list.packs; return(Sharpen.Collections.UnmodifiableCollection(Arrays.AsList(packs))); }
/// <exception cref="System.IO.IOException"></exception> internal override ObjectLoader OpenObject1(WindowCursor curs, AnyObjectId objectId ) { if (unpackedObjectCache.IsUnpacked(objectId)) { ObjectLoader ldr = OpenObject2(curs, objectId.Name, objectId); if (ldr != null) { return(ldr); } else { unpackedObjectCache.Remove(objectId); } } ObjectDirectory.PackList pList = packList.Get(); for (; ;) { foreach (PackFile p in pList.packs) { try { ObjectLoader ldr = p.Get(curs, objectId); if (ldr != null) { return(ldr); } } catch (PackMismatchException) { // Pack was modified; refresh the entire pack list. // pList = ScanPacks(pList); goto SEARCH_continue; } catch (IOException) { // Assume the pack is corrupted. // RemovePack(p); } } return(null); SEARCH_continue :; } SEARCH_break :; }
private void RemovePack(PackFile deadPack) { ObjectDirectory.PackList o; ObjectDirectory.PackList n; do { o = packList.Get(); PackFile[] oldList = o.packs; int j = IndexOf(oldList, deadPack); if (j < 0) { break; } PackFile[] newList = new PackFile[oldList.Length - 1]; System.Array.Copy(oldList, 0, newList, 0, j); System.Array.Copy(oldList, j + 1, newList, j, newList.Length - j); n = new ObjectDirectory.PackList(o.snapshot, newList); }while (!packList.CompareAndSet(o, n)); deadPack.Close(); }
public override void Close() { unpackedObjectCache.Clear(); ObjectDirectory.PackList packs = packList.Get(); packList.Set(NO_PACKS); foreach (PackFile p in packs.packs) { p.Close(); } // Fully close all loaded alternates and clear the alternate list. FileObjectDatabase.AlternateHandle[] alt = alternates.Get(); if (alt != null) { alternates.Set(null); foreach (FileObjectDatabase.AlternateHandle od in alt) { od.Close(); } } }
/// <exception cref="System.IO.IOException"></exception> internal override void SelectObjectRepresentation(PackWriter packer, ObjectToPack otp, WindowCursor curs) { ObjectDirectory.PackList pList = packList.Get(); for (; ;) { foreach (PackFile p in pList.packs) { try { LocalObjectRepresentation rep = p.Representation(curs, otp); if (rep != null) { packer.Select(otp, rep); } } catch (PackMismatchException) { // Pack was modified; refresh the entire pack list. // pList = ScanPacks(pList); goto SEARCH_continue; } catch (IOException) { // Assume the pack is corrupted. // RemovePack(p); } } goto SEARCH_break; SEARCH_continue :; } SEARCH_break :; foreach (FileObjectDatabase.AlternateHandle h in MyAlternates()) { h.db.SelectObjectRepresentation(packer, otp, curs); } }
private void RemovePack(PackFile deadPack) { ObjectDirectory.PackList o; ObjectDirectory.PackList n; do { o = packList.Get(); PackFile[] oldList = o.packs; int j = IndexOf(oldList, deadPack); if (j < 0) { break; } PackFile[] newList = new PackFile[oldList.Length - 1]; System.Array.Copy(oldList, 0, newList, 0, j); System.Array.Copy(oldList, j + 1, newList, j, newList.Length - j); n = new ObjectDirectory.PackList(o.snapshot, newList); } while (!packList.CompareAndSet(o, n)); deadPack.Close(); }
private void InsertPack(PackFile pf) { ObjectDirectory.PackList o; ObjectDirectory.PackList n; do { o = packList.Get(); // If the pack in question is already present in the list // (picked up by a concurrent thread that did a scan?) we // do not want to insert it a second time. // PackFile[] oldList = o.packs; string name = pf.GetPackFile().GetName(); foreach (PackFile p in oldList) { if (PackFile.SORT.Compare(pf, p) < 0) { break; } if (name.Equals(p.GetPackFile().GetName())) { return; } } PackFile[] newList = new PackFile[1 + oldList.Length]; newList[0] = pf; System.Array.Copy(oldList, 0, newList, 1, oldList.Length); n = new ObjectDirectory.PackList(o.snapshot, newList); } while (!packList.CompareAndSet(o, n)); }
/// <exception cref="System.IO.IOException"></exception> internal override void Resolve(ICollection <ObjectId> matches, AbbreviatedObjectId id) { // Go through the packs once. If we didn't find any resolutions // scan for new packs and check once more. // int oldSize = matches.Count; ObjectDirectory.PackList pList = packList.Get(); for (; ;) { foreach (PackFile p in pList.packs) { try { p.Resolve(matches, id, RESOLVE_ABBREV_LIMIT); } catch (IOException) { // Assume the pack is corrupted. // RemovePack(p); } if (matches.Count > RESOLVE_ABBREV_LIMIT) { return; } } if (matches.Count == oldSize) { ObjectDirectory.PackList nList = ScanPacks(pList); if (nList == pList || nList.packs.Length == 0) { break; } pList = nList; continue; } break; } string fanOut = Sharpen.Runtime.Substring(id.Name, 0, 2); string[] entries = new FilePath(GetDirectory(), fanOut).List(); if (entries != null) { foreach (string e in entries) { if (e.Length != Constants.OBJECT_ID_STRING_LENGTH - 2) { continue; } try { ObjectId entId = ObjectId.FromString(fanOut + e); if (id.PrefixCompare(entId) == 0) { matches.AddItem(entId); } } catch (ArgumentException) { continue; } if (matches.Count > RESOLVE_ABBREV_LIMIT) { return; } } } foreach (FileObjectDatabase.AlternateHandle alt in MyAlternates()) { alt.db.Resolve(matches, id); if (matches.Count > RESOLVE_ABBREV_LIMIT) { return; } } }