/// <summary>Returns a key that is immediately after the packed representation of this tuple</summary> /// <remarks>This is the equivalent of manually packing the tuple and incrementing the resulting slice</remarks> public static Slice Increment([NotNull] this IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } return(FdbKey.Increment(tuple.ToSlice())); }
public static IFdbTuple OfSizeAtMost(this IFdbTuple tuple, int size) { if (tuple == null || tuple.Count > size) { ThrowInvalidTupleSize(tuple, size, 1); } return(tuple); }
public static IFdbTuple Append <T1, T2, T3, T4>([NotNull] this IFdbTuple tuple, T1 value1, T2 value2, T3 value3, T4 value4) { if (tuple == null) { throw new ArgumentNullException("tuple"); } return(new FdbJoinedTuple(tuple, FdbTuple.Create <T1, T2, T3, T4>(value1, value2, value3, value4))); }
/// <summary>Returns the typed value of the first item in this tuple</summary> /// <typeparam name="T">Expected type of the first item</typeparam> /// <returns>Value of the first item, adapted into type <typeparamref name="T"/>.</returns> public static T First <T>([NotNull] this IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } return(tuple.Get <T>(0)); }
public static IFdbTuple Substring([NotNull] this IFdbTuple tuple, int offset) { if (tuple == null) { throw new ArgumentNullException("tuple"); } return(tuple[offset, null]); }
public static async Task Dir(string[] path, IFdbTuple extras, DirectoryBrowseOptions options, IFdbDatabase db, TextWriter log, CancellationToken ct) { if (log == null) log = Console.Out; log.WriteLine("# Listing {0}:", String.Join("/", path)); var parent = await TryOpenCurrentDirectoryAsync(path, db, ct); if (parent == null) { log.WriteLine(" Directory not found."); return; } if (parent.Layer.IsPresent) { log.WriteLine("# Layer: {0}", parent.Layer.ToAsciiOrHexaString()); } var folders = await Fdb.Directory.BrowseAsync(db, parent, ct); if (folders != null && folders.Count > 0) { foreach (var kvp in folders) { var name = kvp.Key; var subfolder = kvp.Value; if (subfolder != null) { if ((options & DirectoryBrowseOptions.ShowCount) != 0) { if (!(subfolder is FdbDirectoryPartition)) { long count = await Fdb.System.EstimateCountAsync(db, subfolder.ToRange(), ct); log.WriteLine(" {0,-12} {1,-12} {3,9:N0} {2}", FdbKey.Dump(subfolder.Copy().Key), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name, count); } else { log.WriteLine(" {0,-12} {1,-12} {3,9} {2}", FdbKey.Dump(subfolder.Copy().Key), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name, "-"); } } else { log.WriteLine(" {0,-12} {1,-12} {2}", FdbKey.Dump(subfolder.Copy().Key), subfolder.Layer.IsNullOrEmpty ? "-" : ("<" + subfolder.Layer.ToUnicode() + ">"), name); } } else { log.WriteLine(" WARNING: {0} seems to be missing!", name); } } log.WriteLine(" {0} sub-directorie(s).", folders.Count); } else { //TODO: test if it contains data? log.WriteLine(" No sub-directories."); } }
/// <summary>Returns a Key Selector pair that defines the range of all items contained under this tuple</summary> public static FdbKeySelectorPair ToSelectorPair([NotNull] this IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } return(FdbKeySelectorPair.StartsWith(tuple.ToSlice())); }
/// <summary>Returns a typed version of a tuple of size 3</summary> /// <typeparam name="T1">Expected type of the first element</typeparam> /// <typeparam name="T2">Expected type of the second element</typeparam> /// <typeparam name="T3">Expected type of the third element</typeparam> /// <param name="tuple">Tuple that must be of size 3</param> /// <returns>Equivalent tuple, with its elements converted to the specified types</returns> public static FdbTuple <T1, T2, T3> As <T1, T2, T3>([NotNull] this IFdbTuple tuple) { tuple.OfSize(3); return(new FdbTuple <T1, T2, T3>( tuple.Get <T1>(0), tuple.Get <T2>(1), tuple.Get <T3>(2) )); }
public static IFdbDynamicSubspace CreateDynamic([NotNull] IFdbTuple tuple, IFdbKeyEncoding encoding = null) { if (tuple == null) { throw new ArgumentNullException("tuple"); } var encoder = (encoding ?? TypeSystem.Default).GetDynamicEncoder(); return(new FdbDynamicSubspace(tuple.ToSlice(), true, encoder)); }
protected override FdbDirectoryLayer GetLayerForPath(IFdbTuple relativeLocation) { if (relativeLocation.Count == 0) { // Forward all actions on the Partition itself (empty path) to its parent's DL return this.ParentDirectoryLayer; } else { // For everything else, use the Partition's DL return this.DirectoryLayer; } }
protected override FdbDirectoryLayer GetLayerForPath(IFdbTuple relativeLocation) { if (relativeLocation.Count == 0) { // Forward all actions on the Partition itself (empty path) to its parent's DL return(this.ParentDirectoryLayer); } else { // For everything else, use the Partition's DL return(this.DirectoryLayer); } }
public static FdbMemoizedTuple Memoize(this IFdbTuple tuple) { if (tuple == null) { return(null); } var memoized = tuple as FdbMemoizedTuple ?? new FdbMemoizedTuple(tuple.ToArray(), tuple.ToSlice()); return(memoized); }
public T FromTuple(IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } var key = new T(); key.FromTuple(tuple); return(key); }
/// <summary>Returns a typed version of a tuple of size 5</summary> /// <typeparam name="T1">Expected type of the first element</typeparam> /// <typeparam name="T2">Expected type of the second element</typeparam> /// <typeparam name="T3">Expected type of the third element</typeparam> /// <typeparam name="T4">Expected type of the fourth element</typeparam> /// <typeparam name="T5">Expected type of the fifth element</typeparam> /// <param name="tuple">Tuple that must be of size 5</param> /// <returns>Equivalent tuple, with its elements converted to the specified types</returns> public static FdbTuple <T1, T2, T3, T4, T5> As <T1, T2, T3, T4, T5>([NotNull] this IFdbTuple tuple) { tuple.OfSize(5); return(new FdbTuple <T1, T2, T3, T4, T5>( tuple.Get <T1>(0), tuple.Get <T2>(1), tuple.Get <T3>(2), tuple.Get <T4>(3), tuple.Get <T5>(4) )); }
/// <summary>Convert a tuple into a key of this subspace</summary> /// <param name="tuple">Tuple that will be packed and appended to the subspace prefix</param> public Slice Pack([NotNull] IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } var writer = this.Subspace.GetWriter(); this.Encoder.PackKey(ref writer, tuple); return(writer.ToSlice()); }
public FdbPrefixedTuple Concat([NotNull] IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } if (tuple.Count == 0) { return(this); } return(new FdbPrefixedTuple(m_prefix, m_items.Concat(tuple))); }
public T FromTuple(IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } if (tuple.Count != 1) { throw new ArgumentException("Tuple must have only one item", "tuple"); } return(tuple.Get <T>(0)); }
public IFdbDynamicSubspace this[IFdbTuple tuple] { [ContractAnnotation("null => halt; notnull => notnull")] get { if (tuple == null) { throw new ArgumentNullException("tuple"); } //TODO: find a way to limit the number of copies of the packed tuple? return(new FdbDynamicSubspace(this.Subspace.Keys.Pack(tuple), false, this.Encoder)); } }
/// <summary>Returns a tuple with only the first (or last) items of this tuple</summary> /// <param name="tuple">Tuple to truncate</param> /// <param name="count">Number of items to keep. If positive, items will be taken from the start of the tuple. If negative, items will be taken from the end of the tuple</param> /// <returns>New tuple of size |<paramref name="count"/>|.</returns> /// <example> /// (a, b, c).Truncate(2) => (a, b) /// (a, b, c).Truncate(-2) => (b, c) /// </example> public static IFdbTuple Truncate([NotNull] this IFdbTuple tuple, int count) { tuple.OfSizeAtLeast(Math.Abs(count)); if (count < 0) { int offset = tuple.Count + count; return(Substring(tuple, offset, -count)); } else { return(Substring(tuple, 0, count)); } }
/// <summary>Remove all fields of an hashset</summary> /// <param name="id"></param> public void Delete(IFdbTransaction trans, IFdbTuple id) { if (trans == null) { throw new ArgumentNullException("trans"); } if (id == null) { throw new ArgumentNullException("id"); } // remove all fields of the hash trans.ClearRange(FdbKeyRange.StartsWith(GetKey(id))); }
/// <summary>Test if the end of current tuple is equal to another tuple</summary> /// <param name="left">Larger tuple</param> /// <param name="right">Smaller tuple</param> /// <returns>True if the end of <paramref name="left"/> is equal to <paramref name="right"/> or if both tuples are identical</returns> public static bool EndsWith([NotNull] this IFdbTuple left, [NotNull] IFdbTuple right) { if (left == null) { throw new ArgumentNullException("left"); } if (right == null) { throw new ArgumentNullException("right"); } //REVIEW: move this on IFdbTuple interface ? return(FdbTuple.EndsWith(left, right)); }
internal static void ThrowInvalidTupleSize(IFdbTuple tuple, int expected, int test) { if (tuple == null) { throw new ArgumentNullException("tuple"); } switch (test) { case 1: throw new InvalidOperationException(String.Format("This operation requires a tuple of size {0} or less, but this tuple has {1} elements", expected, tuple.Count)); case -1: throw new InvalidOperationException(String.Format("This operation requires a tuple of size {0} or more, but this tuple has {1} elements", expected, tuple.Count)); default: throw new InvalidOperationException(String.Format("This operation requires a tuple of size {0}, but this tuple has {1} elements", expected, tuple.Count)); } }
public static object[] ToArray([NotNull] this IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } var items = new object[tuple.Count]; if (items.Length > 0) { tuple.CopyTo(items, 0); } return(items); }
public static FdbSubspace Partition(this IFdbSubspace subspace, [NotNull] IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } if (tuple.Count == 0) { return(new FdbSubspace(subspace.ToFoundationDbKey())); } else { return(new FdbSubspace(FdbTuple.PackWithPrefix(subspace.ToFoundationDbKey(), tuple))); } }
public static async Task ChangeDirectoryLayer(string[] path, string layer, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var dir = await BasicCommands.TryOpenCurrentDirectoryAsync(path, db, ct); if (dir == null) { log.WriteLine("# Directory {0} does not exist anymore", String.Join("/", path)); } else { dir = await db.ReadWriteAsync((tr) => dir.ChangeLayerAsync(tr, Slice.FromString(layer)), ct); log.WriteLine("# Directory {0} layer changed to {1}", String.Join("/", path), dir.Layer.ToAsciiOrHexaString()); } }
internal FdbDirectorySubspace(IFdbTuple location, IFdbTuple relativeLocation, Slice prefix, FdbDirectoryLayer directoryLayer, Slice layer, IDynamicKeyEncoder encoder) : base(prefix, encoder) { Contract.Requires(location != null && relativeLocation != null && prefix != null && directoryLayer != null); if (layer.IsNull) layer = Slice.Empty; this.DirectoryLayer = directoryLayer; this.Location = location; this.RelativeLocation = relativeLocation; this.Layer = layer; this.Path = location.ToArray<string>(); Contract.Ensures(this.DirectoryLayer != null && this.Location != null && this.RelativeLocation != null && this.Path != null); Contract.Ensures(this.RelativeLocation.Count <= this.Location.Count && this.Location.EndsWith(this.RelativeLocation)); }
/// <summary>Shows the first few keys of a directory</summary> public static async Task Show(string[] path, IFdbTuple extras, bool reverse, IFdbDatabase db, TextWriter log, CancellationToken ct) { int count = 20; if (extras.Count > 0) { int x = extras.Get <int>(0); if (x > 0) { count = x; } } // look if there is something under there var folder = await db.Directory.TryOpenAsync(path, cancellationToken : ct); if (folder != null) { log.WriteLine("# Content of {0} [{1}]", FdbKey.Dump(folder.Key), folder.Key.ToHexaString(' ')); var keys = await db.QueryAsync((tr) => { var query = tr.GetRange(folder.ToRange()); return(reverse ? query.Reverse().Take(count) : query.Take(count + 1)); }, cancellationToken : ct); if (keys.Count > 0) { if (reverse) { keys.Reverse(); } foreach (var key in keys.Take(count)) { log.WriteLine("...{0} = {1}", FdbKey.Dump(folder.Extract(key.Key)), key.Value.ToAsciiOrHexaString()); } if (!reverse && keys.Count == count + 1) { log.WriteLine("... more"); } } else { log.WriteLine(" no content found"); } } }
public bool Equals(IFdbTuple other) { if (object.ReferenceEquals(other, null)) { return(false); } var memoized = other as FdbMemoizedTuple; if (!object.ReferenceEquals(memoized, null)) { return(m_packed.Equals(memoized.m_packed)); } return(FdbTuple.Equals(this, other, SimilarValueComparer.Default)); }
public FdbJoinedTuple(IFdbTuple head, IFdbTuple tail) { if (head == null) { throw new ArgumentNullException("head"); } if (tail == null) { throw new ArgumentNullException("tail"); } this.Head = head; this.Tail = tail; m_split = head.Count; m_count = m_split + tail.Count; }
/// <summary>Transform a tuple of N elements into a list of N singletons</summary> /// <param name="tuple">Tuple that contains any number of elements</param> /// <returns>Sequence of tuples that contains a single element</returns> /// <example>(123, ABC, false,).Explode() => [ (123,), (ABC,), (false,) ]</example> public static IEnumerable <IFdbTuple> Explode([NotNull] this IFdbTuple tuple) { if (tuple == null) { throw new ArgumentNullException("tuple"); } int p = 0; int n = tuple.Count; while (p < n) { yield return(tuple[p, p + 1]); ++p; } }
public void SetValue(IFdbTransaction trans, IFdbTuple id, string field, Slice value) { if (trans == null) { throw new ArgumentNullException("trans"); } if (id == null) { throw new ArgumentNullException("id"); } if (string.IsNullOrEmpty(field)) { throw new ArgumentNullException("field"); } trans.Set(GetFieldKey(id, field), value); }
/// <summary>Remove a field of an hashset</summary> /// <param name="trans"></param> /// <param name="id"></param> /// <param name="field"></param> public void DeleteValue(IFdbTransaction trans, IFdbTuple id, string field) { if (trans == null) { throw new ArgumentNullException("trans"); } if (id == null) { throw new ArgumentNullException("id"); } if (string.IsNullOrEmpty(field)) { throw new ArgumentNullException("field"); } trans.Clear(GetFieldKey(id, field)); }
/// <summary>Return the value of a specific field of an hashset</summary> /// <param name="trans">Transaction that will be used for this request</param> /// <param name="id">Unique identifier of the hashset</param> /// <param name="field">Name of the field to read</param> /// <returns>Value of the corresponding field, or Slice.Nil if it the hashset does not exist, or doesn't have a field with this name</returns> public Task <Slice> GetValueAsync(IFdbReadOnlyTransaction trans, IFdbTuple id, string field) { if (trans == null) { throw new ArgumentNullException("trans"); } if (id == null) { throw new ArgumentNullException("id"); } if (string.IsNullOrEmpty(field)) { throw new ArgumentNullException("field"); } return(trans.GetAsync(GetFieldKey(id, field))); }
/// <summary>Remove a directory and all its data</summary> public static async Task RemoveDirectory(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { if (log == null) log = Console.Out; string layer = extras.Count > 0 ? extras.Get<string>(0) : null; var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct); if (folder == null) { log.WriteLine("# Directory {0} does not exist", string.Join("/", path)); return; } // are there any subdirectories ? var subDirs = await folder.TryListAsync(db, ct); if (subDirs != null && subDirs.Count > 0) { //TODO: "-r" flag ? log.WriteLine("# Cannot remove {0} because it still contains {1} sub-directorie(s)", string.Join("/", path), subDirs.Count); } //TODO: ask for confirmation? log.WriteLine("# Deleting directory {0} ...", String.Join("/", path)); await folder.RemoveAsync(db, ct); log.WriteLine("# Gone!"); }
/// <summary>Convert a relative path in this Directory Layer, into an absolute path from the root of partition of the database</summary> internal IFdbTuple PartitionSubPath(IFdbTuple path = null) { // If the DL is the root, the path is already absolute // If the DL is used by a partition, then the path of the partition will be prepended to the path return path == null ? this.Location : this.Location.Concat(path); }
/// <summary>Returns the key prefix of an HashSet: (subspace, id, )</summary> /// <param name="id"></param> /// <returns></returns> protected virtual Slice GetKey(IFdbTuple id) { //REVIEW: should the id be encoded as a an embedded tuple or not? return this.Subspace.Keys.Pack(id); }
protected virtual string ParseFieldKey(IFdbTuple key) { return key.Last<string>(); }
/// <summary>Remove a field of an hashset</summary> /// <param name="trans"></param> /// <param name="id"></param> /// <param name="field"></param> public void DeleteValue(IFdbTransaction trans, IFdbTuple id, string field) { if (trans == null) throw new ArgumentNullException("trans"); if (id == null) throw new ArgumentNullException("id"); if (string.IsNullOrEmpty(field)) throw new ArgumentNullException("field"); trans.Clear(GetFieldKey(id, field)); }
/// <summary>Remove one or more fields of an hashset</summary> /// <param name="trans"></param> /// <param name="id"></param> /// <param name="fields"></param> public void Delete(IFdbTransaction trans, IFdbTuple id, params string[] fields) { if (trans == null) throw new ArgumentNullException("trans"); if (id == null) throw new ArgumentNullException("id"); if (fields == null) throw new ArgumentNullException("fields"); foreach (var field in fields) { if (string.IsNullOrEmpty(field)) throw new ArgumentException("Field cannot have an empty name", "fields"); trans.Clear(GetFieldKey(id, field)); } }
/// <summary>Display a tree of a directory's children</summary> public static async Task Tree(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { if (log == null) log = Console.Out; log.WriteLine("# Tree of {0}:", String.Join("/", path)); FdbDirectorySubspace root = null; if (path.Length > 0) root = await db.Directory.TryOpenAsync(path, cancellationToken: ct); await TreeDirectoryWalk(root, new List<bool>(), db, log, ct); log.WriteLine("# done"); }
public void SetValue(IFdbTransaction trans, IFdbTuple id, string field, Slice value) { if (trans == null) throw new ArgumentNullException("trans"); if (id == null) throw new ArgumentNullException("id"); if (string.IsNullOrEmpty(field)) throw new ArgumentNullException("field"); trans.Set(GetFieldKey(id, field), value); }
/// <summary>Counts the number of keys inside a directory</summary> public static async Task Count(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { // look if there is something under there var folder = (await TryOpenCurrentDirectoryAsync(path, db, ct)) as FdbDirectorySubspace; if (folder == null) { log.WriteLine("# Directory {0} does not exist", String.Join("/", path)); return; } var copy = folder.Copy(); log.WriteLine("# Counting keys under {0} ...", FdbKey.Dump(copy.Key)); var progress = new Progress<FdbTuple<long, Slice>>((state) => { log.Write("\r# Found {0:N0} keys...", state.Item1); }); long count = await Fdb.System.EstimateCountAsync(db, copy.ToRange(), progress, ct); log.WriteLine("\r# Found {0:N0} keys in {1}", count, folder.FullName); }
/// <summary>Returns a new Directory Subspace given its node subspace, path and layer id</summary> private FdbDirectorySubspace ContentsOfNode(FdbSubspace node, IFdbTuple relativePath, Slice layer) { Contract.Requires(node != null); var path = this.Location.Concat(relativePath); var prefix = this.NodeSubspace.UnpackSingle<Slice>(node.Key); if (layer == FdbDirectoryPartition.LayerId) { return new FdbDirectoryPartition(path, relativePath, prefix, this); } else { return new FdbDirectorySubspace(path, relativePath, prefix, this, layer); } }
public static async Task ShowDirectoryLayer(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var dir = await BasicCommands.TryOpenCurrentDirectoryAsync(path, db, ct); if (dir == null) { log.WriteLine("# Directory {0} does not exist anymore", String.Join("/", path)); } else { if (dir.Layer == FdbDirectoryPartition.LayerId) log.WriteLine("# Directory {0} is a partition", String.Join("/", path)); else if (dir.Layer.IsPresent) log.WriteLine("# Directory {0} has layer {1}", String.Join("/", path), dir.Layer.ToAsciiOrHexaString()); else log.WriteLine("# Directory {0} does not have a layer defined", String.Join("/", path)); } }
/// <summary>Move/Rename a directory</summary> public static async Task MoveDirectory(string[] srcPath, string[] dstPath, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var folder = await db.Directory.TryOpenAsync(srcPath, cancellationToken: ct); if (folder == null) { log.WriteLine("# Source directory {0} does not exist!", string.Join("/", srcPath)); return; } folder = await db.Directory.TryOpenAsync(dstPath, cancellationToken: ct); if (folder != null) { log.WriteLine("# Destination directory {0} already exists!", string.Join("/", dstPath)); return; } await db.Directory.MoveAsync(srcPath, dstPath, ct); Console.WriteLine("Moved {0} to {1}", string.Join("/", srcPath), string.Join("/", dstPath)); }
/// <summary>Remove an existing node from its parents</summary> /// <returns>True if the parent node was found, otherwise false</returns> private async Task<bool> RemoveFromParent(IFdbTransaction tr, IFdbTuple path) { Contract.Requires(tr != null && path != null); var parent = await FindAsync(tr, path.Substring(0, path.Count - 1)).ConfigureAwait(false); if (parent.Exists) { if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Removing path {0} from its parent folder at {1}", path, parent.Subspace.Key); tr.Clear(GetSubDirKey(parent.Subspace, path.Get<string>(-1))); return true; } return false; }
public static async Task Map(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { // we want to merge the map of shards, with the map of directories from the Directory Layer, and count for each directory how many shards intersect var folder = await TryOpenCurrentDirectoryAsync(path, db, ct); if (folder == null) { log.WriteLine("# Directory not found"); return; } var span = folder.DirectoryLayer.ContentSubspace.ToRange(); // note: this may break in future versions of the DL! Maybe we need a custom API to get a flat list of all directories in a DL that span a specific range ? var shards = await Fdb.System.GetChunksAsync(db, span, ct); int totalShards = shards.Count; log.WriteLine("Found {0} shard(s) in partition /{1}", totalShards, folder.DirectoryLayer.FullName); log.WriteLine("Listing all directories..."); var map = new Dictionary<string, int>(StringComparer.Ordinal); Action<string[], int> account = (p, c) => { for (int i = 1; i <= p.Length; i++) { var s = "/" + String.Join("/", p, 0, i); int x; map[s] = map.TryGetValue(s, out x) ? (x + c) : c; } }; var work = new Stack<IFdbDirectory>(); work.Push(folder); var dirs = new List<IFdbDirectory>(); int n = 0; while(work.Count > 0) { var cur = work.Pop(); // skip sub partitions var names = await cur.ListAsync(db, ct); foreach(var name in names) { var sub = await cur.TryOpenAsync(db, name, ct); if (sub != null) { var p = sub.FullName; if (sub is FdbDirectoryPartition) { log.WriteLine("\r! Skipping partition {0} ", sub.Name); n = 0; continue; } log.Write("\r/{0}{1}", p, p.Length > n ? String.Empty : new string(' ', n - p.Length)); n = p.Length; work.Push(sub); dirs.Add(sub); } } } log.Write("\r" + new string(' ', n + 2)); log.WriteLine("\r> Found {0} sub-directories", dirs.Count); log.WriteLine(); log.WriteLine("Estimating size of each directory..."); int foundShards = 0; n = 0; int max = 0; IFdbDirectory bigBad = null; foreach (var dir in dirs) { log.Write("\r> {0}{1}", dir.Name, dir.Name.Length > n ? String.Empty : new string(' ', n - dir.Name.Length)); n = dir.Name.Length; var p = dir.Path.ToArray(); var key = ((FdbSubspace)dir).Key; // verify that the subspace has at least one key inside var bounds = await db.ReadAsync(async (tr) => { var kvs = await Task.WhenAll( tr.GetRange(FdbKeyRange.StartsWith(key)).FirstOrDefaultAsync(), tr.GetRange(FdbKeyRange.StartsWith(key)).LastOrDefaultAsync() ); return new { Min = kvs[0].Key, Max = kvs[1].Key }; }, ct); if (bounds.Min.HasValue) { // folder is not empty shards = await Fdb.System.GetChunksAsync(db, FdbKeyRange.StartsWith(key), ct); //TODO: we still need to check if the first and last shard really intersect the subspace // we need to check if the shards actually contain data //Console.WriteLine("/{0} under {1} with {2} shard(s)", string.Join("/", p), FdbKey.Dump(key), shards.Count); foundShards += shards.Count; account(p, shards.Count); if (shards.Count > max) { max = shards.Count; bigBad = dir; } } else { account(p, 0); } } log.Write("\r" + new string(' ', n + 2)); log.WriteLine("\rFound a total of {0} shard(s) in {1} folder(s)", foundShards, dirs.Count); log.WriteLine(); log.WriteLine("Shards %Total Path"); foreach(var kvp in map.OrderBy(x => x.Key)) { log.WriteLine("{0,6} {1,-20} {2}", kvp.Value, RobustHistogram.FormatHistoBar((double)kvp.Value / foundShards, 20), kvp.Key); } log.WriteLine(); if (bigBad != null) { log.WriteLine("Biggest folder is /{0} with {1} shards ({2:N1}% total, {3:N1}% subtree)", bigBad.FullName, max, 100.0 * max / totalShards, 100.0 * max / foundShards); log.WriteLine(); } }
/// <summary>Finds a node subspace, given its path, by walking the tree from the root.</summary> /// <returns>Node if it was found, or null</returns> private async Task<Node> FindAsync(IFdbReadOnlyTransaction tr, IFdbTuple path) { Contract.Requires(tr != null && path != null); // look for the node by traversing from the root down. Stop when crossing a partition... var n = this.RootNode; int i = 0; Slice layer = Slice.Nil; while (i < path.Count) { if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Looking for child {0} under node {1}...", path.Get<string>(i), n.Key); n = NodeWithPrefix(await tr.GetAsync(GetSubDirKey(n, path.Get<string>(i))).ConfigureAwait(false)); if (n == null) { return new Node(null, path.Substring(0, i + 1), path, Slice.Empty); } if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Reading Layer value for subfolder {0} found at {1}", path, n.Key); layer = await tr.GetAsync(n.Pack(LayerSuffix)).ConfigureAwait(false); if (layer == FdbDirectoryPartition.LayerId) { // stop when reaching a partition return new Node(n, path.Substring(0, i + 1), path, FdbDirectoryPartition.LayerId); } ++i; } return new Node(n, path, path, layer); }
protected virtual IFdbTuple ToRelativePath(IFdbTuple location) { return location == null ? this.RelativeLocation : this.RelativeLocation.Concat(location); }
public void Set(IFdbTransaction trans, IFdbTuple id, IEnumerable<KeyValuePair<string, Slice>> fields) { if (trans == null) throw new ArgumentNullException("trans"); if (id == null) throw new ArgumentNullException("id"); if (fields == null) throw new ArgumentNullException("fields"); foreach (var field in fields) { if (string.IsNullOrEmpty(field.Key)) throw new ArgumentException("Field cannot have an empty name", "fields"); trans.Set(GetFieldKey(id, field.Key), field.Value); } }
/// <summary>Find the DCs, machines and processes in the cluster</summary> public static async Task Topology(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var coords = await Fdb.System.GetCoordinatorsAsync(db, ct); log.WriteLine("[Cluster] {0}", coords.Id); var servers = await db.QueryAsync(tr => tr .WithReadAccessToSystemKeys() .GetRange(FdbKeyRange.StartsWith(Fdb.System.ServerList)) .Select(kvp => new { // Offsets Size Type Name Description // 0 2 Word Version? 0100 (1.0 ?) // 2 4 DWord ??? 0x00 0x20 0xA2 0x00 // 6 2 Word FDBMagic 0xDB 0x0F "FDB" // 8 16 Guid NodeId Unique Process ID // 24 16 Guid Machine "machine_id" field in foundationdb.conf (ends with 8x0 if manually specified) // 40 16 Guid DataCenter "datacenter_id" field in foundationdb.conf (ends with 8x0 if manually specified) // 56 4 ??? ?? 4 x 0 // 60 12 x24 ARRAY[] ?? array of 12x the same 24-byte struct defined below // ...0 4 DWord IPAddress 01 00 00 7F => 127.0.0.1 // ...4 4 DWord Port 94 11 00 00 -> 4500 // ...8 4 DWord ?? randomish, changes every reboot // ..12 4 DWord ?? randomish, changes every reboot // ..16 4 DWord Size? small L-E integer, usually between 0x20 and 0x40... // ..20 4 DWord ?? randmoish, changes every reboot ProcessId = kvp.Value.Substring(8, 16).ToHexaString(), MachineId = kvp.Value.Substring(24, 16).ToHexaString(), DataCenterId = kvp.Value.Substring(40, 16).ToHexaString(), Parts = Enumerable.Range(0, 12).Select(i => { int p = 60 + 24 * i; return new { Address = new IPAddress(kvp.Value.Substring(p, 4).GetBytes().Reverse().ToArray()), Port = kvp.Value.Substring(p + 4, 4).ToInt32(), Unknown1 = kvp.Value.Substring(p + 8, 4).ToInt32(), Unknown2 = kvp.Value.Substring(p + 12, 4).ToInt32(), Unknown3 = kvp.Value.Substring(p + 16, 4).ToInt32(), Unknown4 = kvp.Value.Substring(p + 20, 4).ToInt32(), }; }).ToList(), Raw = kvp.Value, }), ct ); var numNodes = servers.Select(s => s.ProcessId).Distinct().Count(); var numMachines = servers.Select(s => s.MachineId).Distinct().Count(); var numDCs = servers.Select(s => s.DataCenterId).Distinct().Count(); var dcs = servers.GroupBy(x => x.DataCenterId).ToArray(); for (int dcIndex = 0; dcIndex < dcs.Length;dcIndex++) { var dc = dcs[dcIndex]; bool lastDc = dcIndex == dcs.Length - 1; string dcId = dc.Key.EndsWith("0000000000000000") ? dc.Key.Substring(0, 16) : dc.Key; log.WriteLine((lastDc ? "`- " : "|- ") + "[DataCenter] {0} (#{1})", dcId, dcIndex); var machines = dc.GroupBy(x => x.MachineId).ToArray(); string dcPrefix = lastDc ? " " : "| "; for (int machineIndex = 0; machineIndex < machines.Length; machineIndex++) { var machine = machines[machineIndex]; var lastMachine = machineIndex == machines.Length - 1; string machineId = machine.Key.EndsWith("0000000000000000") ? machine.Key.Substring(0, 16) : machine.Key; log.WriteLine(dcPrefix + (lastMachine ? "`- " : "|- ") + "[Machine] {0}, {1}", machine.First().Parts[0].Address, machineId); var procs = machine.ToArray(); string machinePrefix = dcPrefix + (lastMachine ? " " : "| "); for (int procIndex = 0; procIndex < procs.Length; procIndex++) { var proc = procs[procIndex]; bool lastProc = procIndex == procs.Length - 1; log.WriteLine(machinePrefix + (lastProc ? "`- " : "|- ") + "[Process] {0}:{1}, {2}", proc.Parts[0].Address, proc.Parts[0].Port, proc.ProcessId); //foreach (var part in proc.Parts) //{ // log.WriteLine(machinePrefix + "| -> {0}, {1}, {2:X8}, {3:X8}, {4}, {5:X8}", part.Address, part.Port, part.Unknown1, part.Unknown2, part.Unknown3, part.Unknown4); //} } } } log.WriteLine(); log.WriteLine("Found {0} process(es) on {1} machine(s) in {2} datacenter(s)", numNodes, numMachines, numDCs); log.WriteLine(); }
/// <summary>Remove all fields of an hashset</summary> /// <param name="id"></param> public void Delete(IFdbTransaction trans, IFdbTuple id) { if (trans == null) throw new ArgumentNullException("trans"); if (id == null) throw new ArgumentNullException("id"); // remove all fields of the hash trans.ClearRange(FdbKeyRange.StartsWith(GetKey(id))); }
public static async Task Shards(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { var ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct); Console.WriteLine("Found {0} shards in the whole cluster", ranges.Count); // look if there is something under there var folder = (await TryOpenCurrentDirectoryAsync(path, db, ct)) as FdbDirectorySubspace; if (folder != null) { var r = FdbKeyRange.StartsWith(folder.Copy().Key); Console.WriteLine("Searching for shards that intersect with /{0} ...", String.Join("/", path)); ranges = await Fdb.System.GetChunksAsync(db, r, ct); Console.WriteLine("Found {0} ranges intersecting {1}:", ranges.Count, r); var last = Slice.Empty; foreach (var range in ranges) { Console.Write("> " + FdbKey.Dump(range.Begin) + " ..."); long count = await Fdb.System.EstimateCountAsync(db, range, ct); Console.WriteLine(" {0:N0}", count); last = range.End; //TODO: we can probably get more details on this shard looking in the system keyspace (where it is, how many replicas, ...) } Console.WriteLine("> ... " + FdbKey.Dump(last)); } //Console.WriteLine("Found " + ranges.Count + " shards in the cluster"); //TODO: shards that intersect the current directory }
/// <summary>Return the list the names of all fields of an hashset</summary> /// <param name="trans">Transaction that will be used for this request</param> /// <param name="id">Unique identifier of the hashset</param> /// <returns>List of all fields. If the list is empty, the hashset does not exist</returns> public Task<List<string>> GetKeys(IFdbReadOnlyTransaction trans, IFdbTuple id, CancellationToken cancellationToken = default(CancellationToken)) { //note: As of Beta2, FDB does not have a fdb_get_range that only return the keys. That means that we will have to also read the values from the db, in order to just get the names of the fields :( //TODO: find a way to optimize this ? if (trans == null) throw new ArgumentNullException("trans"); if (id == null) throw new ArgumentNullException("id"); var prefix = GetKey(id); var results = new Dictionary<string, Slice>(StringComparer.OrdinalIgnoreCase); return trans .GetRange(FdbKeyRange.StartsWith(prefix)) .Select((kvp) => ParseFieldKey(FdbTuple.Unpack(kvp.Key))) .ToListAsync(cancellationToken); }
public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { double ratio = 0.1d; bool auto = true; if (extras.Count > 0) { double x = extras.Get<double>(0); if (x > 0 && x <= 1) ratio = x; auto = false; } var folder = await TryOpenCurrentDirectoryAsync(path, db, ct); FdbKeyRange span; if (folder is FdbDirectorySubspace) { span = FdbKeyRange.StartsWith((folder as FdbDirectorySubspace).Copy()); log.WriteLine("Reading list of shards for /{0} under {1} ...", String.Join("/", path), FdbKey.Dump(span.Begin)); } else { log.WriteLine("Reading list of shards for the whole cluster ..."); span = FdbKeyRange.All; } // dump keyServers var ranges = await Fdb.System.GetChunksAsync(db, span, ct); log.WriteLine("> Found {0:N0} shard(s)", ranges.Count); // take a sample var samples = new List<FdbKeyRange>(); if (ranges.Count <= 32) { // small enough to scan it all samples.AddRange(ranges); log.WriteLine("Sampling all {0:N0} shards ...", samples.Count); } else { // need to take a random subset var rnd = new Random(); int sz = Math.Max((int)Math.Ceiling(ratio * ranges.Count), 1); if (auto) { if (sz > 100) sz = 100; //SAFETY if (sz < 32) sz = Math.Max(sz, Math.Min(32, ranges.Count)); } var population = new List<FdbKeyRange>(ranges); for (int i = 0; i < sz; i++) { int p = rnd.Next(population.Count); samples.Add(population[p]); population.RemoveAt(p); } log.WriteLine("Sampling " + samples.Count + " out of " + ranges.Count + " shards (" + (100.0 * samples.Count / ranges.Count).ToString("N1") + "%) ..."); } log.WriteLine(); const string FORMAT_STRING = "{0,9} ║{1,10}{6,6} {2,-29} ║{3,10}{7,7} {4,-37} ║{5,10}"; const string SCALE_KEY = "....--------========########M"; const string SCALE_VAL = "....--------========########@@@@@@@@M"; log.WriteLine(FORMAT_STRING, "Count", "Keys", SCALE_KEY, "Values", SCALE_VAL, "Total", "med.", "med."); var rangeOptions = new FdbRangeOptions { Mode = FdbStreamingMode.WantAll }; samples = samples.OrderBy(x => x.Begin).ToList(); long globalSize = 0; long globalCount = 0; int workers = 8; // Math.Max(4, Environment.ProcessorCount); var sw = Stopwatch.StartNew(); var tasks = new List<Task>(); int n = samples.Count; while (samples.Count > 0) { while (tasks.Count < workers && samples.Count > 0) { var range = samples[0]; samples.RemoveAt(0); tasks.Add(Task.Run(async () => { var kk = new RobustHistogram(RobustHistogram.TimeScale.Ticks); var vv = new RobustHistogram(RobustHistogram.TimeScale.Ticks); #region Method 1: get_range everything... using (var tr = db.BeginTransaction(ct)) { long keySize = 0; long valueSize = 0; long count = 0; int iter = 0; var beginSelector = FdbKeySelector.FirstGreaterOrEqual(range.Begin); var endSelector = FdbKeySelector.FirstGreaterOrEqual(range.End); while (true) { FdbRangeChunk data = default(FdbRangeChunk); FdbException error = null; try { data = await tr.Snapshot.GetRangeAsync( beginSelector, endSelector, rangeOptions, iter ).ConfigureAwait(false); } catch (FdbException e) { error = e; } if (error != null) { await tr.OnErrorAsync(error.Code).ConfigureAwait(false); continue; } if (data.Count == 0) break; count += data.Count; foreach (var kvp in data.Chunk) { keySize += kvp.Key.Count; valueSize += kvp.Value.Count; kk.Add(TimeSpan.FromTicks(kvp.Key.Count)); vv.Add(TimeSpan.FromTicks(kvp.Value.Count)); } if (!data.HasMore) break; beginSelector = FdbKeySelector.FirstGreaterThan(data.Last.Key); ++iter; } long totalSize = keySize + valueSize; Interlocked.Add(ref globalSize, totalSize); Interlocked.Add(ref globalCount, count); lock (log) { log.WriteLine(FORMAT_STRING, count.ToString("N0"), FormatSize(keySize), kk.GetDistribution(begin: 1, end: 12000, fold: 2), FormatSize(valueSize), vv.GetDistribution(begin: 1, end: 120000, fold: 2), FormatSize(totalSize), FormatSize((int)Math.Ceiling(kk.Median)), FormatSize((int)Math.Ceiling(vv.Median))); } } #endregion #region Method 2: estimate the count using key selectors... //long counter = await Fdb.System.EstimateCountAsync(db, range, ct); //Console.WriteLine("COUNT = " + counter.ToString("N0")); #endregion }, ct)); } var done = await Task.WhenAny(tasks); tasks.Remove(done); } await Task.WhenAll(tasks); sw.Stop(); log.WriteLine(); if (n != ranges.Count) { log.WriteLine("Sampled " + FormatSize(globalSize) + " (" + globalSize.ToString("N0") + " bytes) and " + globalCount.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec"); log.WriteLine("> Estimated total size is " + FormatSize(globalSize * ranges.Count / n)); } else { log.WriteLine("Found " + FormatSize(globalSize) + " (" + globalSize.ToString("N0") + " bytes) and " + globalCount.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec"); // compare to the whole cluster ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct); log.WriteLine("> This directory contains ~{0:N2}% of all data", (100.0 * n / ranges.Count)); } log.WriteLine(); }
/// <summary>Returns the key of a specific field of an HashSet: (subspace, id, field, )</summary> /// <param name="id"></param> /// <param name="field"></param> /// <returns></returns> protected virtual Slice GetFieldKey(IFdbTuple id, string field) { //REVIEW: should the id be encoded as a an embedded tuple or not? return this.Subspace.Keys.Pack(id.Append(field)); }
/// <summary>Creates a new directory</summary> public static async Task CreateDirectory(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct) { if (log == null) log = Console.Out; string layer = extras.Count > 0 ? extras.Get<string>(0) : null; log.WriteLine("# Creating directory {0} with layer '{1}'", String.Join("/", path), layer); var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct); if (folder != null) { log.WriteLine("- Directory {0} already exists!", string.Join("/", path)); return; } folder = await db.Directory.TryCreateAsync(path, Slice.FromString(layer), cancellationToken: ct); log.WriteLine("- Created under {0} [{1}]", FdbKey.Dump(folder.Key), folder.Key.ToHexaString(' ')); // look if there is already stuff under there var stuff = await db.ReadAsync((tr) => tr.GetRange(folder.ToRange()).FirstOrDefaultAsync(), cancellationToken: ct); if (stuff.Key.IsPresent) { log.WriteLine("CAUTION: There is already some data under {0} !"); log.WriteLine(" {0} = {1}", FdbKey.Dump(stuff.Key), stuff.Value.ToAsciiOrHexaString()); } }
/// <summary>Shows the first few keys of a directory</summary> public static async Task Show(string[] path, IFdbTuple extras, bool reverse, IFdbDatabase db, TextWriter log, CancellationToken ct) { int count = 20; if (extras.Count > 0) { int x = extras.Get<int>(0); if (x > 0) count = x; } // look if there is something under there var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct); if (folder != null) { log.WriteLine("# Content of {0} [{1}]", FdbKey.Dump(folder.Key), folder.Key.ToHexaString(' ')); var keys = await db.QueryAsync((tr) => { var query = tr.GetRange(folder.ToRange()); return reverse ? query.Reverse().Take(count) : query.Take(count + 1); }, cancellationToken: ct); if (keys.Count > 0) { if (reverse) keys.Reverse(); foreach (var key in keys.Take(count)) { log.WriteLine("...{0} = {1}", FdbKey.Dump(folder.Extract(key.Key)), key.Value.ToAsciiOrHexaString()); } if (!reverse && keys.Count == count + 1) { log.WriteLine("... more"); } } else { log.WriteLine(" no content found"); } } }
/// <summary>Return the DirectoryLayer instance that should be called for the given path</summary> /// <param name="relativeLocation">Location relative to this directory subspace</param> protected virtual FdbDirectoryLayer GetLayerForPath(IFdbTuple relativeLocation) { // for regular directories, always returns its DL. return this.DirectoryLayer; }