public static T[] ToArray <T>([NotNull] this IFdbTuple tuple)
        {
            if (tuple == null)
            {
                throw new ArgumentNullException("tuple");
            }

            var items = new T[tuple.Count];

            if (items.Length > 0)
            {
                for (int i = 0; i < items.Length; i++)
                {
                    items[i] = tuple.Get <T>(i);
                }
            }
            return(items);
        }
Пример #2
0
            /// <summary>Compare a single item in both tuples</summary>
            /// <param name="x">First tuple</param>
            /// <param name="y">Second tuple</param>
            /// <returns>Returns a positive value if x is greater than y, a negative value if x is less than y and 0 if x is equal to y.</returns>
            public int Compare(IFdbTuple x, IFdbTuple y)
            {
                if (y == null)
                {
                    return(x == null ? 0 : +1);
                }
                if (x == null)
                {
                    return(-1);
                }

                int nx = x.Count;
                int ny = y.Count;

                if (ny == 0 || nx == 0)
                {
                    return(nx - ny);
                }

                int p = this.Offset;

                return(this.Comparer.Compare(x.Get <T1>(p), y.Get <T1>(p)));
            }
		internal async Task<FdbDirectorySubspace> MoveInternalAsync(IFdbTransaction trans, IFdbTuple oldPath, IFdbTuple newPath, bool throwOnError)
		{
			Contract.Requires(trans != null && oldPath != null && newPath != null);

			if (oldPath.Count == 0)
			{
				throw new InvalidOperationException("The root directory may not be moved.");
			}
			if (newPath.Count == 0)
			{
				throw new InvalidOperationException("The root directory cannot be overwritten.");
			}
			if (newPath.StartsWith(oldPath))
			{
				throw new InvalidOperationException(string.Format("The destination directory({0}) cannot be a subdirectory of the source directory({1}).", newPath, oldPath));
			}

			await CheckWriteVersionAsync(trans).ConfigureAwait(false);

			var oldNode = await FindAsync(trans, oldPath).ConfigureAwait(false);
			if (!oldNode.Exists)
			{
				if (throwOnError) throw new InvalidOperationException(string.Format("The source directory '{0}' does not exist.", oldPath));
				return null;
			}

			var newNode = await FindAsync(trans, newPath).ConfigureAwait(false);

			// we have already checked that old and new are under this partition path, but one of them (or both?) could be under a sub-partition..
			if (oldNode.IsInPartition(false) || newNode.IsInPartition(false))
			{
				if (!oldNode.IsInPartition(false) || !newNode.IsInPartition(false) || !FdbTuple.Equals(oldNode.Path, newNode.Path))
				{
					throw new InvalidOperationException("Cannot move between partitions.");
				}
				// both nodes are in the same sub-partition, delegate to it
				return await GetPartitionForNode(newNode).DirectoryLayer.MoveInternalAsync(trans, oldNode.PartitionSubPath, newNode.PartitionSubPath, throwOnError).ConfigureAwait(false);
			}

			if (newNode.Exists)
			{
				if (throwOnError) throw new InvalidOperationException(string.Format("The destination directory '{0}' already exists. Remove it first.", newPath));
				return null;
			}

			var parentNode = await FindAsync(trans, newPath.Substring(0, newPath.Count - 1)).ConfigureAwait(false);
			if (!parentNode.Exists)
			{
				if (throwOnError) throw new InvalidOperationException(string.Format("The parent of the destination directory '{0}' does not exist. Create it first.", newPath));
				return null;
			}

			trans.Set(GetSubDirKey(parentNode.Subspace, newPath.Get<string>(-1)), this.NodeSubspace.UnpackSingle<Slice>(oldNode.Subspace.Key));
			await RemoveFromParent(trans, oldPath).ConfigureAwait(false);

			return ContentsOfNode(oldNode.Subspace, newPath, oldNode.Layer);
		}
Пример #4
0
 public byte GetBufferId()
 {
     return(_key.Get <byte>(1));
 }
		/// <summary>Creates a new directory</summary>
		public static async Task CreateDirectory(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct)
		{
			if (log == null) log = Console.Out;

			string layer = extras.Count > 0 ? extras.Get<string>(0) : null;

			log.WriteLine("# Creating directory {0} with layer '{1}'", String.Join("/", path), layer);

			var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct);
			if (folder != null)
			{
				log.WriteLine("- Directory {0} already exists!", string.Join("/", path));
				return;
			}

			folder = await db.Directory.TryCreateAsync(path, Slice.FromString(layer), cancellationToken: ct);
			log.WriteLine("- Created under {0} [{1}]", FdbKey.Dump(folder.Key), folder.Key.ToHexaString(' '));

			// look if there is already stuff under there
			var stuff = await db.ReadAsync((tr) => tr.GetRange(folder.ToRange()).FirstOrDefaultAsync(), cancellationToken: ct);
			if (stuff.Key.IsPresent)
			{
				log.WriteLine("CAUTION: There is already some data under {0} !");
				log.WriteLine("  {0} = {1}", FdbKey.Dump(stuff.Key), stuff.Value.ToAsciiOrHexaString());
			}
		}
		/// <summary>Shows the first few keys of a directory</summary>
		public static async Task Show(string[] path, IFdbTuple extras, bool reverse, IFdbDatabase db, TextWriter log, CancellationToken ct)
		{
			int count = 20;
			if (extras.Count > 0)
			{
				int x = extras.Get<int>(0);
				if (x > 0) count = x;
			}

			// look if there is something under there
			var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct);
			if (folder != null)
			{
				log.WriteLine("# Content of {0} [{1}]", FdbKey.Dump(folder.Key), folder.Key.ToHexaString(' '));
				var keys = await db.QueryAsync((tr) =>
					{
						var query = tr.GetRange(folder.ToRange());
						return reverse
							? query.Reverse().Take(count)
							: query.Take(count + 1);
					}, cancellationToken: ct);
				if (keys.Count > 0)
				{
					if (reverse) keys.Reverse();
					foreach (var key in keys.Take(count))
					{
						log.WriteLine("...{0} = {1}", FdbKey.Dump(folder.Extract(key.Key)), key.Value.ToAsciiOrHexaString());
					}
					if (!reverse && keys.Count == count + 1)
					{
						log.WriteLine("... more");
					}
				}
				else
				{
					log.WriteLine("  no content found");
				}
			}
		}
		/// <summary>Finds a node subspace, given its path, by walking the tree from the root.</summary>
		/// <returns>Node if it was found, or null</returns>
		private async Task<Node> FindAsync(IFdbReadOnlyTransaction tr, IFdbTuple path)
		{
			Contract.Requires(tr != null && path != null);

			// look for the node by traversing from the root down. Stop when crossing a partition...

			var n = this.RootNode;
			int i = 0;
			Slice layer = Slice.Nil;
			while (i < path.Count)
			{
				if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Looking for child {0} under node {1}...", path.Get<string>(i), n.Key);
				n = NodeWithPrefix(await tr.GetAsync(GetSubDirKey(n, path.Get<string>(i))).ConfigureAwait(false));
				if (n == null)
				{
					return new Node(null, path.Substring(0, i + 1), path, Slice.Empty);
				}

				if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Reading Layer value for subfolder {0} found at {1}", path, n.Key);
				layer = await tr.GetAsync(n.Pack(LayerSuffix)).ConfigureAwait(false);
				if (layer == FdbDirectoryPartition.LayerId)
				{ // stop when reaching a partition
					return new Node(n, path.Substring(0, i + 1), path, FdbDirectoryPartition.LayerId);
				}

				++i;
			}
			return new Node(n, path, path, layer);
		}
 public R Get <R>(int index)
 {
     return(m_items.Get <R>(index));
 }
 /// <summary>Execute a lambda Action with the content of this tuple</summary>
 /// <param name="tuple">Tuple of size 5</param>
 /// <param name="lambda">Action that will be passed the content of this tuple as parameters</param>
 /// <exception cref="InvalidOperationException">If <paramref name="tuple"/> has not the expected size</exception>
 public static void With <T1, T2, T3, T4, T5>([NotNull] this IFdbTuple tuple, [NotNull] Action <T1, T2, T3, T4, T5> lambda)
 {
     OfSize(tuple, 5);
     lambda(tuple.Get <T1>(0), tuple.Get <T2>(1), tuple.Get <T3>(2), tuple.Get <T4>(3), tuple.Get <T5>(4));
 }
 /// <summary>Execute a lambda Action with the content of this tuple</summary>
 /// <param name="tuple">Tuple of size 3</param>
 /// <param name="lambda">Action that will be passed the content of this tuple as parameters</param>
 /// <exception cref="InvalidOperationException">If <paramref name="tuple"/> has not the expected size</exception>
 public static void With <T1, T2, T3>([NotNull] this IFdbTuple tuple, [NotNull] Action <T1, T2, T3> lambda)
 {
     OfSize(tuple, 3);
     lambda(tuple.Get <T1>(0), tuple.Get <T2>(1), tuple.Get <T3>(2));
 }
 /// <summary>Execute a lambda Action with the content of this tuple</summary>
 /// <param name="tuple">Tuple of size 1</param>
 /// <param name="lambda">Action that will be passed the content of this tuple as parameters</param>
 /// <exception cref="InvalidOperationException">If <paramref name="tuple"/> has not the expected size</exception>
 public static void With <T1>([NotNull] this IFdbTuple tuple, [NotNull] Action <T1> lambda)
 {
     OfSize(tuple, 1);
     lambda(tuple.Get <T1>(0));
 }
 /// <summary>Returns a typed version of a tuple of size 1</summary>
 /// <typeparam name="T1">Expected type of the single element</typeparam>
 /// <param name="tuple">Tuple that must be of size 1</param>
 /// <returns>Equivalent tuple, with its element converted to the specified type</returns>
 public static FdbTuple <T1> As <T1>([NotNull] this IFdbTuple tuple)
 {
     tuple.OfSize(1);
     return(new FdbTuple <T1>(tuple.Get <T1>(0)));
 }
Пример #13
0
 public string GetStreamName()
 {
     return(_key.Get <string>(1));
 }
Пример #14
0
 public byte GetCellarId()
 {
     return(_key.Get <byte>(1));
 }
		/// <summary>Remove an existing node from its parents</summary>
		/// <returns>True if the parent node was found, otherwise false</returns>
		private async Task<bool> RemoveFromParent(IFdbTransaction tr, IFdbTuple path)
		{
			Contract.Requires(tr != null && path != null);

			var parent = await FindAsync(tr, path.Substring(0, path.Count - 1)).ConfigureAwait(false);
			if (parent.Exists)
			{
				tr.Clear(GetSubDirKey(parent.Subspace, path.Get<string>(-1)));
				return true;
			}
			return false;
		}
 /// <summary>Execute a lambda Action with the content of this tuple</summary>
 /// <param name="tuple">Tuple of size 8</param>
 /// <param name="lambda">Action that will be passed the content of this tuple as parameters</param>
 /// <exception cref="InvalidOperationException">If <paramref name="tuple"/> has not the expected size</exception>
 public static void With <T1, T2, T3, T4, T5, T6, T7, T8>([NotNull] this IFdbTuple tuple, [NotNull] Action <T1, T2, T3, T4, T5, T6, T7, T8> lambda)
 {
     OfSize(tuple, 8);
     lambda(tuple.Get <T1>(0), tuple.Get <T2>(1), tuple.Get <T3>(2), tuple.Get <T4>(3), tuple.Get <T5>(4), tuple.Get <T6>(5), tuple.Get <T7>(6), tuple.Get <T8>(7));
 }
 /// <summary>Execute a lambda Function with the content of this tuple</summary>
 /// <param name="tuple">Tuple of size 1</param>
 /// <param name="lambda">Action that will be passed the content of this tuple as parameters</param>
 /// <returns>Result of calling <paramref name="lambda"/> with the items of this tuple</returns>
 /// <exception cref="InvalidOperationException">If <paramref name="tuple"/> has not the expected size</exception>
 public static TResult With <T1, TResult>([NotNull] this IFdbTuple tuple, [NotNull] Func <T1, TResult> lambda)
 {
     OfSize(tuple, 1);
     return(lambda(tuple.Get <T1>(0)));
 }
		/// <summary>Convert a tuple representing a path, into a string array</summary>
		/// <param name="path">Tuple that should only contain strings</param>
		/// <returns>Array of strings</returns>
		public static string[] ParsePath(IFdbTuple path)
		{
			if (path == null) throw new ArgumentNullException("path");
			var tmp = new string[path.Count];
			for (int i = 0; i < tmp.Length; i++)
			{
				tmp[i] = path.Get<string>(i);
			}
			return tmp;
		}
 /// <summary>Execute a lambda Function with the content of this tuple</summary>
 /// <param name="tuple">Tuple of size 4</param>
 /// <param name="lambda">Function that will be passed the content of this tuple as parameters</param>
 /// <returns>Result of calling <paramref name="lambda"/> with the items of this tuple</returns>
 /// <exception cref="InvalidOperationException">If <paramref name="tuple"/> has not the expected size</exception>
 public static TResult With <T1, T2, T3, T4, TResult>([NotNull] this IFdbTuple tuple, [NotNull] Func <T1, T2, T3, T4, TResult> lambda)
 {
     OfSize(tuple, 4);
     return(lambda(tuple.Get <T1>(0), tuple.Get <T2>(1), tuple.Get <T3>(2), tuple.Get <T4>(3)));
 }
		/// <summary>Remove an existing node from its parents</summary>
		/// <returns>True if the parent node was found, otherwise false</returns>
		private async Task<bool> RemoveFromParent(IFdbTransaction tr, IFdbTuple path)
		{
			Contract.Requires(tr != null && path != null);

			var parent = await FindAsync(tr, path.Substring(0, path.Count - 1)).ConfigureAwait(false);
			if (parent.Exists)
			{
				if (FdbDirectoryLayer.AnnotateTransactions) tr.Annotate("Removing path {0} from its parent folder at {1}", path, parent.Subspace.Key);
				tr.Clear(GetSubDirKey(parent.Subspace, path.Get<string>(-1)));
				return true;
			}
			return false;
		}
 /// <summary>Execute a lambda Function with the content of this tuple</summary>
 /// <param name="tuple">Tuple of size 8</param>
 /// <param name="lambda">Function that will be passed the content of this tuple as parameters</param>
 /// <returns>Result of calling <paramref name="lambda"/> with the items of this tuple</returns>
 /// <exception cref="InvalidOperationException">If <paramref name="tuple"/> has not the expected size</exception>
 public static TResult With <T1, T2, T3, T4, T5, T6, T7, T8, TResult>([NotNull] this IFdbTuple tuple, [NotNull] Func <T1, T2, T3, T4, T5, T6, T7, T8, TResult> lambda)
 {
     OfSize(tuple, 8);
     return(lambda(tuple.Get <T1>(0), tuple.Get <T2>(1), tuple.Get <T3>(2), tuple.Get <T4>(3), tuple.Get <T5>(4), tuple.Get <T6>(5), tuple.Get <T7>(6), tuple.Get <T8>(7)));
 }
		/// <summary>Remove a directory and all its data</summary>
		public static async Task RemoveDirectory(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct)
		{
			if (log == null) log = Console.Out;

			string layer = extras.Count > 0 ? extras.Get<string>(0) : null;

			var folder = await db.Directory.TryOpenAsync(path, cancellationToken: ct);
			if (folder == null)
			{
				log.WriteLine("# Directory {0} does not exist", string.Join("/", path));
				return;
			}

			// are there any subdirectories ?
			var subDirs = await folder.TryListAsync(db, ct);
			if (subDirs != null && subDirs.Count > 0)
			{
				//TODO: "-r" flag ?
				log.WriteLine("# Cannot remove {0} because it still contains {1} sub-directorie(s)", string.Join("/", path), subDirs.Count);
			}

			//TODO: ask for confirmation?

			log.WriteLine("# Deleting directory {0} ...", String.Join("/", path));
			await folder.RemoveAsync(db, ct);
			log.WriteLine("# Gone!");
		}
		internal static IFdbTuple VerifyPath(IFdbTuple path, string argName = null)
		{
			Contract.Requires(path != null);

			// The path should not contain any null strings
			if (path == null) throw new ArgumentNullException(argName ?? "path");
			int count = path.Count;
			for (int i = 0; i < count; i++)
			{
				if (path.Get<string>(i) == null)
				{
					throw new ArgumentException("The path of a directory cannot contain null elements", argName ?? "path");
				}
			}
			return path;
		}
		public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct)
		{
			double ratio = 0.1d;
			bool auto = true;
			if (extras.Count > 0)
			{
				double x = extras.Get<double>(0);
				if (x > 0 && x <= 1) ratio = x;
				auto = false;
			}

			var folder = await TryOpenCurrentDirectoryAsync(path, db, ct);
			FdbKeyRange span;
			if (folder is FdbDirectorySubspace)
			{
				span = FdbKeyRange.StartsWith((folder as FdbDirectorySubspace).Copy());
				log.WriteLine("Reading list of shards for /{0} under {1} ...", String.Join("/", path), FdbKey.Dump(span.Begin));
			}
			else
			{
				log.WriteLine("Reading list of shards for the whole cluster ...");
				span = FdbKeyRange.All;
			}

			// dump keyServers
			var ranges = await Fdb.System.GetChunksAsync(db, span, ct);
			log.WriteLine("> Found {0:N0} shard(s)", ranges.Count);

			// take a sample
			var samples = new List<FdbKeyRange>();

			if (ranges.Count <= 32)
			{ // small enough to scan it all
				samples.AddRange(ranges);
				log.WriteLine("Sampling all {0:N0} shards ...", samples.Count);
			}
			else
			{ // need to take a random subset
				var rnd = new Random();
				int sz = Math.Max((int)Math.Ceiling(ratio * ranges.Count), 1);
				if (auto)
				{
					if (sz > 100) sz = 100; //SAFETY
					if (sz < 32) sz = Math.Max(sz, Math.Min(32, ranges.Count));
				}

				var population = new List<FdbKeyRange>(ranges);
				for (int i = 0; i < sz; i++)
				{
					int p = rnd.Next(population.Count);
					samples.Add(population[p]);
					population.RemoveAt(p);
				}
				log.WriteLine("Sampling " + samples.Count + " out of " + ranges.Count + " shards (" + (100.0 * samples.Count / ranges.Count).ToString("N1") + "%) ...");
			}

			log.WriteLine();
			const string FORMAT_STRING = "{0,9} ║{1,10}{6,6} {2,-29} ║{3,10}{7,7} {4,-37} ║{5,10}";
			const string SCALE_KEY = "....--------========########M";
			const string SCALE_VAL = "....--------========########@@@@@@@@M";
			log.WriteLine(FORMAT_STRING, "Count", "Keys", SCALE_KEY, "Values", SCALE_VAL, "Total", "med.", "med.");

			var rangeOptions = new FdbRangeOptions { Mode = FdbStreamingMode.WantAll };

			samples = samples.OrderBy(x => x.Begin).ToList();

			long globalSize = 0;
			long globalCount = 0;
			int workers = 8; // Math.Max(4, Environment.ProcessorCount);

			var sw = Stopwatch.StartNew();
			var tasks = new List<Task>();
			int n = samples.Count;
			while (samples.Count > 0)
			{
				while (tasks.Count < workers && samples.Count > 0)
				{
					var range = samples[0];
					samples.RemoveAt(0);
					tasks.Add(Task.Run(async () =>
					{
						var kk = new RobustHistogram(RobustHistogram.TimeScale.Ticks);
						var vv = new RobustHistogram(RobustHistogram.TimeScale.Ticks);

						#region Method 1: get_range everything...

						using (var tr = db.BeginTransaction(ct))
						{
							long keySize = 0;
							long valueSize = 0;
							long count = 0;

							int iter = 0;
							var beginSelector = FdbKeySelector.FirstGreaterOrEqual(range.Begin);
							var endSelector = FdbKeySelector.FirstGreaterOrEqual(range.End);
							while (true)
							{
								FdbRangeChunk data = default(FdbRangeChunk);
								FdbException error = null;
								try
								{
									data = await tr.Snapshot.GetRangeAsync(
										beginSelector,
										endSelector,
										rangeOptions,
										iter
									).ConfigureAwait(false);
								}
								catch (FdbException e)
								{
									error = e;
								}

								if (error != null)
								{
									await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
									continue;
								}

								if (data.Count == 0) break;

								count += data.Count;
								foreach (var kvp in data.Chunk)
								{
									keySize += kvp.Key.Count;
									valueSize += kvp.Value.Count;

									kk.Add(TimeSpan.FromTicks(kvp.Key.Count));
									vv.Add(TimeSpan.FromTicks(kvp.Value.Count));
								}

								if (!data.HasMore) break;

								beginSelector = FdbKeySelector.FirstGreaterThan(data.Last.Key);
								++iter;
							}

							long totalSize = keySize + valueSize;
							Interlocked.Add(ref globalSize, totalSize);
							Interlocked.Add(ref globalCount, count);

							lock (log)
							{
								log.WriteLine(FORMAT_STRING, count.ToString("N0"), FormatSize(keySize), kk.GetDistribution(begin: 1, end: 12000, fold: 2), FormatSize(valueSize), vv.GetDistribution(begin: 1, end: 120000, fold: 2), FormatSize(totalSize), FormatSize((int)Math.Ceiling(kk.Median)), FormatSize((int)Math.Ceiling(vv.Median)));
							}
						}
						#endregion

						#region Method 2: estimate the count using key selectors...

						//long counter = await Fdb.System.EstimateCountAsync(db, range, ct);
						//Console.WriteLine("COUNT = " + counter.ToString("N0"));

						#endregion
					}, ct));
				}

				var done = await Task.WhenAny(tasks);
				tasks.Remove(done);
			}

			await Task.WhenAll(tasks);
			sw.Stop();

			log.WriteLine();
			if (n != ranges.Count)
			{
				log.WriteLine("Sampled " + FormatSize(globalSize) + " (" + globalSize.ToString("N0") + " bytes) and " + globalCount.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec");
				log.WriteLine("> Estimated total size is " + FormatSize(globalSize * ranges.Count / n));
			}
			else
			{
				log.WriteLine("Found " + FormatSize(globalSize) + " (" + globalSize.ToString("N0") + " bytes) and " + globalCount.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec");
				// compare to the whole cluster
				ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct);
				log.WriteLine("> This directory contains ~{0:N2}% of all data", (100.0 * n / ranges.Count));
			}
			log.WriteLine();
		}
		internal async Task<FdbDirectorySubspace> CreateOrOpenInternalAsync(IFdbReadOnlyTransaction readTrans, IFdbTransaction trans, IFdbTuple path, Slice layer, Slice prefix, bool allowCreate, bool allowOpen, bool throwOnError)
		{
			Contract.Requires(readTrans != null || trans != null, "Need at least one transaction");
			Contract.Requires(path != null, "Path must be specified");
			Contract.Requires(readTrans == null || trans == null || object.ReferenceEquals(readTrans, trans), "The write transaction should be the same as the read transaction");

			if (path.Count == 0)
			{ // Root directory contains node metadata and so may not be opened.
				throw new InvalidOperationException("The root directory may not be opened.");
			}

			// to open an existing directory, we only need the read transaction
			// if none was specified, we can use the writeable transaction
			if (readTrans == null) readTrans = trans;

			await CheckReadVersionAsync(readTrans).ConfigureAwait(false);

			if (prefix.HasValue && this.Path.Count > 0)
				throw new InvalidOperationException("Cannot specify a prefix in a partition.");

			var existingNode = await FindAsync(readTrans, path).ConfigureAwait(false);

			if (existingNode.Exists)
			{
				if (existingNode.IsInPartition(false))
				{
					var subpath = existingNode.PartitionSubPath;
					var dl = GetPartitionForNode(existingNode).DirectoryLayer;
					return await dl.CreateOrOpenInternalAsync(readTrans, trans, subpath, layer, prefix, allowCreate, allowOpen, throwOnError).ConfigureAwait(false);
				}

				if (!allowOpen)
				{
					if (throwOnError) throw new InvalidOperationException(string.Format("The directory {0} already exists.", path));
					return null;
				}

				if (layer.IsPresent && layer != existingNode.Layer)
				{
					throw new InvalidOperationException(String.Format("The directory {0} was created with incompatible layer {1} instead of expected {2}.", path, layer.ToAsciiOrHexaString(), existingNode.Layer.ToAsciiOrHexaString()));
				}
				return ContentsOfNode(existingNode.Subspace, path, existingNode.Layer);
			}

			if (!allowCreate)
			{
				if (throwOnError) throw new InvalidOperationException(string.Format("The directory {0} does not exist.", path));
				return null;
			}

			// from there, we actually do need a wrtieable transaction
			if (trans == null) throw new InvalidOperationException("A writeable transaction is needed to create a new directory");

			await CheckWriteVersionAsync(trans).ConfigureAwait(false);

			if (prefix == null)
			{ // automatically allocate a new prefix inside the ContentSubspace
				long id = await this.Allocator.AllocateAsync(trans).ConfigureAwait(false);
				prefix = this.ContentSubspace.Pack(id);

				// ensure that there is no data already present under this prefix
				if (await trans.GetRange(FdbKeyRange.StartsWith(prefix)).AnyAsync().ConfigureAwait(false))
				{
					throw new InvalidOperationException(String.Format("The database has keys stored at the prefix chosen by the automatic prefix allocator: {0}", prefix.ToAsciiOrHexaString()));
				}

				// ensure that the prefix has not already been allocated
				if (!(await IsPrefixFree(trans.Snapshot, prefix).ConfigureAwait(false)))
				{
					throw new InvalidOperationException("The directory layer has manually allocated prefixes that conflict with the automatic prefix allocator.");
				}
			}
			else
			{
				// ensure that the prefix has not already been allocated
				if (!(await IsPrefixFree(trans, prefix).ConfigureAwait(false)))
				{
					throw new InvalidOperationException("The given prefix is already in use.");
				}
			}

			// we need to recursively create any missing parents
			FdbSubspace parentNode;
			if (path.Count > 1)
			{
				var parentSubspace = await CreateOrOpenInternalAsync(readTrans, trans, path.Substring(0, path.Count - 1), Slice.Nil, Slice.Nil, true, true, true).ConfigureAwait(false);
				parentNode = NodeWithPrefix(parentSubspace.Key);
			}
			else
			{
				parentNode = this.RootNode;
			}
			if (parentNode == null) throw new InvalidOperationException(string.Format("The parent directory of {0} doesn't exist.", path));

			// initialize the metadata for this new directory
			var node = NodeWithPrefix(prefix);
			trans.Set(GetSubDirKey(parentNode, path.Get<string>(-1)), prefix);
			SetLayer(trans, node, layer);

			return ContentsOfNode(node, path, layer);
		}
Пример #26
0
        public static async Task Sampling(string[] path, IFdbTuple extras, IFdbDatabase db, TextWriter log, CancellationToken ct)
        {
            double ratio = 0.1d;
            bool   auto  = true;

            if (extras.Count > 0)
            {
                double x = extras.Get <double>(0);
                if (x > 0 && x <= 1)
                {
                    ratio = x;
                }
                auto = false;
            }

            var folder = await TryOpenCurrentDirectoryAsync(path, db, ct);

            FdbKeyRange span;

            if (folder is FdbDirectorySubspace)
            {
                span = FdbKeyRange.StartsWith((folder as FdbDirectorySubspace).Copy());
                log.WriteLine("Reading list of shards for /{0} under {1} ...", String.Join("/", path), FdbKey.Dump(span.Begin));
            }
            else
            {
                log.WriteLine("Reading list of shards for the whole cluster ...");
                span = FdbKeyRange.All;
            }

            // dump keyServers
            var ranges = await Fdb.System.GetChunksAsync(db, span, ct);

            log.WriteLine("> Found {0:N0} shard(s)", ranges.Count);

            // take a sample
            var samples = new List <FdbKeyRange>();

            if (ranges.Count <= 32)
            {             // small enough to scan it all
                samples.AddRange(ranges);
                log.WriteLine("Sampling all {0:N0} shards ...", samples.Count);
            }
            else
            {             // need to take a random subset
                var rnd = new Random();
                int sz  = Math.Max((int)Math.Ceiling(ratio * ranges.Count), 1);
                if (auto)
                {
                    if (sz > 100)
                    {
                        sz = 100;                               //SAFETY
                    }
                    if (sz < 32)
                    {
                        sz = Math.Max(sz, Math.Min(32, ranges.Count));
                    }
                }

                var population = new List <FdbKeyRange>(ranges);
                for (int i = 0; i < sz; i++)
                {
                    int p = rnd.Next(population.Count);
                    samples.Add(population[p]);
                    population.RemoveAt(p);
                }
                log.WriteLine("Sampling " + samples.Count + " out of " + ranges.Count + " shards (" + (100.0 * samples.Count / ranges.Count).ToString("N1") + "%) ...");
            }

            log.WriteLine();
            const string FORMAT_STRING = "{0,9} ║{1,10}{6,6} {2,-29} ║{3,10}{7,7} {4,-37} ║{5,10}";
            const string SCALE_KEY     = "....--------========########M";
            const string SCALE_VAL     = "....--------========########@@@@@@@@M";

            log.WriteLine(FORMAT_STRING, "Count", "Keys", SCALE_KEY, "Values", SCALE_VAL, "Total", "med.", "med.");

            var rangeOptions = new FdbRangeOptions {
                Mode = FdbStreamingMode.WantAll
            };

            samples = samples.OrderBy(x => x.Begin).ToList();

            long globalSize  = 0;
            long globalCount = 0;
            int  workers     = 8;        // Math.Max(4, Environment.ProcessorCount);

            var sw    = Stopwatch.StartNew();
            var tasks = new List <Task>();
            int n     = samples.Count;

            while (samples.Count > 0)
            {
                while (tasks.Count < workers && samples.Count > 0)
                {
                    var range = samples[0];
                    samples.RemoveAt(0);
                    tasks.Add(Task.Run(async() =>
                    {
                        var kk = new RobustHistogram(RobustHistogram.TimeScale.Ticks);
                        var vv = new RobustHistogram(RobustHistogram.TimeScale.Ticks);

                        #region Method 1: get_range everything...

                        using (var tr = db.BeginTransaction(ct))
                        {
                            long keySize   = 0;
                            long valueSize = 0;
                            long count     = 0;

                            int iter          = 0;
                            var beginSelector = FdbKeySelector.FirstGreaterOrEqual(range.Begin);
                            var endSelector   = FdbKeySelector.FirstGreaterOrEqual(range.End);
                            while (true)
                            {
                                FdbRangeChunk data = default(FdbRangeChunk);
                                FdbException error = null;
                                try
                                {
                                    data = await tr.Snapshot.GetRangeAsync(
                                        beginSelector,
                                        endSelector,
                                        rangeOptions,
                                        iter
                                        ).ConfigureAwait(false);
                                }
                                catch (FdbException e)
                                {
                                    error = e;
                                }

                                if (error != null)
                                {
                                    await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
                                    continue;
                                }

                                if (data.Count == 0)
                                {
                                    break;
                                }

                                count += data.Count;
                                foreach (var kvp in data.Chunk)
                                {
                                    keySize   += kvp.Key.Count;
                                    valueSize += kvp.Value.Count;

                                    kk.Add(TimeSpan.FromTicks(kvp.Key.Count));
                                    vv.Add(TimeSpan.FromTicks(kvp.Value.Count));
                                }

                                if (!data.HasMore)
                                {
                                    break;
                                }

                                beginSelector = FdbKeySelector.FirstGreaterThan(data.Last.Key);
                                ++iter;
                            }

                            long totalSize = keySize + valueSize;
                            Interlocked.Add(ref globalSize, totalSize);
                            Interlocked.Add(ref globalCount, count);

                            lock (log)
                            {
                                log.WriteLine(FORMAT_STRING, count.ToString("N0"), FormatSize(keySize), kk.GetDistribution(begin: 1, end: 12000, fold: 2), FormatSize(valueSize), vv.GetDistribution(begin: 1, end: 120000, fold: 2), FormatSize(totalSize), FormatSize((int)Math.Ceiling(kk.Median)), FormatSize((int)Math.Ceiling(vv.Median)));
                            }
                        }
                        #endregion

                        #region Method 2: estimate the count using key selectors...

                        //long counter = await Fdb.System.EstimateCountAsync(db, range, ct);
                        //Console.WriteLine("COUNT = " + counter.ToString("N0"));

                        #endregion
                    }, ct));
                }

                var done = await Task.WhenAny(tasks);

                tasks.Remove(done);
            }

            await Task.WhenAll(tasks);

            sw.Stop();

            log.WriteLine();
            if (n != ranges.Count)
            {
                log.WriteLine("Sampled " + FormatSize(globalSize) + " (" + globalSize.ToString("N0") + " bytes) and " + globalCount.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec");
                log.WriteLine("> Estimated total size is " + FormatSize(globalSize * ranges.Count / n));
            }
            else
            {
                log.WriteLine("Found " + FormatSize(globalSize) + " (" + globalSize.ToString("N0") + " bytes) and " + globalCount.ToString("N0") + " keys in " + sw.Elapsed.TotalSeconds.ToString("N1") + " sec");
                // compare to the whole cluster
                ranges = await Fdb.System.GetChunksAsync(db, FdbKey.MinValue, FdbKey.MaxValue, ct);

                log.WriteLine("> This directory contains ~{0:N2}% of all data", (100.0 * n / ranges.Count));
            }
            log.WriteLine();
        }
Пример #27
0
 public long GetChunkStartPos()
 {
     return(_key.Get <long>(1));
 }