internal UnorderedSet(UnorderedSet <T> set, T element) { elements = new HashSet <T>(set.elements) { element }; }
public static UnorderedSet <T> operator |(UnorderedSet <T> left, UnorderedSet <T> right) { var result = new UnorderedSet <T>(); result.UnionExclusivelyWith(left); result.UnionExclusivelyWith(right); return(result); }
public void UnionExclusivelyWith(UnorderedSet <T> other) { foreach (T item in other) { if (this.Contains(item)) { throw new Exception(); } this.Add(item); } }
//============================================================ //------------------------------------------------------------ public static void CopyFileTo(this string filePath, string destFilePath, UnorderedSet <string> includedExtensions) { filePath.IsExistingFilePath().Assert(); destFilePath.IsExistingPath().Not().Assert(); if (includedExtensions.Contains(filePath.GetExtension()).Not()) { return; } filePath.CopyFileTo(destFilePath); }
static AttributedTypeCache() { foreach (var type in Meta.kumaAsms.AllTypes()) { foreach (var attr in type.GetCustomAttributes()) { if (!catalog.TryGetValue(attr.GetType(), out UnorderedSet <TypeKey> types)) { types = new UnorderedSet <TypeKey>(4); catalog.Add(attr.GetType(), types); } types.AddIfNone(type); } } }
public bool Equals(UnorderedSet <T> other) { foreach (T element in elements) { if (!other.elements.Contains(element)) { return(false); } } foreach (T element in other.elements) { if (!elements.Contains(element)) { return(false); } } return(true); }
public static void CopyFolderTo(this string folderPath, string destFolder, UnorderedSet <string> includedExtensions, UnorderedSet <string> excludedFolderNames) { folderPath.IsExistingFolderPath().Assert(); destFolder.IsExistingPath().Not().Assert(); if ((excludedFolderNames?.Contains(folderPath.GetFolderName())).IsTrue()) { return; } destFolder.MakeDir(); foreach (string folder in folderPath.GetChildFolderPaths()) { folder.CopyFolderTo(destFolder.CombinePath(folder.GetFolderName()), includedExtensions, excludedFolderNames); } foreach (string file in folderPath.GetChildFilePaths()) { if (includedExtensions is null || includedExtensions.Contains(file.GetExtension())) { file.CopyFileTo(destFolder.CombinePath(file.GetFileName()), includedExtensions); } } }
public Tensor[] ComputeGradient(long[] target_tensor_ids, long[] source_tensor_ids, UnorderedMap <long, TapeTensor> sources_that_are_targets, Tensor[] output_gradients) { var result = new List <Tensor>(source_tensor_ids.Length); var sources_set = new UnorderedSet <long>(source_tensor_ids); var gradients_size = new UnorderedMap <long, long>(); var state = PrepareBackprop( target_tensor_ids, tensor_tape_, op_tape_, sources_set, persistent_); var op_stack = InitialStack(state.op_tape, state.op_missing_tensor); var gradients = InitialGradients(target_tensor_ids, sources_that_are_targets, output_gradients, tensor_tape_, state.op_tape); while (!op_stack.empty()) { var op = op_stack.Dequeue(); if (!state.op_tape.find(op, out var trace)) { continue; } // Console.WriteLine($"ComputeGradient: {state.op_tape[op].op_type}"); state.op_tape.erase(op); var out_gradients = new List <Tensor>(trace.output_tensor_info.Length); var unneeded_gradients = new List <long>(); for (int i = 0; i < trace.input_tensor_id.Length; i++) { var in_tensor_id = trace.input_tensor_id[i]; if (!tensor_tape_.find(in_tensor_id) && !sources_set.find(in_tensor_id)) { unneeded_gradients.Add(i); } } bool any_gradient_nonzero = false; var zero_indices = new List <int>(); for (int i = 0; i < trace.output_tensor_info.Length; ++i) { var id = trace.output_tensor_info[i].GetID(); if (!gradients.find(id, out var grad_it)) { if (FunctionsAcceptingNoneForIndicesMap().find(trace.op_type, out var func_name_it) && func_name_it.find(i)) { out_gradients.Add(null); } else { out_gradients.Add(null); zero_indices.Add(i); } } else { any_gradient_nonzero = true; var new_gradients = grad_it.Count == 1 ? grad_it[0] : gen_math_ops.add_n(grad_it.ToArray()); // vspace.AggregateGradients if (!sources_set.find(id)) { gradients.Remove(id); } else { grad_it.Clear(); grad_it.Add(new_gradients); // vspace.MarkAsResult(new_gradients); } out_gradients.Add(new_gradients); } } Tensor[] in_gradients; if (any_gradient_nonzero) { foreach (var i in zero_indices) { out_gradients[i] = trace.output_tensor_info[i].ZerosLike(); } in_gradients = CallBackwardFunction(trace.backward_function, unneeded_gradients, out_gradients); if (in_gradients.Count() != trace.input_tensor_id.Count()) { throw new RuntimeError($"Recorded operation '{trace.op_type}' returned too few gradients. Expected {trace.input_tensor_id.Length} but received {in_gradients.Count()}"); } if (!persistent_) { // trace.backward_function_deleter(trace.backward_function); } } else { in_gradients = new Tensor[trace.input_tensor_id.Length]; } for (int i = 0; i < in_gradients.Length; ++i) { var id = trace.input_tensor_id[i]; if (in_gradients[i] != null) { var unaggregated_grads = gradients[id]; unaggregated_grads.Add(in_gradients[i]); if (unaggregated_grads.Count > kMinAggregateCount) { if (!gradients_size.find(id, out var size)) { size = (long)unaggregated_grads[0].size; gradients_size.emplace(id, size); } if (unaggregated_grads.Count * size * 4 > kMinAggregateBytes) { throw new NotImplementedException(""); } } } if (!state.tensor_usage_counts.find(id)) { continue; } state.tensor_usage_counts[id]--; if (state.tensor_usage_counts[id] > 0) { continue; } if (!tensor_tape_.find(id, out var tape_it)) { if (gradients.find(id, out var grad_it)) { // foreach (var g in grad_it) // DeleteGradient(g); gradients.erase(id); } continue; } var op_id = tape_it; if (op_id == -1) { continue; } if (state.op_missing_tensor.find(op_id, out var missing_it)) { state.op_missing_tensor[op_id]--; if (state.op_missing_tensor[op_id] == 0) { op_stack.Enqueue(op_id); } } } } if (state.op_tape.Count > 0) { throw new RuntimeError("Invalid tape state."); } var used_gradient_ids = new List <long>(source_tensor_ids.Length); foreach (var id in source_tensor_ids) { if (!gradients.find(id, out var grad_it)) { result.Add(null); } else { if (grad_it.Count > 1) { var grad = gen_math_ops.add_n(grad_it.ToArray()); grad_it.Clear(); grad_it.Add(grad); } result.Add(grad_it[0]); used_gradient_ids.Add(id); } } /*foreach(var grad_pair in gradients) * { * if(!used_gradient_ids.Contains(grad_pair.Key)) * { * foreach(var g in grad_pair.Value) * { * vspace.DeleteGradient(g); * } * } * }*/ return(result.ToArray()); }
public BackpropInitialState PrepareBackprop(Tensor[] target, TensorTape tensor_tape, OpTape op_tape, UnorderedSet <Tensor> sources_set, bool persistent_tape) { BackpropInitialState result = new BackpropInitialState(); var tensor_stack = new Queue <Tensor>(target); while (tensor_stack.Count > 0) { var tensor_id = tensor_stack.Dequeue(); if (!tensor_tape.find(tensor_id, out var op_id)) { continue; } if (op_id == -1 || !op_tape.find(op_id, out var op_it) || result.op_tape.find(op_id, out var result_op_it)) { continue; } result.op_tape.emplace(op_id, op_it); foreach (var it in op_it.input_tensor_id) { if (result.tensor_usage_counts.find(it)) { result.tensor_usage_counts[it]++; } else { result.tensor_usage_counts[it] = 1; if (tensor_tape.find(it)) { tensor_stack.Enqueue(it); } } } if (!persistent_tape) { op_tape.Remove(op_id); } } foreach (var pair in result.tensor_usage_counts) { if (tensor_tape.find(pair.Key, out var it) && it != -1) { result.op_missing_tensor[it] += 1; } } if (!persistent_tape) { // Call destructors for all unneeded gradient functions and // clear the op_tape. We can clear the tape because ownership of // backward functions that will be used for gradient computation // has been transferred to `result`. /*for (const auto&op_pair : *op_tape) { * op_pair.second.backward_function_deleter( * op_pair.second.backward_function); * }*/ op_tape.Clear(); } return(result); }