public void Initialize() { this.transferObjects = new (); this.resolver = new DependencyResolver(); this.resolver.Bind <ITypeMapping>().ToSingleton <TypeMapping>(); this.resolver.Bind <List <ITransferObject> >().To(this.transferObjects); this.writer = this.resolver.Create <ModelWriter>(); }
public writing_model_synchronously_by_content_type() { theWriter = new ModelWriter(new IMessageSerializer[] { new FakeWriter("blue"), new FakeWriter("red"), new FakeWriter("green"), }); }
public static void SaveWithModel(string path, CartesianModel model, ProfilerRecord[] profilerRecord) { var xdoc = new XDocument(); xdoc.Add(new XElement("ProfilingResults", ModelWriter.ToXElement(model), new XElement("Records", profilerRecord.Select(ToXElement)))); xdoc.Save(path); }
private void applyStaticPublishingRules(Type messageType, string[] supported, List <MessageRoute> list, ModelWriter modelWriter) { foreach (var channel in _subscribers.AllKnown().Where(x => x.ShouldSendMessage(messageType))) { var contentType = supported.FirstOrDefault(x => x != "application/json") ?? "application/json"; if (contentType.IsNotEmpty()) { list.Add(new MessageRoute(messageType, modelWriter, channel, contentType)); } } }
void OnDestroy() { ModelWriter.Save("Assets/3DBall.nn", m_SirenBuilder.BuildMLAgentsModel(256, 3, 8, 2, 6)); m_input.Dispose(); m_target.Dispose(); m_lr.Dispose(); foreach (var param in m_parameters) { param.Value.Dispose(); } m_worker.Dispose(); DisposePlot(); }
public static void SaveToXml(string path, OneBlockModelSettings model) { var xdoc = new XDocument(); var xelem = ToXElement(model, "oneblock"); xelem.Add( new XElement("Conductivity", model.Conductivity), new XElement("AnomalySizeX", model.AnomalySizeX), new XElement("AnomalySizeY", model.AnomalySizeY), new XElement("AnomalySizeZ", model.AnomalySizeZ), new XElement("AnomalyStartDepth", model.AnomalyStartDepth)); ModelWriter.SaveBackground(xelem, model.Section1D); xdoc.Add(xelem); xdoc.Save(path); }
NNModel LoadOnnxModel(byte[] rawModel) { var converter = new ONNXModelConverter(true); var onnxModel = converter.Convert(rawModel); NNModelData assetData = ScriptableObject.CreateInstance <NNModelData>(); using (var memoryStream = new MemoryStream()) using (var writer = new BinaryWriter(memoryStream)) { ModelWriter.Save(writer, onnxModel); assetData.Value = memoryStream.ToArray(); } assetData.name = "Data"; assetData.hideFlags = HideFlags.HideInHierarchy; var asset = ScriptableObject.CreateInstance <NNModel>(); asset.modelData = assetData; return(asset); }
private async Task applyDynamicSubscriptions(Type messageType, ModelWriter modelWriter, List <MessageRoute> list) { var subscriptions = await _subscriptions.GetSubscribersFor(messageType); if (subscriptions.Any()) { var published = new PublishedMessage(messageType, modelWriter, _channels); foreach (var subscription in subscriptions) { if (MessageRoute.TryToRoute(published, subscription, out MessageRoute route, out PublisherSubscriberMismatch mismatch)) { route.Writer = modelWriter[route.ContentType]; route.Channel = _channels.GetOrBuildChannel(route.Destination); list.Add(route); } else { _logger.SubscriptionMismatch(mismatch); } }
//encoding CRF model from training corpus public bool Learn(EncoderOptions args) { if (args.MinDifference <= 0.0) { return(false); } if (args.CostFactor < 0.0) { return(false); } if (args.ThreadsNum <= 0) { return(false); } if (args.HugeLexMemLoad > 0) { } var modelWriter = new ModelWriter(args.ThreadsNum, args.CostFactor, args.HugeLexMemLoad, args.RetrainModelFileName); if (modelWriter.Open(args.TemplateFileName, args.TrainingCorpusFileName) == false) { return(false); } var xList = modelWriter.ReadAllRecords(); modelWriter.Shrink(xList, args.MinFeatureFreq); if (!modelWriter.SaveModelMetaData(args.ModelFileName)) { return(false); } else { } if (!modelWriter.BuildFeatureSetIntoIndex(args.ModelFileName, args.SlotUsageRateThreshold, args.DebugLevel)) { return(false); } else { } if (xList.Length == 0) { return(false); } var orthant = false; if (args.RegType == REG_TYPE.L1) { orthant = true; } if (runCRF(xList, modelWriter, orthant, args) == false) { } modelWriter.SaveFeatureWeight(args.ModelFileName, args.BVQ); return(true); }
private static void ShowEvaluation(int recordNum, ModelWriter feature_index, LBFGS lbfgs, int termNum, int itr, int[,] merr, int[] yfreq, double diff, DateTime startDT, long nonzero_feature_num, EncoderOptions args) { var ts = DateTime.Now - startDT; if (args.DebugLevel > 1) { for (var i = 0; i < feature_index.y_.Count; i++) { var total_merr = 0; var sdict = new SortedDictionary <double, List <string> >(); for (var j = 0; j < feature_index.y_.Count; j++) { total_merr += merr[i, j]; var v = (double)merr[i, j] / (double)yfreq[i]; if (v > 0.0001) { if (sdict.ContainsKey(v) == false) { sdict.Add(v, new List <string>()); } sdict[v].Add(feature_index.y_[j]); } } var vet = (double)total_merr / (double)yfreq[i]; vet = vet * 100.0F; Console.ForegroundColor = ConsoleColor.Green; Console.Write("{0} ", feature_index.y_[i]); Console.ResetColor(); Console.Write("[FR={0}, TE=", yfreq[i]); Console.ForegroundColor = ConsoleColor.Yellow; Console.Write("{0:0.00}%", vet); Console.ResetColor(); Console.WriteLine("]"); var n = 0; foreach (var pair in sdict.Reverse()) { for (int index = 0; index < pair.Value.Count; index++) { var item = pair.Value[index]; n += item.Length + 1 + 7; if (n > 80) { //only show data in one line, more data in tail will not be show. break; } Console.Write("{0}:", item); Console.ForegroundColor = ConsoleColor.Red; Console.Write("{0:0.00}% ", pair.Key * 100); Console.ResetColor(); } if (n > 80) { break; } } Console.WriteLine(); } } var act_feature_rate = (double)(nonzero_feature_num) / (double)(feature_index.feature_size()) * 100.0; //Logger.WriteLine("iter={0} terr={1:0.00000} serr={2:0.00000} diff={3:0.000000} fsize={4}({5:0.00}% act)", itr, 1.0 * lbfgs.err / termNum, 1.0 * lbfgs.zeroone / recordNum, diff, feature_index.feature_size(), act_feature_rate); //Logger.WriteLine("Time span: {0}, Aver. time span per iter: {1}", ts, new TimeSpan(0, 0, (int)(ts.TotalSeconds / (itr + 1)))); }
bool runCRF(EncoderTagger[] x, ModelWriter modelWriter, bool orthant, EncoderOptions args) { var old_obj = double.MaxValue; var converge = 0; var lbfgs = new LBFGS(args.ThreadsNum); lbfgs.expected = new double[modelWriter.feature_size() + 1]; var processList = new List <CRFEncoderThread>(); var parallelOption = new ParallelOptions(); parallelOption.MaxDegreeOfParallelism = args.ThreadsNum; //Initialize encoding threads for (var i = 0; i < args.ThreadsNum; i++) { var thread = new CRFEncoderThread(); thread.start_i = i; thread.thread_num = args.ThreadsNum; thread.x = x; thread.lbfgs = lbfgs; thread.Init(); processList.Add(thread); } //Statistic term and result tags frequency var termNum = 0; int[] yfreq; yfreq = new int[modelWriter.y_.Count]; for (int index = 0; index < x.Length; index++) { var tagger = x[index]; termNum += tagger.word_num; for (var j = 0; j < tagger.word_num; j++) { yfreq[tagger.answer_[j]]++; } } //Iterative training var startDT = DateTime.Now; var dMinErrRecord = 1.0; for (var itr = 0; itr < args.MaxIteration; ++itr) { //Clear result container lbfgs.obj = 0.0f; lbfgs.err = 0; lbfgs.zeroone = 0; Array.Clear(lbfgs.expected, 0, lbfgs.expected.Length); var threadList = new List <Thread>(); for (var i = 0; i < args.ThreadsNum; i++) { var thread = new Thread(processList[i].Run); thread.Start(); threadList.Add(thread); } int[,] merr; merr = new int[modelWriter.y_.Count, modelWriter.y_.Count]; for (var i = 0; i < args.ThreadsNum; ++i) { threadList[i].Join(); lbfgs.obj += processList[i].obj; lbfgs.err += processList[i].err; lbfgs.zeroone += processList[i].zeroone; //Calculate error for (var j = 0; j < modelWriter.y_.Count; j++) { for (var k = 0; k < modelWriter.y_.Count; k++) { merr[j, k] += processList[i].merr[j, k]; } } } long num_nonzero = 0; var fsize = modelWriter.feature_size(); var alpha = modelWriter.alpha_; if (orthant == true) { //L1 regularization Parallel.For <double>(1, fsize + 1, parallelOption, () => 0, (k, loop, subtotal) => { subtotal += Math.Abs(alpha[k] / modelWriter.cost_factor_); if (alpha[k] != 0.0) { Interlocked.Increment(ref num_nonzero); } return(subtotal); }, (subtotal) => // lock free accumulator { double initialValue; double newValue; do { initialValue = lbfgs.obj; // read current value newValue = initialValue + subtotal; //calculate new value }while (initialValue != Interlocked.CompareExchange(ref lbfgs.obj, newValue, initialValue)); }); } else { //L2 regularization num_nonzero = fsize; Parallel.For <double>(1, fsize + 1, parallelOption, () => 0, (k, loop, subtotal) => { subtotal += (alpha[k] * alpha[k] / (2.0 * modelWriter.cost_factor_)); lbfgs.expected[k] += (alpha[k] / modelWriter.cost_factor_); return(subtotal); }, (subtotal) => // lock free accumulator { double initialValue; double newValue; do { initialValue = lbfgs.obj; // read current value newValue = initialValue + subtotal; //calculate new value }while (initialValue != Interlocked.CompareExchange(ref lbfgs.obj, newValue, initialValue)); }); } //Show each iteration result var diff = (itr == 0 ? 1.0f : Math.Abs(old_obj - lbfgs.obj) / old_obj); old_obj = lbfgs.obj; ShowEvaluation(x.Length, modelWriter, lbfgs, termNum, itr, merr, yfreq, diff, startDT, num_nonzero, args); if (diff < args.MinDifference) { converge++; } else { converge = 0; } if (itr > args.MaxIteration || converge == 3) { break; // 3 is ad-hoc } if (args.DebugLevel > 0 && (double)lbfgs.zeroone / (double)x.Length < dMinErrRecord) { var cc = Console.ForegroundColor; Console.ForegroundColor = ConsoleColor.Red; Console.Write("[Debug Mode] "); Console.ForegroundColor = cc; //Save current best feature weight into file dMinErrRecord = (double)lbfgs.zeroone / (double)x.Length; modelWriter.SaveFeatureWeight("feature_weight_tmp", false); } int iret; iret = lbfgs.optimize(alpha, modelWriter.cost_factor_, orthant); if (iret <= 0) { return(false); } } return(true); }
Bus ReadBus(string content) { var mw = new ModelWriter(); return mw.ReadObject<Bus>(content); }
public MessageRoute(Type messageType, ModelWriter writer, Uri destination, string contentType) : this(messageType, destination, contentType) { Writer = writer[contentType]; }
public MessageRoute(Type messageType, ModelWriter writer, IChannel channel, string contentType) : this(messageType, channel.Uri, contentType) { Writer = writer[contentType]; Channel = channel; }
public async Task <GeneratedCodeWorkspace> ExecuteAsync(CodeModel codeModel, Configuration configuration) { Directory.CreateDirectory(configuration.OutputFolder); var project = GeneratedCodeWorkspace.Create(configuration.OutputFolder, configuration.SharedSourceFolder); var sourceInputModel = new SourceInputModel(await project.GetCompilationAsync()); var context = new BuildContext(codeModel, configuration, sourceInputModel); var modelWriter = new ModelWriter(); var clientWriter = new ClientWriter(); var restClientWriter = new RestClientWriter(); var serializeWriter = new SerializationWriter(); var headerModelModelWriter = new ResponseHeaderGroupWriter(); foreach (var model in context.Library.Models) { var codeWriter = new CodeWriter(); modelWriter.WriteModel(codeWriter, model); var serializerCodeWriter = new CodeWriter(); serializeWriter.WriteSerialization(serializerCodeWriter, model); var name = model.Type.Name; project.AddGeneratedFile($"Models/{name}.cs", codeWriter.ToString()); project.AddGeneratedFile($"Models/{name}.Serialization.cs", serializerCodeWriter.ToString()); } foreach (var client in context.Library.RestClients) { var restCodeWriter = new CodeWriter(); restClientWriter.WriteClient(restCodeWriter, client); project.AddGeneratedFile($"{client.Type.Name}.cs", restCodeWriter.ToString()); } foreach (ResponseHeaderGroupType responseHeaderModel in context.Library.HeaderModels) { var headerModelCodeWriter = new CodeWriter(); headerModelModelWriter.WriteHeaderModel(headerModelCodeWriter, responseHeaderModel); project.AddGeneratedFile($"{responseHeaderModel.Type.Name}.cs", headerModelCodeWriter.ToString()); } foreach (var client in context.Library.Clients) { var codeWriter = new CodeWriter(); clientWriter.WriteClient(codeWriter, client, context.Configuration); project.AddGeneratedFile($"{client.Type.Name}.cs", codeWriter.ToString()); } foreach (var operation in context.Library.LongRunningOperations) { var codeWriter = new CodeWriter(); LongRunningOperationWriter.Write(codeWriter, operation); project.AddGeneratedFile($"{operation.Type.Name}.cs", codeWriter.ToString()); } if (context.Configuration.AzureArm) { var codeWriter = new CodeWriter(); ManagementClientWriter.WriteClientOptions(codeWriter, context); project.AddGeneratedFile($"{context.Configuration.LibraryName}ManagementClientOptions.cs", codeWriter.ToString()); var clientCodeWriter = new CodeWriter(); ManagementClientWriter.WriteAggregateClient(clientCodeWriter, context); project.AddGeneratedFile($"{context.Configuration.LibraryName}ManagementClient.cs", clientCodeWriter.ToString()); } return(project); }
public void Initialize() { this.resolver = new DependencyResolver(); this.resolver.Bind <ITypeMapping>().ToSingleton <TypeMapping>(); this.writer = this.resolver.Create <ModelWriter>(); }
public AngularModelWriter(TypeScriptModelWriter modelWriter) { this.modelWriter = modelWriter; }
public override void Render(float deltaTime) { if (level != null) { ImGui.SetNextItemOpen(true); if (ImGui.TreeNode("Mesh mode")) { int meshMode = (int)settings.mode; if (ImGui.Combo("Mesh Mode", ref meshMode, enumNameMap, enumNameMap.Length)) { settings.mode = (ModelWriter.WriterLevelMode)meshMode; } ImGui.TreePop(); } ImGui.SetNextItemOpen(true); if (ImGui.TreeNode("Objects to include")) { ImGui.Checkbox("Include Ties", ref settings.writeTies); ImGui.Checkbox("Include Shrubs", ref settings.writeShrubs); ImGui.Checkbox("Include Mobies", ref settings.writeMobies); if (level.terrainChunks.Count == 0) { ImGui.Checkbox("Include Terrain", ref settings.chunksSelected[0]); } ImGui.Checkbox("Include MLT File", ref settings.exportMtlFile); ImGui.TreePop(); } if (level.terrainChunks.Count != 0) { ImGui.SetNextItemOpen(true); if (ImGui.TreeNode("Chunk config")) { ImGui.TextDisabled("Use CTRL key to select multiple"); for (int i = 0; i < level.terrainChunks.Count; i++) { if (ImGui.Selectable("Chunk " + i, settings.chunksSelected[i])) { settings.chunksSelected[i] = !settings.chunksSelected[i]; } } ImGui.TreePop(); } } if (ImGui.Button("Perform export")) { var res = CrossFileDialog.SaveFile("Level.obj", "*.obj"); if (res.Length > 0) { ModelWriter.WriteObj(res, level, settings); isOpen = false; } } } else { ImGui.Text("Export unavailable, no level found!"); } }
public ReflectionWriter(ModelWriter modelWriter) { this.modelWriter = modelWriter; }
public MessageRoute(Type messageType, ModelWriter writer, ISubscriber subscriber, string contentType) : this(messageType, subscriber.Uri, contentType) { Writer = writer[contentType]; Subscriber = subscriber; }
//encoding CRF model from training corpus public bool Learn(EncoderArgs args) { if (args.min_diff <= 0.0) { Logger.WriteLine(Logger.Level.err, "eta must be > 0.0"); return(false); } if (args.C < 0.0) { Logger.WriteLine(Logger.Level.err, "C must be >= 0.0"); return(false); } if (args.threads_num <= 0) { Logger.WriteLine(Logger.Level.err, "thread must be > 0"); return(false); } if (args.hugeLexMemLoad > 0) { Logger.WriteLine("Build feature lexical dictionary in huge mode[shrink when mem used rate:{0}%]", args.hugeLexMemLoad); } Logger.WriteLine("Open and check training corpus and templates..."); var modelWriter = new ModelWriter(args.threads_num, args.C, args.hugeLexMemLoad, args.strRetrainModelFileName); if (modelWriter.Open(args.strTemplateFileName, args.strTrainingCorpus) == false) { Logger.WriteLine("Open training corpus or template file failed."); return(false); } Logger.WriteLine("Load training data and generate lexical features: "); var xList = modelWriter.ReadAllRecords(); Logger.WriteLine(""); Logger.WriteLine("Shrinking feature set [frequency is less than {0}]...", args.min_feature_freq); modelWriter.Shrink(xList, args.min_feature_freq); Logger.WriteLine("Saving model meta data..."); if (!modelWriter.SaveModelMetaData(args.strEncodedModelFileName)) { Logger.WriteLine(Logger.Level.err, "Failed!"); return(false); } else { Logger.WriteLine("Success"); } Logger.WriteLine("Indexing feature set with {0} maximum slot usage rate threshold...", args.slot_usage_rate_threshold); if (!modelWriter.BuildFeatureSetIntoIndex(args.strEncodedModelFileName, args.slot_usage_rate_threshold, args.debugLevel)) { Logger.WriteLine(Logger.Level.err, "Failed!"); return(false); } else { Logger.WriteLine("Success"); } Logger.WriteLine("Sentences size: " + xList.Length); Logger.WriteLine("Features size: " + modelWriter.feature_size()); Logger.WriteLine("Thread(s): " + args.threads_num); Logger.WriteLine("Regularization type: " + args.regType.ToString()); Logger.WriteLine("Freq: " + args.min_feature_freq); Logger.WriteLine("eta: " + args.min_diff); Logger.WriteLine("C: " + args.C); Logger.WriteLine("Vector quantization: " + args.bVQ); if (xList.Length == 0) { Logger.WriteLine(Logger.Level.err, "No sentence for training."); return(false); } var orthant = false; if (args.regType == REG_TYPE.L1) { orthant = true; } if (runCRF(xList, modelWriter, orthant, args) == false) { Logger.WriteLine(Logger.Level.warn, "Some warnings are raised during encoding..."); } Logger.WriteLine("Saving model feature's weight..."); modelWriter.SaveFeatureWeight(args.strEncodedModelFileName, args.bVQ); return(true); }
public PublishedMessage(Type messageType, ModelWriter modelWriter, IChannelGraph channels) : this(messageType) { ContentTypes = modelWriter.ContentTypes; Transports = channels.ValidTransports; }
public NNModel GetModelForBehaviorName(string behaviorName) { if (m_CachedModels.ContainsKey(behaviorName)) { return(m_CachedModels[behaviorName]); } string assetPath = null; if (m_BehaviorNameOverrides.ContainsKey(behaviorName)) { assetPath = m_BehaviorNameOverrides[behaviorName]; } else if (!string.IsNullOrEmpty(m_BehaviorNameOverrideDirectory)) { assetPath = Path.Combine(m_BehaviorNameOverrideDirectory, $"{behaviorName}.{m_OverrideExtension}"); } if (string.IsNullOrEmpty(assetPath)) { Debug.Log($"No override for BehaviorName {behaviorName}, and no directory set."); return(null); } byte[] rawModel = null; try { rawModel = File.ReadAllBytes(assetPath); } catch (IOException) { Debug.Log($"Couldn't load file {assetPath} at full path {Path.GetFullPath(assetPath)}", this); // Cache the null so we don't repeatedly try to load a missing file m_CachedModels[behaviorName] = null; return(null); } NNModel asset; var isOnnx = m_OverrideExtension.Equals("onnx"); if (isOnnx) { var converter = new ONNXModelConverter(true); var onnxModel = converter.Convert(rawModel); NNModelData assetData = ScriptableObject.CreateInstance <NNModelData>(); using (var memoryStream = new MemoryStream()) using (var writer = new BinaryWriter(memoryStream)) { ModelWriter.Save(writer, onnxModel); assetData.Value = memoryStream.ToArray(); } assetData.name = "Data"; assetData.hideFlags = HideFlags.HideInHierarchy; asset = ScriptableObject.CreateInstance <NNModel>(); asset.modelData = assetData; } else { // Note - this approach doesn't work for onnx files. Need to replace with // the equivalent of ONNXModelImporter.OnImportAsset() asset = ScriptableObject.CreateInstance <NNModel>(); asset.modelData = ScriptableObject.CreateInstance <NNModelData>(); asset.modelData.Value = rawModel; } asset.name = "Override - " + Path.GetFileName(assetPath); m_CachedModels[behaviorName] = asset; return(asset); }