private void ConfirmButton_Click(object sender, RoutedEventArgs e) { StoreModel store = (StoreModel)StoreList.SelectedItem; if (store != null) { Transform = new TransformModel(); Transform.Staff = PublicVariables.Staff; Transform.Store = PublicVariables.Store; Transform.Date = DateTime.Now; Transform.TotalMoney = (decimal)TransformValue.Value; Transform.ToStore = store; GlobalConfig.TransformValidator = new TransformValidator(); ValidationResult result = GlobalConfig.TransformValidator.Validate(Transform); if (result.IsValid == false) { MessageBox.Show(result.Errors[0].ErrorMessage); } else { GlobalConfig.Connection.AddTransformToTheDatase(Transform); SetInitialValues(); } } else { MessageBox.Show("Select Store Please"); } }
public static OperationEntity Transform(TransformModel transformModel) { var componentGroupId = transformModel.ComponentGroupId; var allUriParameters = TransformUriParameters(transformModel.Operation.Value, componentGroupId); var requiredQueryUriParameters = allUriParameters.Where(p => p.IsRequired && p.In == "query").ToList(); var optionalQueryUriParameters = allUriParameters.Where(p => !p.IsRequired && p.In == "query").ToList(); return(new OperationEntity { Id = transformModel.OperationId, Name = transformModel.OperationName, Service = transformModel.ServiceName, GroupName = transformModel.OperationGroupName, Summary = transformModel.Operation.Value.Summary, Description = transformModel.Operation.Value.Description, ApiVersion = transformModel.OpenApiDoc.Info.Version, IsDeprecated = transformModel.Operation.Value.Deprecated, HttpVerb = transformModel.Operation.Key.ToString().ToUpper(), Servers = TransformHelper.GetServerEnities(transformModel.OpenApiDoc.Servers), Paths = TransformPaths(transformModel.OpenApiPath, transformModel.Operation.Value, requiredQueryUriParameters), // remove this for now // OptionalParameters = TransformOptionalParameters(optionalQueryUriParameters), RequestParameters = allUriParameters, Responses = TransformResponses(transformModel, transformModel.Operation.Value, componentGroupId), RequestBodies = TransformRequestBody(transformModel.Operation.Value, componentGroupId), Securities = TransformSecurity(transformModel.Operation.Value.Security.Count != 0 ? transformModel.Operation.Value.Security : transformModel.OpenApiDoc.SecurityRequirements), SeeAlsos = TransformExternalDocs(transformModel.Operation.Value), // for internal IsFunctionOrAction = IsFunctionOrAction(transformModel.Operation.Value), GroupedPaths = GetGroupedPaths(transformModel.OpenApiPath, transformModel.Operation.Value), InternalOpeartionId = transformModel.Operation.Value.OperationId.ToLower() }); }
public SaveOnnxCommand(IHostEnvironment env, Arguments args) : base(env, args, LoadName) { Host.CheckValue(args, nameof(args)); Host.CheckNonWhiteSpace(args.Onnx, nameof(args.Onnx)); Utils.CheckOptionalUserDirectory(args.Onnx, nameof(args.Onnx)); _outputModelPath = args.Onnx; _outputJsonModelPath = string.IsNullOrWhiteSpace(args.Json) ? null : args.Json; if (args.Name == null) { _name = Path.GetFileNameWithoutExtension(_outputModelPath); } else { Host.CheckNonWhiteSpace(args.Name, nameof(args.Name)); _name = args.Name; } _loadPredictor = args.LoadPredictor; _inputsToDrop = CreateDropMap(args.InputsToDropArray ?? args.InputsToDrop?.Split(',')); _outputsToDrop = CreateDropMap(args.OutputsToDropArray ?? args.OutputsToDrop?.Split(',')); _domain = args.Domain; _model = args.Model; }
public ComponTransformViewModel() { _transformModel = new TransformModel(); _flameColorMode = FlameColorMode.Color; Command = new RelayCommand(CommandHandler); GradientModel = new GradientModel(Colors.Gray, Colors.Gray); ColorPosition = .5; FColor = Colors.Gray; Parameter1Visibility = Visibility.Collapsed; Parameter2Visibility = Visibility.Collapsed; Parameter3Visibility = Visibility.Collapsed; Parameter4Visibility = Visibility.Collapsed; Variations = VariationFactoryModel.StaticVariationFactory.VariationNames; VariationSelected = "Linear"; BindStorage.SetActionFor(ActionValueChanged, _propParameters); BindStorage.SetActionFor(ActionValueChanged, _propValues2); BindStorage.SetActionFor(ActionTransformValueChanged, _propValues); BindStorage.SetActionFor(ActionVariationChanged, _propVariation); SetCoefficients(); }
public SaveOnnxCommand(IHostEnvironment env, Arguments args) : base(env, args, LoadName) { Host.CheckValue(args, nameof(args)); Host.CheckNonWhiteSpace(args.Onnx, nameof(args.Onnx)); Utils.CheckOptionalUserDirectory(args.Onnx, nameof(args.Onnx)); _outputModelPath = args.Onnx; _outputJsonModelPath = string.IsNullOrWhiteSpace(args.Json) ? null : args.Json; if (args.Name == null) { _name = Path.GetFileNameWithoutExtension(_outputModelPath); } else { Host.CheckNonWhiteSpace(args.Name, nameof(args.Name)); _name = args.Name; } _loadPredictor = args.LoadPredictor; _inputsToDrop = CreateDropMap(args.InputsToDropArray ?? args.InputsToDrop?.Split(',')); _outputsToDrop = CreateDropMap(args.OutputsToDropArray ?? args.OutputsToDrop?.Split(',')); _domain = args.Domain; if (args.Model != null && args.PredictiveModel != null) { throw env.Except(nameof(args.Model) + " and " + nameof(args.PredictiveModel) + " cannot be specified at the same time when calling ONNX converter. Please check the content of " + nameof(args) + "."); } _model = args.Model; _predictiveModel = args.PredictiveModel; }
private void GetTransform(PanelTransform panelTransform) { var dc = (PanelTransformViewModel)panelTransform.DataContext; var tm = new TransformModel { }; }
public static void WriteKcHousePriceModel(string dataPath, Stream stream) { if (s_housePriceModel == null) { s_housePriceModel = CreateKcHousePricePredictorModel(dataPath); } s_housePriceModel.Save(s_environment, stream); }
void ApplyTransform(TransformModel transform) { using (var edit = textView.TextBuffer.CreateEdit()) { edit.Replace(transform.Position, Math.Max(0, transform.NumDelete), transform.Insert ?? string.Empty); edit.Apply(); } }
private static TransformModel[] CopyTransformations(IReadOnlyList <TransformModel> transformationModels) { var transformations = new TransformModel[transformationModels.Count]; for (var i = 0; i < transformations.Length; i++) { transformations[i] = transformationModels[i].Copy(); } return(transformations); }
public static OperationGroupEntity Transform(TransformModel transformModel) { return(new OperationGroupEntity { Id = transformModel.OperationGroupId, ApiVersion = transformModel.OpenApiDoc.Info?.Version, Name = transformModel.OperationGroupName, Service = transformModel.ServiceName, Summary = transformModel.OpenApiTag.Description }); }
/// <summary> /// Verifies the user's original password by specified breach-proof password. /// </summary>/// /// <param name="originalPassword">The original user's password</param> /// <param name="breachProofPassword"></param> /// <returns>Returns true if password is valid, otherwise false.</returns> public async Task <bool> VerifyBreachProofPasswordAsync(string originalPassword, BreachProofPassword breachProofPassword, bool prove = false) { if (string.IsNullOrEmpty(originalPassword)) { throw new ArgumentNullException(nameof(originalPassword)); } if (breachProofPassword == null) { throw new ArgumentNullException(nameof(breachProofPassword)); } var blindingResult = this.pythiaCrypto.Blind(originalPassword); var transformModel = new TransformModel { BlindedPassword = blindingResult.BlindedPassword, Salt = breachProofPassword.Salt, Version = breachProofPassword.Version, IncludeProof = prove }; var tokenContext = new TokenContext("pythia", "transform"); var token = await this.tokenProvider.GetTokenAsync(tokenContext).ConfigureAwait(false); var result = await this.client.TransformPasswordAsync( transformModel, token.ToString()).ConfigureAwait(false); if (prove) { var proofKey = this.proofKeys[breachProofPassword.Version]; var proofParams = new PythiaProofParams { TransformedPassword = result.TransformedPassword, TransformationPublicKey = proofKey, BlindedPassword = blindingResult.BlindedPassword, Tweak = breachProofPassword.Salt, ProofValueC = result.Proof.ValueC, ProofValueU = result.Proof.ValueU }; if (!this.pythiaCrypto.Verify(proofParams)) { throw new PythiaProofIsNotValidException(); } } var deblindedPassword = this.pythiaCrypto.Deblind( result.TransformedPassword, blindingResult.BlindingSecret); return(deblindedPassword.SequenceEqual(breachProofPassword.DeblindedPassword)); }
/// <summary> /// Creates a new breach-proof password for specified user's password. /// </summary> /// <param name="password">The user's password.</param> public async Task <BreachProofPassword> CreateBreachProofPasswordAsync(string password) { if (string.IsNullOrEmpty(password)) { throw new ArgumentNullException(nameof(password)); } var blindingResult = this.pythiaCrypto.Blind(password); var currentVersion = this.proofKeys.Keys.Max(); var currentProofKey = this.proofKeys[currentVersion]; var salt = this.pythiaCrypto.GenerateSalt(); var transformModel = new TransformModel { BlindedPassword = blindingResult.BlindedPassword, Salt = salt, Version = currentVersion, IncludeProof = true }; var tokenContext = new TokenContext("pythia", "transform"); var token = await this.tokenProvider.GetTokenAsync(tokenContext).ConfigureAwait(false); var result = await this.client.TransformPasswordAsync( transformModel, token.ToString()).ConfigureAwait(false); var proofParams = new PythiaProofParams { TransformedPassword = result.TransformedPassword, TransformationPublicKey = currentProofKey, BlindedPassword = blindingResult.BlindedPassword, Tweak = salt, ProofValueC = result.Proof.ValueC, ProofValueU = result.Proof.ValueU }; if (!this.pythiaCrypto.Verify(proofParams)) { throw new PythiaProofIsNotValidException(); } var deblindedPassword = this.pythiaCrypto.Deblind( result.TransformedPassword, blindingResult.BlindingSecret); return(new BreachProofPassword { DeblindedPassword = deblindedPassword, Salt = salt, Version = currentVersion }); }
protected TransformModel GetTransformModel(Transform transform) { var result = new TransformModel( new Vector3d(transform.position.x, transform.position.y, transform.position.z), new MathDescription.Quaternion(transform.rotation.w, transform.rotation.x, transform.rotation.y, transform.rotation.z), new Vector3d(transform.lossyScale.x, transform.lossyScale.y, transform.lossyScale.z), new Vector3d(transform.localPosition.x, transform.localPosition.y, transform.localPosition.z), new MathDescription.Quaternion(transform.localRotation.w, transform.localRotation.x, transform.localRotation.y, transform.localRotation.z), new Vector3d(transform.localScale.x, transform.localScale.y, transform.localScale.z) ); return(result); }
public static ComponentGroupEntity Transform(TransformModel transformModel) { var openApiDocument = transformModel.OpenApiDoc; var componentGroup = new ComponentGroupEntity { Id = transformModel.ComponentGroupId, Name = transformModel.ComponentGroupName, Service = transformModel.ServiceName, ApiVersion = transformModel.OpenApiDoc.Info.Version }; return(componentGroup); }
public BoundBase(SchemaBindablePipelineEnsembleBase parent, RoleMappedSchema schema) { Parent = parent; InputRoleMappedSchema = schema; Schema = Schema.Create(new ScoreMapperSchema(Parent.ScoreType, Parent._scoreColumnKind)); _inputColIndices = new HashSet <int>(); for (int i = 0; i < Parent._inputCols.Length; i++) { var name = Parent._inputCols[i]; if (!InputRoleMappedSchema.Schema.TryGetColumnIndex(name, out int col)) { throw Parent.Host.Except("Schema does not contain required input column '{0}'", name); } _inputColIndices.Add(col); } Mappers = new ISchemaBoundRowMapper[Parent.PredictorModels.Length]; BoundPipelines = new IRowToRowMapper[Parent.PredictorModels.Length]; ScoreCols = new int[Parent.PredictorModels.Length]; for (int i = 0; i < Mappers.Length; i++) { // Get the RoleMappedSchema to pass to the predictor. var emptyDv = new EmptyDataView(Parent.Host, schema.Schema); Parent.PredictorModels[i].PrepareData(Parent.Host, emptyDv, out RoleMappedData rmd, out IPredictor predictor); // Get the predictor as a bindable mapper, and bind it to the RoleMappedSchema found above. var bindable = ScoreUtils.GetSchemaBindableMapper(Parent.Host, Parent.PredictorModels[i].Predictor); Mappers[i] = bindable.Bind(Parent.Host, rmd.Schema) as ISchemaBoundRowMapper; if (Mappers[i] == null) { throw Parent.Host.Except("Predictor {0} is not a row to row mapper", i); } // Make sure there is a score column, and remember its index. if (!Mappers[i].Schema.TryGetColumnIndex(MetadataUtils.Const.ScoreValueKind.Score, out ScoreCols[i])) { throw Parent.Host.Except("Predictor {0} does not contain a score column", i); } // Get the pipeline. var dv = new EmptyDataView(Parent.Host, schema.Schema); var tm = new TransformModel(Parent.Host, dv, dv); var pipeline = Parent.PredictorModels[i].TransformModel.Apply(Parent.Host, tm); BoundPipelines[i] = pipeline.AsRowToRowMapper(Parent.Host); if (BoundPipelines[i] == null) { throw Parent.Host.Except("Transform pipeline {0} contains transforms that do not implement IRowToRowMapper", i); } } }
public static IList <ResponseLinkEntity> GetResponseLinks(TransformModel transformModel, IDictionary <string, OpenApiLink> openApiLinks) { var links = new List <ResponseLinkEntity>(); foreach (var openApiLink in openApiLinks) { links.Add(new ResponseLinkEntity { Key = openApiLink.Key, OperationId = openApiLink.Value.OperationId }); } return(links); }
public static ComponentEntity Transform(TransformModel transformModel) { var properties = TransformHelper.GetPropertiesFromSchema(transformModel.OpenApiSchema, transformModel.ComponentGroupId); var component = new ComponentEntity { Id = transformModel.ComponentId, Name = transformModel.ComponentName, Service = transformModel.ServiceName, GroupName = transformModel.ComponentGroupName, ApiVersion = transformModel.OpenApiDoc.Info.Version, Description = transformModel.OpenApiSchema.Description ?? transformModel.OpenApiSchema.Title, PropertyItems = properties.ToList(), Example = GetComponentExample(transformModel.OpenApiSchema) }; return(component); }
public void OperationEntity_GetResponse_With_ServerId() { var openApiDocument = LoadOpenApiDocument("../../samples/GetResponse2.yaml"); var operation = openApiDocument.Paths.Values.First().Operations.Values.First(); var transformModel = new TransformModel { OpenApiDoc = openApiDocument }; var expects = LoadExpectedJsonObject <List <ResponseEntity> >("../../expects/Responses2.json"); var responses = RestOperationTransformer.TransformResponses(transformModel, operation, "mockServerId"); Assert.NotNull(responses); Assert.Equal(responses.Count, expects.Count); foreach (var expect in expects) { var foundResponse = responses.SingleOrDefault(p => p.Name == expect.Name); Assert.NotNull(foundResponse); Assert.Equal(JsonUtility.ToJsonString(expect), JsonUtility.ToJsonString(foundResponse)); } }
public static IList <ResponseEntity> TransformResponses(TransformModel transformModel, OpenApiOperation openApiOperation, string componentGroupId) { var responseEntities = new List <ResponseEntity>(); if (openApiOperation.Responses?.Count > 0) { foreach (var openApiResponse in openApiOperation.Responses) { var bodies = GetResponseMediaTypeAndBodies(openApiResponse.Value.Reference, openApiResponse.Value.Content, componentGroupId); var links = GetResponseLinks(transformModel, openApiResponse.Value.Links); var responseEntity = new ResponseEntity { Name = TransformHelper.GetStatusCodeString(openApiResponse.Key), StatusCode = openApiResponse.Key, Description = openApiResponse.Value.Description, ResponseMediaTypeAndBodies = bodies.Count > 0 ? bodies : null, ResponseLinks = links.Count > 0 ? links : null, ResponseHeaders = null // todo }; responseEntities.Add(responseEntity); } } return(responseEntities); }
public void SetInputFromPath(GraphRunner runner, string varName, string path, TlcModule.DataKind kind) { _host.CheckUserArg(runner != null, nameof(runner), "Provide a GraphRunner instance."); _host.CheckUserArg(!string.IsNullOrWhiteSpace(varName), nameof(varName), "Specify a graph variable name."); _host.CheckUserArg(!string.IsNullOrWhiteSpace(path), nameof(path), "Specify a valid file path."); switch (kind) { case TlcModule.DataKind.FileHandle: var fh = new SimpleFileHandle(_host, path, false, false); runner.SetInput(varName, fh); break; case TlcModule.DataKind.DataView: IDataView loader = new BinaryLoader(_host, new BinaryLoader.Arguments(), path); runner.SetInput(varName, loader); break; case TlcModule.DataKind.PredictorModel: PredictorModel pm; using (var fs = File.OpenRead(path)) pm = new PredictorModel(_host, fs); runner.SetInput(varName, pm); break; case TlcModule.DataKind.TransformModel: TransformModel tm; using (var fs = File.OpenRead(path)) tm = new TransformModel(_host, fs); runner.SetInput(varName, tm); break; default: throw _host.Except("Port type {0} not supported", kind); } }
public void ComponentEntity_GetComponent() { var openApiDocument = LoadOpenApiDocument("../../samples/GetComponent.yaml"); var operation = openApiDocument.Paths.Values.First().Operations.Values.First(); var model = new TransformModel { OpenApiDoc = openApiDocument, ServiceName = "mockServerName", ComponentGroupName = "mockComponentGroup" }; var components = new List <ComponentEntity>(); if (openApiDocument.Components?.Schemas != null) { foreach (var schema in openApiDocument.Components?.Schemas) { model.ComponentId = Utility.GetId(model.ServiceName, model.ComponentGroupName, schema.Key); model.ComponentName = schema.Key; model.OpenApiSchema = schema.Value; components.Add(RestComponentTransformer.Transform(model)); } } var expects = LoadExpectedJsonObject <ComponentGroupEntity>("../../expects/Components.json"); Assert.Equal(components.Count(), expects.Components.Count); foreach (var expect in expects.Components) { var foundComponent = components.SingleOrDefault(p => p.Id == expect.Id); Assert.NotNull(foundComponent); Assert.Equal(JsonUtility.ToJsonString(expect), JsonUtility.ToJsonString(foundComponent)); } }
public AnimatedExportObjectModel convert(ExportableModel r3AnimatedMesh) { AnimatedExportObjectModel result = new AnimatedExportObjectModel(); result.Name = r3AnimatedMesh.gameObject.name; Dictionary <string, BoneBindPose> bonesBindPoses = GetBonesBindPoseTransforms(r3AnimatedMesh). ToDictionary( x => x.Key, x => new BoneBindPose( x.Key, x.Value.position, x.Value.rotation, x.Value.scale )); MeshGeometry meshGeometry = DeriveMeshGeometryData(GetMesh(r3AnimatedMesh), GetBonesTransforms(r3AnimatedMesh)); TransformModel transformModel = GetTransformModel(r3AnimatedMesh.transform); result.bindBonePoses = bonesBindPoses; result.meshGeometry = meshGeometry; result.transform = transformModel; result.materials = GetMaterials(r3AnimatedMesh); return(result); }
public void SendTransform(TransformModel transformModel) { Clients.Others.SendTransform(transformModel); }
public void Init() { instance = new TransformModel(); }
internal PredictionModel(Stream stream) { _env = new MLContext(); AssemblyRegistration.RegisterAssemblies(_env); PredictorModel = new TransformModel(_env, stream); }
private static void RunGraphCore(EnvironmentBlock *penv, IHostEnvironment env, string graphStr, int cdata, DataSourceBlock **ppdata) { Contracts.AssertValue(env); var args = new RunGraphArgs(); string err = null; if (!CmdParser.ParseArguments(env, graphStr, args, e => err = err ?? e)) { throw env.Except(err); } int?maxThreadsAllowed = Math.Min(args.parallel > 0 ? args.parallel.Value : penv->maxThreadsAllowed, penv->maxThreadsAllowed); maxThreadsAllowed = penv->maxThreadsAllowed > 0 ? maxThreadsAllowed : args.parallel; var host = env.Register("RunGraph", args.randomSeed, null, maxThreadsAllowed); JObject graph; try { graph = JObject.Parse(args.graph); } catch (JsonReaderException ex) { throw host.Except(ex, "Failed to parse experiment graph: {0}", ex.Message); } var runner = new GraphRunner(host, graph["nodes"] as JArray); var dvNative = new IDataView[cdata]; try { for (int i = 0; i < cdata; i++) { dvNative[i] = new NativeDataView(host, ppdata[i]); } // Setting inputs. var jInputs = graph["inputs"] as JObject; if (graph["inputs"] != null && jInputs == null) { throw host.Except("Unexpected value for 'inputs': {0}", graph["inputs"]); } int iDv = 0; if (jInputs != null) { foreach (var kvp in jInputs) { var pathValue = kvp.Value as JValue; if (pathValue == null) { throw host.Except("Invalid value for input: {0}", kvp.Value); } var path = pathValue.Value <string>(); var varName = kvp.Key; var type = runner.GetPortDataKind(varName); switch (type) { case TlcModule.DataKind.FileHandle: var fh = new SimpleFileHandle(host, path, false, false); runner.SetInput(varName, fh); break; case TlcModule.DataKind.DataView: IDataView dv; if (!string.IsNullOrWhiteSpace(path)) { var extension = Path.GetExtension(path); if (extension == ".txt") { dv = TextLoader.ReadFile(host, new TextLoader.Arguments(), new MultiFileSource(path)); } else { dv = new BinaryLoader(host, new BinaryLoader.Arguments(), path); } } else { Contracts.Assert(iDv < dvNative.Length); // prefetch all columns dv = dvNative[iDv++]; var prefetch = new int[dv.Schema.ColumnCount]; for (int i = 0; i < prefetch.Length; i++) { prefetch[i] = i; } dv = new CacheDataView(host, dv, prefetch); } runner.SetInput(varName, dv); break; case TlcModule.DataKind.PredictorModel: PredictorModel pm; if (!string.IsNullOrWhiteSpace(path)) { using (var fs = File.OpenRead(path)) pm = new PredictorModel(host, fs); } else { throw host.Except("Model must be loaded from a file"); } runner.SetInput(varName, pm); break; case TlcModule.DataKind.TransformModel: TransformModel tm; if (!string.IsNullOrWhiteSpace(path)) { using (var fs = File.OpenRead(path)) tm = new TransformModel(host, fs); } else { throw host.Except("Model must be loaded from a file"); } runner.SetInput(varName, tm); break; default: throw host.Except("Port type {0} not supported", type); } } } runner.RunAll(); // Reading outputs. using (var ch = host.Start("Reading outputs")) { var jOutputs = graph["outputs"] as JObject; if (jOutputs != null) { foreach (var kvp in jOutputs) { var pathValue = kvp.Value as JValue; if (pathValue == null) { throw host.Except("Invalid value for input: {0}", kvp.Value); } var path = pathValue.Value <string>(); var varName = kvp.Key; var type = runner.GetPortDataKind(varName); switch (type) { case TlcModule.DataKind.FileHandle: var fh = runner.GetOutput <IFileHandle>(varName); throw host.ExceptNotSupp("File handle outputs not yet supported."); case TlcModule.DataKind.DataView: var idv = runner.GetOutput <IDataView>(varName); if (!string.IsNullOrWhiteSpace(path)) { SaveIdvToFile(idv, path, host); } else { var infos = ProcessColumns(ref idv, args.maxSlots, host); SendViewToNative(ch, penv, idv, infos); } break; case TlcModule.DataKind.PredictorModel: var pm = runner.GetOutput <IPredictorModel>(varName); if (!string.IsNullOrWhiteSpace(path)) { SavePredictorModelToFile(pm, path, host); } else { throw host.Except("Returning in-memory models is not supported"); } break; case TlcModule.DataKind.TransformModel: var tm = runner.GetOutput <ITransformModel>(varName); if (!string.IsNullOrWhiteSpace(path)) { using (var fs = File.OpenWrite(path)) tm.Save(host, fs); } else { throw host.Except("Returning in-memory models is not supported"); } break; case TlcModule.DataKind.Array: var objArray = runner.GetOutput <object[]>(varName); if (objArray is IPredictorModel[]) { var modelArray = (IPredictorModel[])objArray; // Save each model separately for (var i = 0; i < modelArray.Length; i++) { var modelPath = string.Format(CultureInfo.InvariantCulture, path, i); SavePredictorModelToFile(modelArray[i], modelPath, host); } } else { throw host.Except("DataKind.Array type {0} not supported", objArray.First().GetType()); } break; default: throw host.Except("Port type {0} not supported", type); } } } } } finally { // The raw data view is disposable so it lets go of unmanaged raw pointers before we return. for (int i = 0; i < dvNative.Length; i++) { var view = dvNative[i]; if (view == null) { continue; } host.Assert(view is IDisposable); var disp = (IDisposable)dvNative[i]; disp.Dispose(); } } }
internal PredictionModel(Stream stream) { _env = new ConsoleEnvironment(); _predictorModel = new TransformModel(_env, stream); }
internal PredictionModel(Stream stream) { _env = new MLContext(); PredictorModel = new TransformModel(_env, stream); }