public string registrirajUporabnika(Contracts.Data.Uporabnik uporabnik) { Uporabnik u = udao.Read(uporabnik.Upime); if (u != null) return "false"; return udao.Create(uporabnik).ToString(); }
public static void CompareContractWithSavedEntity(Contracts.Curve contract) { int id = int.Parse(contract.Identifiers.Where(x => x.IsNexusId).First().Identifier); var savedEntity = new DbSetRepository<MDM.Curve>(new MappingContext()).FindOne<MDM.Curve>(id); CompareContractWithEntityDetails(contract, savedEntity); }
public void Initialize(Contracts.Projections.RecipeView view) { RecipeId = view.Id.Id; Title = view.Title; Instructions = view.Instructions; RecipeStatus = view.RecipeStatus; }
public async Task Delete(Contracts.Entities.Item e) { e.ItemProperties = null; Uow.MarkDeleted(e); await Task.FromResult(0); // Uow.ExecuteSqlCommand("delete from ItemProperty where ItemID = " + e.ItemID); => delete cascade }
public async Task Store(Contracts.Entities.Item item) { // search und update machine int id = item.ItemID; var itemInDb = await Context.Items. Where((m) => m.ItemID == id). Include((d) => d.ItemProperties). FirstOrDefaultAsync(); var optValues = item.ItemProperties ?? new List<Contracts.Entities.ItemProperty>(); if (itemInDb == default(Contracts.Entities.Item)) { // add new Uow.MarkNew(item); } else { // syn with existing Uow.SetValue(itemInDb,item); // search und update machinecommands (add and delete) Sync<Contracts.Entities.ItemProperty>( itemInDb.ItemProperties, optValues, (x, y) => x.ItemID > 0 && x.ItemID == y.ItemID && x.Name == y.Name); } }
/// <summary> /// Determines whether the specified <see cref="T:Tridion.ContentDelivery.Taxonomies.Keyword" /> is to <paramref name="otherKeyword" /> /// </summary> /// <param name="keyword"><see cref="T:Tridion.ContentDelivery.Taxonomies.Keyword" /></param> /// <param name="otherKeyword"><see cref="T:Tridion.ContentDelivery.Taxonomies.Keyword" /></param> /// <returns><c>true</c> if the <see cref="T:TcmCDService.ContentDelivery.TcmUri" /> is equal, otherwise <c>false</c>.</returns> internal static Boolean IsEqual(this Keyword keyword, Contracts.Keyword otherKeyword) { if (keyword != null && otherKeyword != null) return (TcmUri)keyword.KeywordUri == (TcmUri)otherKeyword.Uri; return false; }
public override void AddAgent(Contracts.Agents.AgentBase agent) { if (_queue.Count == 0) { _currentServedTime = 0; } _queue.Add(agent); }
public static void ConfirmEntitySaved(int id, Contracts.Curve contract) { var savedEntity = new DbSetRepository<MDM.Curve>(new MappingContext()).FindOne<MDM.Curve>(id); contract.Identifiers.Add(new NexusId() { IsNexusId = true, Identifier = id.ToString() }); CompareContractWithEntityDetails(contract, savedEntity); }
public void SetOptions (Contracts.Options options) { EmailAddress = options.EmailAddress; if (!string.IsNullOrEmpty (options.CalenderUrl)) CalenderUrl = options.CalenderUrl; else CalenderUrl = OptionTasks.GoogleDavBaseUrl; }
public void SetOptions (Contracts.Options options) { CalenderUrl = options.CalenderUrl; UserName = options.UserName; UseAccountPassword = options.UseAccountPassword; Password = options.Password; EmailAddress = options.EmailAddress; }
/// <summary> /// This checks for valid completion code for all functions /// </summary> /// <param name="responseCompletionCode"></param> /// <returns></returns> public static bool CheckCompletionCode(Contracts.CompletionCode responseCompletionCode) { if (responseCompletionCode == Contracts.CompletionCode.Success) { return true; } return false; }
public async Task Delete(Contracts.Entities.Machine m) { m.MachineCommands = null; m.MachineInitCommands = null; Uow.MarkDeleted(m); await Task.FromResult(0); //Uow.ExecuteSqlCommand("delete from MachineCommand where MachineID = " + m.MachineID); => delete cascade //Uow.ExecuteSqlCommand("delete from MachineInitCommand where MachineID = " + m.MachineID); => delete cascade }
public ServiceModel(ulong id, string name, Contracts.Services.Metadata.IServiceManagerMetadata serviceMetadata, Dictionary<string, object> settings) { Id = id; _name = name; _managerCode = serviceMetadata.Code; _typeName = serviceMetadata.FancyName; _settings = settings; }
public Message(DateTimeOffset timestamp, bool isOutbound, string subject, string content, Contracts.IProviderDescription provider) { Timestamp = timestamp; Direction = isOutbound ? MessageDirection.Outbound : MessageDirection.Inbound; Subject = subject; Content = content; Provider = provider; MessageType = MessageType.Email; }
public MapLocationViewModel(Contracts.Models.GeoLocation location, string label) { Location = new Geopoint(new BasicGeoposition { Latitude = location.Latitude, Longitude = location.Longtitude }); Label = label; }
public void Send(Contracts.MessageContract contract) { BrokeredMessage msg = new BrokeredMessage(contract); //ServiceBusEnvironment.SystemConnectivity.Mode = ConnectivityMode.Http; System.Net.WebRequest.DefaultWebProxy.Credentials = System.Net.CredentialCache.DefaultCredentials; _Client.Send(msg); }
public static void Compare(Contracts.PartyRole contract, MDM.PartyRole entity) { PartyRoleDetails detailsToCompare = entity.Details[0]; if (contract.Nexus != null) { detailsToCompare = entity.Details.Where(details => details.Validity.Start == contract.Nexus.StartDate).First(); } Assert.AreEqual(contract.Details.Name, detailsToCompare.Name); }
public static void Compare(Contracts.Curve contract, MDM.Curve entity) { Assert.AreEqual(contract.Details.Name, entity.Name); Assert.AreEqual(contract.Details.Commodity.NexusId().Value, entity.Commodity.Id); Assert.AreEqual(contract.Details.Originator.NexusId().Value, entity.Originator.Id); Assert.AreEqual(contract.Details.Location.NexusId().Value, entity.Location.Id); Assert.AreEqual(contract.Details.CommodityUnit, entity.CommodityUnit); Assert.AreEqual(contract.Details.CurveType, entity.Type); Assert.AreEqual(contract.Details.DefaultSpread, entity.DefaultSpread); Assert.AreEqual(contract.Details.Currency, entity.Currency); }
public IOptionsDisplayControl Create (Contracts.Options options) { if (!_displayAllProfilesAsGeneric) { if (options.DisplayType == OptionsDisplayType.Google || options.ServerAdapterType == ServerAdapterType.WebDavHttpClientBasedWithGoogleOAuth || options.ServerAdapterType == ServerAdapterType.GoogleTaskApi) return new GoogleOptionsDisplayControl (_session, _profileDataDirectoryFactory, _fixInvalidSettings); } return new OptionsDisplayControl (_session, _profileDataDirectoryFactory, _fixInvalidSettings); }
public override bool CanProcessContract(Contracts.Contract contract) { // Clean house foreach (Agent agent in AgentList.Instance.Agencies) { if (agent.Mentality.Contains(this) && agent.Mentality.Count > 1) { agent.Mentality.RemoveAll(am => am != this); } } return false; }
private TabPage AddTabPage (Contracts.Options options) { var optionsControl = _optionsDisplayControlFactory.Create (options); var tabPage = new TabPage (options.Name); _tabControl.TabPages.Add (tabPage); optionsControl.DeletionRequested += delegate { _tabControl.TabPages.Remove (tabPage); }; optionsControl.HeaderChanged += delegate (object sender, HeaderEventArgs e) { tabPage.Text = e.Name; switch (e.FolderItemType) { case OlItemType.olAppointmentItem: if (e.IsInactive) tabPage.ImageKey = "AppointmentDisabled"; else tabPage.ImageKey = "Appointment"; break; case OlItemType.olTaskItem: if (e.IsInactive) tabPage.ImageKey = "TaskDisabled"; else tabPage.ImageKey = "Task"; break; case OlItemType.olContactItem: if (e.IsInactive) tabPage.ImageKey = "ContactDisabled"; else tabPage.ImageKey = "Contact"; break; default: tabPage.ImageKey = null; break; } }; optionsControl.CopyRequested += delegate { var newOptions = optionsControl.Options; newOptions.Name += " (Copy)"; newOptions.Id = Guid.NewGuid(); var newPage = AddTabPage (newOptions); _tabControl.SelectedTab = newPage; }; optionsControl.Options = options; tabPage.Controls.Add (optionsControl.UiControl); optionsControl.UiControl.Dock = DockStyle.Fill; return tabPage; }
public override void Handle(Contracts.AssesmentCompleted @event) { if (@event.Score == 0) { Pubnub.publish<string>("Achievements", new Achievement { DateEarnt = DateTime.Now, Name = "Thicko", Description = "Duhh!! You have earnt the Thicko achievement for getting no answers correct!" }, result => Trace.WriteLine(result)); } }
public override void AddAgent(Contracts.Agents.AgentBase agent) { if (agent is VehicleAgentBase) { _vehicle = (VehicleAgentBase)agent; _input_time_ms = (int)(TimeSpan.FromMinutes(1).TotalMilliseconds / _vehicle.InputFactor); _canInput = true; } else { _queue.Add(agent); } }
public static Contracts.SayWhat[] ViewToContractAdapter(Contracts.AddInViews.SayWhat[] view) { if ((view == null)) { return null; } Contracts.SayWhat[] result = new Contracts.SayWhat[view.Length]; for (int i = 0; (i < view.Length); i = (i + 1)) { result[i] = Contracts.AddInSideAdapters.SayWhatAddInAdapter.ViewToContractAdapter(view[i]); } return result; }
public static Contracts.AddInViews.SayWhat[] ContractToViewAdapter(Contracts.SayWhat[] contract) { if ((contract == null)) { return null; } Contracts.AddInViews.SayWhat[] result = new Contracts.AddInViews.SayWhat[contract.Length]; for (int i = 0; (i < contract.Length); i = (i + 1)) { result[i] = Contracts.AddInSideAdapters.SayWhatAddInAdapter.ContractToViewAdapter(contract[i]); } return result; }
/// <summary> /// Mapping between Contracts BootType and Ipmi BootType /// Used in GetNextBoot and SetNextBoot APIs associated with blades /// </summary> /// <param name="bootType"></param> /// <returns></returns> internal static Ipmi.BootType GetIpmiBootType(Contracts.BladeBootType bootType) { if (bootType == Contracts.BladeBootType.NoOverride) return Ipmi.BootType.NoOverride; else if (bootType == Contracts.BladeBootType.ForcePxe) return Ipmi.BootType.ForcePxe; else if (bootType == Contracts.BladeBootType.ForceDefaultHdd) return Ipmi.BootType.ForceDefaultHdd; else if (bootType == Contracts.BladeBootType.ForceIntoBiosSetup) return Ipmi.BootType.ForceIntoBiosSetup; else if (bootType == Contracts.BladeBootType.ForceFloppyOrRemovable) return Ipmi.BootType.ForceFloppyOrRemovable; else return Ipmi.BootType.Unknown; }
/// <summary> /// Helper to implement the relation between Event and Show in the contract layer. /// </summary> /// <param name="_event"></param> /// <param name="_show"></param> /// <returns></returns> private static Contracts.Event CreateEventForShow(DataAccess.Event _event, Contracts.Show _show) { var result = new Contracts.Event() { Date = _event.Date, EventID = _event.ID, ListPrice = _event.ListPrice, ShowDetails = _show, StartTime = _event.StartTime, State = (EventState)Enum.Parse(typeof(EventState), _event.State), TheaterID = _event.TheaterID }; return result; }
internal static Contracts.ISpeaker ViewToContractAdapter(Contracts.AddInViews.ISpeaker view) { if ((view == null)) { return null; } if (view.GetType().Equals(typeof(ISpeakerContractToViewAddInAdapter))) { return ((ISpeakerContractToViewAddInAdapter)(view)).GetSourceContract(); } else { return new ISpeakerViewToContractAddInAdapter(view); } }
internal static Contracts.AddInViews.ISpeaker ContractToViewAdapter(Contracts.ISpeaker contract) { if ((contract == null)) { return null; } if (((System.Runtime.Remoting.RemotingServices.IsObjectOutOfAppDomain(contract) != true) && contract.GetType().Equals(typeof(ISpeakerViewToContractAddInAdapter)))) { return ((ISpeakerViewToContractAddInAdapter)(contract)).GetSourceView(); } else { return new ISpeakerContractToViewAddInAdapter(contract); } }
public static bool EditOptions ( NameSpace session, Contracts.Options[] options, out Contracts.Options[] changedOptions, Func<Guid, string> profileDataDirectoryFactory, bool fixInvalidSettings, bool displayAllProfilesAsGeneric) { var form = new OptionsForm (session, profileDataDirectoryFactory, fixInvalidSettings, displayAllProfilesAsGeneric); form.OptionsList = options; var shouldSave = form.ShowDialog() == DialogResult.OK; changedOptions = form.OptionsList; return shouldSave; }
/// <summary> /// Possible returns: /// /// Finite Value: no infinite value in the sliding window and at least a non NaN value /// NaN value: only NaN values in the sliding window or +/- Infinite /// Inifinite value: one infinite value in the sliding window (sign is no relevant) /// </summary> internal static Single ComputeMovingAverageUniform(FixedSizeQueue <Single> others, Single input, int lag, Single lastDropped, ref Single currentSum, ref bool initUniformMovingAverage, ref int nbNanValues) { if (initUniformMovingAverage) { initUniformMovingAverage = false; return(ComputeMovingAverageUniformInitialisation(others, input, lag, lastDropped, ref currentSum, ref nbNanValues)); } else { if (Single.IsNaN(lastDropped)) { --nbNanValues; } else if (!FloatUtils.IsFinite(lastDropped)) { // One infinite value left, // we need to recompute everything as we don't know how many infinite values are in the sliding window. return(ComputeMovingAverageUniformInitialisation(others, input, lag, lastDropped, ref currentSum, ref nbNanValues)); } else { currentSum -= lastDropped; } // lastDropped is finite Contracts.Assert(FloatUtils.IsFinite(lastDropped) || Single.IsNaN(lastDropped)); var newValue = lag == 0 ? input : others[others.Count - lag]; if (!Single.IsNaN(newValue) && !FloatUtils.IsFinite(newValue)) { // One infinite value entered, // we need to recompute everything as we don't know how many infinite values are in the sliding window. return(ComputeMovingAverageUniformInitialisation(others, input, lag, lastDropped, ref currentSum, ref nbNanValues)); } // lastDropped is finite and input is finite or NaN Contracts.Assert(FloatUtils.IsFinite(newValue) || Single.IsNaN(newValue)); if (!Single.IsNaN(currentSum) && !FloatUtils.IsFinite(currentSum)) { if (Single.IsNaN(newValue)) { ++nbNanValues; return(currentSum); } else { return(FloatUtils.IsFinite(newValue) ? currentSum : (currentSum + newValue)); } } // lastDropped is finite, input is finite or NaN, currentSum is finite or NaN Contracts.Assert(FloatUtils.IsFinite(currentSum) || Single.IsNaN(currentSum)); if (Single.IsNaN(newValue)) { ++nbNanValues; int nb = (lag == 0 ? others.Count + 1 : others.Count - lag + 1) - nbNanValues; return(nb == 0 ? Single.NaN : currentSum / nb); } else { int nb = lag == 0 ? others.Count + 1 - nbNanValues : others.Count + 1 - nbNanValues - lag; currentSum += input; return(nb == 0 ? Single.NaN : currentSum / nb); } } }
/// <inheritdoc/> protected override void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, IRandom rand, IdToIdxLookup idToIdx, int numThreads, DualsTableBase duals, Float[] biasReg, Float[] invariants, Float lambdaNInv, VBuffer <Float>[] weights, Float[] biasUnreg, VBuffer <Float>[] l1IntermediateWeights, Float[] l1IntermediateBias, Float[] featureNormSquared) { Contracts.AssertValueOrNull(progress); Contracts.Assert(_args.L1Threshold.HasValue); Contracts.AssertValueOrNull(idToIdx); Contracts.AssertValueOrNull(invariants); Contracts.AssertValueOrNull(featureNormSquared); int numClasses = Utils.Size(weights); Contracts.Assert(Utils.Size(biasReg) == numClasses); Contracts.Assert(Utils.Size(biasUnreg) == numClasses); int maxUpdateTrials = 2 * numThreads; var l1Threshold = _args.L1Threshold.Value; bool l1ThresholdZero = l1Threshold == 0; var lr = _args.BiasLearningRate * _args.L2Const.Value; var pch = progress != null?progress.StartProgressChannel("Dual update") : null; using (pch) using (var cursor = _args.Shuffle ? cursorFactory.Create(rand) : cursorFactory.Create()) { long rowCount = 0; if (pch != null) { pch.SetHeader(new ProgressHeader("examples"), e => e.SetProgress(0, rowCount)); } Func <UInt128, long> getIndexFromId = GetIndexFromIdGetter(idToIdx, biasReg.Length); while (cursor.MoveNext()) { long idx = getIndexFromId(cursor.Id); long dualIndexInitPos = idx * numClasses; var features = cursor.Features; var label = (int)cursor.Label; Float invariant; Float normSquared; if (invariants != null) { invariant = invariants[idx]; Contracts.AssertValue(featureNormSquared); normSquared = featureNormSquared[idx]; } else { normSquared = VectorUtils.NormSquared(features); if (_args.BiasLearningRate == 0) { normSquared += 1; } invariant = _loss.ComputeDualUpdateInvariant(2 * normSquared * lambdaNInv * GetInstanceWeight(cursor)); } // The output for the label class using current weights and bias. var labelOutput = WDot(ref features, ref weights[label], biasReg[label] + biasUnreg[label]); var instanceWeight = GetInstanceWeight(cursor); // This will be the new dual variable corresponding to the label class. Float labelDual = 0; // This will be used to update the weights and regularized bias corresponding to the label class. Float labelPrimalUpdate = 0; // This will be used to update the unregularized bias corresponding to the label class. Float labelAdjustment = 0; // Iterates through all classes. for (int iClass = 0; iClass < numClasses; iClass++) { // Skip the dual/weights/bias update for label class. Will be taken care of at the end. if (iClass == label) { continue; } // Loop trials for compare-and-swap updates of duals. // In general, concurrent update conflict to the same dual variable is rare // if data is shuffled. for (int numTrials = 0; numTrials < maxUpdateTrials; numTrials++) { long dualIndex = iClass + dualIndexInitPos; var dual = duals[dualIndex]; var output = labelOutput + labelPrimalUpdate * normSquared - WDot(ref features, ref weights[iClass], biasReg[iClass] + biasUnreg[iClass]); var dualUpdate = _loss.DualUpdate(output, 1, dual, invariant, numThreads); // The successive over-relaxation apporach to adjust the sum of dual variables (biasReg) to zero. // Reference to details: http://stat.rutgers.edu/home/tzhang/papers/ml02_dual.pdf, pp. 16-17. var adjustment = l1ThresholdZero ? lr * biasReg[iClass] : lr * l1IntermediateBias[iClass]; dualUpdate -= adjustment; bool success = false; duals.ApplyAt(dualIndex, (long index, ref Float value) => success = Interlocked.CompareExchange(ref value, dual + dualUpdate, dual) == dual); if (success) { // Note: dualConstraint[iClass] = lambdaNInv * (sum of duals[iClass]) var primalUpdate = dualUpdate * lambdaNInv * instanceWeight; labelDual -= dual + dualUpdate; labelPrimalUpdate += primalUpdate; biasUnreg[iClass] += adjustment * lambdaNInv * instanceWeight; labelAdjustment -= adjustment; if (l1ThresholdZero) { VectorUtils.AddMult(ref features, weights[iClass].Values, -primalUpdate); biasReg[iClass] -= primalUpdate; } else { //Iterative shrinkage-thresholding (aka. soft-thresholding) //Update v=denseWeights as if there's no L1 //Thresholding: if |v[j]| < threshold, turn off weights[j] //If not, shrink: w[j] = v[i] - sign(v[j]) * threshold l1IntermediateBias[iClass] -= primalUpdate; if (_args.BiasLearningRate == 0) { biasReg[iClass] = Math.Abs(l1IntermediateBias[iClass]) - l1Threshold > 0.0 ? l1IntermediateBias[iClass] - Math.Sign(l1IntermediateBias[iClass]) * l1Threshold : 0; } if (features.IsDense) { CpuMathUtils.SdcaL1UpdateDense(-primalUpdate, features.Length, features.Values, l1Threshold, l1IntermediateWeights[iClass].Values, weights[iClass].Values); } else if (features.Count > 0) { CpuMathUtils.SdcaL1UpdateSparse(-primalUpdate, features.Length, features.Values, features.Indices, features.Count, l1Threshold, l1IntermediateWeights[iClass].Values, weights[iClass].Values); } } break; } } } // Updating with label class weights and dual variable. duals[label + dualIndexInitPos] = labelDual; biasUnreg[label] += labelAdjustment * lambdaNInv * instanceWeight; if (l1ThresholdZero) { VectorUtils.AddMult(ref features, weights[label].Values, labelPrimalUpdate); biasReg[label] += labelPrimalUpdate; } else { l1IntermediateBias[label] += labelPrimalUpdate; var intermediateBias = l1IntermediateBias[label]; biasReg[label] = Math.Abs(intermediateBias) - l1Threshold > 0.0 ? intermediateBias - Math.Sign(intermediateBias) * l1Threshold : 0; if (features.IsDense) { CpuMathUtils.SdcaL1UpdateDense(labelPrimalUpdate, features.Length, features.Values, l1Threshold, l1IntermediateWeights[label].Values, weights[label].Values); } else if (features.Count > 0) { CpuMathUtils.SdcaL1UpdateSparse(labelPrimalUpdate, features.Length, features.Values, features.Indices, features.Count, l1Threshold, l1IntermediateWeights[label].Values, weights[label].Values); } } rowCount++; } } }
protected override DataViewType GetColumnTypeCore(int iinfo) { Contracts.Assert(0 <= iinfo && iinfo < Infos.Length); return(NumberDataViewType.Single); }
private static Float PowerOfTwo(int exp) { Contracts.Assert(0 <= exp && exp < ExpInf); return(FloatUtils.GetPowerOfTwoSingle(exp)); }
private static Session LoadTFSession(IHostEnvironment env, string exportDirSavedModel) { Contracts.Check(env != null, nameof(env)); env.CheckValue(exportDirSavedModel, nameof(exportDirSavedModel)); return(Session.LoadFromSavedModel(exportDirSavedModel)); }
public override IEnumerable <TestResult> ComputeTests(double[] scores) { Object testLock = new Object(); double[] weights = Dataset.SampleWeights; double totalL1Error = 0.0; double totalL2Error = 0.0; int chunkSize = 1 + Dataset.NumDocs / BlockingThreadPool.NumThreads; // Minimizes the number of repeat computations in sparse array to have each thread take as big a chunk as possible // REVIEW: This partitioning doesn't look optimal. // Probably make sence to investigate better ways of splitting data? var actions = new Action[(int)Math.Ceiling(1.0 * Dataset.NumDocs / chunkSize)]; var actionIndex = 0; for (int documentStart = 0; documentStart < Dataset.NumDocs; documentStart += chunkSize) { var startDoc = documentStart; var endDoc = Math.Min(documentStart + chunkSize - 1, Dataset.NumDocs - 1); actions[actionIndex++] = () => { double l1Error = 0.0; double l2Error = 0.0; for (int i = startDoc; i <= endDoc; i++) { double error = _labels[i] - scores[i]; double weight = (weights != null) ? weights[i] : 1.0; l1Error += weight * Math.Abs(error); l2Error += weight * error * error; } lock (testLock) { totalL1Error += l1Error; totalL2Error += l2Error; } }; } Parallel.Invoke(new ParallelOptions() { MaxDegreeOfParallelism = BlockingThreadPool.NumThreads }, actions); List <TestResult> result = new List <TestResult>(); Contracts.Assert(_resultType == null || _resultType == 1 || _resultType == 2); switch (_resultType) { case 1: result.Add(new TestResult("L1", totalL1Error, Dataset.NumDocs, true, TestResult.ValueOperator.Average)); break; case 2: result.Add(new TestResult("L2", totalL2Error, Dataset.NumDocs, true, TestResult.ValueOperator.SqrtAverage)); break; default: result.Add(new TestResult("L1", totalL1Error, Dataset.NumDocs, true, TestResult.ValueOperator.Average)); result.Add(new TestResult("L2", totalL2Error, Dataset.NumDocs, true, TestResult.ValueOperator.SqrtAverage)); break; } return(result); }
/// <summary> /// Constructor for the column role. /// </summary> /// <param name="value">The value for the role. Must be non-empty.</param> public ColumnRole(string value) { Contracts.CheckNonEmpty(value, nameof(value)); Value = value; }
private protected override void CheckLabels(RoleMappedData data) { Contracts.AssertValue(data); data.CheckBinaryLabel(); }
internal RankingPredictionTransformer(IHostEnvironment env, ModelLoadContext ctx) : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(RankingPredictionTransformer<TModel>)), ctx) { Scorer = GetGenericScorer(); }
internal RankingPredictionTransformer(IHostEnvironment env, TModel model, DataViewSchema inputSchema, string featureColumn) : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(RankingPredictionTransformer<TModel>)), model, inputSchema, featureColumn) { Scorer = GetGenericScorer(); }
internal MulticlassPredictionTransformer(IHostEnvironment env, ModelLoadContext ctx) : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(MulticlassPredictionTransformer<TModel>)), ctx) { InitializationLogic(ctx, out _trainLabelColumn, out _scoreColumn, out _predictedLabelColumn); }
internal BinaryPredictionTransformer(IHostEnvironment env, ModelLoadContext ctx) : base(Contracts.CheckRef(env, nameof(env)).Register(nameof(BinaryPredictionTransformer<TModel>)), ctx) { InitializationLogic(ctx, out Threshold, out ThresholdColumn); }
public static CommonOutputs.MacroOutput <Output> OneVersusAll( IHostEnvironment env, Arguments input, EntryPointNode node) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); env.Assert(input.Nodes.Count > 0); var numClasses = GetNumberOfClasses(env, input, out var label); var predModelVars = new Var <PredictorModel> [numClasses]; // This will be the final resulting list of nodes that is returned from the macro. var macroNodes = new List <EntryPointNode>(); // Instantiate the subgraph for each label value. for (int k = 0; k < numClasses; k++) { var result = ProcessClass(env, k, label, input, node); predModelVars[k] = result.Item2; macroNodes.AddRange(result.Item1); } // Use OVA model combiner to combine these models into one. // Takes in array of models that are binary predictor models and // produces single multiclass predictor model. var macroExperiment = new Experiment(env); var combinerNode = new Legacy.Models.OvaModelCombiner { ModelArray = new ArrayVar <PredictorModel>(predModelVars), TrainingData = new Var <IDataView> { VarName = node.GetInputVariable(nameof(input.TrainingData)).VariableName }, Caching = (Legacy.Models.CachingOptions)input.Caching, FeatureColumn = input.FeatureColumn, NormalizeFeatures = (Legacy.Models.NormalizeOption)input.NormalizeFeatures, LabelColumn = input.LabelColumn, UseProbabilities = input.UseProbabilities }; // Get output model variable. if (!node.OutputMap.TryGetValue(nameof(Output.PredictorModel), out var outVariableName)) { throw new Exception("Cannot find OVA model output."); } // Map macro's output back to OVA combiner (so OVA combiner will set the value on our output variable). var combinerOutput = new Legacy.Models.OvaModelCombiner.Output { PredictorModel = new Var <PredictorModel> { VarName = outVariableName } }; // Add to experiment (must be done AFTER we assign variable name to output). macroExperiment.Add(combinerNode, combinerOutput); // Add nodes to main experiment. var nodes = macroExperiment.GetNodes(); var expNodes = EntryPointNode.ValidateNodes(env, node.Context, nodes); macroNodes.AddRange(expNodes); return(new CommonOutputs.MacroOutput <Output>() { Nodes = macroNodes }); }
private BoundColumn MakeColumn(Schema inputSchema, int iinfo) { Contracts.AssertValue(inputSchema); Contracts.Assert(0 <= iinfo && iinfo < _parent._columns.Length); ColumnType itemType = null; int[] sources = new int[_parent._columns[iinfo].Inputs.Count]; // Go through the columns, and establish the following: // - indices of input columns in the input schema. Throw if they are not there. // - output type. Throw if the types of inputs are not the same. // - how many slots are there in the output vector (or variable). Denoted by totalSize. // - total size of CategoricalSlotRanges metadata, if present. Denoted by catCount. // - whether the column is normalized. // It is true when ALL inputs are normalized (and of numeric type). // - whether the column has slot names. // It is true if ANY input is a scalar, or has slot names. // - whether the column has categorical slot ranges. // It is true if ANY input has this metadata. int totalSize = 0; int catCount = 0; bool isNormalized = true; bool hasSlotNames = false; bool hasCategoricals = false; for (int i = 0; i < _parent._columns[iinfo].Inputs.Count; i++) { var(srcName, srcAlias) = _parent._columns[iinfo].Inputs[i]; if (!inputSchema.TryGetColumnIndex(srcName, out int srcCol)) { throw _host.ExceptSchemaMismatch(nameof(inputSchema), "input", srcName); } sources[i] = srcCol; var curType = inputSchema.GetColumnType(srcCol); if (itemType == null) { itemType = curType.ItemType; totalSize = curType.ValueCount; } else if (curType.ItemType.Equals(itemType)) { // If any one input is variable length, then the output is variable length. if (totalSize == 0 || curType.ValueCount == 0) { totalSize = 0; } else { totalSize += curType.ValueCount; } } else { throw _host.ExceptSchemaMismatch(nameof(inputSchema), "input", srcName, itemType.ToString(), curType.ToString()); } if (isNormalized && !inputSchema.IsNormalized(srcCol)) { isNormalized = false; } if (MetadataUtils.TryGetCategoricalFeatureIndices(inputSchema, srcCol, out int[] typeCat))
public EntryStream(Repository.Entry entry) { Contracts.CheckValue(entry, nameof(entry)); Contracts.CheckValue(entry.Stream, nameof(entry.Stream)); _entry = entry; }
/// <summary> /// Constructor given a schema, and mapping pairs of roles to columns in the schema. /// This skips null or empty column-names. It will also skip column-names that are not /// found in the schema if <paramref name="opt"/> is true. /// </summary> /// <param name="schema">The schema over which roles are defined</param> /// <param name="opt">Whether to consider the column names specified "optional" or not. If <c>false</c> then any non-empty /// values for the column names that does not appear in <paramref name="schema"/> will result in an exception being thrown, /// but if <c>true</c> such values will be ignored</param> /// <param name="roles">The column role to column name mappings</param> public RoleMappedSchema(Schema schema, bool opt = false, params KeyValuePair<ColumnRole, string>[] roles) : this(Contracts.CheckRef(schema, nameof(schema)), Contracts.CheckRef(roles, nameof(roles)), opt) { }
// REVIEW: It would be nice to support propagation of select metadata. public static IDataView Create <TSrc, TDst>(IHostEnvironment env, string name, IDataView input, string src, string dst, DataViewType typeSrc, DataViewType typeDst, ValueMapper <TSrc, TDst> mapper, ValueGetter <VBuffer <ReadOnlyMemory <char> > > keyValueGetter = null, ValueGetter <VBuffer <ReadOnlyMemory <char> > > slotNamesGetter = null) { Contracts.CheckValue(env, nameof(env)); env.CheckNonEmpty(name, nameof(name)); env.CheckValue(input, nameof(input)); env.CheckNonEmpty(src, nameof(src)); env.CheckNonEmpty(dst, nameof(dst)); env.CheckValue(typeSrc, nameof(typeSrc)); env.CheckValue(typeDst, nameof(typeDst)); env.CheckValue(mapper, nameof(mapper)); env.Check(keyValueGetter == null || typeDst.GetItemType() is KeyDataViewType); env.Check(slotNamesGetter == null || typeDst.IsKnownSizeVector()); if (typeSrc.RawType != typeof(TSrc)) { throw env.ExceptParam(nameof(mapper), "The source column type '{0}' doesn't match the input type of the mapper", typeSrc); } if (typeDst.RawType != typeof(TDst)) { throw env.ExceptParam(nameof(mapper), "The destination column type '{0}' doesn't match the output type of the mapper", typeDst); } bool tmp = input.Schema.TryGetColumnIndex(src, out int colSrc); if (!tmp) { throw env.ExceptParam(nameof(src), "The input data doesn't have a column named '{0}'", src); } var typeOrig = input.Schema[colSrc].Type; // REVIEW: Ideally this should support vector-type conversion. It currently doesn't. bool ident; Delegate conv; if (typeOrig.SameSizeAndItemType(typeSrc)) { ident = true; conv = null; } else if (!Conversions.Instance.TryGetStandardConversion(typeOrig, typeSrc, out conv, out ident)) { throw env.ExceptParam(nameof(mapper), "The type of column '{0}', '{1}', cannot be converted to the input type of the mapper '{2}'", src, typeOrig, typeSrc); } var col = new Column(src, dst); IDataView impl; if (ident) { impl = new Impl <TSrc, TDst, TDst>(env, name, input, col, typeDst, mapper, keyValueGetter: keyValueGetter, slotNamesGetter: slotNamesGetter); } else { Func <IHostEnvironment, string, IDataView, Column, DataViewType, ValueMapper <int, int>, ValueMapper <int, int>, ValueGetter <VBuffer <ReadOnlyMemory <char> > >, ValueGetter <VBuffer <ReadOnlyMemory <char> > >, Impl <int, int, int> > del = CreateImpl <int, int, int>; var meth = del.GetMethodInfo().GetGenericMethodDefinition() .MakeGenericMethod(typeOrig.RawType, typeof(TSrc), typeof(TDst)); impl = (IDataView)meth.Invoke(null, new object[] { env, name, input, col, typeDst, conv, mapper, keyValueGetter, slotNamesGetter }); } return(new OpaqueDataView(impl)); }
/// <summary> /// Constructor given a schema, and mapping pairs of roles to columns in the schema. /// This skips null or empty column names. It will also skip column-names that are not /// found in the schema if <paramref name="opt"/> is true. /// </summary> /// <param name="schema">The schema over which roles are defined</param> /// <param name="roles">The column role to column name mappings</param> /// <param name="opt">Whether to consider the column names specified "optional" or not. If <c>false</c> then any non-empty /// values for the column names that does not appear in <paramref name="schema"/> will result in an exception being thrown, /// but if <c>true</c> such values will be ignored</param> public RoleMappedSchema(Schema schema, IEnumerable<KeyValuePair<ColumnRole, string>> roles, bool opt = false) : this(Contracts.CheckRef(schema, nameof(schema)), MapFromNames(schema, Contracts.CheckRef(roles, nameof(roles)), opt)) { }
// The purpose of this is to catch (at compile time) invocations of ToFloat // that are not appropriate. Note that the return type is void. public static void ToFloat(this Single dbl) { Contracts.Assert(false, "Bad use of ToFloat"); throw Contracts.Except(); }
/// <summary> /// Constructor given a data view, and mapping pairs of roles to columns in the data view's schema. /// This skips null or empty column-names. It will also skip column-names that are not /// found in the schema if <paramref name="opt"/> is true. /// </summary> /// <param name="data">The schema over which roles are defined</param> /// <param name="roles">The column role to column name mappings</param> /// <param name="opt">Whether to consider the column names specified "optional" or not. If <c>false</c> then any non-empty /// values for the column names that does not appear in <paramref name="data"/>'s schema will result in an exception being thrown, /// but if <c>true</c> such values will be ignored</param> public RoleMappedData(IDataView data, IEnumerable<KeyValuePair<RoleMappedSchema.ColumnRole, string>> roles, bool opt = false) : this(Contracts.CheckRef(data, nameof(data)), new RoleMappedSchema(data.Schema, Contracts.CheckRef(roles, nameof(roles)), opt)) { }
/// <summary> /// Apply a soft max on an array of Floats. Note that src and dst may be the same array. /// </summary> public static void ApplySoftMax(Float[] src, Float[] dst) { Contracts.Assert(src.Length == dst.Length); ApplySoftMax(src, dst, 0, src.Length); }
public MatrixFactorizationReconciler(Func <IHostEnvironment, string, string, string, IEstimator <ITransformer> > factory, Scalar <float> label, Key <T> matColumnIndex, Key <T> matRowIndex) : base(MakeInputs(Contracts.CheckRef(label, nameof(label)), Contracts.CheckRef(matColumnIndex, nameof(matColumnIndex)), Contracts.CheckRef(matRowIndex, nameof(matRowIndex))), new string[] { FixedOutputName }) { Contracts.AssertValue(factory); _factory = factory; Output = new Impl(this); Outputs = new PipelineColumn[] { Output }; }
internal bool TryUnparse(StringBuilder sb) { Contracts.AssertValue(sb); return(TryUnparseCore(sb)); }
public FlockIndexer(OneHotFeatureFlock flock) { Contracts.AssertValue(flock); _flock = flock; _indexer = _flock.Bins.GetIndexer(); }
/// <inheritdoc/> protected override bool CheckConvergence( IProgressChannel pch, int iter, FloatLabelCursor.Factory cursorFactory, DualsTableBase duals, IdToIdxLookup idToIdx, VBuffer <Float>[] weights, VBuffer <Float>[] bestWeights, Float[] biasUnreg, Float[] bestBiasUnreg, Float[] biasReg, Float[] bestBiasReg, long count, Double[] metrics, ref Double bestPrimalLoss, ref int bestIter) { Contracts.AssertValue(weights); Contracts.AssertValue(duals); int numClasses = weights.Length; Contracts.Assert(duals.Length >= numClasses * count); Contracts.AssertValueOrNull(idToIdx); Contracts.Assert(Utils.Size(weights) == numClasses); Contracts.Assert(Utils.Size(biasReg) == numClasses); Contracts.Assert(Utils.Size(biasUnreg) == numClasses); Contracts.Assert(Utils.Size(metrics) == 6); var reportedValues = new Double?[metrics.Length + 1]; reportedValues[metrics.Length] = iter; var lossSum = new CompensatedSum(); var dualLossSum = new CompensatedSum(); int numFeatures = weights[0].Length; using (var cursor = cursorFactory.Create()) { long row = 0; Func <UInt128, long, long> getIndexFromIdAndRow = GetIndexFromIdAndRowGetter(idToIdx, biasReg.Length); // Iterates through data to compute loss function. while (cursor.MoveNext()) { var instanceWeight = GetInstanceWeight(cursor); var features = cursor.Features; var label = (int)cursor.Label; var labelOutput = WDot(ref features, ref weights[label], biasReg[label] + biasUnreg[label]); Double subLoss = 0; Double subDualLoss = 0; long idx = getIndexFromIdAndRow(cursor.Id, row); long dualIndex = idx * numClasses; for (int iClass = 0; iClass < numClasses; iClass++) { if (iClass == label) { dualIndex++; continue; } var currentClassOutput = WDot(ref features, ref weights[iClass], biasReg[iClass] + biasUnreg[iClass]); subLoss += _loss.Loss(labelOutput - currentClassOutput, 1); Contracts.Assert(dualIndex == iClass + idx * numClasses); var dual = duals[dualIndex++]; subDualLoss += _loss.DualLoss(1, dual); } lossSum.Add(subLoss * instanceWeight); dualLossSum.Add(subDualLoss * instanceWeight); row++; } Host.Assert(idToIdx == null || row * numClasses == duals.Length); } Contracts.Assert(_args.L2Const.HasValue); Contracts.Assert(_args.L1Threshold.HasValue); Double l2Const = _args.L2Const.Value; Double l1Threshold = _args.L1Threshold.Value; Double weightsL1Norm = 0; Double weightsL2NormSquared = 0; Double biasRegularizationAdjustment = 0; for (int iClass = 0; iClass < numClasses; iClass++) { weightsL1Norm += VectorUtils.L1Norm(ref weights[iClass]) + Math.Abs(biasReg[iClass]); weightsL2NormSquared += VectorUtils.NormSquared(weights[iClass]) + biasReg[iClass] * biasReg[iClass]; biasRegularizationAdjustment += biasReg[iClass] * biasUnreg[iClass]; } Double l1Regularizer = _args.L1Threshold.Value * l2Const * weightsL1Norm; var l2Regularizer = l2Const * weightsL2NormSquared * 0.5; var newLoss = lossSum.Sum / count + l2Regularizer + l1Regularizer; var newDualLoss = dualLossSum.Sum / count - l2Regularizer - l2Const * biasRegularizationAdjustment; var dualityGap = newLoss - newDualLoss; metrics[(int)MetricKind.Loss] = newLoss; metrics[(int)MetricKind.DualLoss] = newDualLoss; metrics[(int)MetricKind.DualityGap] = dualityGap; metrics[(int)MetricKind.BiasUnreg] = biasUnreg[0]; metrics[(int)MetricKind.BiasReg] = biasReg[0]; metrics[(int)MetricKind.L1Sparsity] = _args.L1Threshold == 0 ? 1 : weights.Sum( weight => weight.Values.Count(w => w != 0)) / (numClasses * numFeatures); bool converged = dualityGap / newLoss < _args.ConvergenceTolerance; if (metrics[(int)MetricKind.Loss] < bestPrimalLoss) { for (int iClass = 0; iClass < numClasses; iClass++) { // Maintain a copy of weights and bias with best primal loss thus far. // This is some extra work and uses extra memory, but it seems worth doing it. // REVIEW: Sparsify bestWeights? weights[iClass].CopyTo(ref bestWeights[iClass]); bestBiasReg[iClass] = biasReg[iClass]; bestBiasUnreg[iClass] = biasUnreg[iClass]; } bestPrimalLoss = metrics[(int)MetricKind.Loss]; bestIter = iter; } for (int i = 0; i < metrics.Length; i++) { reportedValues[i] = metrics[i]; } if (pch != null) { pch.Checkpoint(reportedValues); } return(converged); }
public ImageResizerTransform(IHostEnvironment env, params ColumnInfo[] columns) : base(Contracts.CheckRef(env, nameof(env)).Register(RegistrationName), GetColumnPairs(columns)) { _columns = columns.ToArray(); }
/// <summary> /// Given a repository, returns the save context for saving the data loader model. /// </summary> public static ModelSaveContext GetDataModelSavingContext(RepositoryWriter rep) { Contracts.CheckValue(rep, nameof(rep)); return new ModelSaveContext(rep, DirDataLoaderModel, ModelLoadContext.ModelStreamName); }
public string GetPathOrNull(int index) { Contracts.Check(index == 0); return null; }
private static ILegacyDataLoader ApplyTransformsCore(IHost host, ILegacyDataLoader srcLoader, KeyValuePair<string, string>[] tagData, Func<IHostEnvironment, int, IDataView, IDataView> createTransform) { Contracts.AssertValue(host, "host"); host.AssertValue(srcLoader, "srcLoader"); host.AssertNonEmpty(tagData); host.AssertValue(createTransform, "createTransform"); // If the loader is a composite, we need to start with its underlying pipeline end. var exes = new List<TransformEx>(); var composite = srcLoader as LegacyCompositeDataLoader; IDataView srcView; ILegacyDataLoader pipeStart; if (composite != null) { srcView = composite.View; exes.AddRange(composite._transforms); pipeStart = composite._loader; } else srcView = pipeStart = srcLoader; IDataView view = srcView; using (var ch = host.Start("Transforms")) { int count = Utils.Size(tagData); var newlyCreated = new List<TransformEx>(); for (int i = 0; i < count; i++) { // REVIEW: this might cause silent automatic tag conflicts if the pipeline is short-circuited. // Maybe it's better to allow empty tags? var tag = tagData[i].Key; if (string.IsNullOrEmpty(tag)) tag = GenerateTag(exes.Count); var newDataView = createTransform(host, i, view); // Append the newly created transforms to the exes list. // If the newTransform is a 'no-op' transform, i.e. equal to the original view, // the exes array will not be modified: there's no reason to record details of a no-op transform, // especially since this would overwrite the useful details of the upstream transform. newlyCreated.Clear(); IDataView curDataView = newDataView; while (true) { var cur = curDataView as IDataTransform; if (cur == null) { // We reached all the way back to the pipe start. The exes accumulated so far are irrelevant. ch.Check(curDataView == pipeStart, "The transform has corrupted the chain (chain no longer starts with the same loader)."); exes.Clear(); break; } int index = exes.FindLastIndex(x => x.Transform == cur); if (index >= 0) { // We found a transform in exes to attach to. if (index < exes.Count - 1) { // The transform short-circuited some of the existing ones, remove them. exes.RemoveRange(index + 1, exes.Count - index - 1); } break; } newlyCreated.Add(new TransformEx(tag, tagData[i].Value, cur)); curDataView = cur.Source; } newlyCreated.Reverse(); exes.AddRange(newlyCreated); view = newDataView; } } return view == srcView ? srcLoader : new LegacyCompositeDataLoader(host, exes.ToArray()); }
/// <summary> /// Efficient computation of natural-based pessimistic DCG@3 for a given query /// </summary> /// <param name="scores">vector of scores</param> /// <param name="labels">vector of labels</param> /// <param name="begin">index of first document in query</param> /// <param name="end">index of first document in next query</param> public static unsafe double DCG3(double[] scores, short[] labels, int begin, int end) { if (begin >= end) { throw Contracts.ExceptParam(nameof(begin)); } double maxScore1 = double.NegativeInfinity; double maxScore2 = double.NegativeInfinity; double maxScore3 = double.NegativeInfinity; int maxLabel1 = -1; int maxLabel2 = -1; int maxLabel3 = -1; fixed(double *pScores = scores) { fixed(short *pLabels = labels) { for (int d = begin; d < end; ++d) { double score = pScores[d]; short label = pLabels[d]; // check if the current document should be in the top 3 if (score > maxScore3 || (score == maxScore3 && label < maxLabel3)) { if (score > maxScore2 || (score == maxScore2 && label < maxLabel2)) { maxScore3 = maxScore2; maxLabel3 = maxLabel2; if (score > maxScore1 || (score == maxScore1 && label < maxLabel1)) { maxScore2 = maxScore1; maxLabel2 = maxLabel1; maxScore1 = score; maxLabel1 = label; } else { maxScore2 = score; maxLabel2 = label; } } else { maxScore3 = score; maxLabel3 = label; } } } } } // calculate the dcg double dcg = LabelMap[maxLabel1] * DiscountMap[0]; if (maxScore2 > double.NegativeInfinity) { dcg += LabelMap[maxLabel2] * DiscountMap[1]; } if (maxScore3 > double.NegativeInfinity) { dcg += LabelMap[maxLabel3] * DiscountMap[2]; } return(dcg); }