private void OnAddLanguage(object data) { SystemLanguage language = (SystemLanguage)data; LocalisedTextMeshPro localisedUITextMesh = (LocalisedTextMeshPro)target; Undo.RecordObject(localisedUITextMesh, "Added language override"); //Copy settings from text mesh TextMeshProSettings settings = TextMeshProSettings.FromTextMesh(localisedUITextMesh.TextMesh); LocalisedTextMeshPro.LanguageSettingsOverride languageSettingsOverride = new LocalisedTextMeshPro.LanguageSettingsOverride() { _language = language, _settings = settings, }; //If this is the first language settings also save current settings to default if (localisedUITextMesh._languageSettingsOverrides == null || localisedUITextMesh._languageSettingsOverrides.Length == 0) { localisedUITextMesh._defaultSettings = settings; localisedUITextMesh._languageSettingsOverrides = new LocalisedTextMeshPro.LanguageSettingsOverride[] { languageSettingsOverride }; } //Otherwise add new settings to overrides else { ArrayUtils.Add(ref localisedUITextMesh._languageSettingsOverrides, languageSettingsOverride); } //Then switch to editing this new language SwitchToEditingLanguage(language); }
public static PointDouble operator +(PointDouble p1, PointDouble p2) { return(new PointDouble(p1.Dimensions) { Values = ArrayUtils.Add(p1.Values, p2.Values) }); }
/// <summary> /// It traverses all connected synapses of the column and calculates the span, which synapses /// spans between all input bits. Then it calculates average of spans accross all dimensions. /// </summary> /// <param name="column"></param> /// <param name="htmConfig">Topology</param> /// <returns></returns> public static double CalcAvgSpanOfConnectedSynapses(Column column, HtmConfig htmConfig) { // Gets synapses connected to input bits.(from pool of the column) int[] connected = column.ProximalDendrite.GetConnectedSynapsesSparse(); if (connected == null || connected.Length == 0) { return(0); } int[] maxCoord = new int[htmConfig.InputModuleTopology.Dimensions.Length]; int[] minCoord = new int[maxCoord.Length]; ArrayUtils.FillArray(maxCoord, -1); ArrayUtils.FillArray(minCoord, ArrayUtils.Max(htmConfig.InputModuleTopology.Dimensions)); // // It takes all connected synapses for (int i = 0; i < connected.Length; i++) { maxCoord = ArrayUtils.MaxBetween(maxCoord, AbstractFlatMatrix.ComputeCoordinates(htmConfig.InputModuleTopology.Dimensions.Length, htmConfig.InputModuleTopology.DimensionMultiplies, htmConfig.InputModuleTopology.IsMajorOrdering, connected[i])); minCoord = ArrayUtils.MinBetween(minCoord, AbstractFlatMatrix.ComputeCoordinates(htmConfig.InputModuleTopology.Dimensions.Length, htmConfig.InputModuleTopology.DimensionMultiplies, htmConfig.InputModuleTopology.IsMajorOrdering, connected[i])); } return(ArrayUtils.Average(ArrayUtils.Add(ArrayUtils.Subtract(maxCoord, minCoord), 1))); }
private bool DrawAddBackgroundLogicButton() { int index = 0; Type[] branchTypes = SystemUtils.GetAllSubTypes(typeof(BranchingBackgroundLogic)); string[] branchTypeNames = new string[branchTypes.Length + 1]; branchTypeNames[index++] = "(Add Background Logic)"; foreach (Type type in branchTypes) { branchTypeNames[index] = type.Name; index++; } int newIndex = EditorGUILayout.Popup(string.Empty, 0, branchTypeNames); if (0 != newIndex) { Type branchType = branchTypes[newIndex - 1]; BranchingBackgroundLogic newBackgroundLogic = Activator.CreateInstance(branchType) as BranchingBackgroundLogic; EventStartBranchingState evnt = GetEditableObject() as EventStartBranchingState; ArrayUtils.Add(ref evnt._backgroundLogic, newBackgroundLogic); GetTimelineEditor().GetParent().OnAddedNewXmlNode(newBackgroundLogic); return(true); } return(false); }
public double[] OverlapsForRelativeAreas(int n, int w, int[] initPosition, int initRadius, int[] dPosition, int dRadius, int num, bool verbose) { SetUp(); builder.N(n); builder.W(w); InitCe(); double[] overlaps = new double[num]; int[] outputA = Encode(ce, initPosition, initRadius); int[] newPosition; for (int i = 0; i < num; i++) { newPosition = dPosition == null ? initPosition : ArrayUtils.Add( newPosition = Arrays.CopyOf(initPosition, initPosition.Length), ArrayUtils.Multiply(dPosition, (i + 1))); int newRadius = initRadius + (i + 1) * dRadius; int[] outputB = Encode(ce, newPosition, newRadius); overlaps[i] = Overlap(outputA, outputB); } return(overlaps); }
public static Vector TangentAt(this Curve curve, double t) { if (!curve.IsNurbForm) { curve.CreateNurbForm(); } double[] sumNwP = new double[curve.Dimensions]; double[] sumNwPDer = new double[curve.Dimensions]; double sumNw = 0; double sumNwDer = 0; for (int i = 0; i < curve.ControlPointVector.Length / (curve.Dimensions + 1); i++) { double Nt = curve.BasisFunction(i, curve.Degree, t); double Nder = curve.DerivativeFunction(i, curve.Degree, t); double[] P = CollectionUtils.SubArray <double>(curve.ControlPointVector, i * (curve.Dimensions + 1), curve.Dimensions); sumNwP = ArrayUtils.Add(sumNwP, ArrayUtils.Multiply(P, Nt * curve.Weights[i])); sumNwPDer = ArrayUtils.Add(sumNwPDer, ArrayUtils.Multiply(P, Nder * curve.Weights[i])); sumNw += Nt * curve.Weights[i]; sumNwDer += Nder * curve.Weights[i]; } double[] numerator = ArrayUtils.Sub(ArrayUtils.Multiply(sumNwPDer, sumNw), ArrayUtils.Multiply(sumNwP, sumNwDer)); return(new Vector(ArrayUtils.Divide(numerator, Math.Pow(sumNw, 2)))); }
public void AddChildTrack(IParentBindableTrackMixer boundTrack) { if (_boundTracks != null) { ArrayUtils.Add(ref _boundTracks, boundTrack); } }
public void AttachRenderTexture(RenderTexture texture, FramebufferAttachment?attachmentType = null) { Rendering.CheckGLErrors($"At the start of '{nameof(Framebuffer)}.{nameof(AttachRenderTexture)}'."); Bind(this); var attachment = attachmentType ?? nextDefaultAttachment++; GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, attachment, TextureTarget.Texture2D, texture.Id, 0); renderTextures.Add(texture); textureToAttachment[texture] = attachment; var drawBuffersEnum = (DrawBuffersEnum)attachment; if (Enum.IsDefined(typeof(DrawBuffersEnum), drawBuffersEnum)) { ArrayUtils.Add(ref drawBuffers, drawBuffersEnum); } maxTextureWidth = Math.Max(maxTextureWidth, texture.Width); maxTextureHeight = Math.Max(maxTextureHeight, texture.Height); Rendering.CheckFramebufferStatus(); Rendering.CheckGLErrors($"At the end of '{nameof(Framebuffer)}.{nameof(AttachRenderTexture)}'."); }
private bool DrawAddBackgroundLogicButton() { int index = 0; Type[] logicTypes = SystemUtils.GetAllSubTypes(typeof(ConditionalStateBackgroundLogic)); string[] logicTypeNames = new string[logicTypes.Length + 1]; logicTypeNames[index++] = "(Add State Background Logic)"; foreach (Type type in logicTypes) { logicTypeNames[index] = type.Name; index++; } int newIndex = EditorGUILayout.Popup(string.Empty, 0, logicTypeNames); if (0 != newIndex) { Type branchType = logicTypes[newIndex - 1]; ConditionalStateBackgroundLogic newBackgroundLogic = Activator.CreateInstance(branchType) as ConditionalStateBackgroundLogic; ConditionalState conditionalState = (ConditionalState)GetEditableObject(); ArrayUtils.Add(ref conditionalState._backgroundLogic, newBackgroundLogic); StateMachineEditor editor = (StateMachineEditor)GetEditor(); editor.OnAddedNewObjectToTimeline(newBackgroundLogic); return(true); } return(false); }
private void OnAddNode(ReorderableList list) { Path path = (Path)target; GameObject newNodeObj = new GameObject("PathNode " + list.count); newNodeObj.transform.parent = path.transform; PathNodeData nodeData = new PathNodeData(); nodeData._node = newNodeObj.AddComponent <PathNode>(); if (path._nodes != null && path._nodes.Length > 0) { nodeData._width = path._nodes[path._nodes.Length - 1]._width; nodeData._up = path._nodes[path._nodes.Length - 1]._up; } else { nodeData._width = 0.5f; nodeData._up = Vector3.up; } ArrayUtils.Add(ref path._nodes, nodeData); path.RefreshNodes(path._nodes); _oldArray = (PathNodeData[])path._nodes.Clone(); OnAddedNode(nodeData._node); }
internal static double[] UnsafePointAt(this Curve curve, double t) { double[] sumNwP = new double[curve.Dimensions]; double sumNw = 0; if (t == 0) { return(CollectionUtils.SubArray <double>(curve.ControlPointVector, 0, 3)); } else if (t >= curve.Knots[curve.Knots.Length - 1]) { return(CollectionUtils.SubArray <double>(curve.ControlPointVector, curve.ControlPointVector.Length - 4, 3)); } for (int i = 0; i < curve.ControlPointVector.Length / (curve.Dimensions + 1); i++) { double Nt = curve.BasisFunction(i, curve.Order - 1, t); if (Nt == 0) { continue; } sumNwP = ArrayUtils.Add(sumNwP, ArrayUtils.Multiply(curve.ControlPointVector, Nt * curve.Weights[i], i * (curve.Dimensions + 1), curve.Dimensions)); sumNw += Nt * curve.Weights[i]; } return(ArrayUtils.Divide(sumNwP, sumNw)); }
public void AddValuesTill(Point <int> position) { if (position.Values[0] == 0) { return; } while (_values.Count <= position.Values[0]) { double[] newDelta = new double[_dimensions]; double[] Xt = _values[position.Values[0] - 1].Values; for (int i = 0; i < _dimensions; i++) { newDelta[i] = _a(Xt, position.Values[0])[i] * TimeDelta + _b(Xt, TimeDelta)[i] * _wienerProcessDeltas[i].ValueAt(position).Values[0]; } var newPoint = new Point <double>(_dimensions) { Values = ArrayUtils.Add(newDelta, Xt) }; _values.Add(newPoint); } }
public void AddKinematicsSphere(GPBody body, float radius, int groupId) { var bodyId = kinematicsBodies.Length; var collider = new GPSphereCollider(bodyId, radius); ArrayUtils.Add(ref kinematicsBodies, body); ArrayUtils.Add(ref kinematicsSphereColliders, collider); }
public void AddPointJoint(GPBody body, Vector3 point, Matrix4x4 matrix, float elasticity) { var bodyId = Array.IndexOf(bodies, body); Assert.IsTrue(bodyId >= 0, "Add body to world first"); var joint = new GPPointJoint(bodyId, matrices.Length, point, elasticity); ArrayUtils.Add(ref pointJoints, joint); ArrayUtils.Add(ref matrices, matrix); }
public override void pushAwayFrom(List <double> otherPositions, MersenneTwister rng) { // If min and max are the same, nothing to do if (this.max == this.min) { return; } // How many potential other positions to evaluate? int numPositions = otherPositions.Count * 4; if (numPositions == 0) { return; } // Assign a weight to each potential position based on how close it is // to other particles. stepSize = (double)(this.max - this.min) / numPositions; double[] positions = ArrayUtils.Arrange(this.min, this.max + stepSize.Value, stepSize.Value); // Get rid of duplicates. numPositions = positions.Length; double[] weights = new double[numPositions]; // Assign a weight to each potential position, based on a gaussian falloff // from each existing variable. The weight of a variable to each potential // position is given as: // e ^ -(dist^2/stepSize^2) double maxDistanceSq = -1 * Math.Pow(stepSize.Value, 2); foreach (var pos in otherPositions) { var distances = ArrayUtils.Sub(positions, pos);// pos - positions; var varWeights = distances.Select(d => Math.Exp(Math.Pow(d, 2) / maxDistanceSq)).ToArray(); //var varWeights = Math.Exp(Math.Pow(distances, 2) / maxDistanceSq); weights = ArrayUtils.Add(weights, varWeights); } // Put this particle at the position with smallest weight. int positionIdx = ArrayUtils.Argmin(weights); this._position = positions[positionIdx]; // Set its best position to this. this._bestPosition = this.getPosition(); // Give it a random direction. this._velocity *= rng.Choice(new[] { 1, -1 }); }
public bool AddLink(RSEntityId inEntity, string inLinkId) { int currentIndex = IndexOf(inEntity, inLinkId); if (currentIndex >= 0) { return(false); } ArrayUtils.Add(ref m_Source.Links, new RSEntityLinkData(inEntity, inLinkId)); return(true); }
protected override void OnAddedNode(PathNode node) { BezierPath path = (BezierPath)target; BezierPath.NodeControlPoints controlPoint = new BezierPath.NodeControlPoints(); controlPoint._startTangent = Vector3.forward; controlPoint._endTangent = -Vector3.forward; ArrayUtils.Add(ref path._controlPoints, controlPoint); if (path._controlPoints.Length != path._nodes.Length) { throw new System.Exception(); } }
public SaveDataBlock GetByType(Type type) { //Find existing blocks foreach (SaveDataBlock data in _dataBlocks) { if (data.GetType() == type) { return(data); } } //None found in save file, add new block and return it SaveDataBlock newData = Activator.CreateInstance(type) as SaveDataBlock; ArrayUtils.Add(ref _dataBlocks, newData); return(newData); }
public void AttachRenderbuffer(Renderbuffer renderbuffer, FramebufferAttachment?attachmentType = null) { Bind(this); var attachment = attachmentType ?? nextDefaultAttachment++; GL.FramebufferRenderbuffer(FramebufferTarget.Framebuffer, attachment, RenderbufferTarget.Renderbuffer, Id); Rendering.CheckFramebufferStatus(); ArrayUtils.Add(ref renderbuffers, renderbuffer); var drawBuffersEnum = (DrawBuffersEnum)attachment; if (Enum.IsDefined(typeof(DrawBuffersEnum), drawBuffersEnum)) { ArrayUtils.Add(ref drawBuffers, drawBuffersEnum); } }
public void TestAdd() { int[] ia = { 1, 1, 1, 1 }; int[] expected = { 2, 2, 2, 2 }; Assert.IsTrue(Arrays.AreEqual(expected, ArrayUtils.Add(ia, 1))); // add one array to another expected = new int[] { 4, 4, 4, 4 }; Assert.IsTrue(Arrays.AreEqual(expected, ArrayUtils.Add(ia, ia))); ///////// double version ////////// double[] da = { 1.0, 1.0, 1.0, 1.0 }; double[] d_expected = { 2.0, 2.0, 2.0, 2.0 }; Assert.IsTrue(Arrays.AreEqual(d_expected, ArrayUtils.Add(da, 1.0))); // add one array to another d_expected = new double[] { 4.0, 4.0, 4.0, 4.0 }; Assert.IsTrue(Arrays.AreEqual(d_expected, ArrayUtils.Add(da, da))); }
public void StartCameraShot(CinematicCameraShot shot, Extrapolation extrapolation, float blendTime = -1.0f, InterpolationType blendType = InterpolationType.InOutCubic) { if (blendTime <= 0.0f) { _currentShot._weight = 1.0f; _blendingShots = new ShotInfo[0]; } else if (_currentShot._shot != null) { ArrayUtils.Add(ref _blendingShots, _currentShot); _currentShot._weight = 0.0f; _currentShot._blendType = blendType; _currentShotBlendSpeed = 1.0f / blendTime; } _currentShot._shot = shot; _currentShot._extrapolation = extrapolation; _currentShot._time = 0.0f; }
public void StopCameraShot(CinematicCameraShot shot, float blendTime = -1.0f, InterpolationType blendType = InterpolationType.Linear) { if (_currentShot._shot != null && _currentShot._shot == shot) { if (blendTime <= 0.0f) { _currentShot._weight = 1.0f; _blendingShots = new ShotInfo[0]; } else { ArrayUtils.Add(ref _blendingShots, _currentShot); _currentShot._weight = 0.0f; _currentShot._blendType = blendType; _currentShotBlendSpeed = 1.0f / blendTime; } _currentShot = new ShotInfo(); } }
public void StopAll(float blendTime = -1.0f, InterpolationType blendType = InterpolationType.InOutCubic) { if (_currentShot._shot != null) { if (blendTime <= 0.0f) { _currentShot._weight = 1.0f; _blendingShots = new ShotInfo[0]; } else { ArrayUtils.Add(ref _blendingShots, _currentShot); _currentShot._weight = 0.0f; _currentShot._blendType = blendType; _currentShotBlendSpeed = 1.0f / blendTime; } _currentShot = new ShotInfo(); } }
private bool DrawAddBranchButton() { EditorGUILayout.BeginHorizontal(GUILayout.Width(20.0f)); { if (GUILayout.Button("Add Branch")) { EventStartBranchingState evnt = GetEditableObject() as EventStartBranchingState; Branch newBranch = new Branch(); ArrayUtils.Add(ref evnt._branches, newBranch); GetTimelineEditor().GetParent().OnAddedNewXmlNode(newBranch); return(true); } GUILayout.FlexibleSpace(); } GUILayout.EndHorizontal(); return(false); }
public T Get <T>() where T : SaveDataBlock, new() { //Find existing blocks if (_dataBlocks != null) { foreach (SaveDataBlock data in _dataBlocks) { if (data.GetType() == typeof(T)) { return(data as T); } } } //None found in save file, add new block and return it T newData = new T(); ArrayUtils.Add(ref _dataBlocks, newData); return(newData); }
public static void BakeNavMeshes(MapData data, Boolean inEditor) { FPMathUtils.LoadLookupTables(); data.Asset.NavMeshes = new TextAsset[0]; var navmeshes = BakeNavMeshesLoop(data).ToArray(); if (inEditor) { #if UNITY_EDITOR var pathOnDisk = PathUtils.Combine('/', Path.GetDirectoryName(Application.dataPath), Path.GetDirectoryName(AssetDatabase.GetAssetPath(data.Asset))); var assetDir = Path.GetDirectoryName(AssetDatabase.GetAssetPath(data.Asset)); var assetPath = PathUtils.Combine('/', assetDir, Path.GetFileNameWithoutExtension(AssetDatabase.GetAssetPath(data.Asset))); foreach (var navmesh in navmeshes) { var navmeshFileOnDisk = data.Asset.name + "_" + navmesh.Name + ".bytes"; var navmeshAssetPath = assetPath + "_" + navmesh.Name + ".bytes"; // serialize (max 20 megabytes for now) var bytestream = new ByteStream(new Byte[1024 * 1024 * 20]); navmesh.Serialize(bytestream, true); // write File.WriteAllBytes(PathUtils.Combine('/', pathOnDisk, navmeshFileOnDisk), bytestream.ToArray()); // import asset AssetDatabase.ImportAsset(navmeshAssetPath, ImportAssetOptions.ForceUpdate); // add assets to navmehs ArrayUtils.Add(ref data.Asset.NavMeshes, AssetDatabase.LoadAssetAtPath <TextAsset>(navmeshAssetPath)); } #endif } else { data.Asset.InitNavMeshes(navmeshes); } }
private bool DrawAddConditionButton() { EditorGUILayout.BeginHorizontal(GUILayout.Width(20.0f)); { if (GUILayout.Button("Add New Condition")) { ConditionalState conditionalState = (ConditionalState)GetEditableObject(); ConditionalStateBranch newCondition = new ConditionalStateBranch(); ArrayUtils.Add(ref conditionalState._branches, newCondition); StateMachineEditor editor = (StateMachineEditor)GetEditor(); editor.OnAddedNewObjectToTimeline(newCondition); return(true); } GUILayout.FlexibleSpace(); } GUILayout.EndHorizontal(); return(false); }
public override Dictionary <string, INDArray> ExtractDirectFrom(object readData, int numberOfRecords, IComputationHandler handler) { // read data being null means no more data could be read so we will just pass that along if (readData == null) { return(null); } T[][] rawRecords = (T[][])readData; int numberOfRecordsToExtract = Math.Min(rawRecords.Length, numberOfRecords); _logger.Debug($"Extracting {numberOfRecordsToExtract} records from reader {Reader} (requested: {numberOfRecords})..."); Dictionary <string, INDArray> namedArrays = new Dictionary <string, INDArray>(); foreach (string name in _indexMappings.Keys) { long[][] mappings = _indexMappings[name]; long[][] perMappingShape = new long[mappings.Length / 2][]; long[] perMappingLength = new long[mappings.Length / 2]; long[] featureShape = new long[mappings[0].Length]; for (int i = 0; i < mappings.Length; i += 2) { int halfIndex = i / 2; perMappingShape[halfIndex] = new long[mappings[0].Length]; for (int y = 0; y < featureShape.Length; y++) { perMappingShape[halfIndex][y] = mappings[i + 1][y] - mappings[i][y]; featureShape[y] += perMappingShape[halfIndex][y]; } perMappingLength[i / 2] = ArrayUtils.Product(perMappingShape[halfIndex]); } long[] shape = new long[featureShape.Length + 2]; shape[0] = numberOfRecordsToExtract; shape[1] = 1; Array.Copy(featureShape, 0, shape, 2, featureShape.Length); INDArray array = handler.NDArray(shape); long[] globalBufferIndices = new long[shape.Length]; long sectionOffset = _sectionOffsets.ContainsKey(name) ? _sectionOffsets[name] : 0L; for (int r = 0; r < numberOfRecordsToExtract; r++) { T[] record = rawRecords[r]; globalBufferIndices[0] = r; //BatchTimeFeatures indexing globalBufferIndices[1] = 0; for (int i = 0; i < mappings.Length; i += 2) { long[] beginShape = mappings[i]; long[] localShape = perMappingShape[i / 2]; long[] localStrides = NDArrayUtils.GetStrides(localShape); long[] localBufferIndices = new long[mappings[i].Length]; long length = perMappingLength[i / 2]; long beginFlatIndex = ArrayUtils.Product(beginShape); for (int y = 0; y < length; y++) { localBufferIndices = NDArrayUtils.GetIndices(y, localShape, localStrides, localBufferIndices); localBufferIndices = ArrayUtils.Add(beginShape, localBufferIndices, localBufferIndices); Array.Copy(localBufferIndices, 0, globalBufferIndices, 2, localBufferIndices.Length); array.SetValue(record[beginFlatIndex + y + sectionOffset], globalBufferIndices); } } } namedArrays.Add(name, array); } _logger.Debug($"Done extracting {numberOfRecordsToExtract} records from reader {Reader} (requested: {numberOfRecords})."); return(namedArrays); }
public void Add(T item) { SetValue(ArrayUtils.Add(NativeArrayHandle), item); }
/// <summary> /// Process one input sample. /// This method is called by outer loop code outside the nupic-engine. We /// use this instead of the nupic engine compute() because our inputs and /// outputs aren't fixed size vectors of reals. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="recordNum">Record number of this input pattern. Record numbers should /// normally increase sequentially by 1 each time unless there /// are missing records in the dataset. Knowing this information /// insures that we don't get confused by missing records.</param> /// <param name="classification">Map of the classification information: /// bucketIdx: index of the encoder bucket /// actValue: actual value going into the encoder</param> /// <param name="patternNZ">list of the active indices from the output below</param> /// <param name="learn">if true, learn this sample</param> /// <param name="infer">if true, perform inference</param> /// <returns>dict containing inference results, there is one entry for each /// step in steps, where the key is the number of steps, and /// the value is an array containing the relative likelihood for /// each bucketIdx starting from bucketIdx 0. /// /// There is also an entry containing the average actual value to /// use for each bucket. The key is 'actualValues'. /// /// for example: /// { /// 1 : [0.1, 0.3, 0.2, 0.7], /// 4 : [0.2, 0.4, 0.3, 0.5], /// 'actualValues': [1.5, 3,5, 5,5, 7.6], /// } /// </returns> public Classification <T> Compute <T>(int recordNum, IDictionary <string, object> classification, int[] patternNZ, bool learn, bool infer) { Classification <T> retVal = new Classification <T>(); //List<T> actualValues = this.actualValues.Select(av => av == null ? default(T) : (T)av).ToList(); // Save the offset between recordNum and learnIteration if this is the first // compute if (_recordNumMinusLearnIteration == -1) { _recordNumMinusLearnIteration = recordNum - _learnIteration; } // Update the learn iteration _learnIteration = recordNum - _recordNumMinusLearnIteration; if (Verbosity >= 1) { Console.WriteLine(String.Format("\n{0}: compute ", g_debugPrefix)); Console.WriteLine(" recordNum: " + recordNum); Console.WriteLine(" learnIteration: " + _learnIteration); Console.WriteLine(String.Format(" patternNZ({0}): {1}", patternNZ.Length, Arrays.ToString(patternNZ))); Console.WriteLine(" classificationIn: " + classification); } _patternNzHistory.Append(new Tuple(_learnIteration, patternNZ)); //------------------------------------------------------------------------ // Inference: // For each active bit in the activationPattern, get the classification // votes // // Return value dict. For buckets which we don't have an actual value // for yet, just plug in any valid actual value. It doesn't matter what // we use because that bucket won't have non-zero likelihood anyways. if (infer) { // NOTE: If doing 0-step prediction, we shouldn't use any knowledge // of the classification input during inference. object defaultValue = null; if (Steps[0] == 0) { defaultValue = 0; } else { defaultValue = classification.GetOrDefault("actValue", null); } T[] actValues = new T[this._actualValues.Count]; for (int i = 0; i < _actualValues.Count; i++) { //if (EqualityComparer<T>.Default.Equals(actualValues[i], default(T))) //actualValues[i] == default(T)) if (_actualValues[i] == null) { actValues[i] = defaultValue != null?TypeConverter.Convert <T>(defaultValue) : default(T); //(T) (defaultValue ?? default(T)); } else { actValues[i] = (T)_actualValues[i]; } //actValues[i] = actualValues[i].CompareTo(default(T)) == 0 ? defaultValue : actualValues[i]; } retVal.SetActualValues(actValues); // For each n-step prediction... foreach (int nSteps in Steps.ToArray()) { // Accumulate bucket index votes and actValues into these arrays double[] sumVotes = new double[_maxBucketIdx + 1]; double[] bitVotes = new double[_maxBucketIdx + 1]; foreach (int bit in patternNZ) { Tuple key = new Tuple(bit, nSteps); BitHistory history = _activeBitHistory.GetOrDefault(key, null); if (history == null) { continue; } history.Infer(_learnIteration, bitVotes); sumVotes = ArrayUtils.Add(sumVotes, bitVotes); } // Return the votes for each bucket, normalized double total = ArrayUtils.Sum(sumVotes); if (total > 0) { sumVotes = ArrayUtils.Divide(sumVotes, total); } else { // If all buckets have zero probability then simply make all of the // buckets equally likely. There is no actual prediction for this // timestep so any of the possible predictions are just as good. if (sumVotes.Length > 0) { Arrays.Fill(sumVotes, 1.0 / (double)sumVotes.Length); } } retVal.SetStats(nSteps, sumVotes); } } // ------------------------------------------------------------------------ // Learning: // For each active bit in the activationPattern, store the classification // info. If the bucketIdx is None, we can't learn. This can happen when the // field is missing in a specific record. if (learn && classification.GetOrDefault("bucketIdx", null) != null) { // Get classification info int bucketIdx = (int)(classification["bucketIdx"]); object actValue = classification["actValue"]; // Update maxBucketIndex _maxBucketIdx = Math.Max(_maxBucketIdx, bucketIdx); // Update rolling average of actual values if it's a scalar. If it's // not, it must be a category, in which case each bucket only ever // sees one category so we don't need a running average. while (_maxBucketIdx > _actualValues.Count - 1) { _actualValues.Add(null); } if (_actualValues[bucketIdx] == null) { _actualValues[bucketIdx] = TypeConverter.Convert <T>(actValue); } else { if (typeof(double).IsAssignableFrom(actValue.GetType())) { Double val = ((1.0 - _actValueAlpha) * (TypeConverter.Convert <double>(_actualValues[bucketIdx])) + _actValueAlpha * (TypeConverter.Convert <double>(actValue))); _actualValues[bucketIdx] = TypeConverter.Convert <T>(val); } else { _actualValues[bucketIdx] = TypeConverter.Convert <T>(actValue); } } // Train each pattern that we have in our history that aligns with the // steps we have in steps int nSteps = -1; int iteration = 0; int[] learnPatternNZ = null; foreach (int n in Steps.ToArray()) { nSteps = n; // Do we have the pattern that should be assigned to this classification // in our pattern history? If not, skip it bool found = false; foreach (Tuple t in _patternNzHistory) { iteration = TypeConverter.Convert <int>(t.Get(0)); var tuplePos1 = t.Get(1); if (tuplePos1 is JArray) { JArray arr = (JArray)tuplePos1; learnPatternNZ = arr.Values <int>().ToArray(); } else { learnPatternNZ = (int[])t.Get(1); } if (iteration == _learnIteration - nSteps) { found = true; break; } iteration++; } if (!found) { continue; } // Store classification info for each active bit from the pattern // that we got nSteps time steps ago. foreach (int bit in learnPatternNZ) { // Get the history structure for this bit and step Tuple key = new Tuple(bit, nSteps); BitHistory history = _activeBitHistory.GetOrDefault(key, null); if (history == null) { _activeBitHistory.Add(key, history = new BitHistory(this, bit, nSteps)); } history.Store(_learnIteration, bucketIdx); } } } if (infer && Verbosity >= 1) { Console.WriteLine(" inference: combined bucket likelihoods:"); Console.WriteLine(" actual bucket values: " + Arrays.ToString((T[])retVal.GetActualValues())); foreach (int key in retVal.StepSet()) { if (retVal.GetActualValue(key) == null) { continue; } Object[] actual = new Object[] { (T)retVal.GetActualValue(key) }; Console.WriteLine(String.Format(" {0} steps: {1}", key, PFormatArray(actual))); int bestBucketIdx = retVal.GetMostProbableBucketIndex(key); Console.WriteLine(String.Format(" most likely bucket idx: {0}, value: {1} ", bestBucketIdx, retVal.GetActualValue(bestBucketIdx))); } } return(retVal); }