/// <summary> /// Reverts a set of projected data into it's original form. Complete reverse /// transformation is only possible if all components are present, and, if the /// data has been standardized, the original standard deviation and means of /// the original matrix are known. /// </summary> /// /// <param name="data">The pca transformed data.</param> /// public virtual double[][] Revert(double[][] data) { if (data == null) { throw new ArgumentNullException("data"); } int rows = data.Rows(); int cols = data.Columns(); int components = NumberOfOutputs; double[][] reversion = Jagged.Zeros(rows, components); // Revert the data (reversion = data * eigenVectors.Transpose()) for (int i = 0; i < components; i++) { for (int j = 0; j < rows; j++) { for (int k = 0; k < cols; k++) { reversion[j][i] += data[j][k] * ComponentVectors[k][i]; } } } // if the data has been standardized or centered, // we need to revert those operations as well if (this.Method == PrincipalComponentMethod.Standardize || this.Method == PrincipalComponentMethod.CorrelationMatrix) { reversion.Multiply(StandardDeviations, dimension: 0, result: reversion); } reversion.Add(Means, dimension: 0, result: reversion); return(reversion); }
private void CreateLaserDeviceInstances() { Array <Spatial> laserDeviceList; string currentType; Spatial container; Spatial laserDevice = null; SCG.IEnumerator <PackedScene> it = laserDevicePSList.GetEnumerator(); int index = 0; while (it.MoveNext()) { laserDeviceList = new Array <Spatial>(); container = GetChild <Spatial>(index++); for (int i = 0; i < laserDeviceAmount; i++) { laserDevice = it.Current.Instance() as Spatial; laserDevice.Translation = inactiveTranslation; laserDevice.Name = this.CreateUniqueNodeName(laserDevice); laserDevice.Call(this.GetMethodSetManager(), this); container.CallDeferred(this.GetGDMethodAddChild(), laserDevice); laserDeviceList.Add(laserDevice); } currentType = this.Call <string>(laserDevice, this.GetMethodGetNodeType()); availableLaserDeviceMap.Add(currentType, laserDeviceList); } nuclearDevice = nuclearDevicePS.Instance() as Spatial; nuclearDevice.Translation = inactiveTranslation; nuclearDevice.Name = this.CreateUniqueNodeName(nuclearDevice); nuclearDevice.Call(this.GetMethodSetManager(), this); container = GetChild <Spatial>(index++); container.CallDeferred(this.GetGDMethodAddChild(), nuclearDevice); }
public static T[] Push <T>(this T[] self, T t) { return(self.Add(t)); }
public void AddVertex(float[] xyz) { vpl.Add(xyz); }
static Vivo50Alarms() { Alarms = new Alarm[160]; Alarms.Add(new Alarm(0, "ALARM_TXT_NO_ALARM", AlarmType.None)); Alarms.Add(new Alarm(1, "ALARM_RAM", AlarmType.FunctionFail)); Alarms.Add(new Alarm(2, "ALARM_MAIN_PRESSURE_SENSOR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(3, "ALARM_BACKUP_PRESSURE_SENSOR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(4, "ALARM_PILOT_PRESSURE_SENSOR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(5, "ALARM_EXP_PRESSURE_SENSOR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(6, "ALARM_OXY_PRESSURE_SENSOR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(7, "ALARM_FLOW_SENSOR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(8, "ALARM_EXP_FLOW_SENSOR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(9, "ALARM_SER_COMM_TO_UI", AlarmType.FunctionFail)); Alarms.Add(new Alarm(10, "ALARM_FAN_HIGH_CURRENT", AlarmType.FunctionFail)); Alarms.Add(new Alarm(11, "ALARM_FAN_SHUTDOWN", AlarmType.FunctionFail)); Alarms.Add(new Alarm(12, "ALARM_UI_FAN_SHUTDOWN", AlarmType.FunctionFail)); Alarms.Add(new Alarm(13, "ALARM_RTC_FAIL", AlarmType.FunctionFail)); Alarms.Add(new Alarm(14, "ALARM_BEEPER_FAIL", AlarmType.FunctionFail)); Alarms.Add(new Alarm(15, "ALARM_UI_BEEPER_FAIL", AlarmType.FunctionFail)); Alarms.Add(new Alarm(16, "ALARM_BEEPER_VOLUME_FAIL", AlarmType.FunctionFail)); Alarms.Add(new Alarm(17, "ALARM_CPU_VOLT_FAIL", AlarmType.FunctionFail)); Alarms.Add(new Alarm(18, "ALARM_PSU_VOLT_FAIL", AlarmType.FunctionFail)); Alarms.Add(new Alarm(19, "ALARM_SETTINGS_CORRUPT_IN_RAM", AlarmType.FunctionFail)); Alarms.Add(new Alarm(20, "ALARM_SETTINGS_CORRUPT_IN_FLASH", AlarmType.FunctionFail)); Alarms.Add(new Alarm(21, "ALARM_ADC_SPI_COMM", AlarmType.FunctionFail)); Alarms.Add(new Alarm(30, "ALARM_PRESS_SENSOR_PG1_PG2_EQUAL", AlarmType.FunctionFail)); Alarms.Add(new Alarm(31, "ALARM_PRESSURE_SENSOR_TEMP_HIGH", AlarmType.FunctionFail)); Alarms.Add(new Alarm(32, "ALARM_PRESSURE_SENSOR_TEMP_LOW", AlarmType.FunctionFail)); Alarms.Add(new Alarm(33, "ALARM_BLDC_HIGH_TEMP", AlarmType.FunctionFail)); Alarms.Add(new Alarm(34, "ALARM_BLDC_LOW_TEMP", AlarmType.FunctionFail)); Alarms.Add(new Alarm(35, "ALARM_MAINS_POWER_SUPPLY_TEMP_HIGH", AlarmType.FunctionFail)); Alarms.Add(new Alarm(36, "ALARM_MAINS_POWER_SUPPLY_TEMP_LOW", AlarmType.FunctionFail)); Alarms.Add(new Alarm(37, "ALARM_SENSOR_CALIBRATION", AlarmType.FunctionFail)); Alarms.Add(new Alarm(38, "ALARM_TEMP_COMPENSATION", AlarmType.FunctionFail)); Alarms.Add(new Alarm(39, "ALARM_FAN_MOTOR_ERROR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(40, "ALARM_OTHER_HW_ERROR", AlarmType.FunctionFail)); Alarms.Add(new Alarm(50, "ALARM_UI_RAM", AlarmType.FunctionFail)); Alarms.Add(new Alarm(51, "ALARM_UNUSED", AlarmType.FunctionFail)); Alarms.Add(new Alarm(52, "ALARM_UI_KEYS", AlarmType.FunctionFail)); Alarms.Add(new Alarm(53, "ALARM_UI_DISPLAY", AlarmType.FunctionFail)); Alarms.Add(new Alarm(54, "ALARM_UI_HIGH_PRESSURE", AlarmType.FunctionFail)); Alarms.Add(new Alarm(64, "ALARM_TXT_POWER_FAIL", AlarmType.High)); Alarms.Add(new Alarm(65, "ALARM_TXT_LOW_PRESSURE", AlarmType.High)); Alarms.Add(new Alarm(66, "ALARM_TXT_HIGH_PRESSURE", AlarmType.High)); Alarms.Add(new Alarm(67, "ALARM_TXT_LOW_MINUTE_VOLUME", AlarmType.High)); Alarms.Add(new Alarm(68, "ALARM_TXT_LOW_TIDAL_VOLUME", AlarmType.High)); Alarms.Add(new Alarm(69, "ALARM_TXT_LOW_BREATH_RATE", AlarmType.High)); Alarms.Add(new Alarm(70, "ALARM_TXT_APNEA", AlarmType.High)); Alarms.Add(new Alarm(71, "ALARM_TXT_LOW_FIO2", AlarmType.High)); Alarms.Add(new Alarm(72, "ALARM_TXT_LOW_PULSE", AlarmType.High)); Alarms.Add(new Alarm(73, "ALARM_TXT_HIGH_LEAKAGE", AlarmType.High)); Alarms.Add(new Alarm(74, "ALARM_TXT_LOW_SPO2", AlarmType.High)); Alarms.Add(new Alarm(80, "ALARM_TXT_HIGH_MINUTE_VOLUME", AlarmType.Medium)); Alarms.Add(new Alarm(81, "ALARM_TXT_HIGH_TIDAL_VOLUME", AlarmType.Medium)); Alarms.Add(new Alarm(82, "ALARM_TXT_PAT_HIGH_TEMP", AlarmType.Medium)); Alarms.Add(new Alarm(83, "ALARM_TXT_LOW_PEEP", AlarmType.Medium)); Alarms.Add(new Alarm(84, "ALARM_TXT_HIGH_PEEP", AlarmType.Medium)); Alarms.Add(new Alarm(85, "ALARM_TXT_HIGH_PULSE", AlarmType.Medium)); Alarms.Add(new Alarm(86, "ALARM_TXT_HIGH_FIO2", AlarmType.Medium)); Alarms.Add(new Alarm(87, "ALARM_TXT_5VISO_FAIL", AlarmType.Medium)); Alarms.Add(new Alarm(88, "ALARM_TXT_HIGH_SPO2", AlarmType.Medium)); Alarms.Add(new Alarm(89, "ALARM_TXT_LOW_INT_BAT", AlarmType.Medium)); Alarms.Add(new Alarm(90, "ALARM_TXT_LOW_ALARM_TXT_ACK", AlarmType.Medium)); Alarms.Add(new Alarm(91, "ALARM_TXT_HIGH_BREATH_RATE", AlarmType.Medium)); Alarms.Add(new Alarm(92, "ALARM_TXT_LOW_LEAKAGE", AlarmType.Medium)); Alarms.Add(new Alarm(93, "ALARM_TXT_LEDS_TEST", AlarmType.Medium)); Alarms.Add(new Alarm(94, "ALARM_TXT_PRESSURE_COMP_LOST", AlarmType.Medium)); }
public static float[] Add(this float[] word1, WordVector word2) { return(word1.Add(word2.Vector)); }
public static TReturnConstant AddAndReturnConstant <TElement, TReturnConstant>(this TElement[] array, ref long position, TElement element, TReturnConstant returnConstant) { array.Add(ref position, element); return(returnConstant); }
/// <summary> /// Elementwise addition of a and b, storing in result. /// </summary> /// /// <param name="a">The first vector to add.</param> /// <param name="b">The second vector to add.</param> /// <param name="result">An array to store the result.</param> /// <returns>The same vector passed as result.</returns> /// public void Add(double[] a, double[] b, double[] result) { a.Add(b, result: result); }
public sealed override Array <Real> Add(Array <Real> a, Real b) => a.Add(b);
public override sealed Array <Real> Add(Array <Real> a, Array <Real> b) => a.Add(b);
/// <summary> /// Based on the dependents lists in the XML file, resolve all dependents by reading more /// package definition files. /// </summary> /// <param name="current">Current.</param> /// <param name="authenticated">Authenticated.</param> /// <param name="candidatePackageDefinitions">Candidate package definitions.</param> public static void ResolveDependencies( PackageDefinition current, Array<PackageDefinition> authenticated, Array<PackageDefinition> candidatePackageDefinitions) { var matchingPackages = authenticated.Where(item => item.Name == current.Name); if (null != current.Version) { matchingPackages = matchingPackages.Where(item => item.Version == current.Version); } if (matchingPackages.FirstOrDefault() != null) { return; } if (!current.SupportedPlatforms.Includes(OSUtilities.CurrentOS)) { throw new Exception("Package {0} is not supported on {1}", current.FullName, OSUtilities.CurrentOS.ToString()); } authenticated.Add(current); foreach (var dependent in current.Dependents) { var depName = dependent.Item1; var depVersion = dependent.Item2; var candidates = candidatePackageDefinitions.Where(item => item.Name == depName); if (depVersion != null) { candidates = candidates.Where(item => item.Version == depVersion); } var candidateCount = candidates.Count(); if (0 == candidateCount) { var message = new System.Text.StringBuilder(); message.AppendFormat("Unable to find a candidate package with name '{0}'", depName); if (null != depVersion) { message.AppendFormat(" and version {0}", depVersion); } message.AppendLine(); var packageRepos = new StringArray(); Graph.Instance.PackageRepositories.ToList().ForEach(item => packageRepos.AddUnique(item)); message.AppendLine("Searched in the package repositories:"); message.AppendLine(packageRepos.ToString("\n")); throw new Exception(message.ToString()); } if (candidateCount > 1) { var message = new System.Text.StringBuilder(); message.AppendFormat("There are {0} identical candidate packages with name '{1}'", candidateCount, depName); if (null != depVersion) { message.AppendFormat(" and version {0}", depVersion); } message.AppendLine(" from the following package definition files:"); foreach (var candidate in candidates) { message.AppendFormat(candidate.XMLFilename); message.AppendLine(); } var packageRepos = new StringArray(); Graph.Instance.PackageRepositories.ToList().ForEach(item => packageRepos.AddUnique(item)); message.AppendLine("Found in the package repositories:"); message.AppendLine(packageRepos.ToString("\n")); throw new Exception(message.ToString()); } ResolveDependencies(candidates.First(), authenticated, candidatePackageDefinitions); } }
private void ShowDependencies( int depth, Array<PackageDefinition> visitedPackages, string packageFormatting) { visitedPackages.Add(this); foreach (var dependent in this.Dependents) { var dep = Graph.Instance.Packages.Where(item => item.Name == dependent.Item1 && item.Version == dependent.Item2).First(); if (visitedPackages.Contains(dep)) { continue; } var formattedName = System.String.Format("{0}{1}{2}", new string(' ', depth * 4), dep.FullName, dependent.Item3.GetValueOrDefault(false) ? "*" : System.String.Empty); var repo = (dep.PackageRepositories.Count > 0) ? dep.PackageRepositories[0] : "Found in " + System.IO.Path.GetDirectoryName(dep.GetPackageDirectory()); Log.MessageAll(packageFormatting, formattedName, repo); if (dep.Dependents.Count > 0) { dep.ShowDependencies(depth + 1, visitedPackages, packageFormatting); } } }
// non-recursive internal Array get_constants_at () //author: Brian { Array result = new Array(); foreach (KeyValuePair<string, object> var in instance_vars) { if (Symbol.is_const_id(var.Key)) result.Add(new String(var.Key)); } return result; }
Evaluate( IRegExCommandLineArgument realArg) { if (null != realArg.ShortName) { throw new Exception("The command line argument '{0}' does not support short names", realArg.GetType().ToString()); } var reg = new System.Text.RegularExpressions.Regex(realArg.LongName); var results = new Array<StringArray>(); foreach (var arg in Arguments) { var matches = reg.Match(arg); if (!matches.Success) { continue; } var thisResult = new StringArray(); foreach (var group in matches.Groups) { if (group.ToString() == arg) { continue; } thisResult.Add(group.ToString()); } results.Add(thisResult); } return results; }
public bool Snap(IGeometry igeometry_0, IPoint ipoint_0, double double_0) { bool flag; int i; IHitTest shape; IGeometry geometry; if (!EditorLicenseProviderCheck.Check()) { flag = false; } else if (this.ifeatureCache_0.Count != 0) { bool flag1 = false; double num = 0; int num1 = 0; int num2 = 0; bool flag2 = true; IPoint pointClass = new ESRI.ArcGIS.Geometry.Point(); object value = Missing.Value; IArray arrayClass = new Array(); for (i = 0; i < this.ifeatureCache_0.Count; i++) { shape = (IHitTest)this.ifeatureCache_0.Feature[i].Shape; if ((shape is IPolyline ? true : shape is IPolygon) && shape.HitTest(ipoint_0, double_0, esriGeometryHitPartType.esriGeometryPartBoundary, pointClass, ref num, ref num1, ref num2, ref flag2)) { arrayClass.Add(shape); } } IPointCollection multipointClass = new Multipoint(); IArray arrayClass1 = new Array() as IArray; for (i = 0; i < arrayClass.Count; i++) { ITopologicalOperator2 element = (ITopologicalOperator2)arrayClass.Element[i]; for (int j = 0; j < arrayClass.Count; j++) { if (i != j) { if (((IGeometry)arrayClass.Element[i]).GeometryType != ((IGeometry)arrayClass.Element[j]).GeometryType) { geometry = element.IntersectMultidimension((IGeometry)arrayClass.Element[j]); if (geometry != null) { IGeometryCollection geometryCollection = geometry as IGeometryCollection; if (geometryCollection != null) { for (int k = 0; k < geometryCollection.GeometryCount; k++) { geometry = geometryCollection.Geometry[k]; if (geometry is IPointCollection) { multipointClass.AddPointCollection((IPointCollection)geometry); } else if (geometry is IPointCollection) { multipointClass.AddPoint((IPoint)geometry, ref value, ref value); } } } } } else { geometry = element.Intersect((IGeometry)arrayClass.Element[j], esriGeometryDimension.esriGeometry0Dimension); if (geometry != null) { if (geometry is IPointCollection) { multipointClass.AddPointCollection((IPointCollection)geometry); } else if (geometry is IPointCollection) { multipointClass.AddPoint((IPoint)geometry, ref value, ref value); } } } } } } shape = (IHitTest)multipointClass; if (shape.HitTest(ipoint_0, double_0, esriGeometryHitPartType.esriGeometryPartVertex, pointClass, ref num, ref num1, ref num2, ref flag2)) { ipoint_0.PutCoords(pointClass.X, pointClass.Y); flag1 = true; } flag = flag1; } else { flag = false; } return(flag); }
public override sealed Array <Real> Sub(Array <Real> a, Array <Real> b) => a.Add(b, alpha: -1);
Run() { Log.Detail("Running build"); // TODO: should the rank collections be sorted, so that modules with fewest dependencies are first? var graph = Graph.Instance; var metaDataType = graph.BuildModeMetaData.GetType(); var useEvaluation = CheckIfModulesNeedRebuilding(metaDataType); var explainRebuild = CommandLineProcessor.Evaluate(new Options.ExplainBuildReason()); var immediateOutput = CommandLineProcessor.Evaluate(new Options.ImmediateOutput()); ExecutePreBuild(metaDataType); if (!System.IO.Directory.Exists(graph.BuildRoot)) { System.IO.Directory.CreateDirectory(graph.BuildRoot); } var threadCount = CommandLineProcessor.Evaluate(new Options.MultiThreaded()); if (0 == threadCount) { threadCount = System.Environment.ProcessorCount; } System.Exception abortException = null; if (threadCount > 1) { var cancellationSource = new System.Threading.CancellationTokenSource(); var cancellationToken = cancellationSource.Token; // LongRunning is absolutely necessary in order to achieve paralleism var creationOpts = System.Threading.Tasks.TaskCreationOptions.LongRunning; var continuationOpts = System.Threading.Tasks.TaskContinuationOptions.LongRunning; var scheduler = new LimitedConcurrencyLevelTaskScheduler(threadCount); var factory = new System.Threading.Tasks.TaskFactory( cancellationToken, creationOpts, continuationOpts, scheduler); var tasks = new Array<System.Threading.Tasks.Task>(); foreach (var rank in graph.Reverse()) { foreach (var module in rank) { var context = new ExecutionContext(useEvaluation, explainRebuild, immediateOutput); var task = factory.StartNew(() => { if (cancellationToken.IsCancellationRequested) { return; } var depTasks = new Array<System.Threading.Tasks.Task>(); foreach (var dep in module.Dependents) { if (null == dep.ExecutionTask) { continue; } depTasks.Add(dep.ExecutionTask); } foreach (var dep in module.Requirements) { if (null == dep.ExecutionTask) { continue; } depTasks.Add(dep.ExecutionTask); } System.Threading.Tasks.Task.WaitAll(depTasks.ToArray()); if (cancellationToken.IsCancellationRequested) { return; } try { (module as IModuleExecution).Execute(context); } catch (Exception ex) { abortException = ex; cancellationSource.Cancel(); } finally { if (context.OutputStringBuilder != null && context.OutputStringBuilder.Length > 0) { Log.Info(context.OutputStringBuilder.ToString()); } if (context.ErrorStringBuilder != null && context.ErrorStringBuilder.Length > 0) { Log.Info(context.ErrorStringBuilder.ToString()); } } }); tasks.Add(task); module.ExecutionTask = task; } } try { System.Threading.Tasks.Task.WaitAll(tasks.ToArray()); } catch (System.AggregateException exception) { if (!(exception.InnerException is System.Threading.Tasks.TaskCanceledException)) { throw new Exception(exception, "Error during threaded build"); } } } else { foreach (var rank in graph.Reverse()) { if (null != abortException) { break; } foreach (IModuleExecution module in rank) { var context = new ExecutionContext(useEvaluation, explainRebuild, immediateOutput); try { module.Execute(context); } catch (Exception ex) { abortException = ex; break; } finally { if (context.OutputStringBuilder != null && context.OutputStringBuilder.Length > 0) { Log.Info(context.OutputStringBuilder.ToString()); } if (context.ErrorStringBuilder != null && context.ErrorStringBuilder.Length > 0) { Log.Info(context.ErrorStringBuilder.ToString()); } } } } } if (null != abortException) { throw new Exception(abortException, "Error during {0}threaded build", (threadCount > 1) ? string.Empty : "non-"); } ExecutePostBuild(metaDataType); }
public override void _Process(float delta) { float spawnZoneExtends = 640; Vector2 globalPosition = ((Node2D)Referer).GlobalPosition; Rect2 spawnZone = new Rect2(globalPosition, spawnZoneExtends, spawnZoneExtends); foreach (Node n in GetTree().GetNodesInGroup("EndPositions")) { Position2D pos = (Position2D)n; LavaBlock lb = (LavaBlock)pos.GetParent(); if (spawnZone.HasPoint(pos.GlobalPosition) && !lb.loadedNext) { PackedScene nextToSpawn = GD.Load <PackedScene>("tests/thibault/lava_blocks/" + lb.nexts[(int)(GD.Randi() % lb.nexts.Count)] + ".tscn"); LavaBlock nextBlock = (LavaBlock)nextToSpawn.Instance(); nextBlock.camera = lb.camera; nextBlock.GlobalPosition = pos.GlobalPosition; Referer.GetParent().AddChild(nextBlock); lb.loadedNext = true; Array <LavaSurfaceBurn> nextLavas = new Array <LavaSurfaceBurn>(); foreach (Node n2 in GetTree().GetNodesInGroup("LavaBurn")) { LavaSurfaceBurn l = (LavaSurfaceBurn)n2; if (l.GetParent() == nextBlock) { nextLavas.Add(l); } } for (int i = 0; i < nextLavas.Count; i++) { if (i != 0 || lavaSurface.isInterrupted) { surfaces.Add(lavaSurface); lavaSurface = (LavaSurfaceBurn)lavaPacked.Instance(); GetTree().CurrentScene.AddChild(lavaSurface); } LavaSurfaceBurn nextLava = nextLavas[i]; Vector2 offset = nextLava.GlobalPosition; foreach (Vector2 p in nextLava.Points) { lavaSurface.AddPoint(p + offset); } lavaSurface.isInterrupted = nextLava.isInterrupted; nextLava.QueueFree(); } } else if ((pos.GlobalPosition.x - globalPosition.x) < -spawnZoneExtends) { lb.QueueFree(); while (surfaces.Count > 0) { LavaSurfaceBurn ls = surfaces[0]; float maxX = 0; for (int i = 0; i < ls.Points.Length; i++) { maxX = Mathf.Max(maxX, ls.GetPointPosition(i).x); } if (maxX - globalPosition.x < -spawnZoneExtends) { surfaces.RemoveAt(0); ls.QueueFree(); } else { break; } } } } }
public void RegisterMovable(Movable movable) { movables.Add(movable); }
private bool BeginDragSplinePoint() { Vector3 position; if (GetPositionOnDragPlane(ScreenPointToRay(MousePosition), out position)) { Vector3 offset = position - m_dragSpline.GetPointPosition(m_dragPointIndex); const float s = 0.1f; if (offset.magnitude > HandleSize(m_beginDragPosition) * s) { JunctionBase junction = m_dragSpline.GetJunction(m_dragPointIndex); if (m_dragPointIndex == 0 && junction == null) { m_dragSpline.Prepend(position); GameObject splinePoint = m_dragSpline.GetPoint(0); RegisterCreatedObjectUndo(splinePoint, "BH.S3.Prepend"); SetTransformParentUndo(splinePoint.transform, splinePoint.transform.parent, "BH.S3.Prepend"); } else if (m_dragPointIndex == m_dragSpline.PointsCount - 1 && junction == null) { m_dragSpline.Append(position); m_dragPointIndex = m_dragSpline.PointsCount - 1; RegisterCreatedObjectUndo(m_dragSpline.GetPoint(m_dragPointIndex), "BH.S3.Append"); } else { Vector3 dir; if (m_dragSpline.CurveCount == m_dragPointIndex) { dir = m_dragSpline.GetDirection(1.0f); } else { dir = m_dragSpline.GetDirection(0, m_dragPointIndex); } bool isOut = Mathf.Sign(Vector3.Dot(offset.normalized, dir)) >= 0; int connectionIndex = m_dragSpline.CreateBranch(m_dragPointIndex, isOut); junction = m_dragSpline.GetJunction(m_dragPointIndex); m_dragSpline = junction.GetSpline(connectionIndex); RegisterCreatedObjectUndo(m_dragSpline.gameObject, "BH.S3.Branch"); if (junction.ConnectionsCount == 2) { m_newJunction = junction; RegisterCreatedObjectUndo(junction.gameObject, "BH.S3.Branch"); } m_splines = m_splines.Add(m_dragSpline); SplineRenderer splineRenderer = m_dragSpline.GetComponent <SplineRenderer>(); m_splineRenderers = m_splineRenderers.Add(splineRenderer); if (splineRenderer != null) { splineRenderer.IsSelected = true; } m_dragPointIndex = isOut ? 1 : 0; } return(true); } } return(false); }
public double[] Add(double[] v) { return(_v.Add(v)); }
public static InjectTypeMetadata Get(Type type) { return(cached.GetOrAdd(type, (t) => { InjectTypeMetadata typeInfo; typeInfo = new InjectTypeMetadata(); typeInfo.type = type; if (type.IsPrimitive) { typeInfo.IsPrimitive = true; typeInfo.defaultValue = Activator.CreateInstance(type); } else if (type == typeof(string)) { typeInfo.IsPrimitive = true; typeInfo.defaultValue = null; } if (typeInfo.IsPrimitive) { return typeInfo; } BindingFlags bindingFlags = BindingFlags.CreateInstance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static | BindingFlags.SetField | BindingFlags.SetProperty | BindingFlags.InvokeMethod; var members = type.GetMembers(bindingFlags); List <IBuilderMember> builderMembers = null; foreach (var member in members) { var injAttr = member.GetCustomAttribute <InjectAttribute>(false); if (injAttr == null) { continue; } IBuilderMember builderMember = null; //if (injectionMember != null) // throw new Exception(string.Format("class {0}, {1} use multi", t.FullName, typeof(InjecAttribute).Name)); switch (member.MemberType) { case MemberTypes.Field: var fInfo = (FieldInfo)member; builderMember = new FieldBuilder(fInfo, InjectorUtils.FromAttributeProvider(fInfo, fInfo.FieldType)); break; case MemberTypes.Property: var pInfo = (PropertyInfo)member; builderMember = new PropertyBuilder(pInfo, InjectorUtils.FromAttributeProvider(pInfo, pInfo.PropertyType)); break; case MemberTypes.Method: var mInfo = (MethodInfo)member; builderMember = new MethodBuilder(mInfo, InjectorUtils.FromMethod(mInfo)); break; case MemberTypes.Constructor: var cInfo = (ConstructorInfo)member; if (ValidateInjectConstructor(cInfo)) { if (typeInfo.constructorBuilder != null) { throw new Exception(string.Format("class {0}, constructor only use one attribute [{1}]", type.FullName, typeof(InjectAttribute).Name)); } typeInfo.constructorBuilder = new ConstructorBuilder(cInfo, InjectorUtils.FromMethod(cInfo)); } continue; default: continue; } if (builderMember != null) { if (builderMembers == null) { builderMembers = new List <IBuilderMember>(); } builderMembers.Add(builderMember); } } if (builderMembers != null) { typeInfo.builderMembers = builderMembers.ToArray(); } if (typeInfo.constructorBuilder == null) { var cInfos = members.Where(o => o.MemberType == MemberTypes.Constructor).Select(o => (ConstructorInfo)o); foreach (var cInfo in cInfos) { if (!ValidateInjectConstructor(cInfo)) { continue; } if (cInfo.GetParameters().Length != 0) { continue; } typeInfo.constructorBuilder = new ConstructorBuilder(cInfo, InjectorUtils.FromMethod(cInfo)); break; } if (typeInfo.constructorBuilder == null) { foreach (var cInfo in cInfos) { if (!ValidateInjectConstructor(cInfo)) { continue; } typeInfo.constructorBuilder = new ConstructorBuilder(cInfo, InjectorUtils.FromMethod(cInfo)); break; } } } UpdateTransparentProxy(typeInfo); return typeInfo; })); }
private async void UpdateRect() { edgeBodies = new Array <JellyAtom>(); if (jellyAtomPacked == null) { return; } Vector2 atomSeparation = new Vector2(atomW > 1 ? rect.Size.x / (atomW - 1) : 0, atomH > 1 ? rect.Size.y / (atomH - 1) : 0); Vector2 origin = rect.Position; foreach (Node n in GetChildren()) { n.QueueFree(); } mapAtoms = new Dictionary <Vector2, JellyAtom>(); for (int j = atomH - 1; j >= 0; j--) { for (int i = atomW - 1; i >= 0; i--) { JellyAtom jellyAtom = (JellyAtom)jellyAtomPacked.Instance(); Vector2 gridPos = new Vector2(i, j); jellyAtom.Position = origin + atomSeparation * gridPos; jellyAtom.jelly = this; jellyAtom.GravityScale = gravityScale; AddChild(jellyAtom); mapAtoms.Add(gridPos, jellyAtom); Vector2[] neighbours = new Vector2[] { new Vector2(i + 1, j), new Vector2(i, j + 1), new Vector2(i + 1, j + 1), new Vector2(i - 1, j + 1) }; if (i == 3 && j == 3) { foreach (Node n in GetTree().GetNodesInGroup("CameraOffset")) { Node2D cameraOffset = (Node2D)n; RemoteTransform2D rt = new RemoteTransform2D(); rt.UpdateRotation = false; rt.UpdateRotation = false; rt.RemotePath = cameraOffset.GetPath(); jellyAtom.AddChild(rt); } } foreach (Vector2 neighbour in neighbours) { JellyAtom neighbourBody; if (!mapAtoms.TryGetValue(neighbour, out neighbourBody)) { continue; } float dist = (neighbourBody.Position - jellyAtom.Position).Length(); neighbourBody.AddNeighbour(new Neighbour(jellyAtom, dist)); jellyAtom.AddNeighbour(new Neighbour(neighbourBody, dist)); } //await ToSignal(GetTree(), "idle_frame"); } } for (int i = 0; i < atomW; i++) { edgeBodies.Add(mapAtoms[new Vector2(i, 0)]); } for (int j = 1; j < atomH; j++) { edgeBodies.Add(mapAtoms[new Vector2(atomW - 1, j)]); } for (int i = atomW - 2; i >= 0; i--) { edgeBodies.Add(mapAtoms[new Vector2(i, atomH - 1)]); } for (int j = atomH - 2; j >= 1; j--) { edgeBodies.Add(mapAtoms[new Vector2(0, j)]); } }
private List <string> GetStepsInSubCategory(AbstractUserContext userContext, string flowSessionId, string[] nodes) { List <string> stepInfos = new List <string>(); FlowStepToolboxInformation[] flowStepToolboxInformation = FlowEditService.Instance.GetToolboxStepsInformation(userContext, flowSessionId, nodes); foreach (FlowStepToolboxInformation toolboxInformation in flowStepToolboxInformation) { string categoryString = ""; for (int i = 0; i < nodes.Length; i++) { if (i == nodes.Length - 1) { categoryString += $"{nodes[i]}"; continue; } categoryString += $"{nodes[i]}/"; } stepInfos.Add($"{categoryString},{toolboxInformation.Label}"); } string[] toolboxCategories = FlowEditService.Instance.GetToolboxCategories(userContext, flowSessionId, nodes); foreach (string toolboxCategory in toolboxCategories) { if (!string.Equals(toolboxCategory, "[Root Folder]") && !string.Equals(toolboxCategory, "[Current Folder]") && !string.Equals(toolboxCategory, ".Net Libraries") && !string.Equals(toolboxCategory, "User Defined Types")) { stepInfos.AddRange(GetStepsInSubCategory(userContext, flowSessionId, nodes.Add(toolboxCategory))); } } return(stepInfos); }
public static Array <Real> Sub(this Array <Real> a, Real b, Real alpha = 1, Array <Real> result = null) => a.Add(b, -alpha, result);
/// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair.</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> /// public Pipeline Learn(double[][] x, int[] y, double[] weights = null) { Init(x, y); // Create the Gram (Kernel) Matrix var K = kernel.ToJagged(x); // Compute entire data set measures base.Means = Measures.Mean(K, dimension: 0); base.StandardDeviations = Measures.StandardDeviation(K, Means); // Initialize the kernel analogous scatter matrices //int dimension = x.Columns(); double[][] Sb = Jagged.Zeros(NumberOfSamples, NumberOfSamples); double[][] Sw = Jagged.Zeros(NumberOfSamples, NumberOfSamples); // For each class for (int c = 0; c < Classes.Count; c++) { var idx = Matrix.Find(y, y_i => y_i == c); // Get the Kernel matrix class subset double[][] Kc = K.Get(idx); int count = Kc.Rows(); // Get the Kernel matrix class mean double[] mean = Measures.Mean(Kc, dimension: 0); // Construct the Kernel equivalent of the Within-Class Scatter matrix double[][] Swi = Measures.Scatter(Kc, dimension: 0, means: mean); Swi.Divide((double)count, result: Swi); Sw.Add(Swi, result: Sw); // Sw = Sw + Swi // Construct the Kernel equivalent of the Between-Class Scatter matrix double[] d = mean.Subtract(base.Means); double[][] Sbi = Jagged.Outer(d, d); Sbi.Multiply((double)NumberOfSamples, result: Sbi); Sb.Add(Sbi, result: Sb); // Sb = Sb + Sbi // Store additional information base.ClassScatter[c] = Swi; base.ClassCount[c] = count; base.ClassMeans[c] = mean; base.ClassStandardDeviations[c] = Measures.StandardDeviation(Kc, mean); } // Add regularization to avoid singularity Sw.AddToDiagonal(regularization, result: Sw); // Compute the generalized eigenvalue decomposition var gevd = new JaggedGeneralizedEigenvalueDecomposition(Sb, Sw, sort: true); if (gevd.IsSingular) // check validity of the results { throw new SingularMatrixException("One of the matrices is singular. Please retry " + "the method with a higher regularization constant."); } // Get the eigenvalues and corresponding eigenvectors double[] evals = gevd.RealEigenvalues; double[][] eigs = gevd.Eigenvectors; // Eliminate unwanted components int nonzero = x.Columns(); if (Threshold > 0) { nonzero = Math.Min(gevd.Rank, GetNonzeroEigenvalues(evals, Threshold)); } if (NumberOfInputs != 0) { nonzero = Math.Min(nonzero, NumberOfInputs); } if (NumberOfOutputs != 0) { nonzero = Math.Min(nonzero, NumberOfOutputs); } eigs = eigs.Get(null, 0, nonzero); evals = evals.Get(0, nonzero); // Store information this.input = x; base.Eigenvalues = evals; base.DiscriminantVectors = eigs.Transpose(); base.ScatterBetweenClass = Sb; base.ScatterWithinClass = Sw; base.NumberOfOutputs = evals.Length; // Compute feature space means for later classification for (int c = 0; c < Classes.Count; c++) { ProjectionMeans[c] = ClassMeans[c].Dot(eigs); } // Computes additional information about the analysis and creates the // object-oriented structure to hold the discriminants found. CreateDiscriminants(); this.Classifier = CreateClassifier(); return(Classifier); }
public void addToPlayerList(String player) { playerList.Add(player); }
/// <summary> /// Maximum number curl can be divided into. The bigger the value /// the smoother curl will be.With the cost of having more /// polygons for drawing. /// </summary> /// <param name="maxCurlSplits"></param> public CurlMesh(int maxCurlSplits) { // There really is no use for 0 splits. mMaxCurlSplits = maxCurlSplits < 1 ? 1 : maxCurlSplits; mArrScanLines = new Array <Double>(maxCurlSplits + 2); mArrOutputVertices = new Array <Vertex>(7); mArrRotatedVertices = new Array <Vertex>(4); mArrIntersections = new Array <Vertex>(2); mArrTempVertices = new Array <Vertex>(7 + 4); for (int i = 0; i < 7 + 4; ++i) { mArrTempVertices.Add(new Vertex()); } if (DRAW_SHADOW) { mArrSelfShadowVertices = new Array <ShadowVertex>( (mMaxCurlSplits + 2) * 2); mArrDropShadowVertices = new Array <ShadowVertex>( (mMaxCurlSplits + 2) * 2); mArrTempShadowVertices = new Array <ShadowVertex>( (mMaxCurlSplits + 2) * 2); for (int i = 0; i < (mMaxCurlSplits + 2) * 2; ++i) { mArrTempShadowVertices.Add(new ShadowVertex()); } } // Rectangle consists of 4 vertices. Index 0 = top-left, index 1 = // bottom-left, index 2 = top-right and index 3 = bottom-right. for (int i = 0; i < 4; ++i) { mRectangle[i] = new Vertex(); } // Set up shadow penumbra direction to each vertex. We do fake 'self // shadow' calculations based on this information. mRectangle[0].mPenumbraX = mRectangle[1].mPenumbraX = mRectangle[1].mPenumbraY = mRectangle[3].mPenumbraY = -1; mRectangle[0].mPenumbraY = mRectangle[2].mPenumbraX = mRectangle[2].mPenumbraY = mRectangle[3].mPenumbraX = 1; if (DRAW_CURL_POSITION) { mCurlPositionLinesCount = 3; ByteBuffer hvbb = ByteBuffer .AllocateDirect(mCurlPositionLinesCount * 2 * 2 * 4); hvbb.Order(ByteOrder.NativeOrder()); mBufCurlPositionLines = hvbb.AsFloatBuffer(); mBufCurlPositionLines.Position(0); } // There are 4 vertices from bounding rect, max 2 from adding split line // to two corners and curl consists of max mMaxCurlSplits lines each // outputting 2 vertices. int maxVerticesCount = 4 + 2 + (2 * mMaxCurlSplits); ByteBuffer vbb = ByteBuffer.AllocateDirect(maxVerticesCount * 3 * 4); vbb.Order(ByteOrder.NativeOrder()); mBufVertices = vbb.AsFloatBuffer(); mBufVertices.Position(0); if (DRAW_TEXTURE) { ByteBuffer tbb = ByteBuffer .AllocateDirect(maxVerticesCount * 2 * 4); tbb.Order(ByteOrder.NativeOrder()); mBufTexCoords = tbb.AsFloatBuffer(); mBufTexCoords.Position(0); } ByteBuffer cbb = ByteBuffer.AllocateDirect(maxVerticesCount * 4 * 4); cbb.Order(ByteOrder.NativeOrder()); mBufColors = cbb.AsFloatBuffer(); mBufColors.Position(0); if (DRAW_SHADOW) { int maxShadowVerticesCount = (mMaxCurlSplits + 2) * 2 * 2; ByteBuffer scbb = ByteBuffer .AllocateDirect(maxShadowVerticesCount * 4 * 4); scbb.Order(ByteOrder.NativeOrder()); mBufShadowColors = scbb.AsFloatBuffer(); mBufShadowColors.Position(0); ByteBuffer sibb = ByteBuffer .AllocateDirect(maxShadowVerticesCount * 3 * 4); sibb.Order(ByteOrder.NativeOrder()); mBufShadowVertices = sibb.AsFloatBuffer(); mBufShadowVertices.Position(0); mDropShadowCount = mSelfShadowCount = 0; } }
public void AddPoly(int[] pts) { vil.Add(pts); }
protected override Expression <Func <Condominio, object> >[] DataAgregation(Expression <Func <Condominio, object> >[] includes, FilterBase filter) { return(includes.Add(_ => _.Endereco)); }
static void Add <T> (ref T [] array, T item) { array = array.Add(item); }
public void DeductFromPlayer(IPlayer buyingPlayer, int totalUnitsToDeduct) { SortedDictionary <int, List <ItemSlot> > moneys = new SortedDictionary <int, List <ItemSlot> >(); buyingPlayer.Entity.WalkInventory((invslot) => { if (invslot is ItemSlotCreative) { return(true); } if (invslot.Itemstack == null || invslot.Itemstack.Collectible.Attributes == null) { return(true); } int pieceValue = CurrencyValuePerItem(invslot); if (pieceValue != 0) { List <ItemSlot> slots = null; if (!moneys.TryGetValue(pieceValue, out slots)) { slots = new List <ItemSlot>(); } slots.Add(invslot); moneys[pieceValue] = slots; } return(true); }); foreach (var val in moneys.Reverse()) { int pieceValue = val.Key; foreach (ItemSlot slot in val.Value) { int removeUnits = Math.Min(pieceValue * slot.StackSize, totalUnitsToDeduct); removeUnits = (removeUnits / pieceValue) * pieceValue; slot.Itemstack.StackSize -= removeUnits / pieceValue; if (slot.StackSize <= 0) { slot.Itemstack = null; } slot.MarkDirty(); totalUnitsToDeduct -= removeUnits; } if (totalUnitsToDeduct <= 0) { break; } } // Maybe didn't have small moneys? Take a bigger piece.... if (totalUnitsToDeduct > 0) { foreach (var val in moneys) { int pieceValue = val.Key; foreach (ItemSlot slot in val.Value) { int removeUnits = Math.Max(pieceValue, Math.Min(pieceValue * slot.StackSize, totalUnitsToDeduct)); removeUnits = (removeUnits / pieceValue) * pieceValue; slot.Itemstack.StackSize -= removeUnits / pieceValue; if (slot.StackSize <= 0) { slot.Itemstack = null; } slot.MarkDirty(); totalUnitsToDeduct -= removeUnits; } if (totalUnitsToDeduct <= 0) { break; } } } // ...and return single value gears if (totalUnitsToDeduct < 0) { GiveOrDropToPlayer(buyingPlayer, new ItemStack(Api.World.GetItem(new AssetLocation("gear-rusty"))), -totalUnitsToDeduct); } }
/// <summary> /// 返回当前日期指定的星期几 /// </summary> /// <param name="dateTime">DateTime</param> /// <param name="Week">指定星期几</param> /// <returns>返回当前日期指定的星期几</returns> public static DateTime DateMonday(this DateTime dateTime, DayOfWeek Week) { int date = 0; List <DayOfWeek> dayOfs = new List <DayOfWeek>() { dateTime.DayOfWeek, Week }; int[] weeks = { }; foreach (DayOfWeek week in dayOfs) { switch (week) { case DayOfWeek.Monday: date = 1; break; case DayOfWeek.Tuesday: date = 2; break; case DayOfWeek.Wednesday: date = 3; break; case DayOfWeek.Thursday: date = 4; break; case DayOfWeek.Friday: date = 5; break; case DayOfWeek.Saturday: date = 6; break; case DayOfWeek.Sunday: date = 7; break; } weeks = weeks.Add(date); } //weeks[0].CompareTo(weeks[1]) > 0 ? weeks[0] - weeks[1] : weeks[1] - weeks[0]; if (weeks[0].CompareTo(weeks[1]) == 0) { return(dateTime.ToString("yyyy-MM-dd 00:00:00").ToDateTime()); } else { date = weeks[1] - weeks[0]; return(dateTime.AddDays(date).ToString("yyyy-MM-dd 00:00:00").ToDateTime()); } //var dayOfWeek = dateTime;//DateTime.Now; //var day = DateTime.Now; //TimeSpan ts = new TimeSpan(); //if (Week == dayOfWeek.DayOfWeek) //{ //} //Double douLen = 0; //ts.TotalSeconds; //switch (dayOfWeek.DayOfWeek) //{ // case DayOfWeek.Monday: // date = 1; // day = Convert.ToDateTime(dayOfWeek.ToString("yyyy-MM-dd 23:59:59")); // ts = dayOfWeek.Subtract(day).Duration(); // douLen = ts.TotalSeconds; // break; // case DayOfWeek.Tuesday: // date = 2; // day = Convert.ToDateTime(dayOfWeek.AddDays(6).ToString("yyyy-MM-dd 23:59:59")); // ts = dayOfWeek.Subtract(day).Duration(); // douLen = ts.TotalSeconds; // break; // case DayOfWeek.Wednesday: // date = 3; // day = Convert.ToDateTime(dayOfWeek.AddDays(5).ToString("yyyy-MM-dd 23:59:59")); // ts = dayOfWeek.Subtract(day).Duration(); // douLen = ts.TotalSeconds; // break; // case DayOfWeek.Thursday: // date = 4; // day = Convert.ToDateTime(dayOfWeek.AddDays(4).ToString("yyyy-MM-dd 23:59:59")); // ts = dayOfWeek.Subtract(day).Duration(); // douLen = ts.TotalSeconds; // break; // case DayOfWeek.Friday: // date = 5; // day = Convert.ToDateTime(dayOfWeek.AddDays(3).ToString("yyyy-MM-dd 23:59:59")); // ts = dayOfWeek.Subtract(day).Duration(); // douLen = ts.TotalSeconds; // break; // case DayOfWeek.Saturday: // date = 6; // day = Convert.ToDateTime(dayOfWeek.AddDays(2).ToString("yyyy-MM-dd 23:59:59")); // ts = dayOfWeek.Subtract(day).Duration(); // douLen = ts.TotalSeconds; // break; // case DayOfWeek.Sunday: // date = 7; // day = Convert.ToDateTime(dayOfWeek.AddDays(1).ToString("yyyy-MM-dd 23:59:59")); // ts = dayOfWeek.Subtract(day).Duration(); // douLen = ts.TotalSeconds; // break; //} //return dateTime; //douLen.ToString().Split('.')[0]; //date.ToString(); }
public static ANode Deserialize(System.IO.BinaryReader aReader) { BinaryTag type = (BinaryTag)aReader.ReadByte(); switch (type) { case BinaryTag.Array: { int count = aReader.ReadInt32(); Array tmp = new Array(); for (int i = 0; i < count; i++) { tmp.Add(Deserialize(aReader)); } return(tmp); } case BinaryTag.Class: { int count = aReader.ReadInt32(); Class tmp = new Class(); for (int i = 0; i < count; i++) { string key = aReader.ReadString(); var val = Deserialize(aReader); tmp.Add(key, val); } return(tmp); } case BinaryTag.Value: { return(new Data(aReader.ReadString())); } case BinaryTag.IntValue: { return(new Data(aReader.ReadInt32())); } case BinaryTag.DoubleValue: { return(new Data(aReader.ReadDouble())); } case BinaryTag.BoolValue: { return(new Data(aReader.ReadBoolean())); } case BinaryTag.FloatValue: { return(new Data(aReader.ReadSingle())); } default: { throw new Exception("Error deserializing Unknown tag: " + type); } } }
/// <summary> /// Reverts a set of projected data into it's original form. Complete reverse /// transformation is not always possible and is not even guaranteed to exist. /// </summary> /// /// <remarks> /// <para> /// This method works using a closed-form MDS approach as suggested by /// Kwok and Tsang. It is currently a direct implementation of the algorithm /// without any kind of optimization. /// </para> /// <para> /// Reference: /// - http://cmp.felk.cvut.cz/cmp/software/stprtool/manual/kernels/preimage/list/rbfpreimg3.html /// </para> /// </remarks> /// /// <param name="data">The kpca-transformed data.</param> /// <param name="neighbors">The number of nearest neighbors to use while constructing the pre-image.</param> /// public double[][] Revert(double[][] data, int neighbors = 10) { if (data == null) { throw new ArgumentNullException("data"); } if (sourceCentered == null) { throw new InvalidOperationException("The analysis must have been computed first."); } if (neighbors < 2) { throw new ArgumentOutOfRangeException("neighbors", "At least two neighbors are necessary."); } // Verify if the current kernel supports // distance calculation in feature space. var distance = kernel as IReverseDistance; if (distance == null) { throw new NotSupportedException( "Current kernel does not support distance calculation in feature space."); } int rows = data.Rows(); var result = this.result; double[][] reversion = Jagged.Zeros(rows, sourceCentered.Columns()); // number of neighbors cannot exceed the number of training vectors. int nn = System.Math.Min(neighbors, sourceCentered.Rows()); // For each point to be reversed for (int p = 0; p < rows; p++) { // 1. Get the point in feature space double[] y = data.GetRow(p); // 2. Select nn nearest neighbors of the feature space double[][] X = sourceCentered; double[] d2 = new double[result.GetLength(0)]; int[] inx = new int[result.GetLength(0)]; // 2.1 Calculate distances for (int i = 0; i < X.GetLength(0); i++) { inx[i] = i; d2[i] = distance.ReverseDistance(y, result.GetRow(i).First(y.Length)); if (Double.IsNaN(d2[i])) { d2[i] = Double.PositiveInfinity; } } // 2.2 Order them Array.Sort(d2, inx); // 2.3 Select nn neighbors int def = 0; for (int i = 0; i < d2.Length && i < nn; i++, def++) { if (Double.IsInfinity(d2[i])) { break; } } inx = inx.First(def); X = X.Get(inx).Transpose(); // X is in input space d2 = d2.First(def); // distances in input space // 3. Perform SVD // [U,L,V] = svd(X*H); // TODO: If X has more columns than rows, the SV decomposition should be // computed on the transpose of X and the left and right vectors should // be swapped. This should be fixed after more unit tests are elaborated. var svd = new JaggedSingularValueDecomposition(X, computeLeftSingularVectors: true, computeRightSingularVectors: true, autoTranspose: false); double[][] U = svd.LeftSingularVectors; double[][] L = Jagged.Diagonal(def, svd.Diagonal); double[][] V = svd.RightSingularVectors; // 4. Compute projections // Z = L*V'; double[][] Z = Matrix.DotWithTransposed(L, V); // 5. Calculate distances // d02 = sum(Z.^2)'; double[] d02 = Matrix.Sum(Elementwise.Pow(Z, 2), 0); // 6. Get the pre-image using // z = -0.5*inv(Z')*(d2-d02) double[][] inv = Matrix.PseudoInverse(Z.Transpose()); double[] w = (-0.5).Multiply(inv).Dot(d2.Subtract(d02)); double[] z = w.First(U.Columns()); // 8. Project the pre-image on the original basis using // x = U*z + sum(X,2)/nn; double[] x = (U.Dot(z)).Add(Matrix.Sum(X.Transpose(), 0).Multiply(1.0 / nn)); // 9. Store the computed pre-image. for (int i = 0; i < reversion.Columns(); i++) { reversion[p][i] = x[i]; } } // if the data has been standardized or centered, // we need to revert those operations as well if (this.Method == PrincipalComponentMethod.Standardize) { // multiply by standard deviation and add the mean reversion.Multiply(StandardDeviations, dimension: 0, result: reversion) .Add(Means, dimension: 0, result: reversion); } else if (this.Method == PrincipalComponentMethod.Center) { // only add the mean reversion.Add(Means, dimension: 0, result: reversion); } return(reversion); }
IdentifyAllPackages( bool allowDuplicates = false, bool enforceBamAssemblyVersions = true) { var packageRepos = new System.Collections.Generic.Queue<string>(); foreach (var repo in Graph.Instance.PackageRepositories) { if (packageRepos.Contains(repo)) { continue; } packageRepos.Enqueue(repo); } var masterDefinitionFile = GetMasterPackage(enforceBamAssemblyVersions: enforceBamAssemblyVersions); foreach (var repo in masterDefinitionFile.PackageRepositories) { if (packageRepos.Contains(repo)) { continue; } packageRepos.Enqueue(repo); } // read the definition files of any package found in the package roots var candidatePackageDefinitions = new Array<PackageDefinition>(); candidatePackageDefinitions.Add(masterDefinitionFile); while (packageRepos.Count > 0) { var repo = packageRepos.Dequeue(); if (!System.IO.Directory.Exists(repo)) { throw new Exception("Package repository directory {0} does not exist", repo); } var candidatePackageDirs = System.IO.Directory.GetDirectories(repo, BamSubFolder, System.IO.SearchOption.AllDirectories); Graph.Instance.PackageRepositories.Add(repo); foreach (var bamDir in candidatePackageDirs) { var packageDir = System.IO.Path.GetDirectoryName(bamDir); var packageDefinitionPath = GetPackageDefinitionPathname(packageDir); // ignore any duplicates (can be found due to nested repositories) if (null != candidatePackageDefinitions.Where(item => item.XMLFilename == packageDefinitionPath).FirstOrDefault()) { continue; } var definitionFile = new PackageDefinition(packageDefinitionPath, !Graph.Instance.ForceDefinitionFileUpdate); definitionFile.Read(true, enforceBamAssemblyVersions); candidatePackageDefinitions.Add(definitionFile); foreach (var newRepo in definitionFile.PackageRepositories) { if (Graph.Instance.PackageRepositories.Contains(newRepo)) { continue; } packageRepos.Enqueue(newRepo); } } } // defaults come from // - the master definition file // - command line args (these trump the mdf) // and only requires resolving when referenced var packageDefinitions = new Array<PackageDefinition>(); PackageDefinition.ResolveDependencies(masterDefinitionFile, packageDefinitions, candidatePackageDefinitions); // now resolve any duplicate names using defaults // unless duplicates are allowed var duplicatePackageNames = packageDefinitions.GroupBy(item => item.Name).Where(item => item.Count() > 1).Select(item => item.Key); if ((duplicatePackageNames.Count() > 0) && !allowDuplicates) { var versionSpeciferArgs = new Options.PackageDefaultVersion(); var packageVersionSpecifiers = CommandLineProcessor.Evaluate(versionSpeciferArgs); var toRemove = new Array<PackageDefinition>(); foreach (var dupName in duplicatePackageNames) { var duplicates = packageDefinitions.Where(item => item.Name == dupName); PackageDefinition resolvedDuplicate = null; // command line specifications take precedence to resolve a duplicate foreach (var specifier in packageVersionSpecifiers) { if (!specifier.Contains(dupName)) { continue; } foreach (var dupPackage in duplicates) { if (specifier[1] == dupPackage.Version) { resolvedDuplicate = dupPackage; break; } } if (resolvedDuplicate != null) { break; } var noMatchMessage = new System.Text.StringBuilder(); noMatchMessage.AppendFormat("Command line version specified, {0}, could not resolve to one of the available versions of package {1}:", specifier[1], duplicates.First().Name); noMatchMessage.AppendLine(); foreach (var dup in duplicates) { noMatchMessage.AppendFormat("\t{0}", dup.Version); noMatchMessage.AppendLine(); } throw new Exception(noMatchMessage.ToString()); } if (resolvedDuplicate != null) { toRemove.AddRange(packageDefinitions.Where(item => (item.Name == dupName) && (item != resolvedDuplicate))); continue; } // now look at the master dependency file, for any 'default' specifications var masterDependency = masterDefinitionFile.Dependents.Where(item => item.Item1 == dupName && item.Item3.HasValue && item.Item3.Value).FirstOrDefault(); if (null != masterDependency) { toRemove.AddRange(packageDefinitions.Where(item => (item.Name == dupName) && (item.Version != masterDependency.Item2))); continue; } var resolveErrorMessage = new System.Text.StringBuilder(); resolveErrorMessage.AppendFormat("Unable to resolve to a single version of package {0}. Use --{0}.version=<version> to resolve. Available versions of the package are:", duplicates.First().Name); resolveErrorMessage.AppendLine(); foreach (var dup in duplicates) { resolveErrorMessage.AppendFormat("\t{0}", dup.Version); resolveErrorMessage.AppendLine(); } throw new Exception(resolveErrorMessage.ToString()); } packageDefinitions.RemoveAll(toRemove); } Graph.Instance.SetPackageDefinitions(packageDefinitions); }