/// <summary> /// Evaluates the <see cref="SpatialDimension"/>-dimensional /// coordinate of the point with parameter value t. /// </summary> /// <param name="t"></param> /// <returns></returns> public double[] GetPointOnSegment(double t) { Debug.Assert( t.Abs() <= 1.0, "Point out of range"); double[] result = new double[SpatialDimension]; for (int d = 0; d < SpatialDimension; d++) { result[d] = 0.5 * ((End[d] - Start[d]) * t + Start[d] + End[d]); } return(result); }
private static IEnumerable <GradientStop> ScaleGradientStops(IEnumerable <GradientStop> gradientStops, Point startPoint, Point endPoint, Size targetSize) { if (startPoint.X == endPoint.X) { return(startPoint.Y < endPoint.Y ? ScaleGradientStops(gradientStops, startPoint.Y / targetSize.Height, endPoint.Y / targetSize.Height) : ScaleGradientStops(gradientStops, 1 - startPoint.Y / targetSize.Height, 1 - endPoint.Y / targetSize.Height)); } if (startPoint.Y == endPoint.Y) { return(startPoint.X < endPoint.X ? ScaleGradientStops(gradientStops, startPoint.X / targetSize.Width, endPoint.X / targetSize.Width) : ScaleGradientStops(gradientStops, 1 - startPoint.X / targetSize.Width, 1 - endPoint.X / targetSize.Width)); } Point direction = endPoint - startPoint; double directionLength = direction.GetLength(); double sin = direction.Y / directionLength; double cos = direction.X / directionLength; // generated gradient image size double generatedImageWidth = cos.Abs() * targetSize.Width + sin.Abs() * targetSize.Height; double generatedImageHeight = sin.Abs() * targetSize.Width + cos.Abs() * targetSize.Height; // transformation from a unit square to the generated gradient image rectangle Matrix matrix = Matrix.TranslationMatrix(-0.5, -0.5) * // translate the unit square center to the origin Matrix.ScalingMatrix(generatedImageWidth, generatedImageHeight) * // scale to the generated gradient image size new Matrix(cos, sin, -sin, cos, 0, 0) * // rotate to the generated gradient image angle Matrix.TranslationMatrix(targetSize.Width / 2, targetSize.Height / 2); // translate to the target rectangle center Point relativeStart = startPoint * matrix.Inverse; Point relativeEnd = endPoint * matrix.Inverse; return(ScaleGradientStops(gradientStops, relativeStart.X, relativeEnd.X)); }
private bool Animate(double targetValue) { double diff = targetValue - percentOffset; if (diff.Abs() > MinimalValueDelta || velocity.Abs() > MinimalVelocityDelta) { velocity = velocity * (1 - Dampening); velocity += diff * AttractionFator; if (velocity.Abs() > TerminalVelocity) { velocity *= TerminalVelocity / velocity.Abs(); } percentOffset = percentOffset + velocity; return(true); } percentOffset = targetValue; velocity = 0; return(false); }
public static void ScaleFromPoint_Scales_Shape(double scale, double expectedCentroid_x, double expectedCentroid_y) { List <CartesianCoordinate> bowTieNonCrossingSegmentsScaled = new List <CartesianCoordinate>() { new CartesianCoordinate(4, 3.5), new CartesianCoordinate(3, 5), new CartesianCoordinate(3, 2), new CartesianCoordinate(4, 3.5), new CartesianCoordinate(5, 2), new CartesianCoordinate(5, 5), new CartesianCoordinate(4, 3.5), }; Polygon polygon = new Polygon(bowTieNonCrossingSegmentsScaled); // Check shape position CartesianCoordinate originalCentroid = polygon.Centroid; Assert.AreEqual(4, originalCentroid.X); Assert.AreEqual(3.5, originalCentroid.Y); CartesianCoordinate referencePoint = new CartesianCoordinate(2, 1); Polygon polygonScaled = polygon.ScaleFromPoint(scale, referencePoint) as Polygon; // Check shape scale PointExtents extents = polygonScaled.Extents(); Assert.AreEqual(2 * scale.Abs(), extents.Width); Assert.AreEqual(3 * scale.Abs(), extents.Height); // Check scaled centroid position CartesianCoordinate scaledCentroid = polygonScaled.Centroid; Assert.AreEqual(expectedCentroid_x, scaledCentroid.X); Assert.AreEqual(expectedCentroid_y, scaledCentroid.Y); }
private void GetCornerD(DragDeltaEventArgs e, out double dx, out double dy, double invert) { var multiplier = GetScaleMultiplier(); dx = e.HorizontalChange * Viewbox.Scale.Width * multiplier; dy = e.VerticalChange * Viewbox.Scale.Height * multiplier; if (dx.Abs() > dy.Abs()) { dx = invert.Sign() * dy * _model.TargetWidth / _model.TargetHeight; } else { dy = invert.Sign() * dx * _model.TargetHeight / _model.TargetWidth; } }
// Reset this spring to its initial state public void Reset() { double mass = 0; if (Element?.Geometry != null) { if (Element.Family != null) { Stiffness = Element.Family.GetAxialStiffness(); mass = Element.Family.GetArea(); mass *= Element.Family.GetPrimaryMaterial()?.Density ?? 0; } else { Stiffness = 0; } } if (Element.StartNode != null) { StartParticle = Element.StartNode.GetData <Particle>(); } if (Element.EndNode != null) { EndParticle = Element.EndNode.GetData <Particle>(); } RestLength = Element.EndNode.Position.DistanceTo(Element.StartNode.Position); mass *= RestLength; // Lump stiffness at nodes: Vector dir = EndParticle.Position - StartParticle.Position; double length = dir.Magnitude(); dir /= length; double extension = length - RestLength; double T = extension * Stiffness; double Ke = Stiffness / RestLength; double Kg = T.Abs() / length; double K = 0.5 * (Ke + Kg); Vector Kv = dir.Abs() * K; // Hmmm... do we need to worry about signs? StartParticle.LumpedK += Kv; EndParticle.LumpedK += Kv; // Lump mass at nodes: StartParticle.Mass += mass / 2; EndParticle.Mass += mass / 2; }
private static void GetPoints(double angle, out Point startPoint, out Point endPoint) { double radians = Math.PI * angle / 180; double x = Math.Cos(radians); double y = Math.Sin(radians); double scale = 1 / x.Abs().Max(y.Abs()); x *= scale; y *= scale; Point offset = new Point(x.Min(0), y.Min(0)); startPoint = -offset; endPoint = new Point(x, y) - offset; }
public bool Animate( double currentValue, double currentVelocity, double targetValue, double attractionFator, double dampening, double terminalVelocity, double minValueDelta, double minVelocityDelta, out double newValue, out double newVelocity) { Debug.Assert(currentValue.IsValid()); Debug.Assert(currentVelocity.IsValid()); Debug.Assert(targetValue.IsValid()); Debug.Assert(dampening.IsValid()); Debug.Assert(dampening > 0 && dampening < 1); Debug.Assert(attractionFator.IsValid()); Debug.Assert(attractionFator > 0); Debug.Assert(terminalVelocity > 0); Debug.Assert(minValueDelta > 0); Debug.Assert(minVelocityDelta > 0); double diff = targetValue - currentValue; if (diff.Abs() > minValueDelta || currentVelocity.Abs() > minVelocityDelta) { newVelocity = currentVelocity * (1 - dampening); newVelocity += diff * attractionFator; if (currentVelocity.Abs() > terminalVelocity) { newVelocity *= terminalVelocity / currentVelocity.Abs(); } newValue = currentValue + newVelocity; return(true); } else { newValue = targetValue; newVelocity = 0; return(false); } }
/// <summary> /// /// </summary> /// <param name="Lab"></param> /// <param name="Observer"></param> /// <param name="Illuminant"></param> public Lch(Lab Lab, ObserverAngle Observer = ObserverAngle.Two, Illuminant Illuminant = Illuminant.Default) : this(0, 0, 0, Observer, Illuminant) { double l, c, h = Math.Atan2(Lab.B, Lab.A); if (h > 0d) { h = (h / Math.PI) * 180d; } else { h = 360d - (h.Abs() / Math.PI) * 180d; } l = Lab.L; c = Math.Sqrt(Math.Pow(Lab.A, 2d) + Math.Pow(Lab.B, 2d)); L = l; C = c; H = h; }
private Pen GetPen() { double strokeThickness = StrokeThickness; if (Stroke == null || strokeThickness.IsNaNValue() || strokeThickness.IsEqualOrCloseToZero()) { return(null); } if (strokePen != null) { return(strokePen); } double thickness = strokeThickness.Abs(); strokePen = new Pen { Thickness = thickness, Brush = Stroke, StartLineCap = StrokeStartLineCap, EndLineCap = StrokeEndLineCap, DashCap = StrokeDashCap, LineJoin = StrokeLineJoin, MiterLimit = StrokeMiterLimit }; if (StrokeDashArray != null || !StrokeDashOffset.IsEqualOrCloseToZero()) { strokePen.DashStyle = new DashStyle(StrokeDashArray, StrokeDashOffset); } strokePen.Freeze(); return(strokePen); }
static double DistanceByHeightAndAngle(double heigth, double angle) { var a = angle.Abs(); return(a >= 90 ? heigth * a / 90 : heigth / Math.Sin(a.Radians())); }
public static double AutoRound2(this double d, int digits) { var digitsReal = Math.Log10(d.Abs()).Floor() + 1; return(d.Round((digits - digitsReal).Max(0))); }
static bool IsTresholdAbsOk(double value, double treshold) { return(treshold >= 0 ? value.Abs() >= treshold : value.Abs() < -treshold); }
/// <summary> /// Usually used after <see cref="MergeLogically(GridCommons, GridCommons)"/>; this method finds element boundaries /// which intersect geometrically, but not logically and inserts a <see cref="CellFaceTag"/> which connects those cells. /// </summary> /// <param name="g"></param> /// <param name="upsampling"></param> /// <returns></returns> public static GridCommons Seal(GridCommons g, int upsampling = 4) { GridCommons R = g.CloneAs(); g = null; GridData gdat = new GridData(R); int D = gdat.SpatialDimension; int J = gdat.Cells.NoOfLocalUpdatedCells; if (R.CellPartitioning.MpiSize > 1) { throw new NotSupportedException("Not supported in MPI-parallel mode."); } //NodeSet[] TestNodes = gdat.Edges.EdgeRefElements.Select(KrefEdge => KrefEdge.GetSubdivisionTree(upsampling).GlobalVertice).ToArray(); NodeSet[] TestNodes = gdat.Edges.EdgeRefElements.Select(KrefEdge => KrefEdge.GetBruteForceQuadRule(upsampling, 1).Nodes).ToArray(); // Its better to use vertices in the interior of the element; if we use vertices at the corners, we might get // intersection of edges that just share one point. // Define all edges that will be tested (set to boundary edges) // ============================================================ int[] UnknownEdges = gdat.BoundaryEdges.ItemEnum.ToArray(); int L = UnknownEdges.Sum(iEdg => TestNodes[gdat.Edges.GetRefElementIndex(iEdg)].NoOfNodes); // Transform nodes on edges (that should be tested) to global coordinates // ====================================================================== MultidimensionalArray TestNodesGlobal = MultidimensionalArray.Create(L, D); MultidimensionalArray NormalsGlobal = MultidimensionalArray.Create(L, D); int[] NodeToEdge = new int[L]; // pointer l -> Edge index, where l is the first index into 'TestNodesGlobal' & 'NormalsGlobal' int[,] E2C = gdat.Edges.CellIndices; int cnt = 0; foreach (int iEdg in UnknownEdges) { int iKref = gdat.Edges.GetRefElementIndex(iEdg); NodeSet Ns = TestNodes[iKref]; int K = Ns.NoOfNodes; int[] I0 = new int[] { cnt, 0 }; int[] IE = new int[] { cnt + K - 1, D - 1 }; MultidimensionalArray TN = gdat.GlobalNodes.GetValue_EdgeSV(Ns, iEdg, 1); TestNodesGlobal.SetSubArray(TN.ExtractSubArrayShallow(0, -1, -1), I0, IE); MultidimensionalArray N1 = gdat.Edges.NormalsCache.GetNormals_Edge(Ns, iEdg, 1); NormalsGlobal.SetSubArray(N1.ExtractSubArrayShallow(0, -1, -1), I0, IE); for (int i = cnt; i < cnt + K; i++) { NodeToEdge[i] = iEdg; } cnt += K; } // binary tree to speed up point localization int[] pl_Permutation = new int[L]; PointLocalization pl = new PointLocalization(TestNodesGlobal, 0.01, pl_Permutation); Debug.Assert(!object.ReferenceEquals(pl.Points, TestNodesGlobal)); // compare search edges to all other nodes // ======================================= // mapping: cell --> Neighbour cell index, face index // 1st index: Cell index; // 2nd index: enumeration List <Tuple <int, int> >[] FoundPairings = new List <Tuple <int, int> > [gdat.Cells.NoOfCells]; int[][] C2E = gdat.Cells.Cells2Edges; byte[,] E2F = gdat.Edges.FaceIndices; int cnt2 = 0; for (int iEdgC = 0; iEdgC < UnknownEdges.Length; iEdgC++) // loop over edges that may get sealed { int iEdg = UnknownEdges[iEdgC]; int iKref = gdat.Edges.GetRefElementIndex(iEdg); NodeSet Ns = TestNodes[iKref]; int K = Ns.NoOfNodes; int jCell1 = E2C[iEdg, 0]; Debug.Assert(E2C[iEdg, 1] < 0); int iFace1 = E2F[iEdg, 0]; Debug.Assert(E2F[iEdg, 1] == byte.MaxValue); int[] I0 = new int[] { cnt2, 0 }; int[] IE = new int[] { cnt2 + K - 1, D - 1 }; MultidimensionalArray TN = TestNodesGlobal.ExtractSubArrayShallow(I0, IE); //MultidimensionalArray N1 = NormalsGlobal.ExtractSubArrayShallow(I0, IE); // find bounding box for edge BoundingBox bbEdge = new BoundingBox(TN); if (bbEdge.h_min / bbEdge.h_max < 1.0e-5) { // very thin bounding box, thicken slightly double delta = bbEdge.h_max * 1.0e-5; for (int d = 0; d < D; d++) { bbEdge.Min[d] -= delta; bbEdge.Max[d] += delta; } } bbEdge.ExtendByFactor(0.01); // determine binary code for bounding box int bbEdgeSigBits; GeomBinTreeBranchCode bbEdgeCode = GeomBinTreeBranchCode.Combine( GeomBinTreeBranchCode.CreateFormPoint(pl.PointsBB, bbEdge.Min), GeomBinTreeBranchCode.CreateFormPoint(pl.PointsBB, bbEdge.Max), out bbEdgeSigBits); // determine all points in bounding box int iP0, Len; pl.GetPointsInBranch(bbEdgeCode, bbEdgeSigBits, out iP0, out Len); // determine all edged which potentially overlap with edge 'iEdg' HashSet <int> PotOvrlap = new HashSet <int>(); // a set of edge indices for (int n = 0; n < Len; n++) { int l = iP0 + n; int iPt = pl_Permutation[l]; Debug.Assert(GenericBlas.L2DistPow2(pl.Points.GetRow(l), TestNodesGlobal.GetRow(iPt)) <= 0); int iOvlpEdge = NodeToEdge[iPt]; if (iOvlpEdge != iEdg) { PotOvrlap.Add(iOvlpEdge); } } //int[] PotOvrlap = UnknownEdges.CloneAs(); // determine actually overlapping boundary edges: foreach (int iOvrlapEdge in PotOvrlap) { int jCell2 = E2C[iOvrlapEdge, 0]; Debug.Assert(E2C[iOvrlapEdge, 1] < 0); if (jCell2 == jCell1) { continue; } int iFace2 = E2F[iOvrlapEdge, 0]; Debug.Assert(E2F[iOvrlapEdge, 1] == byte.MaxValue); int AllreadyFound = FoundPairings[jCell1] == null ? 0 : FoundPairings[jCell1].Where(tp => tp.Item1 == jCell2 && tp.Item2 == iFace1).Count(); if (AllreadyFound > 1) { throw new ApplicationException("Error in algorithmus."); } if (AllreadyFound > 0) { continue; } var Kref_j2 = gdat.Cells.GetRefElement(jCell2); double h = Kref_j2.GetMaxDiameter(); MultidimensionalArray LocVtx_j2 = MultidimensionalArray.Create(K, D); bool[] NewtonConvervence = new bool[K]; gdat.TransformGlobal2Local(TN, LocVtx_j2, jCell2, NewtonConvervence); for (int k = 0; k < K; k++) // loop over all transformed points { if (!NewtonConvervence[k]) { continue; } double[] pt = LocVtx_j2.GetRow(k); double[] ptClose = new double[D]; double dist = Kref_j2.ClosestPoint(pt, ptClose); if (dist > h * 1.0e-8) { continue; } AffineManifold Face = Kref_j2.GetFacePlane(iFace2); double FaceDist = Face.PointDistance(pt); if (FaceDist.Abs() > 1.0e-8 * h) { continue; } NodeSet Ns2 = new NodeSet(Kref_j2, pt); MultidimensionalArray Normals2 = MultidimensionalArray.Create(1, D); gdat.Edges.GetNormalsForCell(Ns2, jCell2, iFace2, Normals2); double[] N1d = NormalsGlobal.GetRow(cnt2 + k); double[] N2d = Normals2.GetRow(0); //Check if face normals points exactly in the opposite direction, 2 ways: // 1) calculate angle between both normals -> bad choice because of Math.Acos // 2) inner product of two opposite vectors is -1 //if (Math.Abs(Math.Abs(Math.Acos(GenericBlas.InnerProd(N1d, N2d))) - Math.PI) > 1.0e-8) if (Math.Abs(GenericBlas.InnerProd(N1d, N2d) + 1.0) > 1.0e-8) { continue; } // if we reach this point, jCell1 should match with jCell2/iFace2 if (FoundPairings[jCell1] == null) { FoundPairings[jCell1] = new List <Tuple <int, int> >(); } if (FoundPairings[jCell2] == null) { FoundPairings[jCell2] = new List <Tuple <int, int> >(); } FoundPairings[jCell1].Add(new Tuple <int, int>(jCell2, iFace1)); FoundPairings[jCell2].Add(new Tuple <int, int>(jCell1, iFace2)); break; // no need to test jCell1 vs. jCell2 anymore } } cnt2 += K; } // add the newly found pairings to the grid for (int j = 0; j < J; j++) { var fp = FoundPairings[j]; if (fp != null) { foreach (var t in fp) { ArrayTools.AddToArray(new CellFaceTag() { EdgeTag = 0, FaceIndex = t.Item2, ConformalNeighborship = false, NeighCell_GlobalID = gdat.CurrentGlobalIdPermutation.Values[t.Item1] }, ref R.Cells[j].CellFaceTags); } } } return(R); }
public void DoAbs(double value, double expectedValue) { Assert.Equal(expectedValue, value.Abs()); }
public static int IteratonSequenceNextStep(int count, double power = 1, int loop = 0) { var p = count / 100.0 * Math.Pow(count, power.Abs()) / Math.Pow(2, loop.Abs()); return(p.Ceiling() * power.Sign() * ((double)loop).SignUp()); }
public static double Abs_Double(double value) { return(value.Abs()); }
public bool LocalSolve(int jCell, BitArray AcceptedMask, SinglePhaseField Phi, double _sign, out double Min, out double Max) { Debug.Assert(_sign.Abs() == 1); // find all accepted neighbors // =========================== var NeighCells = this.GridDat.GetCellNeighboursViaEdges(jCell); int iKref = this.GridDat.Cells.GetRefElementIndex(jCell); int NN = NeighCells.Length; var NeighCellsK = new Tuple <int, int, int> [NN]; int NNK = 0; for (int nn = 0; nn < NN; nn++) { if (AcceptedMask[NeighCells[nn].Item1] == true) { NeighCellsK[NNK] = NeighCells[nn]; NNK++; } } // evaluate accepted neighbors // ============================ Min = double.MaxValue; Max = double.MinValue; var TrafoIdx = this.GridDat.Edges.Edge2CellTrafoIndex; int K = this.PhiEvalBuffer[0].GetLength(1); // Nodes per edge for (int nnk = 0; nnk < NNK; nnk++) // loop over accepted neighbours { int jNC = NeighCellsK[nnk].Item1; int iEdg = NeighCellsK[nnk].Item2; int InOrOut = NeighCellsK[nnk].Item3; int iTrafo = TrafoIdx[iEdg, InOrOut]; this.PhiEvalBuffer[nnk].Clear(); Phi.Evaluate(jNC, 1, this.EdgeNodes.GetVolumeNodeSet(this.GridDat, iTrafo), this.PhiEvalBuffer[nnk]); this.GridDat.TransformLocal2Global(this.EdgeNodes.GetVolumeNodeSet(this.GridDat, iTrafo), this.CellNodesGlobal[nnk], jNC); Max = Math.Max(Max, PhiEvalBuffer[nnk].Max()); Min = Math.Min(Min, PhiEvalBuffer[nnk].Min()); } var _QuadNodesGlobal = this.QuadNodesGlobal[iKref]; this.GridDat.TransformLocal2Global(this.DaRuleS[iKref].Nodes, _QuadNodesGlobal, jCell); if (_sign > 0) { Max += this.GridDat.Cells.h_max[jCell]; } else { Min -= this.GridDat.Cells.h_max[jCell]; } // perform projection of geometric reinit // ====================================== // temp storage double[] Y = new double[2]; double[] X = new double[2]; double[] X1 = new double[2]; double[] X2 = new double[2]; // basis values at cell quadrature nodes var BasisValues = this.LevelSetBasis_Geometric.CellEval(this.DaRuleS[iKref].Nodes, jCell, 1).ExtractSubArrayShallow(0, -1, -1); int NoOfQn = BasisValues.GetLength(0); // number of quadrature nodes // result at quadrature nodes var PhiAtQuadNodes = MultidimensionalArray.Create(NoOfQn); for (int iQn = NoOfQn - 1; iQn >= 0; iQn--) // loop over all quadrature nodes { Y[0] = _QuadNodesGlobal[iQn, 0]; Y[1] = _QuadNodesGlobal[iQn, 1]; double _dist_min1 = double.MaxValue, _phi_min1 = double.NaN; int _nnk_Min1 = int.MinValue, _k_min1 = int.MinValue; double _dist_min2 = double.MaxValue, _phi_min2 = double.NaN; int _nnk_Min2 = int.MinValue, _k_min2 = int.MinValue; // find closest point: brute force approach for (int nnk = 0; nnk < NNK; nnk++) // loop over all edges with known values { double dist_min1 = double.MaxValue, phi_min1 = double.NaN; int nnk_Min1 = int.MinValue, k_min1 = int.MinValue; double dist_min2 = double.MaxValue, phi_min2 = double.NaN; int nnk_Min2 = int.MinValue, k_min2 = int.MinValue; for (int k = 0; k < K; k++) // loop over all nodes on this edge { X[0] = this.CellNodesGlobal[nnk][k, 0]; X[1] = this.CellNodesGlobal[nnk][k, 1]; double phi = this.PhiEvalBuffer[nnk][0, k]; phi *= _sign; double dist = GenericBlas.L2Dist(X, Y) + phi; bool NoBlock = true; if (dist < dist_min1) { nnk_Min2 = nnk_Min1; k_min2 = k_min1; dist_min2 = dist_min1; phi_min2 = phi_min1; dist_min1 = dist; nnk_Min1 = nnk; k_min1 = k; phi_min1 = phi; NoBlock = false; } if (dist >= dist_min1 && dist < dist_min2 && NoBlock) { dist_min2 = dist; nnk_Min2 = nnk; k_min2 = k; phi_min2 = phi; } } if (dist_min1 < _dist_min1) { _dist_min1 = dist_min1; _k_min1 = k_min1; _nnk_Min1 = nnk_Min1; _phi_min1 = phi_min1; _dist_min2 = dist_min2; _k_min2 = k_min2; _nnk_Min2 = nnk_Min2; _phi_min2 = phi_min2; } } { Debug.Assert(_nnk_Min1 == _nnk_Min2); Debug.Assert(_k_min1 != _k_min2); double PhiMin1 = this.PhiEvalBuffer[_nnk_Min1][0, _k_min1]; double PhiMin2 = this.PhiEvalBuffer[_nnk_Min2][0, _k_min2]; X1[0] = this.CellNodesGlobal[_nnk_Min1][_k_min1, 0]; X1[1] = this.CellNodesGlobal[_nnk_Min1][_k_min1, 1]; X2[0] = this.CellNodesGlobal[_nnk_Min2][_k_min2, 0]; X2[1] = this.CellNodesGlobal[_nnk_Min2][_k_min2, 1]; } _dist_min1 *= _sign; PhiAtQuadNodes[iQn] = _dist_min1 * this.DaRuleS[iKref].Weights[iQn]; } // finalize projection & return // ============================ if (this.GridDat.Cells.IsCellAffineLinear(jCell)) { int N = this.LevelSetBasis_Geometric.GetLength(jCell); int N2 = Phi.Basis.GetLength(jCell); MultidimensionalArray Phi_1 = MultidimensionalArray.Create(N); double scale = this.GridDat.Cells.JacobiDet[jCell]; Phi_1.Multiply(scale, BasisValues, PhiAtQuadNodes, 0.0, "m", "km", "k"); for (int n = 0; n < N; n++) { Phi.Coordinates[jCell, n] = Phi_1[n]; } for (int n = N; n < N2; n++) { Phi.Coordinates[jCell, n] = 0; } } else { throw new NotImplementedException("not implemented for curved cells"); } return(true); }
/// <summary> /// Quadrangulate this mesh by merging adjacent tris into quads. /// The algorithm will prioritise merging the longest edges first /// </summary> public void Quadrangulate() { var sortedPairs = new SortedList <double, Pair <MeshFace, MeshFace> >(Count); // Find adjacent pairs of tris and sort by resultant face 'squareness' for (int i = 0; i < Count - 1; i++) { MeshFace faceA = this[i]; if (faceA.IsTri) { for (int j = i + 1; j < Count; j++) { MeshFace faceB = this[j]; if (faceB.IsTri) { double squareness = faceA.SharedEdgeSquareness(faceB); if (!squareness.IsNaN() && squareness.Abs() < 0.8) { sortedPairs.AddSafe(squareness, Pair.Create(faceA, faceB)); } } } } } // Reverse through pairs and join: for (int i = 0; i < sortedPairs.Count; i++) { var pair = sortedPairs.Values[i]; if (Contains(pair.First.GUID) && Contains(pair.Second.GUID)) { Remove(pair.First); Remove(pair.Second); Add(pair.First.MergeWith(pair.Second)); } } // Version 0.2: also deprecated /*var sortedPairs = new SortedList<double, Pair<MeshFace, MeshFace>>(Count); * // Find adjacent pairs of tris and sort by edge length * for (int i = 0; i < Count - 1; i++) * { * MeshFace faceA = this[i]; * if (faceA.IsTri) * { * for (int j = i + 1; j < Count; j++) * { * MeshFace faceB = this[j]; * if (faceB.IsTri) * { * double length = faceA.SharedEdgeLengthSquared(faceB); * if (length > 0) * sortedPairs.AddSafe(length, Pair.Create(faceA, faceB)); * } * } * } * } * * // Reverse through pairs and join: * for (int i = sortedPairs.Count - 1; i >= 0; i--) * { * var pair = sortedPairs.Values[i]; * if (Contains(pair.First.GUID) && Contains(pair.Second.GUID)) * { * Remove(pair.First); * Remove(pair.Second); * Add(pair.First.MergeWith(pair.Second)); * } * }*/ // Version 0.1: Deprecated: /* * // Populate lists: * var sortedLists = new SortedList<double, IList<MeshFace>>(Count / 2); * * foreach (MeshFace face in this) * { * if (face.IsTri) * { * double longEdge = face.LongestEdgeLengthSquared(); * * if (!sortedLists.ContainsKey(longEdge)) * sortedLists.Add(longEdge, new MeshFaceCollection()); * * sortedLists[longEdge].Add(face); * } * } * * foreach (IList<MeshFace> faceSet in sortedLists.Values) * { * for (int i = 0; i < faceSet.Count - 1; i++) * { * MeshFace faceA = faceSet[i]; * for (int j = i + 1; j < faceSet.Count; j++) * { * MeshFace faceB = faceSet[j]; * if (faceA.SharedVertexCount(faceB) == 2) // Has a shared edge * { * // Merge faces and replace: * Remove(faceA); * Remove(faceB); * Add(faceA.MergeWith(faceB)); * * faceSet.RemoveAt(j); * j = faceSet.Count; * } * } * } * } */ }
public static double AbsoluteValue(double val) { return(val.Abs()); }
public NormalsClustering(V3d[] normalArray, double delta) { var count = normalArray.Length; Alloc(count); var ca = m_indexArray; var sa = new int[count].Set(1); var suma = SumArray; var kdTree = normalArray.CreateRkdTreeDistDotProduct(0); var query = kdTree.CreateClosestToPointQuery(delta, 0); for (int i = 0; i < count; i++) { int ci = ca[i]; if (ca[ci] != ci) { do { ci = ca[ci]; } while (ca[ci] != ci); ca[i] = ci; } int si = sa[ci]; V3d avgNormali = suma[ci].Normalized; kdTree.GetClosest(query, avgNormali); kdTree.GetClosest(query, avgNormali.Negated); foreach (var id in query.List) { int j = (int)id.Index; int cj = ca[j]; if (ca[cj] != cj) { do { cj = ca[cj]; } while (ca[cj] != cj); ca[j] = cj; } if (ci == cj) { continue; } int sj = sa[cj]; V3d avgNormalj = suma[cj].Normalized; double avgDot = avgNormali.Dot(avgNormalj); if (avgDot.Abs() < 1.0 - 2.0 * delta) { continue; } V3d sum = suma[ci] + (avgDot > 0 ? suma[cj] : suma[cj].Negated); if (si < sj) { ca[ci] = cj; ca[i] = cj; ci = cj; } else { ca[cj] = ci; ca[j] = ci; } si += sj; sa[ci] = si; suma[ci] = sum; } query.Clear(); } Init(); }
private void MassCorrection() { double[] Qnts_old = ComputeBenchmarkQuantities(); CorrectionLevSet.Clear(); CorrectionLevSet.Acc(1.0, phi); this.CorrectionLsTrk.UpdateTracker(0.0); double[] Qnts = ComputeBenchmarkQuantities(); double massDiff = Qnts_old[0] - Qnts[0]; // we assume the current phasefield is close to the equilibrium tangenshyperbolicus form SinglePhaseField phiNew = new SinglePhaseField(phi.Basis); GridData GridDat = (GridData)(phi.GridDat); double mass_uc = Qnts[0]; int i = 0; while (massDiff.Abs() > 1e-6) { // calculated for a cone, one could include the shape e.g. by using the circularity // correction guess double correction = Math.Sign(massDiff) * 1e-10;//-Qnts[1] / (4 * Qnts[0] * Math.PI) * massDiff * 1e-5; // take the correction guess and calculate a forward difference to approximate the derivative phiNew.ProjectField( (ScalarFunctionEx) delegate(int j0, int Len, NodeSet NS, MultidimensionalArray result) { // ScalarFunction2 Debug.Assert(result.Dimension == 2); Debug.Assert(Len == result.GetLength(0)); int K = result.GetLength(1); // number of nodes // evaluate Phi // ----------------------------- phi.Evaluate(j0, Len, NS, result); // compute the pointwise values of the new level set // ----------------------------- result.ApplyAll(x => 0.5 * Math.Log(Math.Max(1 + x, 1e-10) / Math.Max(1 - x, 1e-10)) * Math.Sqrt(2) * this.Control.cahn); result.ApplyAll(x => Math.Tanh((x + correction) / (Math.Sqrt(2) * this.Control.cahn))); } ); // update LsTracker CorrectionLevSet.Clear(); CorrectionLevSet.Acc(1.0, phiNew); this.CorrectionLsTrk.UpdateTracker(0.0); Qnts = ComputeBenchmarkQuantities(); correction = -(massDiff) / ((Qnts_old[0] - Qnts[0] - massDiff) / (correction)); double initial = massDiff; bool finished = false; int k = 0; //while (massDiff.Abs() - initial.Abs() >= 0.0 && step > 1e-12) while (!finished) { double step = Math.Pow(0.5, k); // compute and project // step one calculate distance field phiDist = 0.5 * log(Max(1+c, eps)/Max(1-c, eps)) * sqrt(2) * Cahn // step two project the new phasefield phiNew = tanh((cDist + correction)/(sqrt(2) * Cahn)) // =================== phiNew.ProjectField( (ScalarFunctionEx) delegate(int j0, int Len, NodeSet NS, MultidimensionalArray result) { // ScalarFunction2 Debug.Assert(result.Dimension == 2); Debug.Assert(Len == result.GetLength(0)); int K = result.GetLength(1); // number of nodes // evaluate Phi // ----------------------------- phi.Evaluate(j0, Len, NS, result); // compute the pointwise values of the new level set // ----------------------------- result.ApplyAll(x => 0.5 * Math.Log(Math.Max(1 + x, 1e-10) / Math.Max(1 - x, 1e-10)) * Math.Sqrt(2) * this.Control.cahn); result.ApplyAll(x => Math.Tanh((x + correction * step) / (Math.Sqrt(2) * this.Control.cahn))); } ); // update LsTracker CorrectionLevSet.Clear(); CorrectionLevSet.Acc(1.0, phiNew); this.CorrectionLsTrk.UpdateTracker(0.0); Qnts = ComputeBenchmarkQuantities(); massDiff = Qnts_old[0] - Qnts[0]; if (massDiff.Abs() < (1 - 1e-4 * step) * initial.Abs()) { finished = true; // update field phi.Clear(); phi.Acc(1.0, phiNew); // update LsTracker CorrectionLevSet.Clear(); CorrectionLevSet.Acc(1.0, phi); this.CorrectionLsTrk.UpdateTracker(0.0); Console.WriteLine($"" + $"converged with stepsize: {step}, correction: {correction}\n" + $" dM: {massDiff}"); if (k > 0) { Console.WriteLine($"Finished Linesearch in {k} iterations"); } } else if (Math.Abs(correction * step) < 1e-15) { // reset LsTracker CorrectionLevSet.Clear(); CorrectionLevSet.Acc(1.0, phi); this.CorrectionLsTrk.UpdateTracker(0.0); Qnts = ComputeBenchmarkQuantities(); massDiff = Qnts_old[0] - Qnts[0]; Console.WriteLine($" Linesearch failed after {k} iterations"); goto Failed; } else { k++; } } i++; } Failed: Console.WriteLine($"Performed Mass Correction in {i} iteratins: \n" + $"\told mass: {Qnts_old[0]:N4}\n" + $"\tuncorrected mass: {mass_uc:N4}\n" + $"\tcorrected mass: {Qnts[0]:N4}"); }
/// <summary> /// Solution of the extension problem on a single cell in the far-region /// </summary> /// <param name="Phi">Input;</param> /// <param name="GradPhi">Input;</param> /// <param name="ExtProperty"></param> /// <param name="ExtPropertyMin">Input/Output: lower threshold.</param> /// <param name="ExtPropertyMax">Input/Output: upper threshold.</param> /// <param name="jCell"></param> /// <param name="Accepted"></param> /// <param name="signMod"></param> public void ExtVelSolve_Far(SinglePhaseField Phi, VectorField <SinglePhaseField> GradPhi, ConventionalDGField ExtProperty, ref double ExtPropertyMin, ref double ExtPropertyMax, int jCell, CellMask Accepted, double signMod) { GridData gDat = (GridData)(Phi.GridDat); Debug.Assert(signMod.Abs() == 1.0); Debug.Assert(ExtPropertyMin <= ExtPropertyMax); // define cell- and edge-mask for re-compute // ========================================= CellMask cM = new CellMask(gDat, Chunk.GetSingleElementChunk(jCell)); int[] Edges = gDat.iLogicalCells.Cells2Edges[jCell].CloneAs(); for (int i = 0; i < Edges.Length; i++) { Edges[i] = Math.Abs(Edges[i]) - 1; } EdgeMask eM = new EdgeMask(gDat, FromIndEnum(Edges, gDat.iLogicalEdges.Count)); // won't scale. // solve the linear extension problem for 'jCell', eventually increase // diffusion until we are satisfied with the solution // =================================================================== bool MaximumPrincipleFulfilled = false; double DiffusionCoeff = 0; // initially: try without diffusion double mini = double.NaN, maxi = double.NaN; int count = 0; while (MaximumPrincipleFulfilled == false) // as long as we are satisfied with the solution { count++; // compute operator in 'jCell' // --------------------------- int N = ExtProperty.Basis.GetLength(jCell); int i0G = ExtProperty.Mapping.GlobalUniqueCoordinateIndex(0, jCell, 0); int i0L = ExtProperty.Mapping.GlobalUniqueCoordinateIndex(0, jCell, 0); for (int n = 0; n < N; n++) { this.m_ExtvelMatrix.ClearRow(i0G + n); this.m_ExtvelAffine[i0L + n] = 0; } double penaltyBase = ((double)(ExtProperty.Basis.Degree + 1)).Pow2(); var Flux = new ExtensionVelocityForm(Accepted.GetBitMask(), signMod, penaltyBase, gDat.Cells.h_min, jCell, DiffusionCoeff); var op = (Flux).Operator(DegreeOfNonlinearity: 2); // increase diffusion coefficient for next cycle if (DiffusionCoeff == 0) { DiffusionCoeff = 1.0e-3; // should this be minus or plus? } else { DiffusionCoeff *= 10; } op.ComputeMatrixEx(ExtProperty.Mapping, ArrayTools.Cat <DGField>(new DGField[] { ExtProperty, Phi }, GradPhi), ExtProperty.Mapping, this.m_ExtvelMatrix, this.m_ExtvelAffine, OnlyAffine: false, volQuadScheme: (new CellQuadratureScheme(true, cM)), edgeQuadScheme: (new EdgeQuadratureScheme(true, eM)), ParameterMPIExchange: false); // extract operator matrix and RHS // ------------------------------- // the matrix must only have entries in the block-diagonal! MultidimensionalArray Mtx = MultidimensionalArray.Create(N, N); //MultidimensionalArray rhs = MultidimensionalArray.Create(N); double[] rhs = new double[N]; for (int n = 0; n < N; n++) { #if DEBUG int Lr; int[] row_cols = null; double[] row_vals = null; Lr = this.m_ExtvelMatrix.GetRow(i0G + n, ref row_cols, ref row_vals); for (int lr = 0; lr < Lr; lr++) { int ColIndex = row_cols[lr]; double Value = row_vals[lr]; Debug.Assert((ColIndex >= i0G && ColIndex < i0G + N) || (Value == 0.0), "Matrix is expected to be block-diagonal."); } #endif for (int m = 0; m < N; m++) { Mtx[n, m] = this.m_ExtvelMatrix[i0G + n, i0G + m]; } rhs[n] = -this.m_ExtvelAffine[i0L + n]; } // Solve the system, i.e. the local extension-velocity equation // ------------------------------------------------------------ double[] ep = new double[N]; Mtx.Solve(ep, rhs); for (int n = 0; n < N; n++) { ExtProperty.Coordinates[jCell, n] = ep[n]; } // detect violation of maximum principle // ------------------------------------- ExtProperty.GetExtremalValuesInCell(out mini, out maxi, jCell); // define relaxed bounds... double compExtPropertyMin = ExtPropertyMin - (1.0e-8 + ExtPropertyMax - ExtPropertyMin) * 0.2; double compExtPropertyMax = ExtPropertyMax + (1.0e-8 + ExtPropertyMax - ExtPropertyMin) * 0.2; // test if extension velocity solution is within bounds MaximumPrincipleFulfilled = (mini >= compExtPropertyMin) && (maxi <= compExtPropertyMax); if (count > 5) { break; } } if (count > 4) { Console.WriteLine(" ExtVel, cell #{0}, Diff coeff {1}, extremal p. holds? {2} (min/max soll = {3:e4}/{4:e4}, ist = {5:e4}/{6:e4})", jCell, DiffusionCoeff, MaximumPrincipleFulfilled, ExtPropertyMin, ExtPropertyMax, mini, maxi); } // record maxima and minima // ======================== ExtPropertyMax = Math.Max(ExtPropertyMax, maxi); ExtPropertyMin = Math.Min(ExtPropertyMin, mini); }
static void JacobianTest(Cell cl, out bool PositiveJacobianFlag, out bool NegativeJacobianFlag, out bool Linear) { RefElement Kref = cl.Type.GetRefElement(); int D = 3; // for OpenFOAM, we assume 3D; // get nodes within ref element, to test the Jacobian int deg = Kref.GetInterpolationDegree(cl.Type); if (deg > 1) { deg--; } deg *= 3; NodeSet TestNodes = Kref.GetQuadratureRule(2 * deg).Nodes; // evaluate derivatives of nodal polynomials for transformation PolynomialList[] Deriv = Kref.GetInterpolationPolynomials1stDeriv(cl.Type); MultidimensionalArray[] DerivEval = new MultidimensionalArray[D]; for (int d = 0; d < D; d++) { DerivEval[d] = Deriv[d].Values.GetValues(TestNodes); } // evaluate Jacobian matrix MultidimensionalArray Jacobi = MultidimensionalArray.Create(TestNodes.NoOfNodes, D, D); // temporary storage for Jacobian matrix Debug.Assert(cl.TransformationParams.Dimension == 2); Debug.Assert(cl.TransformationParams.GetLength(1) == 3); for (int d1 = 0; d1 < D; d1++) { Debug.Assert(cl.TransformationParams.GetLength(0) == Deriv[d1].Count); MultidimensionalArray JacobiCol = Jacobi.ExtractSubArrayShallow(-1, -1, d1); JacobiCol.Multiply(1.0, DerivEval[d1], cl.TransformationParams, 0.0, "kd", "kn", "nd"); } // do the tests NegativeJacobianFlag = false; PositiveJacobianFlag = false; double MinAbsJacobiDet = double.MaxValue; double MaxAbsJacobiDet = 0.0; for (int n = 0; n < TestNodes.NoOfNodes; n++) { double detJac = Jacobi.ExtractSubArrayShallow(n, -1, -1).Determinant(); if (detJac <= 0) { NegativeJacobianFlag = true; } if (detJac > 0) { PositiveJacobianFlag = true; } MinAbsJacobiDet = Math.Min(MinAbsJacobiDet, detJac.Abs()); MaxAbsJacobiDet = Math.Max(MaxAbsJacobiDet, detJac.Abs()); } if ((MaxAbsJacobiDet - MinAbsJacobiDet) / MinAbsJacobiDet <= 1.0e-8) { Linear = true; } else { Linear = false; } }
/// <summary> /// Uses a safe-guarded Newton method to find all roots on /// <paramref name="segment"/> /// </summary> /// <param name="segment"></param> /// <param name="levelSet"></param> /// <param name="cell"></param> /// <param name="iKref"></param> /// <returns></returns> public double[] GetRoots(LineSegment segment, ILevelSet levelSet, int cell, int iKref) { LevelSet levelSetField = levelSet as LevelSet; if (levelSetField == null) { throw new NotImplementedException("Method currently only works for polynomial level sets"); } int maxNoOfCoefficientsPerDimension = levelSetField.Basis.Degree + 1; int noOfPolynomials = segment.ProjectedPolynomialCoefficients.GetLength(1); double[] coefficients = new double[maxNoOfCoefficientsPerDimension]; for (int i = 0; i < noOfPolynomials; i++) { double dgCoefficient = levelSetField.Coordinates[cell, i]; for (int j = 0; j < maxNoOfCoefficientsPerDimension; j++) { coefficients[j] += dgCoefficient * segment.ProjectedPolynomialCoefficients[iKref, i, j]; } } //if(coefficients.L2NormPow2() < this.Tolerance) { // // ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // // special case : // // the zero-level-set is probably parallel to this line segment // // ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // return new double[] { -1.0, 1.0 }; //} double[] roots; unsafe { fixed(double *pCoeff = &coefficients[coefficients.Length - 1]) { double xLow = -1.0; double xHigh = 1.0; int NO_OF_BRACKETS = 8; List <double> lowerBounds = new List <double>(); List <double> upperBounds = new List <double>(); double dx2 = 2.0 / NO_OF_BRACKETS; double x = xLow; double fOld = Eval(x, pCoeff, coefficients.Length); for (int i = 0; i < NO_OF_BRACKETS; i++) { x += dx2; double fNew = Eval(x, pCoeff, coefficients.Length); if (i == 0 && fOld.Abs() < Tolerance) { lowerBounds.Add(x - dx2); upperBounds.Add(x); fOld = fNew; continue; } else { if (fNew.Abs() < Tolerance) { lowerBounds.Add(x - dx2); upperBounds.Add(x); x += dx2; fOld = Eval(x, pCoeff, coefficients.Length); continue; } } if (fNew * fOld <= 0.0) { lowerBounds.Add(x - dx2); upperBounds.Add(x); } fOld = fNew; } // Actual Newton-Raphson int MAX_ITERATIONS = 50; roots = new double[lowerBounds.Count]; for (int j = 0; j < lowerBounds.Count; j++) { xLow = lowerBounds[j]; xHigh = upperBounds[j]; double fLeft = Eval(xLow, pCoeff, coefficients.Length); double fRight = Eval(xHigh, pCoeff, coefficients.Length); if (fLeft.Abs() < Tolerance) { roots[j] = xLow; break; } if (fRight.Abs() < Tolerance) { roots[j] = xHigh; break; } if (fLeft.Sign() == fRight.Sign()) { throw new Exception(); } if (fLeft > 0.0) { xLow = upperBounds[j]; xHigh = lowerBounds[j]; } double root = 0.5 * (xLow + xHigh); double f; double df; Eval(root, pCoeff, coefficients.Length, out f, out df); double dxOld = (xHigh - xLow).Abs(); double dx = dxOld; int i = 0; while (true) { if (i > MAX_ITERATIONS) { throw new Exception("Max iterations exceeded"); } double a = ((root - xHigh) * df - f) * ((root - xLow) * df - f); if (a > 0.0 || 2.0 * Math.Abs(f) > Math.Abs(dxOld * df)) { // Newton out of range or too slow -> Bisect dxOld = dx; dx = 0.5 * (xHigh - xLow); root = xLow + dx; } else { // Take Newton step dxOld = dx; dx = -f / df; //// Convergence acceleration according to Yao2014 //double fDelta = Eval(root + dx, pCoeff, coefficients.Length); //dx = -(f + fDelta) / df; root += dx; } Eval(root, pCoeff, coefficients.Length, out f, out df); if (Math.Abs(f) <= Tolerance) { roots[j] = root; break; } if (f < 0.0) { xLow = root; } else { xHigh = root; } i++; } } } } return(roots); }
/// <summary> /// Intersection of line <paramref name="S1"/>--<paramref name="S2"/> and <paramref name="E1"/>--<paramref name="E2"/> /// </summary> /// <param name="S1"></param> /// <param name="S2"></param> /// <param name="E1"></param> /// <param name="E2"></param> /// <param name="alpha1"> /// coordinate of <paramref name="I"/> on the line <paramref name="S1"/>--<paramref name="S2"/> /// </param> /// <param name="alpha2"> /// coordinate of <paramref name="I"/> on the line <paramref name="E1"/>--<paramref name="E2"/> /// </param> /// <param name="I"></param> /// <returns></returns> internal static bool ComputeIntersection(Vector S1, Vector S2, Vector E1, Vector E2, out double alpha1, out double alpha2, out Vector I) { if (S1.Dim != 2) { throw new ArgumentException("spatial dimension mismatch."); } if (S2.Dim != 2) { throw new ArgumentException("spatial dimension mismatch."); } if (E1.Dim != 2) { throw new ArgumentException("spatial dimension mismatch."); } if (E2.Dim != 2) { throw new ArgumentException("spatial dimension mismatch."); } Vector S12 = S2 - S1; Vector E12 = E2 - E1; if (S12.Abs() <= 0) { throw new ArgumentException(); } if (E12.Abs() <= 0) { throw new ArgumentException(); } var P_S12 = AffineManifold.FromPoints(S1, S2); var P_E12 = AffineManifold.FromPoints(E1, E2); Vector NS = P_S12.Normal; NS.Normalize(); Vector NE = P_E12.Normal; NE.Normalize(); double parallel = NS * NE; if (parallel.Abs() >= 1.0) { alpha1 = double.PositiveInfinity; alpha2 = double.PositiveInfinity; I = new Vector(double.PositiveInfinity, double.PositiveInfinity); return(false); } //S12.Normalize(); //E12.Normalize(); I = AffineManifold.Intersect2D(P_S12, P_E12); Vector IS1 = I - S2; Vector IE1 = I - E2; Vector IS2 = I - S1; Vector IE2 = I - E1; Vector IS; bool flip_1; if (IS1.AbsSquare() > IS2.AbsSquare()) { IS = IS1; flip_1 = true; } else { IS = IS2; flip_1 = false; } Vector IE; bool flip_2; if (IE1.AbsSquare() > IE2.AbsSquare()) { IE = IE1; flip_2 = true; } else { IE = IE2; flip_2 = false; } Debug.Assert((S12.AngleTo(IS).Abs() <= 1.0e-5) || ((S12.AngleTo(IS).Abs() - Math.PI).Abs() <= 1.0e-5)); Debug.Assert((E12.AngleTo(IE).Abs() <= 1.0e-5) || ((E12.AngleTo(IE).Abs() - Math.PI).Abs() <= 1.0e-5)); alpha1 = (S12 * IS) / S12.AbsSquare(); alpha2 = (E12 * IE) / E12.AbsSquare(); if (flip_1) { alpha1 = 1 + alpha1; } if (flip_2) { alpha2 = 1 + alpha2; } return(true); }
public void Abs_ShouldBehaveAsMathAbs_IfDouble(double value) => Assert.Equal(Math.Abs(value), value.Abs());
public static int SignedRoundToInt(this double x) { return(x.Sign() * x.Abs().RoundToInt()); }
internal void OnAddMyTrade(string portfolio, string securityId, string orderId, double price, double amount, DateTime time, string tradeNo) { NewMyTrade.SafeInvoke(portfolio, securityId, orderId.To <long>(), price.ToDecimal(), amount.Abs().ToDecimal(), time, tradeNo.To <long>()); }
public static int Sign(this double v) { return(v.Abs() < Epsilon ? 0 : Math.Sign(v)); }