public IEnumerable <IChunkRulePair <CellBoundaryQuadRule> > GetQuadRuleSet(ExecutionMask mask, int order) { using (var tr = new FuncTrace()) { if (mask != null && mask is CellMask == false) { throw new ArgumentException("Cell mask required", "mask"); } if (mask.MaskType != MaskType.Geometrical) { throw new ArgumentException("Expecting a geometrical mask."); } Stopwatch totalTimer = new Stopwatch(); Stopwatch projectionTimer = new Stopwatch(); Stopwatch optimizationTimer = new Stopwatch(); totalTimer.Start(); if (order != currentOrder) { cache.Clear(); SwitchOrder(order); } var result = new List <ChunkRulePair <CellBoundaryQuadRule> >(mask.NoOfItemsLocally); foreach (Chunk chunk in mask) { for (int i = 0; i < chunk.Len; i++) { int cell = chunk.i0 + i; if (cache.ContainsKey(cell)) { result.Add(new ChunkRulePair <CellBoundaryQuadRule>( Chunk.GetSingleElementChunk(cell), cache[cell])); continue; } optimizationTimer.Start(); CellBoundaryQuadRule optimizedRule = GetOptimizedRule(chunk.i0 + i, order); optimizationTimer.Stop(); cache.Add(cell, optimizedRule); result.Add(new ChunkRulePair <CellBoundaryQuadRule>( Chunk.GetSingleElementChunk(i + chunk.i0), optimizedRule)); } } totalTimer.Stop(); double totalTicks = (double)totalTimer.ElapsedTicks; double percentageProjection = Math.Round(projectionTimer.ElapsedTicks / totalTicks * 100, 2); double percentageOptimization = Math.Round(optimizationTimer.ElapsedTicks / totalTicks * 100, 2); tr.Info("Percentage spent on projection to surface: " + percentageProjection.ToString(NumberFormatInfo.InvariantInfo) + "%"); tr.Info("Percentage spent on optimization: " + percentageOptimization.ToString(NumberFormatInfo.InvariantInfo) + "%"); return(result); } }
protected override void CreateEquationsAndSolvers(GridUpdateDataVaultBase L) { using (FuncTrace tr = new FuncTrace()) { // assemble system, create matrix // ------------------------------ var volQrSch = new CellQuadratureScheme(true, CellMask.GetFullMask(this.GridData)); var edgQrSch = new EdgeQuadratureScheme(true, EdgeMask.GetFullMask(this.GridData)); double D = this.GridData.SpatialDimension; double penalty_base = (T.Basis.Degree + 1) * (T.Basis.Degree + D) / D; double penalty_factor = base.Control.penalty_poisson; { // equation assembly // ----------------- tr.Info("creating sparse system..."); Console.WriteLine("creating sparse system for {0} DOF's ...", T.Mapping.Ntotal); Stopwatch stw = new Stopwatch(); stw.Start(); SpatialOperator LapaceIp = new SpatialOperator(1, 1, QuadOrderFunc.SumOfMaxDegrees(), "T", "T"); var flux = new ipFlux(penalty_base * base.Control.penalty_poisson, this.GridData.Cells.cj, base.Control); LapaceIp.EquationComponents["T"].Add(flux); LapaceIp.Commit(); #if DEBUG var RefLaplaceMtx = new MsrMatrix(T.Mapping); #endif LaplaceMtx = new BlockMsrMatrix(T.Mapping); LaplaceAffine = new double[T.Mapping.LocalLength]; LapaceIp.ComputeMatrixEx(T.Mapping, null, T.Mapping, LaplaceMtx, LaplaceAffine, volQuadScheme: volQrSch, edgeQuadScheme: edgQrSch); #if DEBUG LaplaceAffine.ClearEntries(); LapaceIp.ComputeMatrixEx(T.Mapping, null, T.Mapping, RefLaplaceMtx, LaplaceAffine, volQuadScheme: volQrSch, edgeQuadScheme: edgQrSch); MsrMatrix ErrMtx = RefLaplaceMtx.CloneAs(); ErrMtx.Acc(-1.0, LaplaceMtx); double err = ErrMtx.InfNorm(); double infNrm = LaplaceMtx.InfNorm(); Console.WriteLine("Matrix comparison error: " + err + ", matrix norm is: " + infNrm); Assert.Less(err, infNrm * 1e-10, "MsrMatrix2 comparison failed."); #endif //int q = LaplaceMtx._GetTotalNoOfNonZeros(); //tr.Info("finished: Number of non-zeros: " + q); stw.Stop(); Console.WriteLine("done {0} sec.", stw.Elapsed.TotalSeconds); //double condNo = LaplaceMtx.condest(BatchmodeConnector.Flavor.Octave); //Console.WriteLine("condition number: {0:0.####E-00} ",condNo); } } }
/// <summary> /// Loads a BoSSS grid from an grid file; the file type (see <see cref="ImporterTypes"/>) are determined by the file ending. /// </summary> public static GridCommons Import(string fileName) { using (var tr = new FuncTrace()) { ImporterTypes importerType = GetImporterType(fileName); tr.Info(string.Format("Loading {0} file '{1}'...", importerType.ToString(), fileName)); IGridImporter importer; using (new BlockTrace("Import", tr)) { switch (importerType) { case ImporterTypes.Gambit: GambitNeutral gn = new GambitNeutral(fileName); if (gn.BoSSSConversionNeccessary()) { gn = gn.ToLinearElements(); } importer = gn; break; case ImporterTypes.CGNS: importer = new Cgns(fileName); break; case ImporterTypes.Gmsh: importer = new Gmsh(fileName); break; default: throw new NotImplementedException(); } } tr.Info("Converting to BoSSS grid ..."); GridCommons grid; using (new BlockTrace("Conversion", tr)) { grid = importer.GenerateBoSSSGrid(); } return(grid); } }
/// <summary> /// Loads the given <paramref name="sessionId"/> from the given /// <paramref name="database"/>. /// </summary> /// <param name="sessionId"></param> /// <param name="database"></param> /// <returns></returns> public SessionInfo LoadSession(Guid sessionId, IDatabaseInfo database) { using (var tr = new FuncTrace()) { tr.Info("Loading session " + sessionId); using (Stream s = fsDriver.GetSessionInfoStream(false, sessionId)) { SessionInfo loadedSession = (SessionInfo)Driver.Deserialize(s, typeof(SessionInfo)); loadedSession.Database = database; loadedSession.WriteTime = Utils.GetSessionFileWriteTime(loadedSession); s.Close(); return(loadedSession); } } }
/// <summary> /// Loads the grid info object for the given /// <paramref name="gridGuid"/> from the given /// <paramref name="database"/> /// </summary> /// <param name="gridGuid"></param> /// <param name="database"></param> /// <returns></returns> public IGridInfo LoadGridInfo(Guid gridGuid, IDatabaseInfo database) { using (var tr = new FuncTrace()) { tr.Info("Loading grid " + gridGuid); IGrid grid = null; grid = DeserializeGrid(gridGuid); /* * if (MyRank == 0) * { * grid = DeserializeGrid(gridGuid); * } * * grid = grid.MPIBroadcast(0); */ grid.GridSerializationHandler.Database = database; grid.WriteTime = Utils.GetGridFileWriteTime(grid); return(grid); } }
/// <summary> /// /// </summary> /// <param name="create"> /// true: creates a new file for writing; /// false: open a file for reading; /// </param> /// <param name="RelPath"> /// relative path within the base paths. /// </param> /// <returns> /// a file stream /// </returns> /// <param name="ForceOverride"> /// when opening a stream for writing (<paramref name="create"/>=true), this argument /// toggles how existing files should be treated: /// false: an exception is thrown if the file already exists; /// true: an existing file would be overwritten /// </param> /// <returns></returns> private Stream OpenFile(bool create, string RelPath, bool ForceOverride) { using (var tr = new FuncTrace()) { tr.Info("opening file '" + RelPath + "', create='" + create + "'"); if (create) { // create new file string fullpath = Path.Combine(BasePath, RelPath); if (ForceOverride == true) { FileStream fs = new FileStream(fullpath, FileMode.Create); //, FileAccess.ReadWrite, FileShare.Read); // overwrites existing file return(fs); } else { FileStream fs = new FileStream(fullpath, FileMode.CreateNew);//, FileAccess.ReadWrite, FileShare.Read); // throws exception if file exists return(fs); } } else { // try to open file FileNotFoundException exc = null; try { FileStream fs = new FileStream(Path.Combine(BasePath, RelPath), FileMode.Open, FileAccess.Read, FileShare.Read); return(fs); } catch (FileNotFoundException fnf) { exc = fnf; } throw exc; } } }
/// <summary> /// loads a single <see cref="TimestepInfo"/>-object from the database. /// </summary> public TimestepInfo LoadTimestepInfo(Guid timestepGuid, ISessionInfo session, IDatabaseInfo database) { using (var tr = new FuncTrace()) { tr.Info("Loading time-step " + timestepGuid); TimestepInfo tsi = null; if (MyRank == 0) { using (Stream s = fsDriver.GetTimestepStream(false, timestepGuid)) { tsi = (TimestepInfo)Driver.Deserialize(s, typeof(TimestepInfo)); tsi.Session = session; s.Close(); } tsi.ID = timestepGuid; } tsi = tsi.MPIBroadcast(0); tsi.Database = database; tsi.WriteTime = Utils.GetTimestepFileWriteTime(tsi); return(tsi); } }
/// <summary> /// see <see cref="Device.CreateMatrix(MsrMatrix,MatrixType)"/>; /// </summary> public override MatrixBase CreateMatrix(MsrMatrix M, MatrixType matType) { using (var f = new FuncTrace()) { f.Info("desired matrix type: " + matType); switch (matType) { case MatrixType.CCBCSR: return(new CudaCCBCSRMatrix(M, this.Env)); case MatrixType.CSR: return(new CudaCSRMatrix(M, this.Env)); case MatrixType.ELLPACK: return(new CudaELLPACKmodMatrix(M, this.Env)); case MatrixType.Auto: case MatrixType.ELLPACKcache: return(new CudaELLPACKcacheMatrix(M, this.Env)); default: throw new NotImplementedException(); } } }
/// <summary> /// Uses a moment-fitting basis of order <paramref name="order"/> for /// determining optimal weights of a quadrature rule for a sub-region /// of each edge of each cell in the given <paramref name="mask"/>. /// </summary> /// <param name="mask"> /// Cells for which rules shall be created /// </param> /// <param name="order"> /// Desired order of the moment-fitting system. Assuming that /// <see cref="edgeSurfaceRuleFactory"/> integrates the basis /// polynomials exactly over the zero iso-contour (which it usually /// doesn't), the resulting quadrature rules will be exact up to this /// order. /// </param> /// <returns> /// A set of quadrature rules /// </returns> private CellBoundaryQuadRule[] GetOptimizedRules(CellMask mask, int order) { using (var tr = new FuncTrace()) { int maxLambdaDegree = order + 1; int noOfLambdas = GetNumberOfLambdas(); int noOfFaces = LevelSetData.GridDat.Grid.RefElements[0].NoOfFaces; int D = LevelSetData.GridDat.SpatialDimension; int noOfNodesPerEdge = baseRule.NumbersOfNodesPerFace[0]; CellBoundaryQuadRule[] optimizedRules = new CellBoundaryQuadRule[mask.NoOfItemsLocally]; Debug.Assert( baseRule.NumbersOfNodesPerFace.Distinct().Count() == 1, "Assumption violated: Number of nodes varies from edge to edge."); Debug.Assert(noOfLambdas < noOfNodesPerEdge, "Not enough integration points"); LambdaEdgeBoundaryQuadrature cellBoundaryQuadrature = new LambdaEdgeBoundaryQuadrature(this, CoFaceQuadRuleFactory, maxLambdaDegree, mask); cellBoundaryQuadrature.Execute(); double[, ,] boundaryResults = cellBoundaryQuadrature.Results; LambdaLevelSetSurfaceQuadrature surfaceQuadrature = new LambdaLevelSetSurfaceQuadrature(this, edgeSurfaceRuleFactory, maxLambdaDegree, mask); surfaceQuadrature.Execute(); double[, ,] surfaceResults = surfaceQuadrature.Results; int noOfRhs = 0; int[,] rhsIndexMap = new int[mask.NoOfItemsLocally, noOfFaces]; foreach (Chunk chunk in mask) { MultidimensionalArray levelSetValues = LevelSetData.GetLevSetValues(signTestRule.Nodes, chunk.i0, chunk.Len); for (int i = 0; i < chunk.Len; i++) { int cell = i + chunk.i0; int iSubGrid = localCellIndex2SubgridIndex[cell]; optimizedRules[iSubGrid] = new CellBoundaryQuadRule() { Nodes = baseRule.Nodes, Weights = MultidimensionalArray.Create(baseRule.NoOfNodes), NumbersOfNodesPerFace = baseRule.NumbersOfNodesPerFace.CloneAs() }; int noOfProcessedNodes = 0; for (int e = 0; e < noOfFaces; e++) { int noOfNodesOnEdge = baseRule.NumbersOfNodesPerFace[e]; bool faceIsCut = false; for (int k = 0; k < noOfLambdas; k++) { faceIsCut |= surfaceResults[iSubGrid, e, k].Abs() > 1e-9; } //edgeIsCut = tracker.EdgeIsCut[cell, e]; if (!faceIsCut) { // Sign is checked in multiple points to avoid // some weird edge cases. Sign with most // occurrences on the test nodes wins int numNeg = 0; int numPos = 0; int offset = signTestRule.NumbersOfNodesPerFace.Take(e).Sum(); for (int j = 0; j < signTestRule.NumbersOfNodesPerFace[e]; j++) { double val = levelSetValues[i, offset + j]; if (val < 0.0) { numNeg++; } else if (val > 0.0) { numPos++; } } int sign = numPos - numNeg; if (sign == 0) { throw new Exception(String.Format( "Could not determine sign of face {0} of cell {1}", e, cell)); } switch (jumpType) { case JumpTypes.Heaviside: if (sign > 0) { CopyWeights(baseRule, optimizedRules[iSubGrid], e, 1.0); } break; case JumpTypes.OneMinusHeaviside: if (sign < 0) { CopyWeights(baseRule, optimizedRules[iSubGrid], e, -1.0); } break; case JumpTypes.Sign: CopyWeights(baseRule, optimizedRules[iSubGrid], e, levelSetValues[i, e].Sign()); break; default: throw new NotImplementedException(); } rhsIndexMap[iSubGrid, e] = -1; noOfProcessedNodes += noOfNodesOnEdge; continue; } rhsIndexMap[iSubGrid, e] = noOfRhs; noOfRhs++; noOfProcessedNodes += noOfNodesOnEdge; } } } // Leading dimension of B (rhs); required by DGELSY int LDB = Math.Max(noOfLambdas, noOfNodesPerEdge); double[] rhs = new double[LDB * noOfRhs]; foreach (Chunk chunk in mask) { for (int i = 0; i < chunk.Len; i++) { int cell = i + chunk.i0; int iSubGrid = localCellIndex2SubgridIndex[cell]; for (int e = 0; e < noOfFaces; e++) { int rhsIndex = rhsIndexMap[iSubGrid, e]; if (rhsIndex < 0) { continue; } switch (jumpType) { case JumpTypes.Heaviside: for (int k = 0; k < noOfLambdas; k++) { rhs[rhsIndex * LDB + k] = boundaryResults[iSubGrid, e, k] - surfaceResults[iSubGrid, e, k]; } break; case JumpTypes.OneMinusHeaviside: for (int k = 0; k < noOfLambdas; k++) { rhs[rhsIndex * LDB + k] = boundaryResults[iSubGrid, e, k] + surfaceResults[iSubGrid, e, k]; } break; case JumpTypes.Sign: for (int k = 0; k < noOfLambdas; k++) { rhs[rhsIndex * LDB + k] = boundaryResults[iSubGrid, e, k] - 2.0 * surfaceResults[iSubGrid, e, k]; } break; default: throw new NotImplementedException(); } } } } if (rhs.Length > 0) { LAPACK.F77_LAPACK.DGELSY(noOfLambdas, noOfNodesPerEdge, basisValuesEdge.Storage, rhs, noOfRhs, 1e-12); } foreach (Chunk chunk in mask) { for (int i = 0; i < chunk.Len; i++) { int cell = i + chunk.i0; int iSubGrid = localCellIndex2SubgridIndex[cell]; int noOfProcessedNodes = 0; for (int e = 0; e < noOfFaces; e++) { int noOfNodesOnEdge = baseRule.NumbersOfNodesPerFace[e]; int rhsIndex = rhsIndexMap[iSubGrid, e]; if (rhsIndex < 0) { noOfProcessedNodes += noOfNodesOnEdge; continue; } for (int j = 0; j < noOfNodesOnEdge; j++) { optimizedRules[iSubGrid].Weights[noOfProcessedNodes + j] = rhs[rhsIndex * LDB + j]; } noOfProcessedNodes += noOfNodesOnEdge; } double max = optimizedRules[iSubGrid].Weights.Max(d => d.Abs()); if (max > 2.0 * RefElement.Volume) { tr.Info(String.Format( "Warning: Abnormally large integration weight detected" + " for level set edge volume integral in cell {0}" + " (|w| = {1}). This may indicate a loss of" + " integration accuracy.", cell, max)); } } } return(optimizedRules); } }
/// <summary> /// Constructor /// </summary> /// <param name="__ProblemMapping"> /// Mapping of original problem, equal to <see cref="ProblemMapping"/>. /// </param> /// <param name="__aggGrdB"> /// Sequence of aggregation grid DG basis objects, correlates to original mapping /// </param> /// <param name="DgDegrees"></param> public MultigridMapping(UnsetteledCoordinateMapping __ProblemMapping, AggregationGridBasis[] __aggGrdB, int[] DgDegrees) { using (var tr = new FuncTrace()) { // check args // =========== tr.Info("Entering multigrid mapping " + __aggGrdB[0] + " Basis length " + __aggGrdB.Length); if (__aggGrdB.Length != __ProblemMapping.BasisS.Count) { throw new ArgumentException("Mismatch between number of multigrid basis objects and number of variables in problem mapping."); } var mgGrid = __aggGrdB[0].AggGrid; for (int iVar = 0; iVar < __aggGrdB.Length; iVar++) { if (__ProblemMapping.BasisS[iVar] is BoSSS.Foundation.XDG.XDGBasis) { if (!(__aggGrdB[iVar] is XdgAggregationBasis)) { throw new ArgumentException(); } } else if (__ProblemMapping.BasisS[iVar] is BoSSS.Foundation.Basis) { //if((__aggGrdB[iVar] is XdgAggregationBasis)) // throw new ArgumentException(); } if (!object.ReferenceEquals(mgGrid, __aggGrdB[iVar].AggGrid)) { throw new ArgumentException("Basis object must all be defined on the same grid."); } } // find basis with maximum degree // ============================== this.ProblemMapping = __ProblemMapping; //this.MaxBasis = ProblemMapping.BasisS.ElementAtMax(basis => basis.Degree); this.m_DgDegree = DgDegrees.CloneAs(); //if(!this.MaxBasis.IsSubBasis(__aggGrdB.DGBasis)) { // throw new ArgumentException("Basis on aggregation grid is insufficient;"); //} if (m_DgDegree.Length != __ProblemMapping.BasisS.Count()) { throw new ArgumentException("Wrong number of DG degrees."); } for (int i = 0; i < m_DgDegree.Length; i++) { if (m_DgDegree[i] < 0) { throw new ArgumentException("DG degree must be greater of equal to 0."); } if (m_DgDegree[i] > __ProblemMapping.BasisS[i].Degree) { throw new ArgumentException("DG degree on sub-level can not exceed DG degree of the original problem."); } } // create basis for this level // =========================== this.AggBasis = __aggGrdB; // min/max length // ============== { int Smin = 0; int Smax = 0; int Nofields = this.m_DgDegree.Length; for (int ifld = 0; ifld < Nofields; ifld++) { Smin += this.AggBasis[ifld].GetMinimalLength(this.m_DgDegree[ifld]); Smax += this.AggBasis[ifld].GetMaximalLength(this.m_DgDegree[ifld]); } this.MinimalLength = Smin; this.MaximalLength = Smax; } // offsets // ======= if (this.MinimalLength != this.MaximalLength) { int JAGGloc = this.AggGrid.iLogicalCells.NoOfLocalUpdatedCells; int JAGGtot = this.AggGrid.iLogicalCells.Count; int[] __i0Tmp = new int[JAGGtot]; HashSet <int> BlockLen = new HashSet <int>(); int LL = 0; for (int jag = 0; jag < JAGGloc; jag++) { int S = 0; for (int i = 0; i < m_DgDegree.Length; i++) { S += this.AggBasis[i].GetLength(jag, m_DgDegree[i]); } if (jag < JAGGloc - 1) { __i0Tmp[jag + 1] = __i0Tmp[jag] + S; } else { LL = __i0Tmp[jag] + S; } BlockLen.Add(S); } Partitioning = new Partitioning(LL); int i0Part = Partitioning.i0; m_i0 = new int[JAGGtot + 1]; for (int jag = 0; jag < JAGGloc; jag++) { m_i0[jag] = __i0Tmp[jag]; // store local i0 index __i0Tmp[jag] += i0Part; // convert to global index } m_i0[JAGGloc] = LL; __i0Tmp.MPIExchange(this.AggGrid); // compute global cell i0's in the external range m_i0_ExtGlob = new int[JAGGtot - JAGGloc]; Array.Copy(__i0Tmp, JAGGloc, m_i0_ExtGlob, 0, JAGGtot - JAGGloc); // compute local cell i0's in the external range (very confusing) for (int jag = JAGGloc; jag < JAGGtot; jag++) { int S = 0; for (int i = 0; i < m_DgDegree.Length; i++) { S += this.AggBasis[i].GetLength(jag, m_DgDegree[i]); } m_i0[jag + 1] = this.m_i0[jag] + S; BlockLen.Add(S); } // build look-up for block types m_Len2SublockType = new Dictionary <int, int>(); m_Subblk_i0 = new int[BlockLen.Count][]; m_SubblkLen = new int[BlockLen.Count][]; int type = 0; foreach (int S in BlockLen) { m_Subblk_i0[type] = new int[] { 0 }; m_SubblkLen[type] = new int[] { S }; m_Len2SublockType.Add(S, type); type++; } #if DEBUG for (int jag = JAGGloc; jag < JAGGtot; jag++) { Debug.Assert(Partitioning.IsInLocalRange(m_i0_ExtGlob[jag - JAGGloc]) == false); } #endif } else { m_Subblk_i0 = new int[][] { new int[] { 0 } }; m_SubblkLen = new int[][] { new int[] { this.MaximalLength } }; Partitioning = new Partitioning(this.AggGrid.iLogicalCells.NoOfLocalUpdatedCells * this.MaximalLength); } Debug.Assert(Partitioning != null); Debug.Assert(Partitioning.LocalLength == this.LocalLength); } }
protected CellBoundaryQuadRule GetOptimizedRule(int cell, int order) { using (var tr = new FuncTrace()) { int numberOfPhis = GetNumberOfPhis(); int noOfEdges = LevelSetData.GridDat.Grid.RefElements[0].NoOfFaces; int D = LevelSetData.GridDat.SpatialDimension; PhiQuadrature quadrature = new PhiQuadrature(LevelSetData, this, order, cell); quadrature.Execute(); double[,] quadResults = quadrature.Results; CellBoundaryQuadRule optimizedRule = new CellBoundaryQuadRule() { NumbersOfNodesPerFace = baseRule.NumbersOfNodesPerFace.CloneAs() }; for (int e = 0; e < noOfEdges; e++) { bool edgeIsCut = false; for (int i = 0; i < numberOfPhis; i++) { edgeIsCut |= quadResults[e, i].Abs() > 1e-9; } if (!edgeIsCut) { optimizedRule.NumbersOfNodesPerFace[e] = 0; } } optimizedRule.Weights = MultidimensionalArray.Create( optimizedRule.NumbersOfNodesPerFace.Sum()); optimizedRule.Nodes = new NodeSet( this.RefElement, optimizedRule.Weights.Length, LevelSetData.GridDat.SpatialDimension); //optimizedRule.Nodes.LockForever(); if (optimizedRule.NoOfNodes == 0) { var emptyRule = CellBoundaryQuadRule.CreateEmpty( RefElement, 1, LevelSetData.GridDat.SpatialDimension, noOfEdges); emptyRule.Nodes.LockForever(); return(emptyRule); } for (int e = 0; e < noOfEdges; e++) { int noOfNodesOnEdge = optimizedRule.NumbersOfNodesPerFace[e]; if (noOfNodesOnEdge == 0) { continue; } CopyNodes(baseRule, optimizedRule, e); } optimizedRule.Nodes.LockForever(); if (LevelSetData.GridDat.Cells.Cells2Edges[cell].Length != noOfEdges) { throw new NotImplementedException("Hanging nodes not yet supported"); } int noOfProcessedNodes = 0; for (int e = 0; e < noOfEdges; e++) { int noOfNodesOnEdge = optimizedRule.NumbersOfNodesPerFace[e]; if (noOfNodesOnEdge == 0) { continue; } MultidimensionalArray normals = EvaluateRefNormalsOnEdge(this.LevelSetData, cell, optimizedRule, e); MultidimensionalArray metrics = GetMetricTermsOnEdge(this.LevelSetData, levelSetIndex, optimizedRule, cell, e); //lh = tracker.GridDat.NSC.LockNodeSetFamily(tracker.GridDat.NSC.CreateContainer( // optimizedRule.Nodes.ExtractSubArrayShallow( // new int[] { noOfProcessedNodes, 0 }, // new int[] { noOfProcessedNodes + noOfNodesOnEdge - 1, optimizedRule.SpatialDim - 1 }), // 0, // 0.0)); NodeSet irgendwelcheNodes = new NodeSet(this.RefElement, optimizedRule.Nodes.ExtractSubArrayShallow( new int[] { noOfProcessedNodes, 0 }, new int[] { noOfProcessedNodes + noOfNodesOnEdge - 1, optimizedRule.SpatialDim - 1 })); MultidimensionalArray phiValues = EvaluatePhis(irgendwelcheNodes, cell, e); double[] matrix = new double[numberOfPhis * noOfNodesOnEdge]; // Additional space required by Fortran routine double[] rhs = new double[Math.Max(noOfNodesOnEdge, numberOfPhis)]; for (int j = 0; j < numberOfPhis; j++) { rhs[j] = quadResults[e, j]; for (int i = 0; i < noOfNodesOnEdge; i++) { // Beware of Fortran order! int index = i * numberOfPhis + j; for (int d = 0; d < D; d++) { matrix[index] += phiValues[i, j, d] * normals[i, d]; } } } LAPACK.F77_LAPACK.DGELSY(numberOfPhis, noOfNodesOnEdge, matrix, rhs, 1, 1e-12); int edge = Math.Abs(LevelSetData.GridDat.Cells.Cells2Edges[cell][e]) - 1; double maxWeight = 0.0; for (int i = 0; i < noOfNodesOnEdge; i++) { optimizedRule.Weights[noOfProcessedNodes + i] = rhs[i] / metrics[i]; //optimizedRule.Weights[noOfProcessedNodes + i] = rhs[i]; maxWeight = Math.Max(optimizedRule.Weights[noOfProcessedNodes + i].Abs(), maxWeight); } noOfProcessedNodes += noOfNodesOnEdge; if (maxWeight > 4.0 * LevelSetData.GridDat.Edges.h_max_Edge[edge]) { tr.Info(String.Format( "Warning: Abnormally large integration weight detected" + " for level set edge surface integral on edge {0} of cell {1} " + " (|w| = {2}). This may indicate a loss of" + " integration accuracy.", e, cell, maxWeight)); } } return(optimizedRule); } }
/// <summary> /// Obtaining the time integrated spatial discretization of the reinitialization equation in a narrow band around the zero level set, based on a Godunov's numerical Hamiltonian calculation /// </summary> /// <param name="LS"> The level set function </param> /// <param name="Restriction"> The narrow band around the zero level set </param> /// <param name="NumberOfTimesteps"> /// maximum number of pseudo-timesteps /// </param> /// <param name="thickness"> /// The smoothing width of the signum function. /// This is the main stabilization parameter for re-initialization. /// It should be set to approximately 3 cells. /// </param> /// <param name="TimestepSize"> /// size of the pseudo-timestep /// </param> public void ReInitialize(LevelSet LS, SubGrid Restriction, double thickness, double TimestepSize, int NumberOfTimesteps) { using (var tr = new FuncTrace()) { // log parameters: tr.Info("thickness: " + thickness.ToString(NumberFormatInfo.InvariantInfo)); tr.Info("TimestepSize: " + TimestepSize.ToString(NumberFormatInfo.InvariantInfo)); tr.Info("NumberOfTimesteps: " + NumberOfTimesteps); ExplicitEuler TimeIntegrator; SpatialOperator SO; Func <int[], int[], int[], int> QuadratureOrder = QuadOrderFunc.NonLinear(3); if (m_ctx.SpatialDimension == 2) { SO = new SpatialOperator(1, 5, 1, QuadratureOrder, new string[] { "LS", "LSCGV", "LSDG[0]", "LSUG[0]", "LSDG[1]", "LSUG[1]", "Result" }); SO.EquationComponents["Result"].Add(new GodunovHamiltonian(m_ctx, thickness)); SO.Commit(); TimeIntegrator = new RungeKutta(m_Scheme, SO, new CoordinateMapping(LS), new CoordinateMapping(LSCGV, LSDG[0], LSUG[0], LSDG[1], LSUG[1]), sgrd: Restriction); } else { SO = new SpatialOperator(1, 7, 1, QuadratureOrder, new string[] { "LS", "LSCGV", "LSDG[0]", "LSUG[0]", "LSDG[1]", "LSUG[1]", "LSDG[2]", "LSUG[2]", "Result" }); SO.EquationComponents["Result"].Add(new GodunovHamiltonian(m_ctx, thickness)); SO.Commit(); TimeIntegrator = new RungeKutta(m_Scheme, SO, new CoordinateMapping(LS), new CoordinateMapping(LSCGV, LSDG[0], LSUG[0], LSDG[1], LSUG[1], LSDG[2], LSUG[2]), sgrd: Restriction); } // Calculating the gradients in each sub-stage of a Runge-Kutta integration procedure ExplicitEuler.ChangeRateCallback EvalGradients = delegate(double t1, double t2) { LSUG.Clear(); CalculateLevelSetGradient(LS, LSUG, "Upwind", Restriction); LSDG.Clear(); CalculateLevelSetGradient(LS, LSDG, "Downwind", Restriction); LSCG.Clear(); CalculateLevelSetGradient(LS, LSCG, "Central", Restriction); LSCGV.Clear(); var VolMask = (Restriction != null) ? Restriction.VolumeMask : null; LSCGV.ProjectAbs(1.0, VolMask, LSCG.ToArray()); }; TimeIntegrator.OnBeforeComputeChangeRate += EvalGradients; { EvalGradients(0, 0); var GodunovResi = new SinglePhaseField(LS.Basis, "Residual"); SO.Evaluate(1.0, 0.0, LS.Mapping, TimeIntegrator.ParameterMapping.Fields, GodunovResi.Mapping, Restriction); //Tecplot.Tecplot.PlotFields(ArrayTools.Cat<DGField>( LSUG, LSDG, LS, GodunovResi), "Residual", 0, 3); } // pseudo-timestepping // =================== double factor = 1.0; double time = 0; LevelSet prevLevSet = new LevelSet(LS.Basis, "prevLevSet"); CellMask RestrictionMask = (Restriction == null) ? null : Restriction.VolumeMask; for (int i = 0; (i < NumberOfTimesteps); i++) { tr.Info("Level set reinitialization pseudo-timestepping, timestep " + i); // backup old Levelset // ------------------- prevLevSet.Clear(); prevLevSet.Acc(1.0, LS, RestrictionMask); // time integration // ---------------- double dt = TimestepSize * factor; tr.Info("dt = " + dt.ToString(NumberFormatInfo.InvariantInfo) + " (factor = " + factor.ToString(NumberFormatInfo.InvariantInfo) + ")"); TimeIntegrator.Perform(dt); time += dt; // change norm // ------ prevLevSet.Acc(-1.0, LS, RestrictionMask); double ChangeNorm = prevLevSet.L2Norm(RestrictionMask); Console.WriteLine("Reinit: PseudoTime: {0} - Changenorm: {1}", i, ChangeNorm); //Tecplot.Tecplot.PlotFields(new SinglePhaseField[] { LS }, m_ctx, "Reinit-" + i, "Reinit-" + i, i, 3); } //*/ } }
/// <summary> /// Uses a moment-fitting basis of order <paramref name="order"/> for /// determining optimal weights of a quadrature rule for a sub-region /// of each cell in the given <paramref name="mask"/>. /// </summary> /// <param name="mask"> /// Cells for which rules shall be created /// </param> /// <param name="nodes">Integration nodes</param> /// <param name="quadResults"> /// Integration results for each function in the /// <see cref="lambdaBasis"/> /// </param> /// <param name="order"> /// Desired order of the moment-fitting system. Assuming that /// <see cref="surfaceRuleFactory"/> integrates the basis /// polynomials exactly over the zero iso-contour (which it usually /// doesn't), the resulting quadrature rules will be exact up to this /// order. /// </param> /// <returns> /// A set of quadrature rules /// </returns> private QuadRule[] GetOptimizedRules(CellMask mask, NodeSet nodes, double[,] quadResults, int order) { using (var tr = new FuncTrace()) { int maxLambdaDegree = order + 1; int noOfLambdas = GetNumberOfLambdas(maxLambdaDegree); int noOfNodes = nodes.GetLength(0); QuadRule[] optimizedRules = new QuadRule[mask.NoOfItemsLocally]; if (mask.NoOfItemsLocally == 0) { return(optimizedRules); } // Leading dimension of B (rhs); required by DGELSY int LDB = Math.Max(noOfLambdas, noOfNodes); double[] rhs = new double[LDB * mask.NoOfItemsLocally]; Basis basis = new Basis(LevelSetData.GridDat, order); MultidimensionalArray basisValues = basis.Evaluate(nodes); int n = 0; foreach (Chunk chunk in mask) { foreach (int cell in chunk.Elements) { int iSubGrid = localCellIndex2SubgridIndex[cell]; for (int k = 0; k < noOfLambdas; k++) { rhs[n * LDB + k] += quadResults[iSubGrid, k]; } n++; } } double[] matrix; if (basisValues.IsContinious) { matrix = basisValues.Storage; } else { matrix = new double[basisValues.Length]; basisValues.CopyTo(matrix, true, 0); } LAPACK.F77_LAPACK.DGELSY(noOfLambdas, noOfNodes, matrix, rhs, mask.NoOfItemsLocally, RCOND); n = 0; foreach (Chunk chunk in mask) { foreach (int cell in chunk.Elements) { optimizedRules[n] = new QuadRule() { Nodes = nodes, Weights = MultidimensionalArray.Create(noOfNodes), OrderOfPrecision = order }; for (int j = 0; j < noOfNodes; j++) { optimizedRules[n].Weights[j] = rhs[n * LDB + j]; } double max = optimizedRules[n].Weights.Max(d => d.Abs()); if (max > 2.0 * RefElement.Volume) { tr.Info(String.Format( "Warning: Abnormally large integration weight detected" + " for level set volume integral in cell {0}" + " (|w| = {1}). This may indicate a loss of" + " integration accuracy.", cell, max)); } n++; } } return(optimizedRules); } }
/// <summary> /// computes a global time-step length ("delta t") according to the /// Courant-Friedrichs-Lax - criterion, based on a velocity /// vector (<paramref name="velvect"/>) and the cell size /// this.<see cref="Cells"/>.<see cref="CellData.h_min"/>; /// </summary> /// <param name="velvect"> /// components of a velocity vector /// </param> /// <param name="max"> /// an upper maximum for the return value; This is useful if the velocity /// defined by <paramref name="velvect"/> is 0 or very small everywhere; /// </param> /// <param name="cm"> /// optional restriction of domain. /// </param> /// <returns> /// the minimum (over all cells j in all processes) of <see cref="CellData.h_min"/>[j] /// over v, where v is the Euclidean norm of a vector build from /// <paramref name="velvect"/>; /// This vector is evaluated at cell center and all cell vertices. /// The return value is the same on all processes; /// </returns> static public double ComputeCFLTime <T>(this IGridData __gdat, IEnumerable <T> velvect, double max, CellMask cm = null) where T : DGField // { using (var tr = new FuncTrace()) { GridData gdat = (GridData)__gdat; ilPSP.MPICollectiveWatchDog.Watch(MPI.Wrappers.csMPI.Raw._COMM.WORLD); T[] _velvect = velvect.ToArray(); if (cm == null) { cm = CellMask.GetFullMask(gdat); } int D = gdat.SpatialDimension; var KrefS = gdat.Grid.RefElements; // find cfl number on this processor // --------------------------------- var m_CFL_EvalPoints = new NodeSet[KrefS.Length]; for (int i = 0; i < KrefS.Length; i++) { var Kref = KrefS[i]; int N = Kref.NoOfVertices + 1; MultidimensionalArray vert = MultidimensionalArray.Create(N, D); vert.SetSubArray(Kref.Vertices, new int[] { 0, 0 }, new int[] { N - 2, D - 1 }); m_CFL_EvalPoints[i] = new NodeSet(Kref, vert); } // evaluators an memory for result int VecMax = 1000; DGField[] evalers = new DGField[_velvect.Length]; MultidimensionalArray[] fieldValues = new MultidimensionalArray[_velvect.Length]; for (int i = 0; i < _velvect.Length; i++) { evalers[i] = _velvect[i]; fieldValues[i] = MultidimensionalArray.Create(VecMax, m_CFL_EvalPoints[0].NoOfNodes); } var h_min = gdat.Cells.h_min; int K = _velvect.Length; double cflhere = max; //for (int j = 0; j < J; j += VectorSize) { foreach (Chunk chk in cm) { int VectorSize = VecMax; for (int j = chk.i0; j < chk.JE; j += VectorSize) { if (j + VectorSize > chk.JE + 1) { VectorSize = chk.JE - j; } VectorSize = gdat.Cells.GetNoOfSimilarConsecutiveCells(CellInfo.RefElementIndex_Mask, j, VectorSize); int iKref = gdat.Cells.GetRefElementIndex(j); int N = m_CFL_EvalPoints[iKref].GetLength(0); if (fieldValues[0].GetLength(0) != VectorSize) { for (int i = 0; i < _velvect.Length; i++) { fieldValues[i].Allocate(VectorSize, N); } } for (int k = 0; k < K; k++) { evalers[k].Evaluate(j, VectorSize, m_CFL_EvalPoints[iKref], fieldValues[k], 0, 0.0); } // loop over cells ... for (int jj = j; jj < j + VectorSize; jj++) { // loop over nodes ... for (int n = 0; n < N; n++) { double velabs = 0; // loop over velocity components ... for (int k = 0; k < K; k++) { double v = fieldValues[k][jj - j, n]; velabs += v * v; } velabs = Math.Sqrt(velabs); double cfl = h_min[jj] / velabs; cflhere = Math.Min(cfl, cflhere); } } } } // find the minimum over all processes via MPI and return // ------------------------------------------------------ double cfltotal; unsafe { csMPI.Raw.Allreduce((IntPtr)(&cflhere), (IntPtr)(&cfltotal), 1, csMPI.Raw._DATATYPE.DOUBLE, csMPI.Raw._OP.MIN, csMPI.Raw._COMM.WORLD); } tr.Info("computed CFL timestep: " + cfltotal); return(cfltotal); } }
/// <summary> /// Finds the settings directory (%USERPROFILE%/.BoSSS); /// If not existent (1st startup), the directory and a dummy config is created. /// </summary> public static string GetBoSSSUserSettingsPath() { using (var tr = new FuncTrace()) { string UserProfile = System.Environment.GetEnvironmentVariable("USERPROFILE") ?? System.Environment.GetEnvironmentVariable("HOME"); if (UserProfile == null) { return(""); } string settingsPath = Path.Combine(UserProfile, ".BoSSS"); if (!Directory.Exists(settingsPath)) { try { Console.WriteLine("Creating: " + settingsPath); Directory.CreateDirectory(settingsPath); } catch (IOException e) { tr.Info(String.Format( "Creating user settings path failed with message '{0}';" + " proceeding without user settings", e.Message)); return(""); } } string etcpath = Path.Combine(settingsPath, "etc"); if (!Directory.Exists(etcpath)) { try { Console.WriteLine("Creating: " + etcpath); Directory.CreateDirectory(etcpath); } catch (IOException e) { tr.Info(String.Format( "Creating user settings 'etc' sub-path failed with message '{0}';" + " proceeding without user settings", e.Message)); return(""); } } string batchpath = Path.Combine(settingsPath, "batch"); if (!Directory.Exists(batchpath)) { try { Console.WriteLine("Creating: " + batchpath); Directory.CreateDirectory(batchpath); } catch (IOException e) { tr.Info(String.Format( "Creating user settings 'batch' sub-path failed with message '{0}';" + " proceeding without user settings", e.Message)); return(""); } } string dbeConfigFilePath = Path.Combine(etcpath, "DBE.xml"); if (!File.Exists(dbeConfigFilePath)) { try { XmlDocument doc = new XmlDocument(); doc.LoadXml(Properties.Resources.DBE_empty); doc.Save(dbeConfigFilePath); } catch (IOException e) { tr.Info(String.Format( "Creating default DBE.xml failed with message '{0}';" + " proceeding without DBE.xml", e.Message)); return(""); } } return(settingsPath); } }
/// <summary> /// Computes L2 norms between DG fields on different grid resolutions, i.e. for a /// convergence study, where the solution on the finest grid is assumed to be exact. /// </summary> /// <param name="FieldsToCompare"> /// Identification (<see cref="DGField.Identification"/>) of the fields which should be compared. /// </param> /// <param name="timestepS"> /// A collection of solutions on different grid resolutions. /// </param> /// <param name="GridRes"> /// On exit, the resolution of the different grids. /// </param> /// <param name="L2Errors"> /// On exit, the L2 error /// (for each field specified in <paramref name="FieldsToCompare"/>) /// in comparison to the solution on the finest grid. /// </param> /// <param name="__DOFs"> /// On exit, the number of degrees-of-freedom /// (for each field specified in <paramref name="FieldsToCompare"/>). /// </param> /// <param name="timestepIds"> /// on exit, the timestep id which correlate with the resolutions <paramref name="GridRes"/> /// (remarks: <paramref name="timestepIds"/> may be re-sorted internally according to grid resolution). /// </param> public static void ComputeErrors(IEnumerable <string> FieldsToCompare, IEnumerable <ITimestepInfo> timestepS, out double[] GridRes, out Dictionary <string, int[]> __DOFs, out Dictionary <string, double[]> L2Errors, out Guid[] timestepIds) { using (var tr = new FuncTrace()) { if (FieldsToCompare == null || FieldsToCompare.Count() <= 0) { throw new ArgumentException("empty list of field names."); } if (timestepS == null || timestepS.Count() < 1) { throw new ArgumentException("requiring at least two different solutions."); } // load the DG-Fields List <IEnumerable <DGField> > fields = new List <IEnumerable <DGField> >(); int i = 1; foreach (var timestep in timestepS) { //Console.WriteLine("Loading timestep {0} of {1}, ({2})...", i, timestepS.Count(), timestep.ID); fields.Add(timestep.Fields); i++; //Console.WriteLine("done (Grid has {0} cells).", fields.Last().First().GridDat.CellPartitioning.TotalLength); } // sort according to grid resolution { var s = fields.OrderBy(f => f.First().GridDat.CellPartitioning.TotalLength).ToArray(); var orgfields = fields.ToArray(); fields.Clear(); fields.AddRange(s); s = null; // filter equal grids: while (fields.Count >= 2 && (fields[fields.Count - 1].First().GridDat.CellPartitioning.TotalLength == fields[fields.Count - 2].First().GridDat.CellPartitioning.TotalLength)) { fields.RemoveAt(fields.Count - 2); } // extract timestep Id's timestepIds = new Guid[fields.Count]; for (int z = 0; z < timestepIds.Length; z++) { int idx = orgfields.IndexOf(fields[z], (f1, f2) => object.ReferenceEquals(f1, f2)); timestepIds[z] = timestepS.ElementAt(idx).ID; } } // Grids and coarse-to-fine -- mappings. GridData[] gDataS = fields.Select(fc => (GridData)(fc.First().GridDat)).ToArray(); int[][] Fine2CoarseMapS = new int[gDataS.Length - 1][]; // 1st index: level; 2n index: cell index on finest level for (int iLevel = 0; iLevel < Fine2CoarseMapS.Length; iLevel++) { ComputeFine2CoarseMap(gDataS.Last(), gDataS[iLevel], out Fine2CoarseMapS[iLevel]); } // extrapolate to fine grid Dictionary <string, List <DGField> > injectedFields = new Dictionary <string, List <DGField> >(); Dictionary <string, List <int> > DOFs = new Dictionary <string, List <int> >(); foreach (string Identification in FieldsToCompare) { List <DGField> fields_Identification = new List <DGField>(); // fields for different resolutions List <int> dofs_Idenitification = new List <int>(); DGField finestSolution = fields.Last().Single(f => f.Identification == Identification); for (int iLevel = 0; iLevel < gDataS.Length - 1; iLevel++) { //Console.WriteLine("Injecting '{0}' from level {1} to finest grid...", Identification, iLevel); tr.Info(string.Format("Injecting '{0}' from level {1} to finest grid...", Identification, iLevel)); DGField coarseSolution = fields[iLevel].Single(f => f.Identification == Identification); if (finestSolution.GetType() != coarseSolution.GetType()) { throw new NotSupportedException(); } if (coarseSolution.Basis.Degree != finestSolution.Basis.Degree) { throw new NotSupportedException(); } if (finestSolution is XDGField) { XDGField _coarseSolution = (XDGField)coarseSolution; XDGField _finestSolution = (XDGField)finestSolution; XDGField injectedSolution = new XDGField(_finestSolution.Basis, Identification + "-inj-" + iLevel); InjectXDGField(Fine2CoarseMapS[iLevel], injectedSolution, _coarseSolution); fields_Identification.Add(injectedSolution); dofs_Idenitification.Add(coarseSolution.Mapping.GetTotalNoOfDOFs()); } else if (finestSolution is SinglePhaseField) { SinglePhaseField _coarseSolution = (SinglePhaseField)coarseSolution; SinglePhaseField _finestSolution = (SinglePhaseField)finestSolution; SinglePhaseField injectedSolution = new SinglePhaseField(_finestSolution.Basis, Identification + "-inj-" + iLevel); InjectDGField(Fine2CoarseMapS[iLevel], injectedSolution, _coarseSolution); fields_Identification.Add(injectedSolution); dofs_Idenitification.Add(coarseSolution.Mapping.GetTotalNoOfDOFs()); } else { throw new NotSupportedException("DG field type '" + finestSolution.GetType().FullName + "' not supported, Identification is '" + finestSolution.Identification + "'"); } tr.Info(string.Format("done.")); //Console.WriteLine("done."); } fields_Identification.Add(finestSolution); injectedFields.Add(Identification, fields_Identification); DOFs.Add(Identification, dofs_Idenitification); } __DOFs = new Dictionary <string, int[]>(); foreach (var kv in DOFs) { __DOFs.Add(kv.Key, kv.Value.ToArray()); } // compute the errors L2Errors = new Dictionary <string, double[]>(); foreach (string Identification in FieldsToCompare) { double[] L2Error = new double[gDataS.Length - 1]; for (int iLevel = 0; iLevel < gDataS.Length - 1; iLevel++) { //Console.WriteLine("Computing L2 error of '{0}' on level {1} ...", Identification, iLevel); tr.Info(string.Format("Computing L2 error of '{0}' on level {1} ...", Identification, iLevel)); DGField Error = injectedFields[Identification].Last().CloneAs(); DGField injSol = injectedFields[Identification].ElementAt(iLevel); Error.Acc(-1.0, injSol); L2Error[iLevel] = Error.L2Norm(); //Console.WriteLine("done (Error is {0:0.####E-00}).", L2Error[iLevel]); tr.Info(string.Format("done (Error is {0:0.####E-00}).", L2Error[iLevel])); } L2Errors.Add(Identification, L2Error); } GridRes = gDataS.Take(gDataS.Length - 1).Select(gd => gd.Cells.h_minGlobal).ToArray(); } }
/// <summary> /// Loads a BoSSS grid from an grid file; the file type (see <see cref="ImporterTypes"/>) are determined by the file ending. /// </summary> public static GridCommons Import(string fileName) { using (var tr = new FuncTrace()) { int myrank; int size; csMPI.Raw.Comm_Rank(csMPI.Raw._COMM.WORLD, out myrank); csMPI.Raw.Comm_Size(csMPI.Raw._COMM.WORLD, out size); ImporterTypes importerType = default(ImporterTypes); if (myrank == 0) { importerType = GetImporterType(fileName); } importerType = importerType.MPIBroadcast(0); IGridImporter importer; { tr.Info(string.Format("Loading {0} file '{1}'...", importerType.ToString(), fileName)); using (new BlockTrace("Import", tr)) { switch (importerType) { case ImporterTypes.Gambit: if (size > 1) { throw new NotSupportedException("Not supported in parallel mode"); } GambitNeutral gn = new GambitNeutral(fileName); if (gn.BoSSSConversionNeccessary()) { gn = gn.ToLinearElements(); } importer = gn; break; case ImporterTypes.CGNS: if (size > 1) { throw new NotSupportedException("Not supported in parallel mode"); } importer = new Cgns(fileName); break; case ImporterTypes.Gmsh: importer = new Gmsh(fileName); break; default: throw new NotImplementedException(); } } tr.Info("Converting to BoSSS grid ..."); } GridCommons grid; using (new BlockTrace("Conversion", tr)) { grid = importer.GenerateBoSSSGrid(); } return(grid); } }
static internal void ComputeMassMatrixBlocks( IEnumerable <SpeciesId> _SpeciesIds, out Dictionary <SpeciesId, MassMatrixBlockContainer> Result, Basis b, XDGSpaceMetrics homie) { using (var tracer = new FuncTrace()) { if (b is XDGBasis) { throw new ArgumentException(); } var ctx = homie.GridDat; Result = new Dictionary <SpeciesId, MassMatrixBlockContainer>(); var schemeHelper = homie.XQuadSchemeHelper; int Nnx = b.Length; int quadorder = homie.CutCellQuadOrder; // define domains and allocate memory // ================================== foreach (var Species in _SpeciesIds) // loop over species... // interation dom { var _IntegrationDomain = homie.LevelSetRegions.GetSpeciesMask(Species).Intersect(homie.LevelSetRegions.GetCutCellMask()); // domain for mass-matrix blocks (include agglomeration targets) var _BlockDomain = _IntegrationDomain; //.Union(Agg.GetAgglomerator(Species).AggInfo.AllAffectedCells); // alloc mem for blocks var _MassMatrixBlocksSpc = MultidimensionalArray.Create(_BlockDomain.NoOfItemsLocally, Nnx, Nnx); // Subgrid index to cell index int[] _jSub2jCell = _BlockDomain.ItemEnum.ToArray(); // cell to subgrid index //Dictionary<int, int> _jCell2jSub; //if (Agg.GetAgglomerator(Species).AggInfo.AgglomerationPairs.Length > 0) { // _jCell2jSub = new Dictionary<int, int>(); // for (int i = 0; i < _jSub2jCell.Length; i++) { // _jCell2jSub.Add(_jSub2jCell[i], i); // } //} else { // _jCell2jSub = null; //} Result.Add(Species, new MassMatrixBlockContainer() { IntegrationDomain = _IntegrationDomain, MassMatrixBlocks = _MassMatrixBlocksSpc, //jCell2jSub = _jCell2jSub, jSub2jCell = _jSub2jCell }); } // compute blocks // ============== foreach (var Species in _SpeciesIds) { // get quad scheme CellQuadratureScheme scheme = schemeHelper.GetVolumeQuadScheme(Species, IntegrationDomain: Result[Species].IntegrationDomain); // result storage var MassMatrixBlocksSpc = Result[Species].MassMatrixBlocks; tracer.Info("mass matrix quad order: " + quadorder); // compute the products of the basis functions: int BlockCnt = -1; int[] BlockCell = Result[Species].jSub2jCell; CellMask speciesCells = homie.LevelSetRegions.GetSpeciesMask(Species); CellQuadrature.GetQuadrature( new int[] { Nnx, Nnx }, ctx, scheme.Compile(ctx, quadorder), delegate(int i0, int Length, QuadRule QR, MultidimensionalArray EvalResult) { // Del_Evaluate // ~~~~~~~~~~~~~ var BasisVal = b.CellEval(QR.Nodes, i0, Length); EvalResult.Multiply(1.0, BasisVal, BasisVal, 0.0, "ikmn", "ikm", "ikn"); }, delegate(int i0, int Length, MultidimensionalArray ResultsOfIntegration) { // Del_SaveIntegrationResults // ~~~~~~~~~~~~~~~~~~~~~~~~~~ for (int i = 0; i < Length; i++) { int jCell = i0 + i; BlockCnt++; // insert ID block in agglomeration target cells (if necessary): // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - var Block = MassMatrixBlocksSpc.ExtractSubArrayShallow(BlockCnt, -1, -1); while (BlockCell[BlockCnt] < jCell) { // agglomeration source/target cell that is not cut // mass matrix is identity (full) or zero (void) Block.Clear(); if (speciesCells.Contains(BlockCell[BlockCnt])) { // cell is full for (int nn = 0; nn < Nnx; nn++) { Block[nn, nn] = 1.0; } } BlockCnt++; Block = MassMatrixBlocksSpc.ExtractSubArrayShallow(BlockCnt, -1, -1); } // store computed block // - - - - - - - - - - - Debug.Assert(BlockCell[BlockCnt] == jCell); MassMatrixBlocksSpc.ExtractSubArrayShallow(BlockCnt, -1, -1) .Set(ResultsOfIntegration.ExtractSubArrayShallow(i, -1, -1)); #if DEBUG for (int n = 0; n < Nnx; n++) { for (int m = 0; m < Nnx; m++) { Debug.Assert(Block[n, m] == Block[m, n]); } } #endif } }).Execute(); // ------------------------------------ quadrature end. BlockCnt++; while (BlockCnt < MassMatrixBlocksSpc.GetLength(0)) { // agglomeration source/target cell that is not cut // mass matrix is identity (full) or zero (void) var Block = MassMatrixBlocksSpc.ExtractSubArrayShallow(BlockCnt, -1, -1); Block.Clear(); if (speciesCells.Contains(BlockCell[BlockCnt])) { // cell is full for (int nn = 0; nn < Nnx; nn++) { Block[nn, nn] = 1.0; } } BlockCnt++; } /* * // test mass matrix for positive definiteness * { * int JSUB = MassMatrixBlocksSpc.GetLength(0); * SubGrid Idom = null; * * int failCount = 0; * var PosDefiniteTest = new FullMatrix(Nnx, Nnx); * * for (int jsub = 0; jsub < JSUB; jsub++) { * PosDefiniteTest.Clear(); * PosDefiniteTest.Acc(MassMatrixBlocksSpc.ExtractSubArrayShallow(jsub, -1, -1), 1.0); * * try { * PosDefiniteTest.Clear(); * PosDefiniteTest.Acc(MassMatrixBlocksSpc.ExtractSubArrayShallow(jsub, -1, -1), 1.0); * PosDefiniteTest.InvertSymmetrical(); * * //PosDefiniteTest.Clear(); * //PosDefiniteTest.AccEye(1.0); * //PosDefiniteTest.Acc(MassMatrixBlocksSpc.ExtractSubArrayShallow(jsub, -1, -1), -1.0); * //PosDefiniteTest.InvertSymmetrical(); * } catch (ArithmeticException ae) { * if (Idom == null) * Idom = new SubGrid(scheme.Domain); * * int jCell = Idom.SubgridIndex2LocalCellIndex[jsub]; * long Gid = Tracker.GridDat.Cells.GetCell(jCell).GlobalID; * * * double volFrac = Tracker.GetSpeciesVolume(jCell, Species)/ctx.Cells.GetCellVolume(jCell); * * var errString = string.Format("Indefinite mass matrix in cell: globalId = {0}, local index = {1}, species {2}; \n cell volume fraction: {3};\n [{4}]", Gid, jCell, Tracker.GetSpeciesName(Species), volFrac, ae.Message); * tracer.Logger.Error(errString); * //Console.WriteLine(errString); * failCount++; * } * } * * if (failCount > 0) { * var errString = string.Format("Indefinite mass matrix in {0} of {1} cut cells", failCount, JSUB); * tracer.Logger.Error(errString); * Console.WriteLine(errString); * } else { * Console.WriteLine("No indefinite mass matrix blocks"); * } * * } * // */ // backup before agglomeration (required if we wanna treat e.g. velocity in DG and pressure in XDG) //MultidimensionalArray[] massMatrixBlocksB4Agglom = new MultidimensionalArray[Result[Species].jSub2jCell.Length]; //Result[Species].MassMatrixBlocks_B4Agglom = massMatrixBlocksB4Agglom; //var _jCell2jSub = Result[Species].jCell2jSub; //int J = ctx.Cells.NoOfLocalUpdatedCells; //foreach (var pair in Agg.GetAgglomerator(Species).AggInfo.AgglomerationPairs) { // foreach (int jCell in new int[] { pair.jCellSource, pair.jCellTarget }) { // create a backup of source and target cell // if (jCell >= J) // continue; // int jSub = _jCell2jSub[jCell]; // if (massMatrixBlocksB4Agglom[jSub] == null) { // massMatrixBlocksB4Agglom[jSub] = MassMatrixBlocksSpc.ExtractSubArrayShallow(jSub, -1, -1).CloneAs(); // } // } //} // agglomeration //Agg.GetAgglomerator(Species).ManipulateMassMatrixBlocks(MassMatrixBlocksSpc, b, Result[Species].jSub2jCell, Result[Species].jCell2jSub); //throw new NotImplementedException("todo"); } } }
/// <summary> /// /// </summary> protected override double RunSolverOneStep(int TimestepNo, double phystime, double dt) { using (var tr = new FuncTrace()) { tr.Info("Performing time iteration No. " + TimestepNo); // Set dt and SIMPLEStatus switch (Control.Algorithm) { case SolutionAlgorithms.Steady_SIMPLE: { dt = 0.0; break; } case SolutionAlgorithms.Unsteady_SIMPLE: { dt = SolverConf.dt; SIMPLEStatus.NextTimestep(); break; } } // some console-output if (base.MPIRank == 0) { switch (Control.Algorithm) { case SolutionAlgorithms.Steady_SIMPLE: Console.WriteLine("Starting steady calculation...\n"); Console.WriteLine("Starting SIMPLE-Iterations...\n"); break; case SolutionAlgorithms.Unsteady_SIMPLE: Console.WriteLine("Starting time step #" + TimestepNo + "...\n"); Console.WriteLine("Starting SIMPLE-Iterations...\n"); break; default: throw new NotImplementedException(); } } do { // do one SIMPLE iteration SIMPLEStatus.NextSIMPLEIteration(); SIMPLEStep.OverallIteration(ref SIMPLEStatus, dt, ResLogger); TerminationKey = WorkingSet.CheckForNanOrInf(Control); if (TerminationKey) { m_Logger.Warn("Found Nan in some field."); if (base.MPIRank == 0) { Console.WriteLine("ERROR: Found Nan in some field."); } Console.ReadKey(); } if ((Control.PhysicsMode == PhysicsMode.LowMach) && (SolverConf.Control.As <LowMachSIMPLEControl>().EdgeTagsNusselt != null)) { CalculateNusselt(SIMPLEStatus.Timestep, base.GridData, WorkingSet.Temperature.Current, Control); } // save to database if (SIMPLEStatus.SaveStep) { SaveToDatabase(SIMPLEStatus.Timestep, phystime); } // calculate errors int QuadDegreePressure = 20; int QuadDegreeVel = 20; ResLogger.ComputeL2Error(WorkingSet.Pressure, Control.AnalyticPressure, QuadDegreePressure, "p_ana"); ResLogger.ComputeL2Error(WorkingSet.Velocity.Current[0], Control.AnalyticVelocityX, QuadDegreeVel, "u_ana"); ResLogger.ComputeL2Error(WorkingSet.Velocity.Current[1], Control.AnalyticVelocityY, QuadDegreeVel, "v_ana"); if (SolverConf.SpatialDimension == 3) { ResLogger.ComputeL2Error(WorkingSet.Velocity.Current[2], Control.AnalyticVelocityZ, QuadDegreeVel, "w_ana"); } switch (Control.PhysicsMode) { case PhysicsMode.Incompressible: break; case PhysicsMode.LowMach: LowMachSIMPLEControl lowMachConf = Control as LowMachSIMPLEControl; ResLogger.ComputeL2Error(WorkingSet.Temperature.Current, lowMachConf.AnalyticTemperature, QuadDegreeVel, "T_ana"); ResLogger.ComputeL2Error(WorkingSet.Rho, lowMachConf.AnalyticDensity, QuadDegreeVel, "Rho_ana"); break; case PhysicsMode.Multiphase: MultiphaseSIMPLEControl multiphaseConf = Control as MultiphaseSIMPLEControl; ResLogger.ComputeL2Error(WorkingSet.Phi.Current, multiphaseConf.AnalyticLevelSet, QuadDegreeVel, "Phi_ana"); ResLogger.ComputeL2Error(WorkingSet.Rho, multiphaseConf.AnalyticDensity, QuadDegreeVel, "Rho_ana"); break; default: throw new NotImplementedException(); } // terminate SIMPLE in case of divergence if (ResLogger.Residuals["L2Norm p'"] > 1.0E+10) { TerminationKey = true; } // push residual logger to next iteration switch (Control.Algorithm) { case SolutionAlgorithms.Steady_SIMPLE: ResLogger.NextIteration(false); break; case SolutionAlgorithms.Unsteady_SIMPLE: ResLogger.NextIteration(true); break; default: throw new NotImplementedException(); } }while (!SIMPLEStatus.IsConverged && !SIMPLEStatus.TerminateSIMPLE && !TerminationKey); // determine cause for end of SIMPLE iterations if (SIMPLEStatus.IsConverged) { tr.Info("Solution converged."); } else if (SIMPLEStatus.TerminateSIMPLE) { if (SIMPLEStatus.SIMPLEStepNo == Control.MaxNoSIMPLEsteps) { SIMPLEStatus.CntMaxNoSIMPLEsteps++; m_Logger.Warn("MaxNoSIMPLEsteps are reached."); } else { m_Logger.Warn("Unknown reason for terminating SIMPLE iterations - should not happen."); } } else { m_Logger.Error("Solution diverged."); } // save the new timestep switch (Control.Algorithm) { case SolutionAlgorithms.Steady_SIMPLE: break; case SolutionAlgorithms.Unsteady_SIMPLE: WorkingSet.Push(Control); ResLogger.NextTimestep(false); break; default: throw new NotImplementedException(); } // some console-output if (SIMPLEStatus.IsConverged) { Console.WriteLine("\nINFO: Done SIMPLE-Iterations - Solution converged.\n"); } else if (SIMPLEStatus.SIMPLEStepNo == Control.MaxNoSIMPLEsteps) { Console.WriteLine("\nWARNING: Done SIMPLE-Iterations - Maximum number of SIMPLE steps was reached.\n"); } else { Console.WriteLine("\nERROR: Calculation was terminated - Solution diverged.\n"); } switch (Control.Algorithm) { case SolutionAlgorithms.Steady_SIMPLE: Console.WriteLine("Done steady calculation."); break; case SolutionAlgorithms.Unsteady_SIMPLE: Console.WriteLine("Done time step #" + TimestepNo + ".\n"); break; default: throw new NotImplementedException(); } //LogEnergyOrrSommerfeld(TimestepNo, phystime, dt); if (Control.EdgeTagsDragAndLift != null) { CalculateDragAndLift(phystime); } //Log temperature history tall cavity //LogTemperature(phystime, this.WorkingSet.Temperature.Current); return(dt); } }
static void ComputeErrors(Func <ConventionalDGField, ConventionalDGField, double> distFunc, IEnumerable <string> FieldsToCompare, IEnumerable <ITimestepInfo> timestepS, out double[] GridRes, out Dictionary <string, int[]> __DOFs, out Dictionary <string, double[]> Errors, out Guid[] timestepIds) { using (var tr = new FuncTrace()) { if (FieldsToCompare == null || FieldsToCompare.Count() <= 0) { throw new ArgumentException("empty list of field names."); } if (timestepS == null || timestepS.Count() < 1) { throw new ArgumentException("requiring at least two different solutions."); } // load the DG-Fields List <IEnumerable <DGField> > fields = new List <IEnumerable <DGField> >(); // 1st index: grid / 2nd index: enumeration int i = 1; foreach (var timestep in timestepS) { //Console.WriteLine("Loading timestep {0} of {1}, ({2})...", i, timestepS.Count(), timestep.ID); fields.Add(timestep.Fields); i++; //Console.WriteLine("done (Grid has {0} cells).", fields.Last().First().GridDat.CellPartitioning.TotalLength); } // sort according to grid resolution { var s = fields.OrderBy(f => f.First().GridDat.CellPartitioning.TotalLength).ToArray(); var orgfields = fields.ToArray(); fields.Clear(); fields.AddRange(s); s = null; // filter equal grids: while (fields.Count >= 2 && (fields[fields.Count - 1].First().GridDat.CellPartitioning.TotalLength == fields[fields.Count - 2].First().GridDat.CellPartitioning.TotalLength)) { fields.RemoveAt(fields.Count - 2); } // extract timestep Id's timestepIds = new Guid[fields.Count]; for (int z = 0; z < timestepIds.Length; z++) { int idx = orgfields.IndexOf(fields[z], (f1, f2) => object.ReferenceEquals(f1, f2)); timestepIds[z] = timestepS.ElementAt(idx).ID; } } // grids and resolution GridData[] gDataS = fields.Select(fc => GridHelper.ExtractGridData(fc.First().GridDat)).ToArray(); GridRes = gDataS.Take(gDataS.Length - 1).Select(gd => gd.Cells.h_minGlobal).ToArray(); // compute the errors Errors = new Dictionary <string, double[]>(); __DOFs = new Dictionary <string, int[]>(); foreach (string Identification in FieldsToCompare) { double[] L2Error = new double[gDataS.Length - 1]; int[] dof = new int[gDataS.Length - 1]; for (int iLevel = 0; iLevel < gDataS.Length - 1; iLevel++) { //Console.WriteLine("Computing L2 error of '{0}' on level {1} ...", Identification, iLevel); tr.Info(string.Format("Computing L2 error of '{0}' on level {1} ...", Identification, iLevel)); ConventionalDGField fine = (ConventionalDGField)(fields.Last().Single(fi => fi.Identification == Identification)); ConventionalDGField coarse = (ConventionalDGField)(fields.ElementAt(iLevel).Single(fi => fi.Identification == Identification)); L2Error[iLevel] = distFunc(coarse, fine); dof[iLevel] = coarse.Mapping.TotalLength; //Console.WriteLine("done (Error is {0:0.####E-00}).", L2Error[iLevel]); tr.Info(string.Format("done (Error is {0:0.####E-00}).", L2Error[iLevel])); } Errors.Add(Identification, L2Error); __DOFs.Add(Identification, dof); } } }