public TypeInference(ICompilation compilation) { if (compilation == null) throw new ArgumentNullException("compilation"); this.compilation = compilation; this.conversions = Conversions.Get(compilation); }
public TypeInference(ITypeResolveContext context, Conversions conversions = null) { if (context == null) throw new ArgumentNullException("context"); this.context = context; this.conversions = conversions ?? Conversions.Get(context); }
internal TypeInference(ICompilation compilation, Conversions conversions) { Debug.Assert(compilation != null); Debug.Assert(conversions != null); this.compilation = compilation; this.conversions = conversions; }
public CSharpResolver(ICompilation compilation) { if (compilation == null) throw new ArgumentNullException("compilation"); this.compilation = compilation; this.conversions = Conversions.Get(compilation); this.context = new CSharpTypeResolveContext(compilation.MainAssembly); }
public CSharpResolver(CSharpTypeResolveContext context) { if (context == null) throw new ArgumentNullException("context"); this.compilation = context.Compilation; this.conversions = Conversions.Get(compilation); this.context = context; if (context.CurrentTypeDefinition != null) currentTypeDefinitionCache = new TypeDefinitionCache(context.CurrentTypeDefinition); }
internal static BoundNode Rewrite( ParameterSymbol targetMethodThisParameter, Conversions conversions, ImmutableDictionary<string, DisplayClassVariable> displayClassVariables, BoundNode node, DiagnosticBag diagnostics) { var rewriter = new CapturedVariableRewriter(targetMethodThisParameter, conversions, displayClassVariables, diagnostics); return rewriter.Visit(node); }
private CapturedVariableRewriter( ParameterSymbol targetMethodThisParameter, Conversions conversions, ImmutableDictionary<string, DisplayClassVariable> displayClassVariables, DiagnosticBag diagnostics) { _targetMethodThisParameter = targetMethodThisParameter; _conversions = conversions; _displayClassVariables = displayClassVariables; _diagnostics = diagnostics; }
private CSharpResolver(ICompilation compilation, Conversions conversions, CSharpTypeResolveContext context, bool checkForOverflow, bool isWithinLambdaExpression, TypeDefinitionCache currentTypeDefinitionCache, ImmutableStack<IVariable> localVariableStack, ObjectInitializerContext objectInitializerStack) { this.compilation = compilation; this.conversions = conversions; this.context = context; this.checkForOverflow = checkForOverflow; this.isWithinLambdaExpression = isWithinLambdaExpression; this.currentTypeDefinitionCache = currentTypeDefinitionCache; this.localVariableStack = localVariableStack; this.objectInitializerStack = objectInitializerStack; }
public OverloadResolution(ITypeResolveContext context, ResolveResult[] arguments, string[] argumentNames = null, IType[] typeArguments = null) { if (context == null) throw new ArgumentNullException("context"); if (arguments == null) throw new ArgumentNullException("arguments"); if (argumentNames == null) argumentNames = new string[arguments.Length]; else if (argumentNames.Length != arguments.Length) throw new ArgumentException("argumentsNames.Length must be equal to arguments.Length"); this.context = context; this.arguments = arguments; this.argumentNames = argumentNames; // keep explicitlyGivenTypeArguments==null when no type arguments were specified if (typeArguments != null && typeArguments.Length > 0) this.explicitlyGivenTypeArguments = typeArguments; this.conversions = new Conversions(context); }
public OverloadResolution(ICompilation compilation, ResolveResult[] arguments, string[] argumentNames = null, IType[] typeArguments = null, Conversions conversions = null) { if (compilation == null) throw new ArgumentNullException("compilation"); if (arguments == null) throw new ArgumentNullException("arguments"); if (argumentNames == null) argumentNames = new string[arguments.Length]; else if (argumentNames.Length != arguments.Length) throw new ArgumentException("argumentsNames.Length must be equal to arguments.Length"); this.compilation = compilation; this.arguments = arguments; this.argumentNames = argumentNames; // keep explicitlyGivenTypeArguments==null when no type arguments were specified if (typeArguments != null && typeArguments.Length > 0) this.explicitlyGivenTypeArguments = typeArguments; this.conversions = conversions ?? Conversions.Get(compilation); this.AllowExpandingParams = true; }
public static string BkDXFXData_ValueToString(object vvarXDataValue) { switch (Information.VarType(RuntimeHelpers.GetObjectValue(vvarXDataValue))) { case VariantType.String: return(Conversions.ToString(vvarXDataValue)); case VariantType.Double: return(Conversions.ToString(vvarXDataValue)); case VariantType.Decimal: return(Conversions.ToString(vvarXDataValue)); case (VariantType)8201: { int num = Information.LBound((Array)vvarXDataValue); int num2 = Information.UBound((Array)vvarXDataValue); string dstrValue = default(string); for (int dlngIdx = num; dlngIdx <= num2; dlngIdx = checked (dlngIdx + 1)) { dstrValue = dstrValue + " " + Conversions.ToString(NewLateBinding.LateIndexGet(vvarXDataValue, new object[1] { dlngIdx }, null)); } return(Strings.Mid(dstrValue, 2)); } case VariantType.Short: return(Conversions.ToString(vvarXDataValue)); case VariantType.Integer: return(Conversions.ToString(vvarXDataValue)); default: return("Unbekannt"); } }
public WalletBalance(WalletBalanceContract contract, int assetAccuracy) { if (contract == null) { throw new ResultValidationException("Wallet not found"); } if (string.IsNullOrWhiteSpace(contract.Address)) { throw new ResultValidationException("Address is required", contract.Address); } if (string.IsNullOrWhiteSpace(contract.AssetId)) { throw new ResultValidationException("Asset ID is required", contract.AssetId); } if (contract.Block == 0) { throw new ResultValidationException("Block is required", contract.Block); } Address = contract.Address; AssetId = contract.AssetId; Block = contract.Block; IsAddressCompromised = contract.IsAddressCompromised ?? false; try { Balance = Conversions.CoinsFromContract(contract.Balance, assetAccuracy); if (Balance <= 0) { throw new ResultValidationException("Balance should be positive number", contract.Balance); } } catch (ConversionException ex) { throw new ResultValidationException("Failed to parse balance", contract.Balance, ex); } }
public static void ClaculNewCode(string sql, string selectmax, string CODE) { try { if (conn.State == ConnectionState.Closed) { conn.Open(); } MySqlDataAdapter mySqlDataAdapter = new MySqlDataAdapter(sql, conn); MySqlCommand mySqlCommand = new MySqlCommand(sql, conn); MySqlDataReader mySqlDataReader = mySqlCommand.ExecuteReader(); DataTable dataTable = new DataTable(); mySqlDataAdapter.Fill(dataTable); int num = checked (dataTable.Rows.Count - 1); if (num == -1) { CODE = "1"; } else { mySqlCommand.CommandText = selectmax; CODE = Conversions.ToString(mySqlCommand.ExecuteScalar()); } mySqlDataReader.Close(); mySqlDataReader.Dispose(); } catch (Exception ex) { ProjectData.SetProjectError(ex); Exception ex2 = ex; MessageBox.Show(ex2.Message.ToString()); ProjectData.ClearProjectError(); } finally { conn.Close(); } }
private void AssignUnitsToBooking() { string muIDmbID; int railListID = 0; var bus = new MinibookingMiniUnitAssignmentService(); VW_UnitSourceMiniUnit muRow; var mbRow = default(VW_BookingMinibooking); for (int i = 0, loopTo = dgvMiniUnit.Rows.Count - 1; i <= loopTo; i++) { muRow = (VW_UnitSourceMiniUnit)dgvMiniUnit.Rows[i].DataBoundItem; railListID = (int)muRow.RecordKey; for (int s = 0, loopTo1 = dgvAllBookings.Rows.Count - 1; s <= loopTo1; s++) { if (Conversions.ToBoolean(Operators.ConditionalCompareObjectEqual(dgvAllBookings.Rows[s].Cells[0].Value, true, false))) { mbRow = (VW_BookingMinibooking)dgvAllBookings.Rows[s].DataBoundItem; muIDmbID = muRow.MiniUnitId.ToString() + "|" + mbRow.MiniBookingId.ToString(); if (!shouldNotAssignList.Contains(muIDmbID)) { bus.Insert(mbRow.MiniBookingId, muRow.MiniUnitId); EmployeeActivityLogService.Insert(My.MyProject.Forms.FrmLoginWindow._userID, "UnitId: " + muRow.MiniUnitId.ToString() + " | MbId: " + mbRow.MiniBookingId.ToString(), "ASSIGN", "UNIT", "Unit Source " + muRow.MiniunitNumber + " assigned to booking " + mbRow.BookingNumber); } } } var cep = new CeresExcelPendingSync(); cep.RecordId = mbRow.MiniBookingId; cep.EmployeeId = My.MyProject.Forms.FrmLoginWindow._userID; cep.SyncType = "UNIT ASSIGNMENT"; cep.ExtraInfo = ""; cep.CurrentStatus = "QUEUE"; cep.QtyOfAttempts = 0; cep.SyncMessage = "ADDED TO QUEUE"; cep.CreatedAt = DateAndTime.Now; CeresExcService.AddOrUpdate(cep); } }
private void UltraButton2_Click(object sender, EventArgs e) { this.ParentForm.DialogResult = System.Windows.Forms.DialogResult.OK; KORISNIKDataSet dataSet = new KORISNIKDataSet(); new KORISNIKDataAdapter().Fill(dataSet); string str6 = Conversions.ToString(dataSet.KORISNIK.Rows[0]["korisnik1naziv"]); string str5 = Conversions.ToString(dataSet.KORISNIK.Rows[0]["MBKORISNIK"]); string str = Conversions.ToString(dataSet.KORISNIK.Rows[0]["korisnik1adresa"]); string str2 = Conversions.ToString(dataSet.KORISNIK.Rows[0]["korisnik1mjesto"]); string str8 = Conversions.ToString(dataSet.KORISNIK.Rows[0]["KONTAKTTELEFON"]); string str4 = Conversions.ToString(dataSet.KORISNIK.Rows[0]["KONTAKTFAX"]); string str3 = ""; ReportDocument document = new ReportDocument(); document.Load(System.Windows.Forms.Application.StartupPath + @"\Izvjestaji\rptKreditne.rpt"); if (this.optKreditor.Checked) { document.ReportDefinition.Sections[4].SectionFormat.EnableNewPageAfter = true; } document.SetDataSource(this.S_OD_REKAP_KREDITNEDataSet1); document.SetParameterValue("OBRACUN", this.obracun.Text + " -isplata plaće za " + DB.MjesecNazivPlatna(this.mjesec) + "/" + Conversions.ToString(this.godina) + "."); document.SetParameterValue("BROJRACUNA", str3); document.SetParameterValue("ADRESA2", str2); document.SetParameterValue("ADRESA", str); document.SetParameterValue("ustanova", str6); ExtendedWindowWorkspace workspace = new ExtendedWindowWorkspace(); PreviewReportWorkItem item = this.Controller.WorkItem.Items.Get <PreviewReportWorkItem>("Pregled"); if (item == null) { item = this.Controller.WorkItem.Items.AddNew <PreviewReportWorkItem>("Pregled"); } item.Izvjestaj = document; item.Activate(); item.Show(item.Workspaces["main"]); this.ParentForm.Close(); }
public static void monitorSHD() { while (true) { if (!Module1.mainHero.getLaserAttacking()) { shdCount++; } else { shdCount = 0; } if (shdCount >= 5) { if (Module1.mainHero.getShield() < Module1.mainHero.getMaxShield()) { int num = Module1.mainHero.getMaxShield() - Module1.mainHero.getShield(); if (num < shdStep) { int num2 = Module1.mainHero.getMaxShield(); initConnection.sendPacket("0|A|SHD|" + Conversions.ToString(num2) + "|" + Conversions.ToString(Module1.mainHero.getMaxShield())); Module1.mainHero.setShield(num2, Module1.mainHero.getMaxShield()); } else { int num3 = Module1.mainHero.getShield() + shdStep; initConnection.sendPacket("0|A|SHD|" + Conversions.ToString(num3) + "|" + Conversions.ToString(Module1.mainHero.getMaxShield())); Module1.mainHero.setShield(num3, Module1.mainHero.getMaxShield()); } } else { shdCount = 0; } } Thread.Sleep(0x3e8); } }
public SQLiteCollationNeededEventArgs(SQLiteDatabase database, string collationName) { if (database == null) { throw new ArgumentNullException(nameof(database)); } if (collationName == null) { throw new ArgumentNullException(nameof(collationName)); } Database = database; CollationName = collationName; CollationOptions = CompareOptions.OrdinalIgnoreCase; // default is case insensitive if (CollationName.Length > 2 && CollationName.StartsWith(CultureInfoCollationPrefix, StringComparison.OrdinalIgnoreCase)) { string sid; int pos = CollationName.IndexOf('_', CultureInfoCollationPrefix.Length); if (pos < 0) { sid = CollationName.Substring(CultureInfoCollationPrefix.Length); } else { sid = CollationName.Substring(CultureInfoCollationPrefix.Length, pos - CultureInfoCollationPrefix.Length); if (Conversions.TryChangeType(CollationName.Substring(pos + 1), out CompareOptions options)) { CollationOptions = options; } } if (int.TryParse(sid, out int lcid)) { CollationCulture = CultureInfo.GetCultureInfo(lcid); // don't handle exception on purpose, we want the user to be aware of that issue } } }
public void method_8(string string_1, object object_0) { object[] objArr2; if (Conversions.ToBoolean(NewLateBinding.LateGet(object_0, null, "InvokeRequired", new object[0], null, null, null))) { object[] objArr1 = new object[] { new QuestForm.GDelegate7(method_8), string_1, RuntimeHelpers.GetObjectValue(object_0) }; objArr2 = objArr1; bool[] flagArr = new bool[] { false, true, true }; NewLateBinding.LateCall(object_0, null, "Invoke", objArr2, null, null, flagArr, true); if (flagArr[1]) string_1 = (string)Conversions.ChangeType(RuntimeHelpers.GetObjectValue(objArr2[1]), typeof(string)); if (!flagArr[2]) return; object_0 = RuntimeHelpers.GetObjectValue(objArr2[2]); return; } objArr2 = new object[] { string_1 }; NewLateBinding.LateSet(object_0, null, "Text", objArr2, null, null); }
/// <summary> /// Converts an address string to the corresponding value. /// </summary> /// <param name="context">Type descriptor context.</param> /// <param name="culture">Globalization info.</param> /// <param name="value">The value being converted.</param> /// <returns>The converted value.</returns> public override Object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, Object value) { DataType dataType = null; Boolean isHex = false; if (typeof(AddressItem).IsAssignableFrom(context.Instance.GetType())) { dataType = (context.Instance as AddressItem)?.DataType; isHex = (context.Instance as AddressItem).IsValueHex; } if (dataType == (DataType)null || !value.GetType().IsAssignableFrom(typeof(String))) { return(base.ConvertFrom(context, culture, value)); } if (!(isHex ? SyntaxChecker.CanParseHex(dataType, value as String) : SyntaxChecker.CanParseValue(dataType, value as String))) { return(base.ConvertFrom(context, culture, value)); } return(isHex ? Conversions.ParseHexStringAsPrimitive(dataType, value as String) : Conversions.ParsePrimitiveStringAsPrimitive(dataType, value as String)); }
public static List <LocalidadEN> CargarLocalidad() { var ListaLoc = new List <LocalidadEN>(); using (var Cnn = new SqlConnection(ConfigurationManager.ConnectionStrings["Mercader"].ToString())) { Cnn.Open(); string ConsultaLoc = "SELECT CodLoc,Provincia_CodProvincia,Descripcion,CodigoPostal FROM Localidad"; var Cmd = new SqlCommand(ConsultaLoc, Cnn); var Lector = Cmd.ExecuteReader(); while (Lector.Read()) { var UnaLocalidad = new LocalidadEN(); UnaLocalidad.CodLoc = Conversions.ToInteger(Lector[0]); UnaLocalidad.CodProvincia = Conversions.ToInteger(Lector[1]); UnaLocalidad.Descripcion = Conversions.ToString(Lector[2]); UnaLocalidad.CodigoPostal = Conversions.ToString(Lector[3]); ListaLoc.Add(UnaLocalidad); } return(ListaLoc); } }
/// <summary> /// Obtiene los eventos realizados en el sistema. /// </summary> /// <returns>List(Of BitacoraEN)</returns> /// <history>Federico Fontan - Diploma 2016</history> public static List<BitacoraEN> CargarBitacora() { var ListaBitacora = new List<BitacoraEN>(); using (var Cnn = new SqlConnection(ConfigurationManager.ConnectionStrings["Mercader"].ToString())) { Cnn.Open(); string ConsultaCarga = "SELECT CodBit,Fecha,Descripcion,Criticidad,Usuario " + "FROM Bitacora"; var Cmd = new SqlCommand(ConsultaCarga, Cnn); var Lector = Cmd.ExecuteReader(); while (Lector.Read()) { var UnEvento = new BitacoraEN(); UnEvento.CodBit = Conversions.ToInteger(Lector[0]); UnEvento.Fecha = Conversions.ToDate(Lector[1]); UnEvento.Descripcion = Conversions.ToString(Lector[2]); UnEvento.Criticidad = Conversions.ToString(Lector[3]); UnEvento.Usuario = Conversions.ToString(Lector[4]); ListaBitacora.Add(UnEvento); } } return ListaBitacora; }
private void IznosiNabave_Load(object sender, EventArgs e) { this.m_cm = (CurrencyManager)this.BindingContext[this.ds.S_OS_STANJE_LOKACIJA]; InfraCustom.PostaviSelectAllSvimEditKontrolama(this); if (this.OSController.DataSet.OS.Rows.Count > 0) { this.daStanje.Fill(this.ds, Conversions.ToLong(this.OSController.DataSet.OS.Rows[0]["invbroj"])); } this.dalok.Fill(this.dslokacije); this.IzbaciLokacije(); this.UltraGrid1.DataSource = this.ds.S_OS_STANJE_LOKACIJA; this.UltraGrid2.DataSource = this.dslokacije.LOKACIJE; this.m_cmDisable = false; this.m_cm.PositionChanged += new EventHandler(this.m_cm_PositionChanged); this.m_cm_PositionChanged(null, null); this.UltraGrid1.Text = ""; this.UltraGrid2.Text = ""; this.UltraGrid2.DisplayLayout.Bands[0].Columns[2].Hidden = true; this.UltraGrid2.DisplayLayout.Bands[0].Columns[0].Header.Caption = "Šif.lokacije"; this.UltraGrid2.DisplayLayout.Bands[0].Columns[1].Header.Caption = "Lokacija"; this.UltraGrid1.DisplayLayout.Bands[0].Columns[2].Hidden = true; this.UltraGrid1.DisplayLayout.Bands[0].Columns[3].Hidden = true; }
public static void setAllPortals(ArrayList portals) { IEnumerator enumerator; removeAllPortals(); try { enumerator = portals.GetEnumerator(); while (enumerator.MoveNext()) { Portal current = (Portal)enumerator.Current; curPortals.Add(current); initConnection.sendPacket("0|p|" + Conversions.ToString(current.getID()) + "|1|0|" + Conversions.ToString(current.getPosX()) + "|" + Conversions.ToString(current.getPosY())); } } finally { if (enumerator is IDisposable) { (enumerator as IDisposable).Dispose(); } } }
public static void ResetearContraseña(UsuarioEN Usuario) { using (var Cnn = new SqlConnection(ConfigurationManager.ConnectionStrings["Mercader"].ToString())) { Cnn.Open(); string ConsultaExiste = "SELECT COUNT(*) FROM Usuario WHERE Usuario=@Param1 AND Activo=0"; var CmdExiste = new SqlCommand(ConsultaExiste, Cnn); CmdExiste.Parameters.AddWithValue("@Param1", Usuario.Usuario); int Resultado = Conversions.ToInteger(CmdExiste.ExecuteScalar()); if (Resultado > 0) { throw new WarningException(Datos.My.Resources.ArchivoIdioma.UsuarioDadoBaja); } else { string Consulta = "UPDATE Usuario SET Contraseña=@Param1 WHERE Usuario=@Param2"; var Cmd = new SqlCommand(Consulta, Cnn); Cmd.Parameters.AddWithValue("@Param1", Usuario.Contraseña); Cmd.Parameters.AddWithValue("@Param2", Usuario.Usuario); Cmd.ExecuteNonQuery(); } } }
public void method_7(bool bool_0, object object_0) { object[] objArr2; if (Conversions.ToBoolean(NewLateBinding.LateGet(object_0, null, "InvokeRequired", new object[0], null, null, null))) { object[] objArr1 = new object[] { new QuestForm.GDelegate6(method_7), bool_0, RuntimeHelpers.GetObjectValue(object_0) }; objArr2 = objArr1; bool[] flagArr = new bool[] { false, true, true }; NewLateBinding.LateCall(object_0, null, "Invoke", objArr2, null, null, flagArr, true); if (flagArr[1]) bool_0 = (bool)Conversions.ChangeType(RuntimeHelpers.GetObjectValue(objArr2[1]), typeof(bool)); if (!flagArr[2]) return; object_0 = RuntimeHelpers.GetObjectValue(objArr2[2]); return; } objArr2 = new object[] { bool_0 }; NewLateBinding.LateSet(object_0, null, "enabled", objArr2, null, null); }
// Token: 0x06000188 RID: 392 RVA: 0x000141DC File Offset: 0x000123DC private void \u0002(object \u0002, EventArgs \u0003) { this.Text = Conversions.ToString(Operators.ConcatenateObject(Operators.ConcatenateObject(\u000E\u2000.\u0002(this.osk.L), \u0006\u2001.\u0002(-1829139039)), global::\u000E.\u0003(this.FN))); this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Maximum = this.SZ; this.folder = this.osk.Folder + \u0006\u2001.\u0002(-1829139017); if (!Directory.Exists(this.folder)) { Directory.CreateDirectory(this.folder); } this.folder += new FileInfo(global::\u000E.\u0003(this.FN)).Name; this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Items[0].SubItems[1].Text = new FileInfo(global::\u000E.\u0003(this.FN)).Name; this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Items[1].SubItems[1].Text = global::\u000E.\u0002((long)this.SZ); do { this.tmp = Interaction.Environ(\u0006\u2001.\u0002(-1829138643)) + \u0006\u2001.\u0002(-1829139066) + global::\u000E.\u0002(10); }while (File.Exists(this.tmp)); this.FS = new FileStream(this.tmp, FileMode.Append); this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Items[2].SubItems[1].Text = global::\u000E.\u0002((long)(checked (this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Value - this.os))); this.os = this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Value; this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Items[3].SubItems[1].Text = global::\u000E.\u0002((long)this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Value); this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().\u0005(); this.gng9dgyujfqxgme5bxe9aw5zemz7ep94\u200A\u2009\u2000\u2005\u0002().Enabled = true; }
private string ToSuperString(int Level) { string str = new string(Conversions.ToCharArrayRankOne("")); int level = checked (Level - 1); for (int i = 0; i <= level; i = checked (i + 1)) { str = string.Concat(str, ". "); } string[] strArrays = new string[] { str, "<ID:", Conversions.ToString(this.ID), ", Value:", null, null }; double value = this.Value; strArrays[4] = value.ToString(CultureInfo.InvariantCulture); strArrays[5] = ">"; str = string.Concat(strArrays); int count = checked (this.Children.Count - 1); for (int j = 0; j <= count; j = checked (j + 1)) { str = string.Concat(str, "\r\n", this.Children[j].ToSuperString(checked (Level + 1))); } return(str); }
private void Client_HandleChatMessage(string data) { string[] parts = data.Split(ProgramConstants.LAN_DATA_SEPARATOR); if (parts.Length < 3) { return; } string playerName = parts[0]; int colorIndex = Conversions.IntFromString(parts[1], -1); if (colorIndex < 0 || colorIndex >= chatColors.Length) { return; } lbChatMessages.AddMessage(new ChatMessage(playerName, chatColors[colorIndex].XNAColor, DateTime.Now, parts[2])); sndMessageSound.Play(); }
public float getAltitudeHeightLevel(double lat, double lon, float zoom) { UnwrappedTileId tileIDUnwrapped = TileCover.CoordinateToTileId(new Mapbox.Utils.Vector2d(lat, lon), (int)zoom); //get tile UnityTile tile = _mapManager.MapVisualizer.GetUnityTileFromUnwrappedTileId(tileIDUnwrapped); //lat lon to meters because the tiles rect is also in meters Vector2d v2d = Conversions.LatLonToMeters(new Mapbox.Utils.Vector2d(lat, lon)); //get the origin of the tile in meters Vector2d v2dcenter = tile.Rect.Center - new Mapbox.Utils.Vector2d(tile.Rect.Size.x / 2, tile.Rect.Size.y / 2); //offset between the tile origin and the lat lon point Vector2d diff = v2d - v2dcenter; //maping the diffetences to (0-1) float Dx = (float)(diff.x / tile.Rect.Size.x); float Dy = (float)(diff.y / tile.Rect.Size.y); //height in unity units float h = tile.QueryHeightData(Dx, Dy); return(h); }
// Token: 0x06000331 RID: 817 RVA: 0x0002020C File Offset: 0x0001E40C private void Cam_FormClosing(object sender, FormClosingEventArgs e) { if (File.Exists(Application.StartupPath + "\\Bin\\Webcam.ini")) { File.Delete(Application.StartupPath + "\\Bin\\Webcam.ini"); } this.SS(Conversions.ToString(this.ComboBox2.SelectedIndex)); this.SS(Conversions.ToString(this.CheckBox1.Checked)); File.WriteAllText(Application.StartupPath + "\\Bin\\Webcam.ini", Conversions.ToString(this.sev)); try { if (this.sk.CN) { this.sk.Send("@"); } this.sk.CN = false; } catch (Exception expr_A3) { ProjectData.SetProjectError(expr_A3); ProjectData.ClearProjectError(); } }
public static int ObtenerIDUsuario(string Usuario) { using (var Cnn = new SqlConnection(ConfigurationManager.ConnectionStrings["Mercader"].ToString())) { Cnn.Open(); string ConsultaExiste = "SELECT COUNT(*) FROM Usuario WHERE Usuario=@Param1 AND Activo=1"; var CmdExiste = new SqlCommand(ConsultaExiste, Cnn); CmdExiste.Parameters.AddWithValue("@Param1", Usuario); int Resultado = Conversions.ToInteger(CmdExiste.ExecuteScalar()); if (Resultado == 0) { throw new WarningException(Datos.My.Resources.ArchivoIdioma.UsuarioDadoBaja); } else { string ConsultaUsuario = "SELECT CodUsu FROM Usuario WHERE Usuario=@Param1 AND Activo=1"; var Cmd = new SqlCommand(ConsultaUsuario, Cnn); Cmd.Parameters.AddWithValue("@Param1", Usuario); int Codigo = Conversions.ToInteger(Cmd.ExecuteScalar()); return Codigo; } } }
/// <summary> /// /// </summary> /// <param name="blendColor"></param> /// <param name="baseColor"></param> /// <param name="opacity"></param> /// <returns></returns> public static Color OpacityMix(Color blendColor, Color baseColor, int opacity) { int r1; int g1; int b1; int r2; int g2; int b2; int r3; int g3; int b3; r1 = blendColor.R; g1 = blendColor.G; b1 = blendColor.B; r2 = baseColor.R; g2 = baseColor.G; b2 = baseColor.B; r3 = Conversions.ToInteger(Conversion.Fix(r1 * (opacity / (float)100) + r2 * (1 - opacity / (float)100))); g3 = Conversions.ToInteger(Conversion.Fix(g1 * (opacity / (float)100) + g2 * (1 - opacity / (float)100))); b3 = Conversions.ToInteger(Conversion.Fix(b1 * (opacity / (float)100) + b2 * (1 - opacity / (float)100))); return(CreateColorFromRGB(r3, g3, b3)); }
private void llena_combos_fg(int c, string strsql, string variable, ref DataTable dt) { dt = new DataTable(); string str = ""; Module1.llena_tablas(ref dt, strsql, ref this.cnn); try { foreach (DataRow row in dt.Rows) { str = Conversions.ToString(Microsoft.VisualBasic.CompilerServices.Operators.ConcatenateObject(Microsoft.VisualBasic.CompilerServices.Operators.ConcatenateObject((object)str, row[variable]), (object)"|")); } } finally { IEnumerator enumerator; if (enumerator is IDisposable) { (enumerator as IDisposable).Dispose(); } } this.fg.Cols[c].ComboList = str; }
/// <summary> /// Replaces user provided keywords with their associated value. /// </summary> /// <param name="assembly">The assembly script.</param> /// <returns>The assembly script with all keywords replaced with their values.</returns> private String ResolveKeywords(String assembly) { if (assembly == null) { return(String.Empty); } // Clear out any whitespace that may cause issues in the assembly script assembly = assembly.Replace("\t", String.Empty); // Resolve keywords foreach (KeyValuePair <String, Object> keyword in this.Keywords) { assembly = assembly.Replace("<" + keyword.Key + ">", Conversions.ToHex(keyword.Value, formatAsAddress: false, includePrefix: true) as String, StringComparison.OrdinalIgnoreCase); } foreach (KeyValuePair <String, Object> globalKeyword in MemoryCore.globalKeywords.Value.ToArray()) { assembly = assembly.Replace("<" + globalKeyword.Key + ">", Conversions.ToHex(globalKeyword.Value, formatAsAddress: false, includePrefix: true) as String, StringComparison.OrdinalIgnoreCase); } return(assembly); }
/// <param name="NroNota"></param> public static int ValidarNotaPedido(string NroNota) { using (var Cnn = new SqlConnection(ConfigurationManager.ConnectionStrings["Mercader"].ToString())) { Cnn.Open(); string ConsultaActivo = "SELECT COUNT(*) FROM Nota_Pedido WHERE Nro_Nota=@Param1 AND Activo=0"; var CmdActivo = new SqlCommand(ConsultaActivo, Cnn); CmdActivo.Parameters.AddWithValue("@Param1", NroNota); int Activo = Conversions.ToInteger(CmdActivo.ExecuteScalar()); if (Activo > 0) { throw new WarningException(Datos.My.Resources.ArchivoIdioma.NotaPedidoDadaBaja); } else { string ConsultaValidar = "SELECT COUNT(*) FROM Nota_Pedido WHERE Nro_Nota=@Param1"; var Cmd = new SqlCommand(ConsultaValidar, Cnn); Cmd.Parameters.AddWithValue("@Param1", NroNota); int Resultado = Conversions.ToInteger(Cmd.ExecuteScalar()); return Resultado; } } }
public bool Exists(object vvarValue) { if (mobjDictEntities.Count > 0) { switch (Information.VarType(RuntimeHelpers.GetObjectValue(vvarValue))) { case VariantType.Double: { double ddblObjectID = Conversions.ToDouble(vvarValue); return(mobjDictEntities.ContainsKey("K" + Conversions.ToString(ddblObjectID))); } case VariantType.String: { string vstrHexNum = Conversions.ToString(vvarValue); int nrlngErrNum = 0; string nrstrErrMsg = ""; if (hwpDxf_Functions.BkDXF_ValidHexNum(vstrHexNum, ref nrlngErrNum, ref nrstrErrMsg)) { double ddblObjectID = hwpDxf_Functions.BkDXF_HexToDbl(Conversions.ToString(vvarValue)); return(mobjDictEntities.ContainsKey("K" + Conversions.ToString(ddblObjectID))); } break; } case VariantType.Short: case VariantType.Integer: { int dlngIndex = Conversions.ToInteger(vvarValue); return((dlngIndex >= 0) & (dlngIndex < mobjDictEntities.Count)); } } } bool Exists = default(bool); return(Exists); }
private void TcCGB_Frm_Load(object sender, EventArgs e) { short num = 1; do { this.ComboBox1.Items.Add(num); num += 1; }while (num <= 30); this.ComboBox1.Text = Conversions.ToString(1); short num2 = 2800; do { this.ComboBox2.Items.Add(num2); num2 += 100; }while (num2 <= 5400); this.ComboBox2.Text = Conversions.ToString(2900); this.ComboBox3.Text = "0.000"; short num3 = 20; do { this.ComboBox4.Items.Add("C" + Conversions.ToString((int)num3)); this.ComboBox5.Items.Add("C" + Conversions.ToString((int)num3)); num3 += 5; }while (num3 <= 80); this.ComboBox4.Text = "C30"; this.ComboBox5.Text = "C30"; this.CheckBox1.Checked = false; this.DataGridView1.Columns[3].Visible = false; this.DataGridView1.Columns[4].Visible = false; this.ComboBox4.Enabled = false; this.ComboBox5.Enabled = false; this.Button2.Enabled = false; this.Button3.Enabled = false; }
public void tri2() { Editor ed = Autodesk.AutoCAD.ApplicationServices.Application.DocumentManager.MdiActiveDocument.Editor; PointSet ps = PreProcess.getPointFromText(mydb.tcSetting.Scale); ObjectId[] ids = PreProcess.getAllLines(); List <Constraint> boundary = new List <Constraint>(); //int plCount = 0; ObjectId[] objectIDs3 = CommandLineQuerries.GetObjectIDs(CommandLineQuerries.EntityType.PLINES, "Select boundaries (polylines)", false); if (objectIDs3 != null) { string text3 = " boundary"; if (objectIDs3.Length > 1) { text3 += "s"; } //editor.WriteMessage(objectIDs3.Length + text3 + " selected."); //plCount = this.getPolylineCount(objectIDs3); List <Edge> list4 = Conversions.ToCeometricEdgeList(objectIDs3); for (int j = 0; j < list4.Count; j++) { boundary.Add(new Constraint(list4[j], Constraint.ConstraintType.Boundary)); } } else { //editor.WriteMessage("No boundaries selected.\n"); } Triangulate tri = new Triangulate(); //tri.TriangulateInternal(ps, new List<Constraint>(), new List<Constraint>()); tri.TriangulateInternal(ps, new List <Constraint>(), boundary, null); }
private bool InternReadDimStyleTable(ref int rlngIdx, ref int rlngUnknown71, ref Dictionary <object, object> robjDictUnknown340, ref string nrstrErrMsg) { nrstrErrMsg = null; robjDictUnknown340.Clear(); checked { bool dblnError = default(bool); if (Operators.ConditionalCompareObjectNotEqual(mobjDictReadCodes[rlngIdx], 100, TextCompare: false)) { nrstrErrMsg = "Ungültiger Gruppencode für Objektname in Zeile " + Conversions.ToString(rlngIdx * 2 + 1) + "."; dblnError = true; } else if (Operators.ConditionalCompareObjectNotEqual(mobjDictReadValues[rlngIdx], "AcDbDimStyleTable", TextCompare: false)) { nrstrErrMsg = "Ungültiger Objektname in Zeile " + Conversions.ToString(rlngIdx * 2 + 2) + "."; dblnError = true; } else if (Operators.ConditionalCompareObjectNotEqual(mobjDictReadCodes[rlngIdx + 1], 71, TextCompare: false)) { nrstrErrMsg = "Ungültiger Gruppencode (?) in Zeile " + Conversions.ToString(rlngIdx * 2 + 3) + "."; dblnError = true; } else { rlngUnknown71 = Conversions.ToInteger(mobjDictReadValues[rlngIdx + 1]); rlngIdx += 2; int dlngCount = default(int); while (Operators.ConditionalCompareObjectEqual(mobjDictReadCodes[rlngIdx], 340, TextCompare: false)) { robjDictUnknown340.Add("K" + Conversions.ToString(dlngCount), RuntimeHelpers.GetObjectValue(mobjDictReadValues[rlngIdx])); dlngCount++; rlngIdx++; } } return(!dblnError); } }
private void ListViewEx_MouseClick(object sender, MouseEventArgs e) { if (base.SelectedItems.Count > 0) { ListViewItem item = base.SelectedItems[0]; ExtraData tag = (ExtraData)item.Tag; if (Conversions.ToBoolean(Operators.AndObject(Operators.CompareString(tag.ButtonText1, "", false) != 0, this.isInRect(e.X, e.Y, new Rectangle(checked (checked (checked (item.Bounds.X + item.Bounds.Width) - 105) - 10), checked (item.Bounds.Y + 5), 105, 30))))) { EventHandler <FileEventArgs> eventHandler = this.Button1Click; if (eventHandler != null) { eventHandler(this, new FileEventArgs(tag)); } } else if (Conversions.ToBoolean(Operators.AndObject(Operators.CompareString(tag.ButtonText2, "", false) != 0, this.isInRect(e.X, e.Y, new Rectangle(checked (checked (item.Bounds.X + item.Bounds.Width) - 230), checked (item.Bounds.Y + 5), 105, 30))))) { EventHandler <FileEventArgs> eventHandler1 = this.Button2Click; if (eventHandler1 != null) { eventHandler1(this, new FileEventArgs(tag)); } } } }
//Created July 2016 by Chien Si Harriman. Note the area tolerance checks are based on percentage tolerances and not absolute tolerances. private void GetSurfaceMatches(SurfaceDefinitions surface, List<SurfaceDefinitions> TestSurfaces, ref DetailedSurfaceSummary ssSummary,Conversions.lengthUnitEnum standardLengthUnits, Conversions.lengthUnitEnum testLengthUnits, double testlengthConversion, double standardlengthConversion, Conversions.areaUnitEnum standardAreaUnits, Conversions.areaUnitEnum testAreaUnits, double testareaConversion, double standardareaConversion) { try{ List<SurfaceDefinitions> possiblesList1 = new List<SurfaceDefinitions>(); List<SurfaceDefinitions> possiblesList2 = new List<SurfaceDefinitions>(); bool adjSpaceIdMatch = false; bool isLowTiltObject = false; bool isHighTiltObject = false; bool interiorWallFlipped = false; bool issurfaceRegular = false; bool istestSurfaceRegular = false; //try to find a surface in the test file that has the same: //adjacent space Id signature //surfaceType //free list is 1 //list 2 is not used ssSummary.ID = surface.SurfaceId; ssSummary.AreaUnits = "SquareFeet"; //TODO, try to remove this hardcoding. ssSummary.TotalSurfaceArea = GetSurfaceArea(surface,standardareaConversion); #region logger.Info("SURFACE ID: " + surface.SurfaceId); logger.Info("START SUBTEST: AdjacencyId check."); for(int ts = 0; ts<TestSurfaces.Count;ts++) { SurfaceDefinitions testSurface = TestSurfaces[ts]; //has to have the same number of Adjacent Space Ids if (testSurface.AdjSpaceId.Count == surface.AdjSpaceId.Count) { //an exception for a shading device if (surface.AdjSpaceId.Count == 0) { adjSpaceIdMatch = true; //must be set to true for if statements below to work. logger.Info("SHADING EXCEPTION: " + surface.SurfaceId + " is a shading device. No AdjacencyId checks performed."); } //has to have the same order of adjacent space id strings to qualify. This method assumes the strings are identical if(surface.AdjSpaceId.Count == 1) { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; logger.Info("TEST SURFACE: "+ testSurface.SurfaceId+ " AdjacencyID MATCH SUCCESS"); } else { logger.Info("TEST SURFACE: "+ testSurface.SurfaceId + " AdjacencyID MATCH FAILED"); } } if(surface.AdjSpaceId.Count == 2) { if(surface.SurfaceType == "Ceiling" && testSurface.SurfaceType == "InteriorFloor") { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[1] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH SUCCESS"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH FAILED"); } } else if (surface.SurfaceType == "InteriorFloor" && testSurface.SurfaceType == "Ceiling") { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[1] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH SUCCESS"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH FAILED"); } } else if (surface.SurfaceType == "InteriorWall" && testSurface.SurfaceType == "InteriorWall") { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[1]) { adjSpaceIdMatch = true; logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH SUCCESS"); } else if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[1] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; interiorWallFlipped = true; logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH SUCCESS"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH FAILED"); } } else { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[1]) { adjSpaceIdMatch = true; logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH SUCCESS"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH FAILED"); } } } } else { if (surface.SurfaceType == "SlabOnGrade") //slab on grade for some reason we see sometimes with two adjacent space ids, depending on the vendor { if(testSurface.AdjSpaceId.Count == 2 && surface.AdjSpaceId.Count == 2) { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[1]) { adjSpaceIdMatch = true; logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH SUCCESS"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH FAILED"); } } else if (surface.AdjSpaceId.Count == 2) { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH SUCCESS"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH FAILED"); } } else { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH SUCCESS"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " AdjacencyID MATCH FAILED"); } } } else { logger.Info("TEST SURFACE: AdjacencyID MATCH FAILED. Surfaces should have the same number of Adjacent Space Ids, with the one allowance we give for Slab on Grade."); } } if (adjSpaceIdMatch) { logger.Info("END SUBTEST: AdjacencyId check."); logger.Info("START SUBTEST: surfaceType check."); if(!IsHighTiltSurface(surface) && !IsLowTiltSurface(surface)) { if(surface.SurfaceType == testSurface.SurfaceType) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH SUCCESS"); possiblesList1.Add(testSurface); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH FAILED"); } } else { if(IsLowTiltSurface(surface)) isLowTiltObject = true; if (IsHighTiltSurface(surface)) isHighTiltObject = true; if(IsLowTiltSurface(testSurface) && isHighTiltObject) { if(testSurface.SurfaceType == "Ceiling" && surface.SurfaceType == "InteriorFloor") { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH SUCCESS"); possiblesList1.Add(testSurface); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH FAILED"); } } else if(IsHighTiltSurface(testSurface) && isHighTiltObject) { if(surface.SurfaceType == testSurface.SurfaceType) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH SUCCESS"); possiblesList1.Add(testSurface); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH FAILED"); } } else if(IsLowTiltSurface(testSurface) && isLowTiltObject) { if (surface.SurfaceType == testSurface.SurfaceType) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH SUCCESS"); possiblesList1.Add(testSurface); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH FAILED"); } } else if (IsHighTiltSurface(testSurface) && isLowTiltObject) { if(testSurface.SurfaceType == "InteriorFloor" && surface.SurfaceType == "Ceiling") { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH SUCCESS"); possiblesList1.Add(testSurface); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " surfaceType MATCH FAILED"); } } } adjSpaceIdMatch = false; //must reset it to make sure it will find other valid surfaces for only the proper adjacency. } } logger.Info("END SUBTEST: surfaceType check."); // #reporting if (possiblesList1.Count == 1) { logger.Info("TEST SUMMARY: Based on a comparison of the surface Type and Adjacent SpaceIds, there is " + possiblesList1.Count.ToString() + " surface in the test file that is a possible match for " + surface.SurfaceId + " of the Standard File."); } else if (possiblesList1.Count > 1) { logger.Info("TEST SUMMARY: Based on a comparison of the surface Type and Adjacent SpaceIds, there are " + possiblesList1.Count.ToString() + " surfaces in the test file that are possible matches for " + surface.SurfaceId + " of the Standard File."); } else { logger.Error("TEST SUMMARY: In the vendor test file, no matches could be found for this surface that have the same AdjacentSpaceId(s) and SurfaceType."); ssSummary.FoundMatch = false; return; } #endregion //there is at least one surface that matches the above criteria //now checking for tilt and azimuth criteria, as these have to match //TODO: consider removing, minor clean up if (possiblesList1.Count > 0) { logger.Info("START SUBTEST: Azimuth and Tilt check."); foreach (SurfaceDefinitions testSurface in possiblesList1) { double tiltDifference = 0; double azimuthDifference = Math.Abs(testSurface.Azimuth - surface.Azimuth); #region if(isLowTiltObject) { if(IsLowTiltSurface(testSurface)) //they are the same, both have small tilts { tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); } else //they are 180 degrees different, and the test surface is a high tilt while the standard is low tilt { if (testSurface.SurfaceType == "InteriorFloor") { tiltDifference = Math.Abs(Math.Abs(testSurface.Tilt - 180) - surface.Tilt); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH FAILED"); logger.Info("PROGRAMMER'S NOTE: Expecting test surface type to be Interior Floor"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); logger.Info("TEST SURFACE: "+testSurface.SurfaceId + " has been removed as a candidate for matching."); continue; } } //no azimuth tests for horizontal surfaces if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH FAILED"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); continue; } //if the is within tolerance else { //if the surface is horizontal, just add to the free List because we don't check for azimuth in this case if (surface.Tilt == 0) { possiblesList2.Add(testSurface); if (tiltDifference == 0) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH SUCCESS:PERFECT"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH SUCCESS"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } } else { logger.Info("START SUBTEST: azimuth checks."); //check the azimuth if (azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH FAILED"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); continue; } //if the tilt and azimuth is within tolerance else { //add to the free List possiblesList2.Add(testSurface); if (tiltDifference == 0 && azimuthDifference == 0) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH SUCCESS:PERFECT"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH SUCCESS"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } } } } } else if (isHighTiltObject) { if(IsHighTiltSurface(testSurface)) //both high tilt interior surfaces { tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); } else //standard is high tilt, test is low tilt { if(testSurface.SurfaceType == "Ceiling") { tiltDifference = Math.Abs(Math.Abs(testSurface.Tilt - 180) - surface.Tilt); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH FAILED"); logger.Info("PROGRAMMER' NOTE: Expected surfaceType to be Ceiling."); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); continue; } } //no azimuth tests if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) //azimuth no longer matters for these surfaces { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH FAILED"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); continue; } //if the tilt and azimuth is within tolerance else { //if the surface is horizontal, just add to the free List because we don't check for azimuth in this case if (surface.Tilt == 180) { possiblesList2.Add(testSurface); if (tiltDifference == 0) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH SUCCESS:PERFECT"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt MATCH PERFECT"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } } else { //check the azimuth if (azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH FAILED"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); continue; } //if the tilt and azimuth is within tolerance else { //add to the free List possiblesList2.Add(testSurface); if (tiltDifference == 0 && azimuthDifference == 0) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH SUCCESS:PERFECT"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Azimuth MATCH SUCCESS"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } } } } } #endregion //the surface is neither a ceiling nor a floor, it is just something regular else { azimuthDifference = Math.Abs(testSurface.Azimuth - surface.Azimuth); if (interiorWallFlipped) //both high tilt interior surfaces { azimuthDifference = Math.Abs(Math.Abs(testSurface.Azimuth - surface.Azimuth) - 180); //180 is needed because they should be separated by 180 } tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); //if the tilt and azimuth is outside of tolerance if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance || azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt Or Azimuth MATCH FAILED"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " has been removed as a candidate for matching."); continue; } //if the tilt and azimuth is within tolerance else { //add to the free List possiblesList2.Add(testSurface); if (tiltDifference == 0 && azimuthDifference == 0) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt And Azimuth MATCH SUCCESS:PERFECT"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Tilt And Azimuth MATCH SUCCESS"); logger.Debug("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); logger.Debug("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); } } } logger.Info("END SUBTEST: Azimuth and Tilt check."); } } // //report to the user that no matches could be found else { logger.Error("TEST SUMMARY: In the test file, surfaces could be found that match the standard file's AdjacentSpaceId and SurfaceType, but of these matches, none could be identified that also have a tilt or azimuth that exactly matches the standard file's, or is within the allowable tolerance."); ssSummary.FoundMatch = false; return; } //clear the first list possiblesList1.Clear(); //start to loop through the new refined list //generally want to look at the polyLoop coordinates //list 2 is analyzed //list 1 is free ; if (possiblesList2.Count > 0) { //simple method from this point forward is just to simply start doing a polyloop check //check the standard surface PolyLoop and the test Surface(s) polyloop(s) //check the absolute coordinates of the testSurface(s) polyloop(s) if (possiblesList2.Count == 1) { logger.Info("PROGRAMMER'S INFO: Only one Surface Candidate remaining."); //meaning there is only one candidate still available //go on to test the polyLoop coordinates and the insertion point possiblesList1.Add(possiblesList2[0]); //this should mean theoretically that you can do a one for one comparison and do the simplistic check possiblesList2.Clear(); //polyLoop absolute coordinates //list 1 is analyzed //list 2 is free logger.Info("START SUBTEST: PolyLoop coordinate checks."); #region if (possiblesList1.Count > 0) { foreach (SurfaceDefinitions testSurface in possiblesList1) { //check the polyLoop coordinates bool coordsMatch = false; foreach (Vector.MemorySafe_CartCoord standardPolyLoopCoord in surface.PlCoords) { coordsMatch = GetPolyLoopCoordMatch(standardPolyLoopCoord, testSurface, surface.SurfaceId, testlengthConversion, standardlengthConversion); if (coordsMatch) { continue; } else { logger.Info("TEST SURFACE: "+testSurface.SurfaceId+ " polyloop coordinate MATCH FAILED. It has been removed from the candidate list."); break; } } if (coordsMatch) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " polyloop coordinate MATCH SUCCESS."); possiblesList2.Add(testSurface); } } } else { logger.Error("TEST SUMMARY: In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, and Surface Area. Failed when attempting to match its polyloop coordinates."); ssSummary.FoundMatch = false; return; } logger.Info("END SUBTEST: PolyLoop coordinate checks."); #endregion possiblesList1.Clear(); issurfaceRegular = IsSurfaceRegular(surface); //Insertion point tests. if (!isHighTiltObject && !isLowTiltObject && issurfaceRegular) //no point in doing these checks if thing is not square and regular #region { logger.Info("PROGRAMMER'S NOTE: Standard Surface is square or rectangle non-horizontal. Assumption that test surface candidate should also be same type of shape."); logger.Info("START SUBTEST: Insertion Point Coordinate check."); if (possiblesList2.Count > 0) { //check the insertion point coordinate foreach (SurfaceDefinitions testSurface in possiblesList2) { //now match the differences double insPtXDiff = Math.Abs((testSurface.InsertionPoint.X * testlengthConversion) - (surface.InsertionPoint.X * standardlengthConversion)); double insPtYDiff = Math.Abs((testSurface.InsertionPoint.Y * testlengthConversion) - (surface.InsertionPoint.Y * standardlengthConversion)); double insPtZDiff = Math.Abs((testSurface.InsertionPoint.Z * testlengthConversion) - (surface.InsertionPoint.Z * standardlengthConversion)); //TODO: this interior flipped algorithm could be improved vastly. How to tell if in lower left has not been solved. if (interiorWallFlipped) { logger.Info("PROGRAMMER'S NOTE: The azimuths are flipped. Adjusting the insertion point test to factor this into account.."); //find the complimenting insertion point for (int pt = 0; pt < testSurface.PlCoords.Count; pt++) { if (Math.Abs((surface.InsertionPoint.Z * standardlengthConversion) - (testSurface.PlCoords[pt].Z * testlengthConversion)) < DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance) { if (Math.Abs((surface.InsertionPoint.X * standardlengthConversion) - testSurface.PlCoords[pt].X * testlengthConversion) < DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance) { if (Math.Abs((surface.InsertionPoint.Y * standardlengthConversion) - testSurface.PlCoords[pt].Y * testlengthConversion) < DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance) { //a match insPtXDiff = Math.Abs((testSurface.PlCoords[pt].X * testlengthConversion) - (surface.InsertionPoint.X * standardlengthConversion)); insPtYDiff = Math.Abs((testSurface.PlCoords[pt].Y * testlengthConversion) - (surface.InsertionPoint.Y * standardlengthConversion)); insPtZDiff = Math.Abs((testSurface.PlCoords[pt].Z * testlengthConversion) - (surface.InsertionPoint.Z * standardlengthConversion)); logger.Info("TEST SURFACE: " + testSurface.SurfaceId + "Insertion Point MATCH SUCCESS."); possiblesList1.Add(testSurface); break; } else { //didn't find a candidate logger.Info("TEST SURFACE: " + testSurface.SurfaceId + "Insertion Point did not Match."); } } } } } else { if (insPtXDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance || insPtYDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance || insPtZDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance) { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Insertion Point MATCH FAILED. It has been removed as a candidate."); continue; } else { //possible match if (insPtXDiff == 0 && insPtYDiff == 0 && insPtZDiff == 0) { //perfect match logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Insertion Point MATCH SUCCESS:PERFECT"); possiblesList1.Add(testSurface); } else { logger.Info("TEST SURFACE: " + testSurface.SurfaceId + " Insertion Point MATCH SUCCESS"); possiblesList1.Add(testSurface); } } } } } else { logger.Error("TEST SUMMARY: In the test file, no surfaces could be found that match standard file's Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, and PolyLoop Coordinates. Failed when matching PolyLoop coordinates."); ssSummary.FoundMatch = false; return; } possiblesList2.Clear(); logger.Info("END SUBTEST: Insertion point coordinate check."); if (possiblesList1.Count == 1) { logger.Info("TEST SUMMARY: MATCH SUCCESS for Standard file surface id: " + surface.SurfaceId + " in the test file. Only one match was found to be within all the tolerances allowed."); List<string> testFileSurfIds = new List<string>(); foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); ssSummary.FoundMatch = true; ssSummary.TestSurfaceIDs = testFileSurfIds; ssSummary.TotalTestSurfaceArea = GetSurfaceArea(possiblesList1[0],testareaConversion); return; } else if (possiblesList1.Count == 0) { logger.Error("TEST SUMMARY: No surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, PolyLoop Coordinates, and Insertion Point. Failed when attempting to match the insertion point coordinates."); ssSummary.FoundMatch = false; return; } else if (possiblesList1.Count > 1) { logger.Error("TEST SUMMARY: Advanced Surface Test found more than one match for Standard file surface id: " + surface.SurfaceId + " in the test file. It was not possible to determine only one unique surface."); ssSummary.FoundMatch = false; return; } } #endregion else { if (possiblesList2.Count == 1) //we do not check insertion points for horizontal surfaces. { possiblesList1 = possiblesList2; //this is just to keep the below code consistent with convention. logger.Info("TEST FILE SUCCESS: for Standard file surface id: " + surface.SurfaceId + " in the test file. Only one match was found to be within all the tolerances allowed."); List<string> testFileSurfIds = new List<string>(); foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); ssSummary.FoundMatch = true; ssSummary.TestSurfaceIDs = testFileSurfIds; ssSummary.TotalTestSurfaceArea = GetSurfaceArea(possiblesList1[0],testareaConversion); return; } else if (possiblesList2.Count == 0) { logger.Error("TEST FILE FAILURE: No surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, PolyLoop Coordinates, and Insertion Point. Failed when attempting to match the insertion point coordinates."); ssSummary.FoundMatch = false; return; } else if (possiblesList2.Count > 1) { logger.Error("TEST FILE FAILRE: Advanced Surface Test found more than one match for Standard file surface id: " + surface.SurfaceId + " in the test file. It was not possible to determine only one unique surface."); ssSummary.FoundMatch = false; return; } } } //more than one candidate still exists even after the adjacency test, surfaceType test, and tilt and azimuth tests, so filter through #region else { //check to see if the remaining area sums matches the standard surface area, //and that the edges of the test surfaces do not overlap, etc. //first try to find if the standard file has a regular rectangular or square profile logger.Debug("More than one surface remains in the test subset."); logger.Info("PROGRAMMER'S NOTE: Performing advanced surface bondary tests."); //checks to see if the testSurface vertices all lie within the standard surface polygonal boundary foreach(SurfaceDefinitions testSurface in possiblesList2) { logger.Info("Testing test surface " +testSurface.SurfaceId); if(DoesSurfaceContainSurface(surface,testSurface,testlengthConversion,standardlengthConversion)) { possiblesList1.Add(testSurface); } } //now we check to see which of the remaining surfaces and their edges form a coherent surface. //do their edges overlap? is the polygon self-intersecting? //add the surface the the existing possibles list possiblesList1.Insert(0, surface); var edgeDict = FindMatchingEdges(possiblesList1); if(EdgesAreValid(edgeDict)) { //finally, we see if the total area of the remaining surfaces equals the area of the standard surface. If all this above has passed, it should not be an issue. double standardArea = ssSummary.TotalSurfaceArea; double testArea = 0; //remove the zero index surface, because this is the standard surface possiblesList1.RemoveAt(0); //these are the remaining candidates foreach(var ts in possiblesList1) { testArea += GetSurfaceArea(ts,testareaConversion); } if(Math.Abs(standardArea - testArea)/standardArea < DOEgbXMLBasics.Tolerances.AreaPercentageTolerance) { logger.Info("TEST FILE SUCCESS: for Standard file surface id: " + surface.SurfaceId + " in the test file. The wall candidates remaining meet the allowable gemoetry constraints."); List<string> testFileSurfIds = new List<string>(); foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } ssSummary.FoundMatch = true; ssSummary.TestSurfaceIDs = testFileSurfIds; ssSummary.TotalTestSurfaceArea = testArea; } else { logger.Info("TEST FILE FAILURE: for Standard file surface id: " + surface.SurfaceId + " in the test file. The wall candidates remaining did not pass the area test."); } } else { //problem logger.Info("TEST FILE FAILURE: for Standard file surface id: " + surface.SurfaceId + " in the test file. The wall candidates remaining do not meet the allowable geometry constraints."); List<string> testFileSurfIds = new List<string>(); foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); ssSummary.FoundMatch = false; ssSummary.TestSurfaceIDs = testFileSurfIds; } } #endregion } } catch(Exception e){ logger.Fatal(e.ToString()); return; } }
//Created July 2016, refactored by Chien Si Harriman. The tolerance checks are based on percentage tolerances, not absolute. public static bool TestSpaceVolumes(List<XmlDocument> gbXMLDocs, List<XmlNamespaceManager> gbXMLnsm, ref CampusReport cr, Conversions.volumeUnitEnum standardUnits, Conversions.volumeUnitEnum testUnits, double testvolConversion, double standardvolConversion, DOEgbXMLTestDetail testDetails, double tolerance) { string spaceId = String.Empty; Dictionary<string, double> standardFileVolumeDict = new Dictionary<string, double>(); Dictionary<string, double> testFileVolumeDict = new Dictionary<string, double>(); bool thinWalled = false; try { //check to see if the test file comes from OpenStudio or Bentley (non-thick wall, or non-centerline geometry) XmlNamespaceManager gbXMLnstw = gbXMLnsm[0]; XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName", gbXMLnstw); if (productName.InnerText.ToLower().Replace(" ", String.Empty).Trim() == "openstudio") //TODO: consider a different test { thinWalled = true; } for (int i = 0; i < gbXMLDocs.Count; i++) { XmlDocument gbXMLTestFile = gbXMLDocs[i]; XmlNamespaceManager gbXMLns = gbXMLnsm[i]; XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space/gbXMLv5:Volume", gbXMLnsm[i]); //make lists of the areas in each project foreach (XmlNode spaceNode in spaceNodes) { string volume = spaceNode.InnerText; if (i % 2 != 0) { for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) { if (spaceNode.ParentNode.Attributes[n].Name == "id") { spaceId = spaceNode.ParentNode.Attributes[n].Value; if (!thinWalled) { //no conversion necessary standardFileVolumeDict.Add(spaceId, (Convert.ToDouble(volume) * standardvolConversion)); } else { if(testDetails.ThinWalledSpecs.Count > 0) { var twSpec = testDetails.ThinWalledSpecs.Find(x => x.SpaceName == spaceId); standardFileVolumeDict.Add(spaceId, twSpec.Volume); break; } else { //no conversion necessary standardFileVolumeDict.Add(spaceId, (Convert.ToDouble(volume) * standardvolConversion)); } } } } } else { for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) { if (spaceNode.ParentNode.Attributes[n].Name == "id") { spaceId = spaceNode.ParentNode.Attributes[n].Value; double convertedValue = Convert.ToDouble(volume) * testvolConversion; testFileVolumeDict.Add(spaceId, convertedValue); break; } } } } } var standardKeys = standardFileVolumeDict.Keys; foreach (string key in standardKeys) { logger.Info("SPACE ID:" + key); //important, we don't make a new report unless one has already been created DetailedSpaceSummary ds = new DetailedSpaceSummary(); if (cr.SpacesReport.Count() != 0) { var result = cr.SpacesReport.Find(x => x.ID == key); if (result == null) { ds.ID = key; } else { ds = cr.SpacesReport.Find(x => x.ID == key); } } else { ds.ID = key; } ds.VolumeUnits = "Cubic Feet"; if (testFileVolumeDict.ContainsKey(key)) { double standardFileVolume = standardFileVolumeDict[key]; double testFileVolume = testFileVolumeDict[key]; ds.TotalVolume = standardFileVolume; ds.TotalTestVolume = testFileVolume; double pctdifference = Math.Abs(testFileVolume - standardFileVolume)/standardFileVolume; if (pctdifference == 0) { logger.Info("TEST FILE SUCCESS:PERFECT : " + key + ". Success finding matching space volume."); ds.FoundMatch = true; } else if (pctdifference <= tolerance) { logger.Info("TEST FILE SUCCESS: " + key + ". Success finding matching space volume."); ds.FoundMatch = true; } else { //at the point of failure, the test will return with details about which volume failed. logger.Info("TEST FILE FAILURE: " + key + ". Failure to find a volume match."); ds.FoundMatch = false; return false; } } else { logger.Info("TEST FILE FAILURE: " + key + ". Failure to find a volume match."); //at the point of failure, the test will return with details about which volume failed. logger.Info("PROGRAMMER's NOTE: Test File and Standard File space names could not be matched. SpaceId: " + key + " could not be found in the test file."); ds.FoundMatch = false; } } var failures = cr.SpacesReport.FindAll(x => x.FoundMatch == false); return (failures.Count > 0) ? false : true; } catch (Exception e) { logger.Debug(e.ToString()); logger.Fatal(" Failed to complete the Spaces Volume Test. See exceptions noted."); return false; } logger.Fatal("Fatal Spaces Volume Test Failure"); return false; }
//Tolerance checks depend upon percentage tolerances public static DOEgbXMLReportingObj TestSpaceVolumes(List<XmlDocument> gbXMLDocs, List<XmlNamespaceManager> gbXMLnsm, DOEgbXMLReportingObj report, Conversions.volumeUnitEnum standardUnits, Conversions.volumeUnitEnum testUnits, double testvolConversion, double standardvolConversion,DOEgbXMLTestDetail testDetails) { report.passOrFail = true; string spaceId = ""; report.unit = standardUnits.ToString(); //assuming that this will be plenty large for now Dictionary<string, double> standardFileVolumeDict = new Dictionary<string, double>(); Dictionary<string, double> testFileVolumeDict = new Dictionary<string, double>(); bool thinWalled = false; try { //check to see if the test file comes from OpenStudio or Bentley (non-thick wall, or non-centerline geometry) XmlNamespaceManager gbXMLnstw = gbXMLnsm[0]; XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName", gbXMLnstw); if (productName.InnerText.ToLower().Replace(" ", String.Empty).Trim() == "openstudio") //TODO: consider a different test. { thinWalled = true; } for (int i = 0; i < gbXMLDocs.Count; i++) { XmlDocument gbXMLTestFile = gbXMLDocs[i]; XmlNamespaceManager gbXMLns = gbXMLnsm[i]; XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space/gbXMLv5:Volume", gbXMLnsm[i]); //make lists of the areas in each project foreach (XmlNode spaceNode in spaceNodes) { string volume = spaceNode.InnerText; if (i % 2 != 0) { spaceId = spaceNode.ParentNode.Attributes[0].Value; if(!thinWalled) { //no conversion necessary standardFileVolumeDict.Add(spaceId, (Convert.ToDouble(volume) * standardvolConversion)); } else { if(testDetails.ThinWalledSpecs.Count > 0) { var twSpec = testDetails.ThinWalledSpecs.Find(x => x.SpaceName == spaceId); standardFileVolumeDict.Add(spaceId, twSpec.Volume); } else { //no conversion necessary standardFileVolumeDict.Add(spaceId, (Convert.ToDouble(volume) * standardvolConversion)); } } } else { spaceId = spaceNode.ParentNode.Attributes[0].Value; double convertedValue = Convert.ToDouble(volume) * testvolConversion; testFileVolumeDict.Add(spaceId, convertedValue); } } } var standardKeys = standardFileVolumeDict.Keys; foreach (string key in standardKeys) { if (testFileVolumeDict.ContainsKey(key)) { double standardFileVolume = standardFileVolumeDict[key]; double testFileVolume = testFileVolumeDict[key]; report.standResult.Add(standardFileVolume.ToString("#.000")); report.testResult.Add(testFileVolume.ToString("#.000")); report.idList.Add(key); double pctdifference = Math.Abs(testFileVolume - standardFileVolume)/standardFileVolume; if (pctdifference == 0) { report.MessageList.Add("For Space Id: " + key + ". Success finding matching space volume. The Standard and Test Files both have identical volumes: " + testFileVolume.ToString("#.000") + " " + report.unit + "for Space Id: " + key); report.TestPassedDict.Add(key, true); } else if (pctdifference <= report.tolerance) { report.MessageList.Add("For Space Id: " + key + ". Success finding matching space volume. The Standard Files space volume of " + standardFileVolume.ToString("#.000") + " " + report.unit + "and the Test File space volume: " + testFileVolume.ToString("#.000") + " are within the allowed tolerance of" + report.tolerance.ToString() + " " + report.unit + "."); report.TestPassedDict.Add(key, true); } else { //at the point of failure, the test will return with details about which volume failed. report.MessageList.Add("For Space Id: " + key + ". Failure to find a volume match. The Volume in the Test File equal to: " + testFileVolume.ToString("#.000") + " " + report.unit + " was not within the allowed tolerance. SpaceId: " + key + " in the Standard file has a volume: " + standardFileVolume.ToString("#.000") + " ."); report.TestPassedDict.Add(key, false); } } else { report.standResult.Add("Space Id: " + key); report.testResult.Add("Could not be matched"); report.idList.Add(""); //at the point of failure, the test will return with details about which volume failed. report.MessageList.Add("Test File and Standard File space names could not be matched. SpaceId: " + key + " could not be found in the test file."); report.passOrFail = false; return report; } } return report; } catch (Exception e) { report.MessageList.Add(e.ToString()); report.longMsg = " Failed to complete the " + report.testType + ". See exceptions noted."; report.passOrFail = false; return report; } report.longMsg = "Fatal " + report.testType + " Test Failure"; report.passOrFail = false; return report; }
//created July 2016 by Chien Si Harriman, Independent Contractor public static bool TestSpaceAreas(List<XmlDocument> gbXMLDocs, List<XmlNamespaceManager> gbXMLnsm, ref CampusReport cr, Conversions.areaUnitEnum standardUnits, Conversions.areaUnitEnum testUnits, double testareaConversion, double standardareaConversion, DOEgbXMLTestDetail testDetails, double tolerance) { string spaceId = ""; Dictionary<string, double> standardFileAreaDict = new Dictionary<string, double>(); Dictionary<string, double> testFileAreaDict = new Dictionary<string, double>(); bool thinWalled = false; try { //check to see if the test file comes from OpenStudio or Bentley (non-thick wall, or non-centerline geometry) XmlNamespaceManager gbXMLnstw = gbXMLnsm[0]; XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName", gbXMLnstw); if (productName.InnerText.ToLower().Replace(" ", String.Empty).Trim() == "openstudio")//TODO: consider a different test { thinWalled = true; } for (int i = 0; i < gbXMLDocs.Count; i++) { XmlDocument gbXMLTestFile = gbXMLDocs[i]; XmlNamespaceManager gbXMLns = gbXMLnsm[i]; XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space/gbXMLv5:Area", gbXMLnsm[i]); //make lists of the areas in each project foreach (XmlNode spaceNode in spaceNodes) { string area = spaceNode.InnerText; if (i % 2 != 0) { for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) { if (spaceNode.ParentNode.Attributes[n].Name == "id") { spaceId = spaceNode.ParentNode.Attributes[n].Value; if (!thinWalled) { //no conversion necessary standardFileAreaDict.Add(spaceId, Convert.ToDouble(area) * standardareaConversion); } else { if(testDetails.ThinWalledSpecs.Count > 0) { var thinwalleddef = testDetails.ThinWalledSpecs.Find(x => x.SpaceName == spaceId); //it is critical that space names match for these tests. standardFileAreaDict.Add(spaceId, thinwalleddef.FloorArea); } else { //no conversion necessary standardFileAreaDict.Add(spaceId, Convert.ToDouble(area) * standardareaConversion); } } break; } } } else { for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) { if (spaceNode.ParentNode.Attributes[n].Name == "id") { spaceId = spaceNode.ParentNode.Attributes[n].Value; double convertedArea = Convert.ToDouble(area) * testareaConversion; testFileAreaDict.Add(spaceId, convertedArea); break; } } } } } var standardKeys = standardFileAreaDict.Keys; foreach (string key in standardKeys) { logger.Info("SPACE ID: " + key); //important, we don't make a new report unless one has already been created DetailedSpaceSummary ds = new DetailedSpaceSummary(); if (cr.SpacesReport.Count() != 0) { var res = cr.SpacesReport.Find(x => x.ID == key); if(res == null) { ds.ID = key; } else { ds = cr.SpacesReport.Find(x => x.ID == key); } } else { ds.ID = key; } ds.AreaUnits = "SquareFeet"; if (testFileAreaDict.ContainsKey(key)) { double testFileSpaceArea = testFileAreaDict[key]; double standardFileSpaceArea = standardFileAreaDict[key]; ds.TotalSurfaceArea = standardFileSpaceArea; ds.TotalTestSurfaceArea = testFileSpaceArea; double difference = Math.Abs(testFileSpaceArea - standardFileSpaceArea); if (difference == 0) { logger.Info("TEST FILE SUCCESS:PERFECT : Found matching space area with an area = " + testFileSpaceArea.ToString("#.000") +" "+standardUnits+". "); ds.FoundMatch = true; } else if (difference < tolerance) { logger.Info("TEST FILE SUCCESS: Found matching space area with an area = " + testFileSpaceArea.ToString("#.000") + " " + standardUnits + "."); ds.FoundMatch = true; } else { logger.Info("TEST FILE FAILURE: " + key + ". Failure to find an space area match."); ds.FoundMatch = false; } } else { logger.Info("TEST FILE FAILURE: "+key +" Could not be matched"); //failure to match spaceIds logger.Info("Test File and Standard File space names could not be matched. SpaceId: " + key + " could not be found in the test file."); ds.FoundMatch = false; return false; } cr.SpacesReport.Add(ds); } } catch (Exception e) { logger.Debug(e.ToString()); logger.Fatal("Failed to complete the Space Area Test. See exceptions noted."); return false; } return true; }
/// <summary> /// Gets whether the lambda body is valid for the given parameter types and return type. /// </summary> /// <returns> /// Produces a <see cref="Conversion.AnonymousFunctionConversion"/> if the lambda is valid; /// otherwise returns <see cref="Conversion.None"/>. /// </returns> public abstract Conversion IsValid(IType[] parameterTypes, IType returnType, Conversions conversions);
public CellHandling(EquivalenceChecker equivalence, Conversions conversions) { Equivalence = equivalence; Conversions = conversions; }
//Created July 2016, Chien Si Harriman //note we made a change to this method where the tolerance is now based on a percentage to the standard, and is not absolute. public static bool GetBuildingArea(List<XmlDocument> gbXMLDocs, List<XmlNamespaceManager> gbXMLnsm, ref CampusReport cr, Conversions.areaUnitEnum standardUnits, Conversions.areaUnitEnum testUnits, double testareaConversion, double standardareaConversion, double tolerance, DOEgbXMLTestDetail t) { //assuming that this will be plenty large for now, all test cases only have one building currently. string[] resultsArray = new string[50]; bool thinWalled = false; for (int i = 0; i < gbXMLDocs.Count; i++) { try { XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName", gbXMLnsm[i]); if (productName.InnerText.ToLower().Replace(" ",String.Empty).Trim() == "openstudio") //TODO: Consider a different test. { thinWalled = true; } XmlDocument gbXMLTestFile = gbXMLDocs[i]; XmlNamespaceManager gbXMLns = gbXMLnsm[i]; var node = gbXMLDocs[i].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Area", gbXMLnsm[i]); string area = node.InnerText; //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result resultsArray[i] = area; if (i % 2 != 0) { //setup standard result and test result if (testareaConversion != 1) { logger.Info("PROGRAMMER'S NOTE: Converted the test file building area units."); } //apply the conversion factor on the test file always, regardless. double standardArea = Convert.ToDouble(resultsArray[i]) * standardareaConversion; double testArea = Convert.ToDouble(resultsArray[(i - 1)]) * testareaConversion; if (thinWalled) { //no conversion necessary, it was already accounted for when it was entered by the administrator. if (Math.Abs(testArea - t.thinWalledExpectedBuildingArea)/t.thinWalledAltBuildingArea < tolerance) { testArea = t.thinWalledAltBuildingArea; } else { logger.Info("TEST FILE FAILURE: The test file's Building Area is not within the allowable tolerance of " + tolerance.ToString() + "."); return false; } } logger.Info("Standard Building Area: "+String.Format("{0:#,0.00}", standardArea.ToString())); logger.Info("Test Building Area: "+String.Format("{0:#,0.00}", testArea.ToString())); cr.BuildingSummary.Find(x => x.FileType == "Standard").BuildingArea = new Area(standardArea,"Square Feet"); cr.BuildingSummary.Find(x => x.FileType == "Test").BuildingArea = new Area(testArea, "Square Feet"); double difference = Math.Abs(standardArea - testArea)/standardArea; if (difference == 0) { logger.Info("TEST FILE SUCCESS:PERFECT: The test file's Building Area matches the standard file Building Area exactly."); } else if (difference <= tolerance) { logger.Info("TEST FILE SUCCESS: The test file's Building Area is within the allowable tolerance of = " + tolerance.ToString()); } else { logger.Info("TEST FILE FAILURE: The test file's Building Area is not within the allowable tolerance of " + tolerance.ToString() + "."); return false; } } else { continue; } } catch (Exception e) { logger.Debug(e.ToString()); logger.Fatal(" Failed to locate Building Area in the XML file."); return false; } } return true; }
public void SetUp() { compilation = new SimpleCompilation(CecilLoaderTests.Mscorlib); conversions = new Conversions(compilation); }
public void StandardizeToUSIP(XmlNodeList nodes, Conversions c, ref double lengthConv, ref double areaConv, ref double volConv, ref Conversions.lengthUnitEnum lengthEnum, ref Conversions.areaUnitEnum areaEnum, ref Conversions.volumeUnitEnum volEnum) { foreach (XmlNode Node in nodes) { XmlAttributeCollection spaceAtts = Node.Attributes; foreach (XmlAttribute at in spaceAtts) { if (at.Name == "volumeUnit") { string type = at.Value; volEnum = (Conversions.volumeUnitEnum)System.Enum.Parse(typeof(Conversions.volumeUnitEnum), type, true); //we know the test files are in cubic feet volConv = c.GetVolumeUnitConversion(volEnum, Conversions.volumeUnitEnum.CubicFeet); if (volConv == -999) { //return with an error message stating contact system administrator with a code } } else if (at.Name == "areaUnit") { string type = at.Value; areaEnum = (Conversions.areaUnitEnum)System.Enum.Parse(typeof(Conversions.areaUnitEnum), type, true); areaConv = c.GetAreaConversion(areaEnum, Conversions.areaUnitEnum.SquareFeet); if (areaConv == -999) { //return with an error message stating contact system administrator with a code } } else if (at.Name == "lengthUnit") { string type = at.Value; lengthEnum = (Conversions.lengthUnitEnum)System.Enum.Parse(typeof(Conversions.lengthUnitEnum), type, true); lengthConv= c.GetLengthConversion(lengthEnum, Conversions.lengthUnitEnum.Feet); if (lengthConv == -999) { //return with an error message stating contact system administrator with a code } } } } }
internal static bool ValidateConstraints(ITypeParameter typeParameter, IType typeArgument, TypeVisitor substitution, Conversions conversions) { switch (typeArgument.Kind) { // void, null, and pointers cannot be used as type arguments case TypeKind.Void: case TypeKind.Null: case TypeKind.Pointer: return false; } if (typeParameter.HasReferenceTypeConstraint) { if (typeArgument.IsReferenceType != true) return false; } if (typeParameter.HasValueTypeConstraint) { if (!NullableType.IsNonNullableValueType(typeArgument)) return false; } if (typeParameter.HasDefaultConstructorConstraint) { ITypeDefinition def = typeArgument.GetDefinition(); if (def != null && def.IsAbstract) return false; var ctors = typeArgument.GetConstructors( m => m.Parameters.Count == 0 && m.Accessibility == Accessibility.Public, GetMemberOptions.IgnoreInheritedMembers | GetMemberOptions.ReturnMemberDefinitions ); if (!ctors.Any()) return false; } foreach (IType constraintType in typeParameter.DirectBaseTypes) { IType c = constraintType; if (substitution != null) c = c.AcceptVisitor(substitution); if (!conversions.IsConstraintConvertible(typeArgument, c)) return false; } return true; }
public ConstraintValidatingSubstitution(IList<IType> classTypeArguments, IList<IType> methodTypeArguments, OverloadResolution overloadResolution) : base(classTypeArguments, methodTypeArguments) { this.conversions = overloadResolution.conversions; }
public override Conversion IsValid(IType[] parameterTypes, IType returnType, Conversions conversions) { return conversions.ImplicitConversion(inferredReturnType, returnType); }
private DOEgbXMLReportingObj GetPossibleSurfaceMatches(SurfaceDefinitions surface, List<SurfaceDefinitions> TestSurfaces, DOEgbXMLReportingObj report, Conversions.lengthUnitEnum standardLengthUnits, Conversions.lengthUnitEnum testLengthUnits, double testlengthConversion, double standardlengthConversion, Conversions.areaUnitEnum standardAreaUnits, Conversions.areaUnitEnum testAreaUnits, double testareaConversion, double standardareaConversion) { //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML //added Mar 14 2013 report.testSummary = "This test tries to match each Surface element in the standard file with an equivalent in your test file"; report.testSummary += " To be as flexible about what constitutes a \"Good Match\", this test finds a pool of possible candidate "; report.testSummary += "surfaces in your test file and then begins to eliminate them as they fail different tests."; report.testSummary += " At the end, there should be only one surface candidate remaining that constitutes a good match. "; report.testSummary += "You can see the result of this filtering process by reviewing the mini-report that is provided for you below."; report.testSummary += "</br>"; //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML //added March 14 2013 report.testSummary += " The search routine first tries to find all surfaces that have the same SurfaceType and adjacentSpaceIds."; report.testSummary += " Everytime there is a match found in the test file, meeting these criteria, a message will appear in the "; report.testSummary += "mini-report, indicating that a match has been found."; report.testSummary += " There may be more than one match in your test file."; report.testSummary += " If there are no matches found for SurfaceType and AdjacencyId, this message will be printed (and the test will end as failed):"; report.testSummary += " In the test file, no matches could be found in the standard file that have the same AdjacentSpaceId(s) and SurfaceType."; report.testSummary += "</br>"; report.testSummary += " If this set of tests is successful, the routine next tries to remove those surfaces that do not meet"; report.testSummary += " the tilt and azimuth tolerances. Let's pretend for example that the tilt and azimuth for the standard surface"; report.testSummary += " in question are both 90 degrees. If the tilt and azimuth test tolerance are 1 degree, then the search"; report.testSummary += " routine will only keep those walls that have 89<=tilt<=91 && <=89azimuth<=91 && match the SurfaceType and"; report.testSummary += " adjacency relationships."; report.testSummary += " The mini-report will let you know which surfaces pass the tilt and azimuth test and which do not."; report.testSummary += "</br>"; report.testSummary += " Next the search routine takes any of the remaining surface candidates that have passed all the tests so far, "; report.testSummary += "and tries to determine if the Surface Areas defined by the polyLoops match to within a pre-defined % tolerance."; report.testSummary += "</br>"; report.testSummary += " the final tests are to physically test the coordinates of the polyloop and insertion point to make sure"; report.testSummary += " that a match for the standard surface can be found."; report.testSummary += " You should see additional messages telling you which surface in your test file matches, or doesn't match"; report.testSummary += " the standard surface being searched against. If there is no match, the mini-report tells you."; report.testSummary += " By making the tests this way, it is hoped that you can see exactly why your test file is failing against"; report.testSummary += " the standard file's surface definitions."; try { report.MessageList.Add("Standard Surface Id: " + surface.SurfaceId); report.MessageList.Add("</br>"); //initialize the return list //alternate between these two to filter out bad matches List<SurfaceDefinitions> possiblesList1 = new List<SurfaceDefinitions>(); List<SurfaceDefinitions> possiblesList2 = new List<SurfaceDefinitions>(); bool adjSpaceIdMatch = false; bool isLowTiltObject = false; bool isHighTiltObject = false; bool interiorWallFlipped = false; bool issurfaceRegular = false; bool istestSurfaceRegular = false; //try to find a surface in the test file that has the same: //adjacent space Id signature //surfaceType //free list is 1 //list 2 is not used for(int ts = 0; ts<TestSurfaces.Count;ts++) { SurfaceDefinitions testSurface = TestSurfaces[ts]; //has to have the same number of Adjacent Space Ids if (testSurface.AdjSpaceId.Count == surface.AdjSpaceId.Count) { //an exception for a shading device if (surface.AdjSpaceId.Count == 0) { adjSpaceIdMatch = true; } //has to have the same order of adjacent space id strings to qualify. This method assumes the strings are identical if(surface.AdjSpaceId.Count == 1) { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; } } if(surface.AdjSpaceId.Count == 2) { if(surface.SurfaceType == "Ceiling" && testSurface.SurfaceType == "InteriorFloor") { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[1] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; } } else if (surface.SurfaceType == "InteriorFloor" && testSurface.SurfaceType == "Ceiling") { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[1] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; } } else if (surface.SurfaceType == "InteriorWall" && testSurface.SurfaceType == "InteriorWall") { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[1]) { adjSpaceIdMatch = true; } if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[1] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[0]) { adjSpaceIdMatch = true; interiorWallFlipped = true; } } else { if (surface.AdjSpaceId[0] == testSurface.AdjSpaceId[0] && surface.AdjSpaceId[1] == testSurface.AdjSpaceId[1]) { adjSpaceIdMatch = true; } } } //if adjacent space Ids match and the surface types match, note this if (adjSpaceIdMatch) { if(!IsHighTiltSurface(surface) && !IsLowTiltSurface(surface)) { if(surface.SurfaceType == testSurface.SurfaceType) { report.MessageList.Add("AdjancentSpaceId(s) and surfaceType Match."); report.MessageList.Add("Surface id: " + testSurface.SurfaceId + " is a candidate."); report.MessageList.Add("</br>"); possiblesList1.Add(testSurface); } } else { if(IsLowTiltSurface(surface)) isLowTiltObject = true; if (IsHighTiltSurface(surface)) isHighTiltObject = true; if(surface.SurfaceType == testSurface.SurfaceType) { report.MessageList.Add("AdjancentSpaceId(s) and surfaceType Match."); report.MessageList.Add("Surface id: " + testSurface.SurfaceId + " is a candidate."); report.MessageList.Add("</br>"); possiblesList1.Add(testSurface); } } } } } if (possiblesList1.Count == 1) { report.MessageList.Add("Based on a comparison of the surface Type and Adjacent SpaceIds, there is " + possiblesList1.Count.ToString() + " surface in the test file that is a possible match for " + surface.SurfaceId + " of the Standard File."); report.MessageList.Add("<br/>"); } else if (possiblesList1.Count > 1) { report.MessageList.Add("Based on a comparison of the surface Type and Adjacent SpaceIds, there are " + possiblesList1.Count.ToString() + " surface in the test file that are possible matches for " + surface.SurfaceId + " of the Standard File."); report.MessageList.Add("<br/>"); } else { report.longMsg = "In the test file, no matches could be found in the standard file that have the same AdjacentSpaceId(s) and SurfaceType."; report.passOrFail = false; return report; } //begin to filter back this list //tilt //azimuth //list 1 is analyzed //list 2 is free if (possiblesList1.Count > 0) { foreach (SurfaceDefinitions testSurface in possiblesList1) { double tiltDifference = 0; double azimuthDifference = Math.Abs(testSurface.Azimuth - surface.Azimuth); if(isLowTiltObject) { if(IsLowTiltSurface(testSurface)) //they are the same, both have small tils { tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); } else //they are 180 degrees different, and the test surface is a high tilt while the standard is low tilt { if (testSurface.SurfaceType == "InteriorFloor") { tiltDifference = Math.Abs(Math.Abs(testSurface.Tilt - 180) - surface.Tilt); } else { report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); report.MessageList.Add("</br>"); continue; } } //no azimuth tests if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) //azimuth no longer matters for these surfaces { if(surface.Tilt != 0) { if(azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) { report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); report.MessageList.Add("</br>"); continue; } } else { report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); report.MessageList.Add("</br>"); continue; } } //if the tilt and azimuth is within tolerance else { //add to the free List if (surface.Tilt == 0) { possiblesList2.Add(testSurface); if (tiltDifference == 0) { report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " matches the standard surface tilt and azimuth exactly."); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add("</br>"); } else { report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " is within the azimuth and tilt tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance + " and " + DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance + ", respectively. It matches the standard file surface within the allowable tolerance."); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add("</br>"); } } else { //check the azimuth if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance || azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) { report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); report.MessageList.Add("</br>"); continue; } //if the tilt and azimuth is within tolerance else { //add to the free List possiblesList2.Add(testSurface); if (tiltDifference == 0 && azimuthDifference == 0) { report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " matches the standard surface tilt and azimuth exactly."); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add("</br>"); } else { report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " is within the azimuth and tilt tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance + " and " + DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance + ", respectively. It matches the standard file surface within the allowable tolerance."); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add("</br>"); } } } } } else if (isHighTiltObject) { if(IsHighTiltSurface(testSurface)) //both high tilt interior surfaces { tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); } else //standard is high tilt, test is low tilt { if(testSurface.SurfaceType == "Ceiling") { tiltDifference = Math.Abs(Math.Abs(testSurface.Tilt - 180) - surface.Tilt); } else { report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString() + ")"); report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); report.MessageList.Add("</br>"); continue; } } //no azimuth tests if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance) //azimuth no longer matters for these surfaces { report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); report.MessageList.Add("</br>"); continue; } //if the tilt and azimuth is within tolerance else { //add to the free List possiblesList2.Add(testSurface); if (tiltDifference == 0) { report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " matches the standard surface tilt and azimuth exactly."); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add("</br>"); } else { report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " is within the azimuth and tilt tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance + " and " + DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance + ", respectively. It matches the standard file surface within the allowable tolerance."); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add("</br>"); } } } else { azimuthDifference = Math.Abs(testSurface.Azimuth - surface.Azimuth); if (interiorWallFlipped) //both high tilt interior surfaces { azimuthDifference = Math.Abs(Math.Abs(testSurface.Azimuth - surface.Azimuth) - 180); //180 is needed because they should be separated by 180 } tiltDifference = Math.Abs(testSurface.Tilt - surface.Tilt); //if the tilt and azimuth is outside of tolerance if (tiltDifference > DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance || azimuthDifference > DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance) { report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " azimuth and tilt match FAILED: "); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add(testSurface.SurfaceId + " has been removed as a candidate for matching."); report.MessageList.Add("</br>"); continue; } //if the tilt and azimuth is within tolerance else { //add to the free List possiblesList2.Add(testSurface); if (tiltDifference == 0 && azimuthDifference == 0) { report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " matches the standard surface tilt and azimuth exactly."); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add("</br>"); } else { report.MessageList.Add("Test surface with id: " + testSurface.SurfaceId + " is within the azimuth and tilt tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceAzimuthTolerance + " and " + DOEgbXMLBasics.Tolerances.SurfaceTiltTolerance + ", respectively. It matches the standard file surface within the allowable tolerance."); report.MessageList.Add("Test file surface (Azimuth, Tilt): (" + testSurface.Azimuth.ToString() + "," + testSurface.Tilt.ToString() + ")"); report.MessageList.Add("Standard file surface (Azimuth, Tilt): (" + surface.Azimuth.ToString() + "," + surface.Tilt.ToString()); report.MessageList.Add("</br>"); } } } } } // report to the user that no matches could be found else { report.longMsg = "In the test file, surfaces could be found that match the standard file's AdjacentSpaceId and SurfaceType, but of these matches, none could be identified that also have a tilt or azimuth that exactly matches the standard file's, or is within the allowable tolerance."; report.passOrFail = false; return report; } //clear the first list possiblesList1.Clear(); //start to loop through the new refined list //generally want to look at the polyLoop coordinates //list 2 is analyzed //list 1 is free report.MessageList.Add("Starting Surface Area Match tests......"); report.MessageList.Add("</br>"); if (possiblesList2.Count > 0) { //simple method from this point forward is just to simply start doing a polyloop check //check the standard surface PolyLoop and the test Surface(s) polyloop(s) //check the absolute coordinates of the testSurface(s) polyloop(s) if (possiblesList2.Count == 1) { report.MessageList.Add("Only one Surface Candidate remaining from the original test pool."); report.MessageList.Add("<br/>"); //meaning there is only one candidate still available //go on to test the polyLoop coordinates and the insertion point possiblesList1.Add(possiblesList2[0]); } //more than one candidate still exists even after the adjacency test, surfaceType test, and tilt and azimuth tests, so filter through else { //The user should be able to determine, based on output which surfaces are left for consideration //Option 1: (easiest) find the one best candidate //do so based on an area match, matching the area of the test surface with the area of the test surface //(without regard for absolute polyloop coordinates) //We find the area using area formulas for both regular polygons and irregular polygons //first we check for the type of surface that it is (regular polygon or not), and we then take it from there //in the case of a rectangular polygon, we only count rectangles or squares as regular, everything else is //assumed to be irregular, though this does not fit the classic definition of a classic polygon. //The language is just semantics //first try to find if the standard file has a regular rectangular or square profile report.MessageList.Add("Checking if the surface is a square or rectangle."); issurfaceRegular = IsSurfaceRegular(surface); foreach (SurfaceDefinitions regSurface in possiblesList2) { //ensures likewise that all the test surface candidates are regular, //TODO: if they are not, then the entire set is assumed to be irregular (this could be improved) istestSurfaceRegular = IsSurfaceRegular(regSurface); if (istestSurfaceRegular == false) break; } if (issurfaceRegular && istestSurfaceRegular) { //we take a shortcut and use the width and height as a way to simplify the area checking scheme //we assume that the width and height are properly entered in this simplified case report.MessageList.Add("Rectangle or Square = TRUE"); report.MessageList.Add("Comparisons of the Width and Height values will be used as a proxy for surface Area."); foreach (SurfaceDefinitions testsurface in possiblesList2) { //it first analyzes the test file to see if slivers are present. If they are, it will fail the test //if slivers are not allowed for the test. This is the first time we check for slivers //TODO: consider removing or giving a feature to allow this to be overridded. if (testsurface.Width <= DOEgbXMLBasics.Tolerances.SliverDimensionTolerance || testsurface.Height <= DOEgbXMLBasics.Tolerances.SliverDimensionTolerance) { if (!DOEgbXMLBasics.SliversAllowed) { report.MessageList.Add("This test does not allow slivers less than " + DOEgbXMLBasics.Tolerances.SliverDimensionTolerance + " ft. A sliver has been detected. Test surface id: " + testsurface.SurfaceId + " is a sliver."); report.passOrFail = false; return report; } } //otherwise, if the sliver test passes double widthDiff = Math.Abs((testlengthConversion * testsurface.Width) - surface.Width * standardlengthConversion); if(widthDiff > DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance) { widthDiff = Math.Abs((testlengthConversion * testsurface.Height) - surface.Width * standardlengthConversion); if(widthDiff < DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance) { //we will swap them double heightDiff = Math.Abs((testlengthConversion * testsurface.Width) - surface.Height * standardlengthConversion); if (heightDiff > DOEgbXMLBasics.Tolerances.SurfaceHeightTolerance) { report.MessageList.Add("Test file's Surface id: " + testsurface.SurfaceId + " width and height do not both match the standard file surface id: " + surface.SurfaceId + ". This surface has been removed as a candidate."); continue; } else { //this surface is a candidate possiblesList1.Add(testsurface); if (widthDiff == 0 && heightDiff == 0) { report.MessageList.Add("Test file surface with id: " + testsurface.SurfaceId + " have has the width and height swapped, but the width and height exactly match the standard file."); //go ahead and now check the polyLoop coordinates, and then the insertion point } else { report.MessageList.Add("Test file surface with id: " + testsurface.SurfaceId + " have been swapped, but are within the width and height tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance + standardLengthUnits + " and " + DOEgbXMLBasics.Tolerances.SurfaceHeightTolerance + standardLengthUnits + ", respectively."); //go ahead and now check the polyloop coordinates, and then the insertion point } } } } else { //we won't swap them double heightDiff = Math.Abs((testlengthConversion * testsurface.Height) - surface.Height * standardlengthConversion); if (widthDiff > DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance || heightDiff > DOEgbXMLBasics.Tolerances.SurfaceHeightTolerance) { report.MessageList.Add("Test file's Surface id: " + testsurface.SurfaceId + " width and height do not both match the standard file surface id: " + surface.SurfaceId + ". This surface has been removed as a candidate."); continue; } else { //this surface is a candidate possiblesList1.Add(testsurface); if (widthDiff == 0 && heightDiff == 0) { report.MessageList.Add("Test file surface with id: " + testsurface.SurfaceId + " matches the width and height exactly of the standard file."); //go ahead and now check the polyLoop coordinates, and then the insertion point } else { report.MessageList.Add("Test file surface with id: " + testsurface.SurfaceId + " is within the width and height tolerances of " + DOEgbXMLBasics.Tolerances.SurfaceWidthTolerance + standardLengthUnits + " and " + DOEgbXMLBasics.Tolerances.SurfaceHeightTolerance + standardLengthUnits + ", respectively."); //go ahead and now check the polyloop coordinates, and then the insertion point } } } } } //It is not "regular". Find the one surface with the area that most closely matches, and then check its polyloops //1. get the polyloop area of the standard file's surface polyloops //2. get the area of the test file surface candidates using the polyloop coordinates else { report.MessageList.Add("The surface is not a square or rectangle."); report.MessageList.Add("PolyLoop coordinates will be used to calculate the area."); //there are two basic cases, one where we get the area using greens theorem when the surface is parallel //to one of the axes of the project global reference frame //and the second where the surface is not parallel to one of the axes of the global reference frame //Surface normal Parallel to global reference frame X Axis if (Math.Abs(surface.PlRHRVector.X) == 1 && surface.PlRHRVector.Y == 0 && surface.PlRHRVector.Z == 0) { List<Vector.MemorySafe_CartCoord> coordList = new List<Vector.MemorySafe_CartCoord>(); foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) { //only take the Y and Z coordinates and throw out the X because we can assume that they are all the same //create new Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(0, coord.Y, coord.Z); coordList.Add(c2); } double area = GetAreaFrom2DPolyLoop(coordList); if (area == -999) { report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); } double testSurfacesArea = 0; foreach (SurfaceDefinitions testSurface in possiblesList2) { if (Math.Abs(testSurface.PlRHRVector.X) == 1 && testSurface.PlRHRVector.Y == 0 && testSurface.PlRHRVector.Z == 0) { List<Vector.MemorySafe_CartCoord> testCoordList = new List<Vector.MemorySafe_CartCoord>(); foreach (Vector.MemorySafe_CartCoord coord in testSurface.PlCoords) { Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(0, coord.Y, coord.Z); testCoordList.Add(c2); } testSurfacesArea = GetAreaFrom2DPolyLoop(testCoordList); if (testSurfacesArea == -999) { report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); } //convert from the test units to the standard units double difference = Math.Abs((area*standardareaConversion) - (testSurfacesArea * testareaConversion)); if (difference < area * DOEgbXMLBasics.Tolerances.SurfaceAreaPercentageTolerance) { possiblesList1.Add(testSurface); if (difference == 0) { //then it perfectly matches, go on to check the poly loop coordinates //then check the insertion point report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " exactly."); } else { report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " within the allowable area percentage tolerance."); } } else { report.MessageList.Add("The test surface cannot find a match for its surface area as defined in the polyLoop coordinates"); //don't return here, it will be returned below } } else { //do nothing, it will be handled by the more general case and then translated to a 2-D surface } } } //Surface normal Parallel to global reference frame y Axis else if (surface.PlRHRVector.X == 0 && Math.Abs(surface.PlRHRVector.Y) == 1 && surface.PlRHRVector.Z == 0) { List<Vector.MemorySafe_CartCoord> coordList = new List<Vector.MemorySafe_CartCoord>(); foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) { //only take the X and Z coordinates and throw out the Y because we can assume that they are all the same Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, 0, coord.Z); coordList.Add(c2); } double area = GetAreaFrom2DPolyLoop(coordList); if (area == -999) { report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); } double testSurfacesArea = 0; foreach (SurfaceDefinitions testSurface in possiblesList2) { if (Math.Abs(testSurface.PlRHRVector.X) == 0 && Math.Abs(testSurface.PlRHRVector.Y) == 1 && testSurface.PlRHRVector.Z == 0) { List<Vector.MemorySafe_CartCoord> testCoordList = new List<Vector.MemorySafe_CartCoord>(); foreach (Vector.MemorySafe_CartCoord coord in testSurface.PlCoords) { Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, 0, coord.Z); testCoordList.Add(c2); } testSurfacesArea = GetAreaFrom2DPolyLoop(testCoordList); if (testSurfacesArea == -999) { report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); } //convert the testSurfaceArea double difference = Math.Abs((area*standardareaConversion) - (testSurfacesArea * testareaConversion)); if (difference < area * DOEgbXMLBasics.Tolerances.SurfaceAreaPercentageTolerance) { possiblesList1.Add(testSurface); if (difference == 0) { //then it perfectly matches, go on to check the poly loop coordinates //then check the insertion point report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " exactly."); } else { report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " within the allowable area percentage tolerance."); } } else { report.MessageList.Add("The test surface cannot find a match for its surface area as defined in the polyLoop coordinates"); //don't return here, it will be returned below } } else { //do nothing, it will be handled by the more general code below and translated to 2D } } } else if (surface.PlRHRVector.X == 0 && surface.PlRHRVector.Y == 0 && Math.Abs(surface.PlRHRVector.Z) == 1) { List<Vector.MemorySafe_CartCoord> coordList = new List<Vector.MemorySafe_CartCoord>(); foreach (Vector.MemorySafe_CartCoord coord in surface.PlCoords) { //only take the X and Y coordinates and throw out the Z because we can assume that they are all the same Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, coord.Y, 0); coordList.Add(c2); } double area = GetAreaFrom2DPolyLoop(coordList); if (area == -999) { report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); } double testSurfacesArea = 0; foreach (SurfaceDefinitions testSurface in possiblesList2) { if (Math.Abs(testSurface.PlRHRVector.X) == 0 && testSurface.PlRHRVector.Y == 0 && Math.Abs(testSurface.PlRHRVector.Z) == 1) { List<Vector.MemorySafe_CartCoord> testCoordList = new List<Vector.MemorySafe_CartCoord>(); foreach (Vector.MemorySafe_CartCoord coord in testSurface.PlCoords) { Vector.MemorySafe_CartCoord c2 = new Vector.MemorySafe_CartCoord(coord.X, coord.Y, 0); testCoordList.Add(c2); } testSurfacesArea = GetAreaFrom2DPolyLoop(testCoordList); if (testSurfacesArea == -999) { report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); } //provide area conversion double difference = Math.Abs((area*standardareaConversion) - (testSurfacesArea * testareaConversion)); if (difference < area * DOEgbXMLBasics.Tolerances.SurfaceAreaPercentageTolerance) { possiblesList1.Add(testSurface); if (difference == 0) { //then it perfectly matches, go on to check the poly loop coordinates //then check the insertion point report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " exactly."); } else { report.MessageList.Add("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " within the allowable area percentage tolerance."); } } else { report.MessageList.Add("The test surface cannot find a match for its surface area as defined in the polyLoop coordinates"); //don't return here, it will be returned below } } else { //do nothing. The code below will handle the more general case where it is not aligned with reference frame axes } } } //the surface is not aligned with one of the reference frame axes, which requires a bit more work to determine the right answer. else { report.MessageList.Add("The standard surface is not aligned along an axis, and will be rotated into a new coordinate frame"); //New Z Axis for this plane is the normal vector, does not need to be created //Get New Y Axis which is the surface Normal Vector cross the original global reference X unit vector (all unit vectors please Vector.CartVect globalReferenceX = new Vector.CartVect(); globalReferenceX.X = 1; globalReferenceX.Y = 0; globalReferenceX.Z = 0; Vector.MemorySafe_CartVect localY = Vector.UnitVector(Vector.CrossProductMSRetMSNV(surface.PlRHRVector, globalReferenceX)); //new X axis is the localY cross the surface normal vector Vector.MemorySafe_CartVect localX = Vector.UnitVector(Vector.CrossProduct(localY, surface.PlRHRVector)); //convert the polyloop coordinates to a local 2-D reference frame //using a trick employed by video game programmers found here http://stackoverflow.com/questions/1023948/rotate-normal-vector-onto-axis-plane List<Vector.MemorySafe_CartCoord> translatedCoordinates = new List<Vector.MemorySafe_CartCoord>(); Vector.MemorySafe_CartCoord newOrigin = new Vector.MemorySafe_CartCoord(0,0,0); translatedCoordinates.Add(newOrigin); for (int j = 1; j < surface.PlCoords.Count; j++) { //randomly assigns the first polyLoop coordinate as the origin Vector.MemorySafe_CartCoord origin = surface.PlCoords[0]; //captures the components of a vector drawn from the new origin to the Vector.CartVect distance = new Vector.CartVect(); distance.X = surface.PlCoords[j].X - origin.X; distance.Y = surface.PlCoords[j].Y - origin.Y; distance.Z = surface.PlCoords[j].Z - origin.Z; //x coordinate is distance vector dot the new local X axis double tX = distance.X * localX.X + distance.Y * localX.Y + distance.Z * localX.Z; //y coordinate is distance vector dot the new local Y axis double tY = distance.X * localY.X + distance.Y * localY.Y + distance.Z * localY.Z; double tZ = 0; Vector.MemorySafe_CartCoord translatedPt = new Vector.MemorySafe_CartCoord(tX,tY,tZ); translatedCoordinates.Add(translatedPt); } double area = GetAreaFrom2DPolyLoop(translatedCoordinates); if (area == -999) { report.MessageList.Add("The coordinates of the standard file polyloop has been incorrectly defined."); report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); report.MessageList.Add("Test may be inaccurate and requires gbXML.org support"); } //get the area of the test candidates using the polyloop coordinates foreach (SurfaceDefinitions testSurface in possiblesList2) { Vector.CartVect testglobalReferenceX = new Vector.CartVect(); globalReferenceX.X = 1; globalReferenceX.Y = 0; globalReferenceX.Z = 0; Vector.MemorySafe_CartVect testlocalY = Vector.UnitVector(Vector.CrossProductMSRetMSNV(surface.PlRHRVector, testglobalReferenceX)); //new X axis is the localY cross the surface normal vector Vector.MemorySafe_CartVect testlocalX = Vector.UnitVector(Vector.CrossProduct(testlocalY, surface.PlRHRVector)); //convert the polyloop coordinates to a local 2-D reference frame //using a trick employed by video game programmers found here http://stackoverflow.com/questions/1023948/rotate-normal-vector-onto-axis-plane List<Vector.MemorySafe_CartCoord> testtranslatedCoordinates = new List<Vector.MemorySafe_CartCoord>(); Vector.MemorySafe_CartCoord newOriginTest = new Vector.MemorySafe_CartCoord(0,0,0); testtranslatedCoordinates.Add(newOriginTest); for (int j = 1; j < surface.PlCoords.Count; j++) { //randomly assigns the first polyLoop coordinate as the origin Vector.MemorySafe_CartCoord origin = testSurface.PlCoords[0]; //captures the components of a vector drawn from the new origin to the Vector.CartVect distance = new Vector.CartVect(); distance.X = testSurface.PlCoords[j].X - origin.X; distance.Y = testSurface.PlCoords[j].Y - origin.Y; distance.Z = testSurface.PlCoords[j].Z - origin.Z; //x coordinate is distance vector dot the new local X axis double tX = distance.X * localX.X + distance.Y * localX.Y + distance.Z * localX.Z; //y coordinate is distance vector dot the new local Y axis double tY = distance.X * localY.X + distance.Y * localY.Y + distance.Z * localY.Z; double tZ = 0; Vector.MemorySafe_CartCoord translatedPt = new Vector.MemorySafe_CartCoord(tX,tY,tZ); testtranslatedCoordinates.Add(translatedPt); } double testarea = GetAreaFrom2DPolyLoop(translatedCoordinates); if (testarea == -999) { report.MessageList.Add("The coordinates of the test file polyloop has been incorrectly defined."); report.MessageList.Add("The coordinates should be 2D and could not be translated to 2D"); } //convert to the standard units double difference = Math.Abs((area*standardareaConversion) - (testarea * testareaConversion)); if (difference < area * DOEgbXMLBasics.Tolerances.SurfaceAreaPercentageTolerance) { possiblesList1.Add(testSurface); //within reason if (difference == 0) { report.MessageList.Add ("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " exactly."); } else { report.MessageList.Add ("The test surface: " + testSurface.SurfaceId + " polyloop surface area matches the polyLoop surface area of the standard surface: " + surface.SurfaceId + " within the allowable area percentage tolerance."); } } else { //not within reason, so the test will fail //don't return yet, it will be returned below when possiblesList1 is found empty } } } } } possiblesList2.Clear(); //polyLoop absolute coordinates //list 1 is analyzed //list 2 is free report.MessageList.Add("</br>"); report.MessageList.Add("Starting PolyLoop coordinate comparisons......."); report.MessageList.Add("</br>"); if (possiblesList1.Count > 0) { foreach (SurfaceDefinitions testSurface in possiblesList1) { //check the polyLoop coordinates foreach (Vector.MemorySafe_CartCoord standardPolyLoopCoord in surface.PlCoords) { report = GetPolyLoopCoordMatch(standardPolyLoopCoord, testSurface, report, surface.SurfaceId, testlengthConversion, standardlengthConversion); if (report.passOrFail) { continue; } else { report.MessageList.Add("Could not find a coordinate match in the test surface polyloop."); break; } } if (report.passOrFail) { possiblesList2.Add(testSurface); } } } else { report.longMsg = "In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, and Surface Area. Failed when attempting to match the surface area."; report.passOrFail = false; return report; } possiblesList1.Clear(); report.MessageList.Add("</br>"); if(!isHighTiltObject && !isLowTiltObject && issurfaceRegular) //no point in doing this if thing is not square and regular { report.MessageList.Add("Starting Insertion Point Coordinate comparisons......."); report.MessageList.Add("</br>"); if (possiblesList2.Count > 0) { //check the insertion point coordinate foreach (SurfaceDefinitions testSurface in possiblesList2) { //now match the differences double insPtXDiff = Math.Abs((testSurface.InsertionPoint.X * testlengthConversion) - (surface.InsertionPoint.X*standardlengthConversion)); double insPtYDiff = Math.Abs((testSurface.InsertionPoint.Y * testlengthConversion) - (surface.InsertionPoint.Y*standardlengthConversion)); double insPtZDiff = Math.Abs((testSurface.InsertionPoint.Z * testlengthConversion) - (surface.InsertionPoint.Z*standardlengthConversion)); if(interiorWallFlipped) { report.MessageList.Add("The azimuths are flipped. Looking to see if the test surface has properly defined the insertion point it has."); report.MessageList.Add("</br>"); //find the complimenting insertion point for(int pt = 0; pt<testSurface.PlCoords.Count; pt++) { if(Math.Abs((surface.InsertionPoint.Z*standardlengthConversion) - (testSurface.PlCoords[pt].Z * testlengthConversion)) < DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance) { //this is a potential candidate if(Math.Abs((surface.InsertionPoint.X*standardlengthConversion) - testSurface.PlCoords[pt].X * testlengthConversion) < DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance) { if(Math.Abs((surface.InsertionPoint.Y*standardlengthConversion) - testSurface.PlCoords[pt].Y * testlengthConversion) < DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance) { //a match insPtXDiff = Math.Abs((testSurface.PlCoords[pt].X * testlengthConversion) - (surface.InsertionPoint.X*standardlengthConversion)); insPtYDiff = Math.Abs((testSurface.PlCoords[pt].Y * testlengthConversion) - (surface.InsertionPoint.Y*standardlengthConversion)); insPtZDiff = Math.Abs((testSurface.PlCoords[pt].Z * testlengthConversion) - (surface.InsertionPoint.Z*standardlengthConversion)); } else { //didn't find a candidate } } else { if (Math.Abs((surface.InsertionPoint.Y*standardlengthConversion) - testSurface.PlCoords[pt].Y * testlengthConversion) < DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance) { //didn't find a candidate } else { //didn't find a candidate } } } } } if (insPtXDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance || insPtYDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance || insPtZDiff > DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance) { report.MessageList.Add("Test file's Surface id: " + testSurface.SurfaceId + " insertion point coordinates do not both match the standard file surface id: " + surface.SurfaceId + ". It has been removed as a candidate."); continue; } else { //possible match possiblesList1.Add(testSurface); if (insPtXDiff == 0 && insPtYDiff == 0 && insPtZDiff == 0) { //perfect match report.MessageList.Add("Test file's Surface with id: " + testSurface.SurfaceId + " matches the insertion point in the standard file exactly."); } else { //perfect match report.MessageList.Add(" Test file's Surface with id: " + testSurface.SurfaceId + " has an insertion point that is within the allowable tolerances of X:" + DOEgbXMLBasics.Tolerances.SurfaceInsPtXTolerance + " ft, Y:" + DOEgbXMLBasics.Tolerances.SurfaceInsPtYTolerance + "ft, Z:" + DOEgbXMLBasics.Tolerances.SurfaceInsPtZTolerance + "ft."); } } } } else { report.longMsg = "In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, and PolyLoop Coordinates. Failed when matching PolyLoop coordinates."; report.passOrFail = false; return report; } if (possiblesList1.Count == 1) { report.longMsg = "Advanced Surface Test found a match for Standard file surface id: " + surface.SurfaceId + " in the test file. Only one match was found to be within all the tolerances allowed. Surface id: " + possiblesList2[0].SurfaceId + "."; report.passOrFail = true; List<string> testFileSurfIds = new List<string>(); foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); return report; } else if (possiblesList1.Count == 0) { report.longMsg = "In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, PolyLoop Coordinates, and Insertion Point. Failed when attempting to match the insertion point coordinates."; report.passOrFail = false; return report; } else if (possiblesList1.Count > 1) { report.longMsg = "Advanced Surface Test found more than one match for Standard file surface id: " + surface.SurfaceId + " in the test file. It was not possible to determine only one unique surface."; report.passOrFail = false; //List<string> testFileSurfIds = new List<string>(); //foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } //report.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); return report; } return report; } else { //we do not conduct insertion point tests for horizontal surfaces if (possiblesList2.Count == 1) { report.longMsg = "Advanced Surface Test found a match for Standard file surface id: " + surface.SurfaceId + " in the test file. Only one match was found to be within all the tolerances allowed. Surface id: " + possiblesList2[0].SurfaceId + "."; report.passOrFail = true; List<string> testFileSurfIds = new List<string>(); foreach (SurfaceDefinitions surf in possiblesList2) { testFileSurfIds.Add(surf.SurfaceId); } globalMatchObject.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); return report; } else if (possiblesList2.Count == 0) { report.longMsg = "In the test file, no surfaces could be found that match standard file;s Surface Id: " + surface.SurfaceId + " AdjacentSpaceId(s), SurfaceType, Tilt, Azimuth, Surface Area, PolyLoop Coordinates, and Insertion Point. Failed when attempting to match the insertion point coordinates."; report.passOrFail = false; return report; } else if (possiblesList2.Count > 1) { report.longMsg = "Advanced Surface Test found more than one match for Standard file surface id: " + surface.SurfaceId + " in the test file. It was not possible to determine only one unique surface."; report.passOrFail = false; //List<string> testFileSurfIds = new List<string>(); //foreach (SurfaceDefinitions surf in possiblesList1) { testFileSurfIds.Add(surf.SurfaceId); } //report.MatchedSurfaceIds.Add(surface.SurfaceId, testFileSurfIds); return report; } return report; } } return report; } catch (Exception e) { report.longMsg = (e.ToString()); return report; } }
public static DOEgbXMLReportingObj GetStoryHeights(List<XmlDocument> gbXMLDocs, List<XmlNamespaceManager> gbXMLnsm, DOEgbXMLReportingObj report, Conversions.lengthUnitEnum standardLength, Conversions.lengthUnitEnum testLength, double testlengthConversion, double standardlengthConversion) { //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML //added March 14 2013 report.testSummary = "This test compares Z-coordinates in each one of the levels of the standard and test file. It does so by"; report.testSummary += " gathering the Z-coordinate of a Building Storey element's PolyLoop in the gbXML files."; report.testSummary += " The z-heights should match exactly. If this test has failed, then one of the z-heights in your file does"; report.testSummary += " not match the standard file. There is no tolerance for error in this test. If any of the z-heights do not match, "; report.testSummary += " most likely, the test file has not been constructed as per the instructions provided by the"; report.testSummary += " gbXML Test Case Manual."; report.testSummary += " In some instances, it is not required that the z-heights match. If you notice that this test has failed"; report.testSummary += " but your file overall has still passed, then this is as designed. Refer to the pass/fail"; report.testSummary += " summary sheet for more information."; report.unit = standardLength.ToString(); string testUOM = testLength.ToString(); //small dictionaries I make to keep track of the story level names and heights //standard file Dictionary<string, double> standardStoryHeight = new Dictionary<string, double>(); //Test File Dictionary<string, double> testStoryHeight = new Dictionary<string, double>(); string key = null; string val = null; string standLevel = ""; for (int i = 0; i < gbXMLDocs.Count; i++) { try { //assuming that this will be plenty large for now XmlDocument gbXMLTestFile = gbXMLDocs[i]; XmlNamespaceManager gbXMLns = gbXMLnsm[i]; XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey", gbXMLns); int nodecount = nodes.Count; foreach (XmlNode node in nodes) { XmlNodeList childNodes = node.ChildNodes; foreach (XmlNode childNode in childNodes) { if (childNode.Name.ToString() == "Level") { key = childNode.InnerText; } else if (childNode.Name.ToString() == "Name") { val = childNode.InnerText; } else { continue; } if (i % 2 != 0) { if (key != null && val != null) { double testlevelvalue = Convert.ToDouble(val) * testlengthConversion; testStoryHeight.Add(key, testlevelvalue); key = null; val = null; } else { } } else { if (key != null && val != null) { double standardlevelvalue = Convert.ToDouble(val); standardStoryHeight.Add(key, standardlevelvalue); key = null; val = null; } else { } } } } //reporting if (i % 2 != 0) { if (standardStoryHeight.Count == 0) { report.longMsg = "Test cannot be completed. Standard File Level Count returns Zero."; report.passOrFail = false; return report; } else if (testStoryHeight.Count == 0) { report.longMsg = "Test cannot be completed. Test File Level Count returns Zero."; report.passOrFail = false; return report; } else { //set pass to true report.passOrFail = true; int count = 0; foreach (KeyValuePair<string, double> standardPair2 in standardStoryHeight) { count++; double difference; StoryHeightMin = 10000; string equivLevel = ""; if (testStoryHeight.ContainsKey(standardPair2.Key)) { double matchkeydiff = Math.Abs(standardPair2.Value - testStoryHeight[standardPair2.Key]); if (matchkeydiff == 0) { report.MessageList.Add("Matched Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key + report.unit + " Exactly"); report.TestPassedDict.Add(standardPair2.Value.ToString(), true); continue; } else if (matchkeydiff < report.tolerance) { report.MessageList.Add("Matched Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key + report.unit + " within allowable tolerance."); report.TestPassedDict.Add(standardPair2.Value.ToString(), true); continue; } else { report.MessageList.Add("Did NOT Match Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key + report.unit + " within allowable tolerance."); report.TestPassedDict.Add(standardPair2.Value.ToString(), true); continue; } } foreach (KeyValuePair<string, double> testPair in testStoryHeight) { //setup standard result and test result report.standResult.Add(standardPair2.Key); report.testResult.Add(testPair.Key); report.idList.Add(Convert.ToString(count)); difference = Math.Abs(Convert.ToDouble(standardPair2.Key)*standardlengthConversion - Convert.ToDouble(testPair.Key) * testlengthConversion); //store all levels and the difference between them if (StoryHeightMin > difference) { StoryHeightMin = difference; standLevel = standardPair2.Value.ToString(); } } if (StoryHeightMin < report.tolerance) { report.MessageList.Add("Matched Standard File's " + standardPair2.Value + " @ " + standardPair2.Key + report.unit + " within the Tolerance allowed"); report.TestPassedDict.Add(standLevel, true); } else { report.MessageList.Add("Standard File's " + standardPair2.Value + " equivalent was not found in the test file. The closest level in the test file was found at " + equivLevel + " in the test file. The difference in heights was " + StoryHeightMin.ToString() + report.unit); report.TestPassedDict.Add(standLevel, false); } } return report; } } } catch (Exception e) { report.longMsg = e.ToString(); report.MessageList.Add(" Failed to locate Building " + report.testType + " in the XML file."); report.passOrFail = false; return report; } } report.longMsg = "Fatal " + report.testType + " Test Failure"; report.passOrFail = false; return report; }
public void StartTest(XmlReader xmldoc, string testToRun, ref gbXMLReport gbr, string username = "******") { log4net.Config.XmlConfigurator.Configure(); TestToRun = testToRun; globalMatchObject = new gbXMLMatches(); globalMatchObject.Init(); //first create a list of lists that is indexed identically to the drop down list the user selects TestDetail = new DOEgbXMLTestDetail(); //then populate the list of lists. All indexing is done "by hand" in InitializeTestResultStrings() TestDetail.InitializeTestResultStrings(); //create report list reportlist will store all the test result ReportList = new List<DOEgbXMLReportingObj>(); //Load an XML File for the test at hand gbXMLTestFile = new XmlDocument(); gbXMLTestFile.Load(xmldoc); gbXMLStandardFile = new XmlDocument(); gbXMLStandardFile.Load(filepaths[TestToRun]); if (!TestFileIsAvailable()) { //TODO: update browser json with something to indicate there is a problem return; } //Define the namespace XmlNamespaceManager gbXMLns1 = new XmlNamespaceManager(gbXMLTestFile.NameTable); gbXMLns1.AddNamespace("gbXMLv5", "http://www.gbxml.org/schema"); XmlNamespaceManager gbXMLns2 = new XmlNamespaceManager(gbXMLStandardFile.NameTable); gbXMLns2.AddNamespace("gbXMLv5", "http://www.gbxml.org/schema"); List<XmlDocument> gbXMLdocs = new List<XmlDocument>(); gbXMLdocs.Add(gbXMLTestFile); gbXMLdocs.Add(gbXMLStandardFile); List<XmlNamespaceManager> gbXMLnsm = new List<XmlNamespaceManager>(); gbXMLnsm.Add(gbXMLns1); gbXMLnsm.Add(gbXMLns2); //standardizing all tests on US-IP Conversions c = new Conversions(); Conversions.volumeUnitEnum testVol = Conversions.volumeUnitEnum.CubicFeet; Conversions.volumeUnitEnum validatorVol = Conversions.volumeUnitEnum.CubicFeet; Conversions.areaUnitEnum testArea = Conversions.areaUnitEnum.SquareFeet; Conversions.areaUnitEnum validatorArea = Conversions.areaUnitEnum.SquareFeet; Conversions.lengthUnitEnum testLength = Conversions.lengthUnitEnum.Feet; Conversions.lengthUnitEnum validatorLength = Conversions.lengthUnitEnum.Feet; Conversions.lengthUnitEnum standardLength = Conversions.lengthUnitEnum.Feet; Conversions.areaUnitEnum standardArea = Conversions.areaUnitEnum.SquareFeet; Conversions.volumeUnitEnum standardVol = Conversions.volumeUnitEnum.CubicFeet; //standardize all units to feet, square feet, and cubic feet double testlengthConversion = 1; double testareaConversion = 1; double testvolumeConversion = 1; double standardlengthConversion = 1; double standardareaConversion = 1; double standardvolConversion = 1; bool mustBePlanar = false; for(int ns=0; ns< gbXMLnsm.Count; ns++) { if (ns == 0) { XmlNodeList nodes = gbXMLdocs[ns].SelectNodes("/gbXMLv5:gbXML", gbXMLnsm[ns]); StandardizeToUSIP(nodes, c, ref testlengthConversion, ref testareaConversion, ref testvolumeConversion, ref testLength, ref testArea, ref testVol); } else { XmlNodeList nodes = gbXMLdocs[ns].SelectNodes("/gbXMLv5:gbXML", gbXMLnsm[ns]); StandardizeToUSIP(nodes, c, ref standardlengthConversion, ref standardareaConversion, ref standardvolConversion, ref standardLength, ref standardArea, ref standardVol); } } //TODO: Add a summary of the Unit of Measures stuff above to the final result //Create a Log file that logs the success or failure of each test. //Eventually maybe I want to create a little HTML factory output = ""; log = ""; table += "<div class='container'>" + "<h3>" + "Test Sections" + "</h3>"; table += "<table class='table table-bordered'>"; table += "<tr class='info'>" + "<td>" + "Test Section Name" + "</td>" + "<td>" + "Standard Result" + "</td>" + "<td>" + "Test File Result" + "</td>" + "<td>" + "Tolerances" + "</td>" + "<td>" + "Pass/Fail" + "</td>" + "</tr>"; string units; DOEgbXMLReportingObj report = new DOEgbXMLReportingObj(); CampusReport camprep = new CampusReport(); //this string I can manipulate to produce the final test output string menujson = JsonConvert.SerializeXmlNode(gbXMLStandardFile); //prepare json for view menujson = MakeViewJson(menujson); gbr.menu = menujson; gbr.CampusReport = camprep; report.standResult = new List<string>(); report.testResult = new List<string>(); report.idList = new List<string>(); report.MessageList = new List<string>(); report.TestPassedDict = new Dictionary<string, bool>(); //Set up the Global Pass/Fail criteria for the test case file TestCriteria = new DOEgbXMLTestCriteriaObject(); TestCriteria.InitializeTestCriteriaWithTestName(testToRun); //needed specially for area and volume tests DOEgbXMLTestDetail afile = TestDetail.TestDetailList.Find(x => x.testName == TestToRun); BuildingSummary bs = new BuildingSummary(); bs.FileType = "Standard"; camprep.BuildingSummary.Add(bs); BuildingSummary bst = new BuildingSummary(); bst.FileType = "Test"; camprep.BuildingSummary.Add(bst); //Test 2 execute //report.tolerance = DOEgbXMLBasics.Tolerances.AreaTolerance; //report.testType = TestType.Building_Area; //units = DOEgbXMLBasics.MeasurementUnits.sqft.ToString(); //report = GetBuildingArea(gbXMLdocs, gbXMLnsm, report, validatorArea, testArea, testareaConversion, standardareaConversion); logger.Info("START: BUILDING AREA TEST"); var baresult = GetBuildingArea(gbXMLdocs, gbXMLnsm, ref camprep, validatorArea, testArea, testareaConversion, standardareaConversion, DOEgbXMLBasics.Tolerances.AreaPercentageTolerance, afile ); if(!baresult) { camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; } logger.Info("END: BUILDING AREA TEST"); //GetBuildingArea(gbXMLdocs,gbXMLnsm,) //AddToOutPut("Building Area Test Passed: ", report, true); //Test 3 execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.SpaceCountTolerance; //report.testType = TestType.Space_Count; //units = DOEgbXMLBasics.MeasurementUnits.spaces.ToString(); //report = GetBuildingSpaceCount(gbXMLdocs, gbXMLnsm, report, units); logger.Info("START: BUILDING SPACE COUNT TEST"); SpacesSummary ssm = new SpacesSummary(); ssm.FileType = "Standard"; camprep.SpacesSummary.Add(ssm); SpacesSummary ssmt = new SpacesSummary(); ssmt.FileType = "Test"; camprep.SpacesSummary.Add(ssmt); var spctResult = GetBuildingSpaceCount(gbXMLdocs, gbXMLnsm, "", DOEgbXMLBasics.Tolerances.SpaceCountTolerance, ref camprep); if(!spctResult) { camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; camprep.SpacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; } logger.Info("END: BUILDING SPACE COUNT TEST"); //AddToOutPut("Building Space Quantity Count Test Passed: ", report, true); // Building Stories Tests.... ////Test 4 execute //report.Clear(); report.tolerance = DOEgbXMLBasics.Tolerances.LevelCountTolerance; //report.testType = TestType.Building_Story_Count; //units = DOEgbXMLBasics.MeasurementUnits.levels.ToString(); //report = GetBuildingStoryCount(gbXMLdocs, gbXMLnsm, report, units); logger.Info("START: Building Storey Count Test"); var blstctresult = GetBuildingStoryCount(gbXMLdocs, gbXMLnsm, ref camprep, DOEgbXMLBasics.Tolerances.LevelCountTolerance); if(!blstctresult) { camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; //TODO: Need a Building Story Summary Field } logger.Info("END: Building Storey Count Test"); //AddToOutPut("Building Story Count Test Passed: ", report, true); //Test 5 execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.LevelHeightTolerance; //report.testType = TestType.Building_Story_Z_Height; //units = DOEgbXMLBasics.MeasurementUnits.ft.ToString(); report = GetStoryHeights(gbXMLdocs, gbXMLnsm, report, validatorLength, testLength, testlengthConversion, standardlengthConversion); logger.Info("START: Building Storey Height Test"); var storyHeightsres = GetStoryHeights(gbXMLdocs, gbXMLnsm, ref camprep, validatorLength, testLength, testlengthConversion, standardlengthConversion, DOEgbXMLBasics.Tolerances.LevelHeightTolerance); if (!storyHeightsres) { camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; //TODO: Need a Building Story Summary Field } logger.Info("END: Building Storey Height Test"); //AddToOutPut("Building Story Z-Height Test: ", report, true); //Test 6 execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.VectorAngleTolerance; //report.testType = TestType.Building_Story_PolyLoop_RHR; //units = "degrees"; //report = TestBuildingStoryRHR(gbXMLdocs, gbXMLnsm, report, units); logger.Info("START: Building Story Right Hand Rule Test."); var blstRHResult = TestBuildingStoryRHR(gbXMLdocs, gbXMLnsm, ref camprep); if(!blstRHResult) { //this method has no bearing on the overall pass or fail tests. //camprep.BuildingSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; //TODO: Need a Building Story Summary Field } logger.Info("END: Building Story Right Hand Rule Test."); //AddToOutPut("Building Story PolyLoop Right Hand Rule Test Result:", report, true); //String spShellGeometrySurfaceNum = TestShellGeomSurfaceNum(gbXMLTestFile, gbXMLns); //Space Tests ............................................................. //Test 7 execute //only needs to test the test file //report.Clear(); //report.testType = TestType.SpaceId_Match_Test; logger.Info("START: UNIQUE SPACE ID TEST"); var spaceIDresults = UniqueSpaceIdTest(gbXMLdocs, gbXMLnsm, ref camprep); if(!spaceIDresults) { camprep.SpacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; } logger.Info("END: UNIQUE SPACE ID TEST"); //AddToOutPut("SpaceId Match Test: ", report, true); //Test 8 execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.SpaceAreaTolerance; //report.testType = TestType.Space_Area; //units = DOEgbXMLBasics.MeasurementUnits.sqft.ToString(); logger.Info("START: SPACE AREAS TEST"); //report = TestSpaceAreas(gbXMLdocs, gbXMLnsm, report, validatorArea, testArea, testareaConversion,standardareaConversion,afile); var result = TestSpaceAreas(gbXMLdocs, gbXMLnsm, ref camprep, validatorArea, testArea, testareaConversion, standardareaConversion, afile, DOEgbXMLBasics.Tolerances.SpaceAreaTolerance); if(!result) { camprep.SpacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; //gbxml detailed compliance results? } logger.Info("END: SPACE AREAS TEST"); //AddToOutPut("Space Areas Test: ", report, true); //Test 9 execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.VolumeTolerance; //report.testType = TestType.Space_Volume; //units = DOEgbXMLBasics.MeasurementUnits.cubicft.ToString(); logger.Info("START: SPACE VOLUMES TEST"); //report = TestSpaceVolumes(gbXMLdocs, gbXMLnsm, report, validatorVol, testVol, testvolumeConversion,standardvolConversion,afile); var volresult = TestSpaceVolumes(gbXMLdocs, gbXMLnsm, ref camprep, validatorVol, testVol, testvolumeConversion, standardvolConversion, afile, DOEgbXMLBasics.Tolerances.VolumePercentageTolerance); logger.Info("END: SPACE VOLUMES TEST"); if (!volresult) { camprep.SpacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; //gbxml detailed compliance results? } //AddToOutPut("Space Volumes Test: ", report, true); //Test 10 Execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.VectorAngleTolerance; //report.testType = TestType.Shell_Geom_RHR; //units = "degrees"; //report = TestShellGeomPLRHR(gbXMLdocs, gbXMLnsm, report, units); //AddToOutPut("Shell Geometry RHR Test: ",report); //Surface Element tests //deprecating all counts tests as criteria for passing and failing. Use this now only to indicate counts in the surfaces summary ////Test 11 Execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.SurfaceCountTolerance; //report.testType = TestType.Total_Surface_Count; //units = ""; //report = GetSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); //AddToOutPut("Surface Count Test Result: ", report, true); ////Surface Element tests ////Test 12 Execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.ExteriorWallCountTolerance; //report.testType = TestType.Exterior_Wall_Surface_Count; //units = ""; SurfaceSummary ss = new SurfaceSummary(); ss.FileType = "Standard"; camprep.SurfacesSummary.Add(ss); SurfaceSummary sst = new SurfaceSummary(); sst.FileType = "Test"; camprep.SurfacesSummary.Add(sst); //initialization of summaries complete logger.Info("START: EXTERIOR WALL COUNT"); //report = GetEWSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); var ewctresult = GetEWSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); logger.Info("END: EXTERIOR WALL COUNT"); if (!ewctresult) { //do nothing, it has no consequence for now //gbxml detailed compliance results? } //AddToOutPut("Exterior Wall Surface Count Test Result: ", report, true); //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.SurfaceCountTolerance; //report.testType = TestType.Underground_Surface_Count; //units = ""; //report = GetUGSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); logger.Info("START: UNDERGROUND WALL COUNT"); var ugwctresult = GetUGSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); if (!ugwctresult) { //do nothing, it has no consequence for now //gbxml detailed compliance results? } logger.Info("END: UNDERGROUND WALL COUNT"); //AddToOutPut("Underground Wall Count Test Result: ", report, true); logger.Info("START: SLABONGRADE COUNT"); var sogctresult = GetSOGSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); if (!sogctresult) { //do nothing, it has no consequence for now //gbxml detailed compliance results? } logger.Info("END: SLABONGRADE WALL COUNT"); ////Surface Element tests ////Test 13 Execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.InteriorWallCountTolerance; //report.testType = TestType.Interior_Wall_Surface_Count; //units = ""; logger.Info("START: INTERIOR WALL COUNT"); //report = GetIWSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); var iwctresult = GetIWSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); if (!iwctresult) { //do nothing, it has no consequence for now //gbxml detailed compliance results? } logger.Info("END: INTERIOR WALL COUNT"); //AddToOutPut("Interior Wall Surface Count Test Result: ", report, true); ////Surface Element tests ////Test 13 Execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.InteriorFloorCountTolerance; //report.testType = TestType.Interior_Floor_Surface_Count; //units = ""; //report = GetIFSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); logger.Info("START: INTERIOR FLOOR/CEILING COUNT"); var ifctresult = GetIFSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); if (!ifctresult) { //do nothing, it has no consequence for now //gbxml detailed compliance results? } logger.Info("END: INTERIOR FLOOR/CEILING COUNT"); //AddToOutPut("Interior Floor Surface Count Test Result: ", report, true); ////Surface Element tests ////Test 14 Execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.InteriorWallCountTolerance; //report.testType = TestType.Roof_Surface_Count; //units = ""; //report = GetRoofSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); logger.Info("START: ROOF COUNT"); var irctresult = GetRoofSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); if (!irctresult) { //do nothing, it has no consequence for now //gbxml detailed compliance results? } logger.Info("END: ROOF COUNT"); //AddToOutPut("Roof Surface Count Test Result: ", report, true); ////Surface Element tests ////Test 15 Execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.InteriorWallCountTolerance; //report.testType = TestType.Shading_Surface_Count; //units = ""; //report = GetShadeSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); logger.Info("START: SHADING DEVICE COUNT"); var shadectresult = GetShadeSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); if (!shadectresult) { //do nothing, it has no consequence for now //gbxml detailed compliance results? } //AddToOutPut("Shading Surface Count Test Result: ", report, true); logger.Info("END: SHADING DEVICE COUNT"); ////Test 16 Execute //report.Clear(); //report.tolerance = DOEgbXMLBasics.Tolerances.AirWallCountTolerance; //report.testType = TestType.Air_Surface_Count; //units = ""; //report = GetAirSurfaceCount(gbXMLdocs, gbXMLnsm, report, units); logger.Info("START: AIR SURFACE COUNT"); var asctresult = GetAirSurfaceCount(gbXMLdocs, gbXMLnsm, ref camprep); if (!asctresult) { //do nothing, it has no consequence for now //gbxml detailed compliance results? } logger.Info("END: AIR SURFACE COUNT"); //AddToOutPut("Air Surface Count Test Result: ", report, true); #region surface detailed test //Jan 31-2012: We may not want to perform these if the surface counts fail, but for now, we will include these tests //Detailed Surface Checks //Store Surface Element Information List<SurfaceDefinitions> TestSurfaces = new List<SurfaceDefinitions>(); XmlDocument TestFile = gbXMLdocs[0]; XmlNamespaceManager TestNSM = gbXMLnsm[0]; List<SurfaceDefinitions> StandardSurfaces = new List<SurfaceDefinitions>(); XmlDocument StandardFile = gbXMLdocs[1]; XmlNamespaceManager StandardNSM = gbXMLnsm[1]; TestSurfaces = GetFileSurfaceDefs(TestFile, TestNSM); StandardSurfaces = GetFileSurfaceDefs(StandardFile, StandardNSM); string TestSurfaceTable = " <div class='container'><table class='table table-bordered'>"; TestSurfaceTable += "<tr class='info'>" + "<td>" + "Test Section Name" + "</td>" + "<td>" + "Stand Surface ID" + "</td>" + "<td>" + "Test Surface ID" + "</td>" + "<td>" + "Stand Surface Tilt" + "</td>" + "<td>" + "Test Surface Tilt" + "</td>" + "<td>" + "Stand Surface Azimuth" + "</td>" + "<td>" + "Test Surface Azimuth" + "</td>" + "<td>" + "Stand Surface Height" + "</td>" + "<td>" + "Test Surface Height" + "</td>" + "<td>" + "Stand Surface Width" + "</td>" + "<td>" + "Test Surface Width" + "</td>" + "<td>" + "Pass/Fail" + "</td>" + "</tr>"; //Test Surfaces Planar Test //all polyloops must be such that the surface defined by the coordinates is planar //report.Clear(); //report.testType = TestType.Surface_Planar_Test; logger.Info("START: SURFACE PLANARITY TEST"); //report = TestSurfacePlanarTest(TestSurfaces, report); var planarityResult = TestSurfacePlanarTest(TestSurfaces,ref camprep); if (!planarityResult) { camprep.SurfacesSummary.Find(x => x.FileType == "Standard").PassedAllTests = false; //gbxml detailed compliance results? } logger.Info("END: SURFACE PLANARITY TEST"); //if (!report.passOrFail && mustBePlanar) //{ // AddToOutPut("Test File Planar Surface Check: ", report, true); // report.Clear(); //} //only run detailed surface checks if the surfaces are planar if(planarityResult) { //<For each surface in the Standard File, try to find a match for this surface in the test file> //Written Jan 31, 2013 by Chien Si Harriman, Senior Product Manager, Carmel Software Corporation //Execute Tests // globalMatchObject.MatchedSurfaceIds = new Dictionary<string, List<string>>(); int i = 1; foreach (SurfaceDefinitions surface in StandardSurfaces) { report.Clear(); DetailedSurfaceSummary ssSummary = new DetailedSurfaceSummary(); //multiple tolerances used report.testType = TestType.Detailed_Surface_Checks; report.subTestIndex = i; if (surface.SurfaceId == "su-zone_5_Srf_7" || surface.SurfaceId == "su-zone_0_Srf_0") { var d = 1; } logger.Info("START: DETAILED SURFACE TEST"); GetSurfaceMatches(surface, TestSurfaces, ref ssSummary, validatorLength, testLength, testlengthConversion, standardlengthConversion, validatorArea, testArea, testareaConversion, standardareaConversion); logger.Info("END: DETAILED SURFACE TEST"); camprep.SurfacesReport.Add(ssSummary); } #endregion #region opending detailed test //openings detailed tests List<OpeningDefinitions> TestOpenings = new List<OpeningDefinitions>(); XmlDocument testFile = gbXMLdocs[0]; XmlNamespaceManager testNSM = gbXMLnsm[0]; List<OpeningDefinitions> StandardOpenings = new List<OpeningDefinitions>(); XmlDocument standardFile = gbXMLdocs[1]; XmlNamespaceManager standardNSM = gbXMLnsm[1]; TestOpenings = GetFileOpeningDefs(TestFile, TestNSM); StandardOpenings = GetFileOpeningDefs(StandardFile, StandardNSM); string TestOpeningTable = ""; report.Clear(); report.testType = TestType.Opening_Planar_Test; report = TestOpeningPlanarTest(TestOpenings, report); if (!report.passOrFail) { AddToOutPut("Test File Planar Opening Check: ", report, true); report.Clear(); } //only run detailed opening checks if the opening are planar else { TestOpeningTable = "<div class='container'><table class='table table-bordered'>"; TestOpeningTable += "<tr class='info'>" + "<td>" + "Test Section Name" + "</td>" + "<td>" + "Standard Opening Id" + "</td>" + "<td>" + "Test Opening Id" + "</td>" + "<td>" + "Standard Parent Surface Id" + "</td>" + "<td>" + "Test Parent Surface Id" + "</td>" + "<td>" + "Standard Parent Azimuth" + "</td>" + "<td>" + "Test Parent Azimuth" + "</td>" + "<td>" + "Standard Parent Tilt" + "</td>" + "<td>" + "Test Parent Tilt" + "</td>" + "<td>" + "Standard Surface Area" + "</td>" + "<td>" + "Test Surface Area" + "</td>" + "<td>" + "Pass/Fail" + "</td>" + "</tr>"; globalMatchObject.MatchedOpeningIds = new Dictionary<string, List<string>>(); int j = 1; //if no openings remove the table. if (StandardOpenings.Count < 1) TestOpeningTable = ""; //compare the openings foreach (OpeningDefinitions opening in StandardOpenings) { report.Clear(); report.testType = TestType.Detailed_Opening_Checks; report.subTestIndex = j; report = GetPossibleOpeningMatches(opening, TestOpenings, report); AddToOutPut("Test 17 for Opening number " + j + " Result: ", report, false); foreach (OpeningDefinitions to in TestOpenings) { if (globalMatchObject.MatchedOpeningIds.ContainsKey(opening.OpeningId)) { foreach (string id in globalMatchObject.MatchedOpeningIds[opening.OpeningId]) { if (to.OpeningId == id) { if (report.passOrFail) TestOpeningTable += "<tr class='success'>" + "<td>" + "<a href='TestDetailPage.aspx?type=" + (int)report.testType + "&subtype=" + report.subTestIndex + "' target='_blank'>" + "Detailed Opening Checks " + report.subTestIndex + "</a>" + "</td>" + "<td>" + opening.OpeningId + "</td>" + "<td>" + to.OpeningId + "</td>" + "<td>" + opening.ParentSurfaceId + "</td>" + "<td>" + to.ParentSurfaceId + "</td>" + "<td>" + String.Format("{0:#,0.00}", opening.ParentAzimuth) + "</td>" + "<td>" + String.Format("{0:#,0.00}", to.ParentAzimuth) + "</td>" + "<td>" + String.Format("{0:#,0.00}", opening.ParentTilt) + "</td>" + "<td>" + String.Format("{0:#,0.00}", to.ParentTilt) + "</td>" + "<td>" + String.Format("{0:#,0.00}", opening.surfaceArea) + "</td>" + "<td>" + String.Format("{0:#,0.00}", to.surfaceArea) + "</td>" + "<td>" + "Pass" + "</td>" + "</tr>"; } } } } //if didn't find match means it failed the test if (!report.passOrFail) TestOpeningTable += "<tr class='error'>" + "<td>" + "<a href='TestDetailPage.aspx?type=" + (int)report.testType + "&subtype=" + report.subTestIndex + "' target='_blank'>" + "Detailed Opening Checks " + report.subTestIndex + "</a>" + "</td>" + "<td>" + opening.OpeningId + "</td>" + "<td>" + "---" + "</td>" + "<td>" + opening.ParentSurfaceId + "</td>" + "<td>" + "---" + "</td>" + "<td>" + String.Format("{0:#,0.00}", opening.ParentAzimuth) + "</td>" + "<td>" + "---" + "</td>" + "<td>" + String.Format("{0:#,0.00}", opening.ParentTilt) + "</td>" + "<td>" + "---" + "</td>" + "<td>" + String.Format("{0:#,0.00}", opening.surfaceArea) + "</td>" + "<td>" + "---" + "</td>" + "<td>" + "Fail" + "</td>" + "</tr>"; j += 1; } } TestOpeningTable += "</table></div><br/>"; #endregion //close table table += "</table></div><br/>"; //add TestSurfaceTable table += TestSurfaceTable + TestOpeningTable; } //CreateSummaryTable(); ((FileAppender)LogManager.GetCurrentLoggers()[0].Logger.Repository.GetAppenders()[0]).Close(); }
//Created July 2016, Chien Si Harriman public static bool GetStoryHeights(List<XmlDocument> gbXMLDocs, List<XmlNamespaceManager> gbXMLnsm, ref CampusReport cr, Conversions.lengthUnitEnum standardLength, Conversions.lengthUnitEnum testLength, double testlengthConversion, double standardlengthConversion, double tolerance) { //small dictionaries I make to keep track of the story level names and heights //standard file Dictionary<string, double> standardStoryHeight = new Dictionary<string, double>(); //Test File Dictionary<string, double> testStoryHeight = new Dictionary<string, double>(); string key = null; string val = null; string standLevel = ""; for (int i = 0; i < gbXMLDocs.Count; i++) { try { //assuming that this will be plenty large for now XmlDocument gbXMLTestFile = gbXMLDocs[i]; XmlNamespaceManager gbXMLns = gbXMLnsm[i]; XmlNodeList nodes = gbXMLTestFile.SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:BuildingStorey", gbXMLns); int nodecount = nodes.Count; foreach (XmlNode node in nodes) { XmlNodeList childNodes = node.ChildNodes; foreach (XmlNode childNode in childNodes) { if (childNode.Name.ToString() == "Level") { val = childNode.InnerText; } else if (childNode.Name.ToString() == "Name") { key = childNode.InnerText; } else { continue; } if (i % 2 != 0) { if (key != null && val != null) { double standardlevelvalue = Convert.ToDouble(val) * standardlengthConversion; standardStoryHeight.Add(key, standardlevelvalue); key = null; val = null; } else { } } else { if (key != null && val != null) { double testlevelvalue = Convert.ToDouble(val) * testlengthConversion; testStoryHeight.Add(key, testlevelvalue); key = null; val = null; } else { } } } } //reporting if (i % 2 != 0) { if (standardStoryHeight.Count == 0) { logger.Info("TEST FILE FAILURE: Test cannot be completed. Standard File Level Count returns Zero."); return false; } else if (testStoryHeight.Count == 0) { logger.Info("TEST FILE FAILURE: Test cannot be completed. Test File Level Count returns Zero."); return false; } else { //set pass to true int count = 0; foreach (KeyValuePair<string, double> standardPair2 in standardStoryHeight) { count++; double difference; StoryHeightMin = 10000; string equivLevel = ""; if (testStoryHeight.ContainsKey(standardPair2.Key)) { double matchkeydiff = Math.Abs(standardPair2.Value - testStoryHeight[standardPair2.Key]); if (matchkeydiff == 0) { logger.Info("TEST FILE SUCCESS: Matched Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key +" Exactly"); continue; } else if (matchkeydiff < tolerance) { logger.Info("TEST FILE SUCCESS: Matched Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key + " within allowable tolerance."); continue; } else { logger.Info("TEST FILE FAILURE: Did NOT Match Standard File's " + standardPair2.Value + " with Test File's " + testStoryHeight[standardPair2.Key] + " @ " + standardPair2.Key+ " within allowable tolerance."); return false; } } foreach (KeyValuePair<string, double> testPair in testStoryHeight) { difference = Math.Abs(Convert.ToDouble(standardPair2.Key) * standardlengthConversion - Convert.ToDouble(testPair.Key) * testlengthConversion); //store all levels and the difference between them if (StoryHeightMin > difference) { StoryHeightMin = difference; standLevel = standardPair2.Value.ToString(); } } if (StoryHeightMin < tolerance) { logger.Info("TEST FILE SUCCESS: Matched Standard File's " + standardPair2.Value + " @ " + standardPair2.Key + " within the Tolerance allowed"); } else { logger.Info("Standard File's " + standardPair2.Value + " equivalent was not found in the test file. The closest level in the test file was found at " + equivLevel + " in the test file. The difference in heights was " + StoryHeightMin.ToString()); } } } } } catch (Exception e) { logger.Info(e.ToString()); logger.Info(" Failed to complete Building Story Level test in the XML file."); return false; } } return true; }
private static TypedConstant GetParamArrayArgument(ParameterSymbol parameter, ImmutableArray<TypedConstant> constructorArgsArray, int argumentsCount, int argsConsumedCount, Conversions conversions) { Debug.Assert(argsConsumedCount <= argumentsCount); int paramArrayArgCount = argumentsCount - argsConsumedCount; if (paramArrayArgCount == 0) { return new TypedConstant(parameter.Type, ImmutableArray<TypedConstant>.Empty); } // If there's exactly one argument and it's an array of an appropriate type, then just return it. if (paramArrayArgCount == 1 && constructorArgsArray[argsConsumedCount].Kind == TypedConstantKind.Array) { TypeSymbol argumentType = (TypeSymbol)constructorArgsArray[argsConsumedCount].Type; // Easy out (i.e. don't both classifying conversion). if (argumentType == parameter.Type) { return constructorArgsArray[argsConsumedCount]; } HashSet<DiagnosticInfo> useSiteDiagnostics = null; // ignoring, since already bound argument and parameter Conversion conversion = conversions.ClassifyConversion(argumentType, parameter.Type, ref useSiteDiagnostics, builtinOnly: true); // NOTE: Won't always succeed, even though we've performed overload resolution. // For example, passing int[] to params object[] actually treats the int[] as an element of the object[]. if (conversion.IsValid && conversion.Kind == ConversionKind.ImplicitReference) { return constructorArgsArray[argsConsumedCount]; } } Debug.Assert(!constructorArgsArray.IsDefault); Debug.Assert(argsConsumedCount <= constructorArgsArray.Length); var values = new TypedConstant[paramArrayArgCount]; for (int i = 0; i < paramArrayArgCount; i++) { values[i] = constructorArgsArray[argsConsumedCount++]; } return new TypedConstant(parameter.Type, values.AsImmutableOrNull()); }
public static DOEgbXMLReportingObj TestSpaceAreas(List<XmlDocument> gbXMLDocs, List<XmlNamespaceManager> gbXMLnsm, DOEgbXMLReportingObj report, Conversions.areaUnitEnum standardUnits, Conversions.areaUnitEnum testUnits, double testareaConversion,double standardareaConversion,DOEgbXMLTestDetail testDetails) { //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML //added Feb 13 2013 report.testSummary = "This test compares the square footage of spaces in the test and standard files. It does this by searching"; report.testSummary += "for a unique Space id in both the test and standard files, and finding a match. Once a match is found, it then"; report.testSummary += " finds the square footage reported for the Space area, and compares them to ensure they are the same or"; report.testSummary += " within tolerance. For example, if the standard file has a Space with id = \"Space-1\" with an area of"; report.testSummary += "250 square feet, then this test searches through the test file for a Space with the identical id."; report.testSummary += " Once this space has been located, the test then compares the Area to 250 square feet. "; report.testSummary += "If they are identical, the test is done, and passes. We have built a tolerance in this test, meaning the"; report.testSummary += " areas do not need to match perfectly in the standard file and test file. As long as your test file's value"; report.testSummary += " for Space Area is +/- this tolerance, the test will pass. Using the previous example, if the allowable"; report.testSummary += " tolerance is 1% (1% of 250 is 2.5 sf), then the test file may have a space area ranging from 247.5 to 252.5"; report.testSummary += " square feet, and the test will still delcare \"Pass\"."; report.unit = standardUnits.ToString(); report.passOrFail = true; string spaceId = ""; //assuming that this will be plenty large for now Dictionary<string, double> standardFileAreaDict = new Dictionary<string, double>(); Dictionary<string, double> testFileAreaDict = new Dictionary<string, double>(); bool thinWalled = false; try { //check to see if the test file comes from OpenStudio or Bentley (non-thick wall, or non-centerline geometry) XmlNamespaceManager gbXMLnstw = gbXMLnsm[0]; XmlNode productName = gbXMLDocs[0].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:DocumentHistory/gbXMLv5:ProgramInfo/gbXMLv5:ProductName",gbXMLnstw); if (productName.InnerText.ToLower().Replace(" ", String.Empty).Trim() == "openstudio")//TODO: consider a different test { thinWalled = true; } for (int i = 0; i < gbXMLDocs.Count; i++) { XmlDocument gbXMLTestFile = gbXMLDocs[i]; XmlNamespaceManager gbXMLns = gbXMLnsm[i]; XmlNodeList spaceNodes = gbXMLDocs[i].SelectNodes("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Space/gbXMLv5:Area", gbXMLnsm[i]); //make lists of the areas in each project foreach (XmlNode spaceNode in spaceNodes) { string area = spaceNode.InnerText; if (i % 2 != 0) { for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) { if (spaceNode.ParentNode.Attributes[n].Name == "id") { spaceId = spaceNode.ParentNode.Attributes[n].Value; if(!thinWalled) { //no conversion necessary standardFileAreaDict.Add(spaceId, Convert.ToDouble(area)*standardareaConversion); } else { if(testDetails.ThinWalledSpecs.Count > 0) { var thinwalleddef = testDetails.ThinWalledSpecs.Find(x => x.SpaceName == spaceId); //it is critical that space names match for these tests. standardFileAreaDict.Add(spaceId, thinwalleddef.FloorArea); } else { //no conversion necessary standardFileAreaDict.Add(spaceId, Convert.ToDouble(area) * standardareaConversion); } } break; } } } else { for (int n = 0; n < spaceNode.ParentNode.Attributes.Count; n++) { if (spaceNode.ParentNode.Attributes[n].Name == "id") { spaceId = spaceNode.ParentNode.Attributes[n].Value; double convertedArea = Convert.ToDouble(area) * testareaConversion; testFileAreaDict.Add(spaceId, convertedArea); break; } } } } } var standardKeys = standardFileAreaDict.Keys; foreach (string key in standardKeys) { if (testFileAreaDict.ContainsKey(key)) { double testFileSpaceArea = testFileAreaDict[key]; double standardFileSpaceArea = standardFileAreaDict[key]; report.standResult.Add(standardFileSpaceArea.ToString("#.000")); report.testResult.Add(testFileSpaceArea.ToString("#.000")); report.idList.Add(key); double difference = Math.Abs(testFileSpaceArea - standardFileSpaceArea); if (difference == 0) { report.MessageList.Add("For Space Id: " + key + ". Success finding matching space area. The Standard File and the Test File both have a space with an area = " + testFileSpaceArea.ToString("#.000") + " " + report.unit + ". "); report.TestPassedDict.Add(key, true); } else if (difference < report.tolerance) { report.MessageList.Add("For Space Id: " + key + ". Success finding matching space area. The Standard File space area of " + standardFileSpaceArea.ToString("#.000") + " and the Test File space area of " + testFileSpaceArea.ToString("#.000") + " " + report.unit + " is within the allowable tolerance of " + report.tolerance.ToString() + " " + report.unit); report.TestPassedDict.Add(key, true); } else { report.MessageList.Add("For space Id: " + key + ". Failure to find an space area match. THe area equal to = " + standardFileSpaceArea.ToString("#.000") + " " + report.unit + " in the Standard File could not be found in the Test File. "); report.TestPassedDict.Add(key, false); } } else { report.standResult.Add("---"); report.testResult.Add("Could not be matched"); report.idList.Add(key); //failure to match spaceIds report.MessageList.Add("Test File and Standard File space names could not be matched. SpaceId: " + key + " could not be found in the test file."); report.passOrFail = false; return report; } } return report; } catch (Exception e) { report.MessageList.Add(e.ToString()); report.longMsg = "Failed to complete the " + report.testType + ". See exceptions noted."; report.passOrFail = false; return report; } report.longMsg = "Fatal " + report.testType + " Test Failure"; report.passOrFail = false; return report; }
//this method relies on an absolute difference for tolerance tests. public static DOEgbXMLReportingObj GetBuildingArea(List<XmlDocument> gbXMLDocs, List<XmlNamespaceManager> gbXMLnsm, DOEgbXMLReportingObj report, Conversions.areaUnitEnum standardUnits, Conversions.areaUnitEnum testUnits, double testareaConversion, double standardareaConversion) { //this summary is text that describes to a lay user what this test does, and how it works functionally. The user should have some familiarity with the basic knowledge of gbXML //added Feb 13 2013 report.testSummary = "This test compares the values stored in the Building Area node of the standard and test gbXML files."; report.testSummary += " This Building area is the sum total of the areas of all spaces created in gbXML."; report.testSummary += " For example, if a small building has five spaces of area = 100 square feet each, then the sum of that area is"; report.testSummary += " 5 x 100 = 500 square feet. The building area value would be 500 square feet."; report.testSummary += " We have built a tolerance in this test, meaning the building areas do not need to match perfectly in the"; report.testSummary += " standard file and test file. As long as your test file's value for Building Area is +/- this tolerance, the"; report.testSummary += " test will pass. Using the previous example, if the allowable tolerance is 1% (1% of 500 is 5 sf), then the test file may have a building area ranging from 495 to 505 square feet, and will still be declared to pass this test."; report.unit = standardUnits.ToString(); string testUOM = testUnits.ToString(); //assuming that this will be plenty large for now string[] resultsArray = new string[50]; for (int i = 0; i < gbXMLDocs.Count; i++) { try { XmlDocument gbXMLTestFile = gbXMLDocs[i]; XmlNamespaceManager gbXMLns = gbXMLnsm[i]; var node = gbXMLDocs[i].SelectSingleNode("/gbXMLv5:gbXML/gbXMLv5:Campus/gbXMLv5:Building/gbXMLv5:Area", gbXMLnsm[i]); string area = node.InnerText; //need to test for accuracy of result if accurate then pass, if not, how much inaccuracy and return this result resultsArray[i] = area; if (i % 2 != 0) { //setup standard result and test result if (testareaConversion != 1) { report.MessageList.Add("Converted the test file from " + testUOM + " to " + report.unit + "."); } //apply the conversion factor on the test file always, regardless. double standardArea = Convert.ToDouble(resultsArray[i]) * standardareaConversion; double testArea = Convert.ToDouble(resultsArray[(i - 1)]) * testareaConversion; report.standResult.Add(String.Format("{0:#,0.00}", standardArea.ToString())); report.testResult.Add(String.Format("{0:#,0.00}", testArea.ToString())); report.idList.Add(""); double difference = standardArea - testArea; if (Math.Abs(difference) == 0) { report.longMsg = "The test file's " + report.testType + "matches the standard file Building Area exactly."; report.passOrFail = true; return report; } else if (Math.Abs(difference) <= report.tolerance) { report.longMsg = "The test file's " + report.testType + " is within the allowable tolerance of = " + report.tolerance.ToString() + " " + report.unit; report.passOrFail = true; return report; } else { report.longMsg = "The test file's " + report.testType + " is not within the allowable tolerance of " + report.tolerance.ToString() + " " + report.unit + "The difference between the standard and test file is " + difference.ToString() + "."; report.passOrFail = false; return report; } } else { continue; } } catch (Exception e) { report.MessageList.Add(e.ToString()); report.longMsg = " Failed to locate " + report.testType + " in the XML file."; report.passOrFail = false; return report; } } report.longMsg = "Fatal " + report.testType + " Test Failure"; report.passOrFail = false; return report; }
public DynamicErasure(Conversions conversions) { this.objectType = conversions.objectType; }