public static void AutoRollbackTransaction(IsolationLevel level, Proc transactional, UnitOfWorkNestingOptions nestingOptions) { using (UnitOfWork.Start(nestingOptions)) { // If we are already in a transaction, don't start a new one if (UnitOfWork.Current.IsInActiveTransaction) { transactional(); } else { RhinoTransaction tx = UnitOfWork.Current.BeginTransaction(level); try { transactional(); tx.Rollback(); } catch { tx.Rollback(); throw; } finally { tx.Dispose(); } } } }
public IEnumerable<Dictionary<string, object>> ProcCall(string ProcName, params object[] parms) { try { if (Proc.ConnectionString == null) { Proc.ConnectionString = ConfigurationManager.ConnectionStrings["iTRAACv2ConnectionString"].ConnectionString; Proc.ConnectionString += WebDash.Web.Properties.Settings.Default.iTRAACv2ConnectionString; } using (Proc Proc1 = new Proc(ProcName)) { if (parms != null) for (int i = 0; i < parms.Length; i += 2) Proc1[parms[i].ToString()] = parms[i + 1]; return (Proc1.ExecuteDataSet(false).Table0.ToSimpleTable()); } } catch (Exception ex) { throw new Exception( "** Exception occurred***\r\rRunning under account: " + WindowsIdentity.GetCurrent().Name + "\r\rMessage:\r" + ex.Message + ((ex.Message.Contains("RSA"))? "\r\rTip: If you get the error \"The RSA key container could not be opened.\"," + "\rthen you need to run the \"RSAKey_Manager.cmd\" on the web server." : "") + "\r\rStack: \r" + ex.StackTrace); } }
private void btnReturnForm_Click(object sender, RoutedEventArgs e) { using (Proc TaxForm_Return = new Proc("TaxForm_Return")) { TaxForm_Return["@OrderNumber"] = txtOrderNumber.Text; TaxForm_Return["@UserGUID"] = User.Current.GUID; if (!TaxForm_Return.ExecuteNonQuery(App.ShowSystemMessage, "", false)) return; bool Success = (bool)TaxForm_Return["@Success"]; //if we simply didn't get a hit on the supplied OrderNumber, then show that via generic message popup if (!Success && (TaxForm_Return["@CustomerName"].ToString() == "")) App.ShowSystemMessage("Tax Form #: " + txtOrderNumber.Text + " - " + TaxForm_Return["@Message"].ToString()); // otherwise add a corresponding row to the returns grid as an aesthetically pleasing list of recent history else { ReturnForms.Add( TaxForm_Return["@TaxFormGUID"].ToString(), TaxForm_Return["@OrderNumber"].ToString(), TaxForm_Return["@CustomerName"].ToString(), TaxForm_Return["@Message"].ToString(), Success ); txtOrderNumber.Clear(); txtOrderNumber.Focus(); //if return committed successfully and autofile is checked, open the taxform edit screen to encourage the user to close out with a full file while we're they're at it if (Success && chkAutoFile.IsChecked.Value && OpenTaxForm != null) OpenTaxForm(TaxForm_Return["@TaxFormGUID"].ToString()); } } }
/// <summary> /// Initializes a new instance of the <see cref="RepeatButton"/> class. /// </summary> public RepeatButton() { startTimerMethod = Dynamic<System.Windows.Controls.Primitives.RepeatButton>.Instance.Procedure.Explicit.CreateDelegate("StartTimer"); stopTimerMethod = Dynamic<System.Windows.Controls.Primitives.RepeatButton>.Instance.Procedure.Explicit.CreateDelegate("StopTimer"); AddHandler(MultitouchScreen.NewContactEvent, (NewContactEventHandler)OnNewContact); AddHandler(MultitouchScreen.ContactRemovedEvent, (ContactEventHandler)OnContactRemoved); }
public void QueueProc(Proc proc) { lock (_procList) { _procList.AddLast(proc); Monitor.Pulse(_procList); } }
private Settings() { using (Proc Settings_s = new Proc("Settings_s")) { Settings_s.ExecuteDataTable(); OfficeCode = Settings_s.Row0["OfficeCode"].ToString(); AdminPassword = Settings_s.Row0["AdminPassword"].ToString(); } }
static RemarkModel() { // ReSharper disable InconsistentNaming using (var RemarkTypes_s = new Proc("RemarkTypes_s")) // ReSharper restore InconsistentNaming { ModelBase.CacheTables(RemarkTypes_s); } }
public override object Call3(Class last_class, object recv, Frame caller, Proc block, object typeref, object size, object array) { System.Array result = System.Array.CreateInstance(TypeOf.Convert(typeref, caller), (int)size); System.Collections.ArrayList inits = ((Ruby.Array)array).value; for (int i=0; i<inits.Count; i++) result.SetValue(inits[i], i); return result; }
static Settings() { //load global DB settings... using (Proc ControlCentral_s = new Proc("ControlCentral_s")) _DBSettings = ControlCentral_s.ExecuteNameValueCollection(); //load local db settings...adding them to the existing NameValueCollection using (Proc LocalSettings_s = new Proc("LocalSettings_s")) LocalSettings_s.ExecuteNameValueCollection(_DBSettings); //load IsolatedStorage based AppSettings (e.g. matrix printer mappings) LoadAppSettings(); }
public override object Call0(Class last_class, object recv, Frame caller, Proc block) { System.Collections.IEnumerable seq = (System.Collections.IEnumerable)recv; System.Collections.ArrayList l = new System.Collections.ArrayList(); foreach (object o in seq) l.Add(o); return Array.CreateUsing(l); }
public static double PerformanceCounter(Proc performance) { long start = 0, end = 0; QueryPerformanceCounter(out start); performance(); QueryPerformanceCounter(out end); return (double)(end - start) / _frequency; }
/// <summary> /// </summary> /// <param name="TableCache">comma delimited list of tables if there are more than one returned from one proc</param> /// <param name="GUID"></param> /// <returns></returns> static public DataRowView TableCache(string GUID, string ProcName, ref string TableName) { //future: if we get to the point where we want to get more parms back from a proc // then we could tweak TableCache() to return it to the outer context string MainTableName = TableName.ToLower(); //save the initial singular incoming table name so we can lookup on this cleanly versus the potential list of output tables DataRowView row = RowLookup(MainTableName, GUID); if (row != null) return (row); using (Proc Entity_s = new Proc(ProcName)) { Entity_s["@GUID"] = GUID; DataSet ds = Entity_s.ExecuteDataSet(); TableName = Entity_s["@TableNames"].ToString();//this kicks the potential comma LIST of tables back out to the calling context string[] tableNames = TableName.Split(','); bool IsNew = false; for (int i = 0; i < tableNames.Length; i++) { string tableName = tableNames[i].Trim().ToLower(); ds.Tables[i].TableName = tableName; IsNew = !dsCache.Tables.Contains(tableName); //foreach (DataColumn col in ds.Tables[i].Columns) //{ // //col.ExtendedProperties["visible"] = !(col.ColumnName.Left(1) == "~"); // //col.ColumnName = col.ColumnName.Replace("~", ""); // col.ReadOnly = false; //} } dsCache.Merge(ds, true); //too bad Dataset.merge doesn't copy over all this metadata if (IsNew) { foreach (DataTable incomming in ds.Tables) { DataTable t = dsCache.Tables[incomming.TableName]; t.PrimaryKey = new DataColumn[] { t.Columns["RowGUID"] }; t.DefaultView.Sort = "RowGUID"; //foreach (DataColumn col in t.Columns) //{ // col.ExtendedProperties["visible"] = ds.Tables[incomming.TableName].Columns[col.ColumnName].ExtendedProperties["visible"]; //} } } } return (RowLookup(MainTableName, GUID)); }
public void Run(Proc procediment) { ExceptionHandler eh = new ExceptionHandler(); AppDomain.CurrentDomain.UnhandledException += eh.OnThreadException; procediment.Invoke(); AppDomain.CurrentDomain.UnhandledException -= eh.OnThreadException; if (eh.Count > 0) Assert.Fail("Engine Validator concurrent issues. Concurrent issues count {0}", eh.Count); }
/// <summary> /// Initializes a new instance of the <see cref="TouchCanvas"/> class. /// </summary> public TouchCanvas() { points = new Dictionary<int, StylusPointCollection>(); raiseGestureOrStrokeCollectedMethod = Dynamic<InkCanvas>.Instance.Procedure.Explicit<InkCanvasStrokeCollectedEventArgs, bool>.CreateDelegate("RaiseGestureOrStrokeCollected"); AddHandler(MultitouchScreen.NewContactEvent, (NewContactEventHandler)OnNewContact); AddHandler(MultitouchScreen.ContactMovedEvent, (ContactEventHandler)OnContactMoved); AddHandler(MultitouchScreen.ContactRemovedEvent, (ContactEventHandler)OnContactRemoved); StylusPlugIns.Add(new TestPlugin()); EditingMode = InkCanvasEditingMode.InkAndGesture; SetEnabledGestures(new[]{ApplicationGesture.ScratchOut}); Gesture += TouchCanvas_Gesture; }
public tabSponsor() { InitializeComponent(); using (Proc Ranks_s = new Proc("Ranks_s")) cbxRank.ItemsSource = Ranks_s.ExecuteDataSet().Table0.DefaultView; iTRAACHelpers.WPFDataGrid_Standard_Behavior(gridMembers); iTRAACHelpers.WPFDataGrid_Standard_Behavior(gridForms); iTRAACHelpers.WPFDataGrid_Standard_Behavior(gridRemarks); iTRAACHelpers.WPFDataGrid_Standard_Behavior(gridMatches); gridMatches.ItemsSource = PotentialMatches.DefaultView; InitializePotentialMatchesGridGrouping(); gridMembers.Loaded += new RoutedEventHandler(gridMembers_Loaded); btnSuspend.Click += new RoutedEventHandler(btnSuspend_Click); }
private void BtnSearchClick(object sender, RoutedEventArgs e) { txtSequenceNumber.SelectAll(); gridReturns.ItemsSource = null; //for visual consistency, blank out the existing list before we go off and search // ReSharper disable InconsistentNaming using (var TaxForm_Returns_Search = new Proc("TaxForm_Returns_Search")) // ReSharper restore InconsistentNaming { TaxForm_Returns_Search["@SequenceNumber"] = txtSequenceNumber.Text; gridReturns.ItemsSource = TaxForm_Returns_Search.ExecuteDataSet().Table0.DefaultView; FilterChanged(); } //automatically open the only one found if it is only one if (gridReturns.Items.Count == 1) OpenForm(gridReturns.Items[0] as DataRowView); else if (gridReturns.Items.Count > 1) gridReturns.GetCell(0, 0).Focus(); }
public void AddProc (string name, IEnumerable<string> content) { if (name == null) { throw new ArgumentNullException ("name"); } if (content == null) { throw new ArgumentNullException ("content"); } if (_Procs.ContainsKey (name)) { Console.WriteLine ("Procedure \"" + name + "\" is already defined."); return; } var proc = new Proc (content); _Procs.Add (name, proc); }
public tabNewCustWiz() { InitializeComponent(); #if DEBUG btnTestFill.Visibility = System.Windows.Visibility.Visible; #endif Fields = new DataFields(); iTRAACHelpers.WPFDataGrid_Standard_Behavior(gridMatches); gridMatches.ItemsSource = PotentialMatches.DefaultView; InitializePotentialMatchesGridGrouping(); using (Proc Ranks_s = new Proc("Ranks_s")) cbxRank.ItemsSource = Ranks_s.ExecuteDataTable().DefaultView; txtSponsorSSN1.Focus(); }
public override object Call1(Class last_class, object recv, Frame caller, Proc block, object p1) { System.Type type = null; CLRClass clrclass = p1 as CLRClass; if (clrclass != null) { type = clrclass.clrtype; } else { type = p1 as System.Type; } if (type != null) { return type.IsInstanceOfType(recv); } return rb_obj_is_kind_of.singleton.Call1(last_class, recv, caller, block, p1); }
static Settings() { //load DB based settings... using (Proc ControlCentral_s = new Proc("ControlCentral_s")) _DBSettings = ControlCentral_s.ExecuteNameValueCollection(); //load "app.config" file based settings... //it should be noted that *saving* *directly* to the app.config file is a well known no-no... from a security standpoing which isn't really elaborated on much that i can find //but that winds up meaning that the *Application* scoped settings accessible via {appname}.Properties.Settings.Default are code gen'd as read only properties. //there's lots of folks whining about wanting to *write* to these properties for various reasons out in the forums... //the main reason here is that we want the printer mappings to be settable by all users but at the application wide level not user based... //i.e. we don't want every user to be required to select the same printers that everybody on this machine will be using //i feel like i basically get the security implications that the administrator/installer must open read/write ACL on the Program Files\{app} folder and app.config file //probalby because you can throw config info in those files which opens up even more access or something like that //anyway, i'm still going for it until i read more about it... found the following code here: http://social.msdn.microsoft.com/Forums/en-US/csharpgeneral/thread/13428050-4fde-4c34-90f8-5255f4123a20/ ConfigurationSectionGroup applicationSectionGroup = appConfig.GetSectionGroup("applicationSettings"); applicationSettingsSection = applicationSectionGroup.Sections[ReflectionHelpers.CurrentAppName + ".Properties.Settings"] as ClientSettingsSection; applicationSettingsSection.SectionInformation.ForceSave = true; //crucial, otherwise just doesn't save, even though documentation indicates that it supposedly means save even if there aren't changes App = new SettingElementCollectionWrapper(applicationSettingsSection.Settings); }
static Type() { using (Proc TaxFormTypes_s = new Proc("TaxFormTypes_s")) { // FormTypeID, FormName, FormType, MaxCkOut, RecordSource, CodeName, Passive, Active DataTable t = TaxFormTypes_s.Table0; foreach (DataRow r in t.Rows) { List.Add((string)r["CodeName"], new Type() { Active = (bool)r["Active"], Code = (string)r["CodeName"], Description = (string)r["FormName"], FormTypeID = (int)r["FormTypeID"], Class = (int)r["FormType"], MaxCheckout = (int)r["MaxCkOut"], PrintDataProcName = (string)r["RecordSource"], UserSelectable = !(bool)r["Passive"] }); } } }
public bool AdminOverride(string adminPassword, int rollbackMinutes) { using (var userAdminOverride = new Proc("User_AdminOverride")) { userAdminOverride["@Password"] = adminPassword; IsAdmin = bool.Parse(userAdminOverride.ExecuteNonQuery()["@Result"].ToString()); } if (IsAdmin) { AdminOverrideRemaining = DateTime.Parse("00:" + rollbackMinutes.ToString(CultureInfo.InvariantCulture) + ":00"); if (_adminOverrideTimer == null) _adminOverrideTimer = // ReSharper disable RedundantArgumentName new Timer(AdminOverrideTimerTick, state: null, dueTime: 0 /*start immediately*/, period: AdminOverrideTimerPeriod); else _adminOverrideTimer.Change(dueTime: 0, period: AdminOverrideTimerPeriod); // ReSharper restore RedundantArgumentName } return (IsAdmin); }
public static void ProcessClientPacket(int packetID) { switch ((PacketID)packetID) { case PacketID.entityUpdate: #region entityUpdate var entityUpdate = new EntityUpdate(creader); if (players.ContainsKey(entityUpdate.guid)) { entityUpdate.Filter(players[entityUpdate.guid]); entityUpdate.Merge(players[entityUpdate.guid]); } else { players.Add(entityUpdate.guid, entityUpdate); } if (entityUpdate.name != null) { RefreshPlayerlist(); } SendUDP(entityUpdate.Data); break; #endregion case PacketID.entityAction: #region entity action EntityAction entityAction = new EntityAction(creader); switch (entityAction.type) { case ActionType.talk: break; case ActionType.staticInteraction: break; case ActionType.pickup: break; case ActionType.drop: //send item back to dropper because dropping is disabled to prevent chatspam if (form.radioButtonDestroy.Checked) { new ChatMessage() { message = "item destroyed" }.Write(cwriter); } else { var serverUpdate = new ServerUpdate(); var pickup = new ServerUpdate.Pickup() { guid = guid, item = entityAction.item }; serverUpdate.pickups.Add(pickup); if (form.radioButtonDuplicate.Checked) { serverUpdate.pickups.Add(pickup); } serverUpdate.Write(cwriter); } break; case ActionType.callPet: var petCall = new SpecialMove() { Guid = guid }; SendUDP(petCall.data); break; default: //unknown type break; } break; #endregion case PacketID.hit: #region hit var hit = new Hit(creader); var attack = new Attack() { Target = (ushort)hit.target, Damage = hit.damage, Stuntime = hit.stuntime, Skill = hit.isYellow, Type = hit.type, ShowLight = hit.showlight == 1, Critical = hit.critical == 1 }; SendUDP(attack.data); lastTarget = attack.Target; break; #endregion case PacketID.passiveProc: #region passiveProc var passiveProc = new PassiveProc(creader); var proc = new Proc() { Target = (ushort)passiveProc.target, Type = passiveProc.type, Modifier = passiveProc.modifier, Duration = passiveProc.duration }; SendUDP(proc.data); break; #endregion case PacketID.shoot: #region shoot var shootPacket = new Resources.Packet.Shoot(creader); var shootDatagram = new Resources.Datagram.Shoot() { Position = shootPacket.position, Velocity = shootPacket.velocity, Scale = shootPacket.scale, Particles = shootPacket.particles, Projectile = shootPacket.projectile }; SendUDP(shootDatagram.data); break; #endregion case PacketID.chat: #region chat var chatMessage = new ChatMessage(creader); if (chatMessage.message.ToLower() == @"/plane") { Console.Beep(); var serverUpdate = new ServerUpdate() { blockDeltas = VoxModel.Parse("model.vox"), }; foreach (var block in serverUpdate.blockDeltas) { block.position.x += 8286946; block.position.y += 8344456; block.position.z += 220; } serverUpdate.Write(cwriter); } else { var chat = new Chat(chatMessage.message) { Sender = guid //client doesn't send this //(ushort)chatMessage.sender }; SendUDP(chat.data); } break; #endregion case PacketID.chunk: #region chunk var chunk = new Chunk(creader); break; #endregion case PacketID.sector: #region sector var sector = new Sector(creader); break; #endregion case PacketID.version: #region version var version = new ProtocolVersion(creader); if (version.version != 3) { version.version = 3; version.Write(cwriter, true); } else { var connect = new Connect(); SendUDP(connect.data); } break; #endregion default: form.Log("unknown client packet\n", Color.Magenta); break; } }
/// <summary> /// To be used by update sprocs that are only concerned with returning a batch of records to keep dsCache in sync with the current database state /// Takes in a DataSet and a list of TableNames and merges them into the main dsCache /// </summary> /// <param name="proc"></param> /// <param name="onlyTableNames"> </param> static public void CacheTables(Proc proc, params string[] onlyTableNames) { //if the outer scope hasn't already executed and populated the corresponding DataSet, go ahead and execute now if (proc.DataSet == null) { proc.ExecuteDataSet(); } //determine the list of tables we want to process onlyTableNames = proc.MatchingTableNames(onlyTableNames); foreach (var t in onlyTableNames) { //need to handle table.1, table.2 type results where we return multiple resultsets intended to be merged into the same cached table var cacheTableName = Multitables.Match(t).Groups[1].Value; var incomingTable = proc.DataSet.Tables[t]; var incomingName = incomingTable.TableName; incomingTable.TableName = cacheTableName; //needs to be the same so merge works, this table is renamed back at the end to prevent collision with any subsequent //*** DataSet.Merge requires PK's to be assigned on both incoming and existing tables in order to merge records appropriately //sometimes PrimaryKey info is missing from the ADO.Net schema metadata that comes back from a stored proc call //it appears that selecting from a sql view is one reason this can happen //so, this logic defines a PK if one isn't present if (incomingTable.PrimaryKey.Length == 0) { try { string pkColName = null; if (incomingTable.Columns.Contains("RowGUID")) { pkColName = "RowGUID"; } else if (incomingTable.Columns.Contains(cacheTableName + "ID")) { pkColName = cacheTableName + "ID"; } else if (incomingTable.Columns[0].ColumnName.Contains("GUID")) { pkColName = incomingTable.Columns[0].ColumnName; //initially necessary for Sponsor_TaxForms.TaxFormGUID } if (pkColName != null) { incomingTable.PrimaryKey = new[] { incomingTable.Columns[pkColName] } } ; } catch (Exception ex) { if (ex.Message != "These columns don't currently have unique values.") { throw; //opening a little wiggle room here for situations like the User table which gets dual keyed off TaxOfficeId and User RowGUID, in this case, the PK will be properly set in the calling context } } } //nugget: DataSet.Merge(DataTable) has become a real linchpin in the whole data roundtrip approach //nugget: in a nutshell, update sprocs return a bare minimum of updated fields in a return resultset along with a corresponding CSV list of @TableNames DataTable cachedTable = DsCache.Tables[cacheTableName]; // ReSharper disable RedundantArgumentName DsCache.Merge(incomingTable, preserveChanges: false, //preserveChanges pretty much has to be false in order to count on what comes back getting slammed in missingSchemaAction: (cachedTable == null) ? MissingSchemaAction.AddWithKey : //if this table hasn't been cached yet, go with most robust MissingSchemaAction so we get the first one populated with good metadata MissingSchemaAction.Ignore); //but be as relaxed as possible with any subsequent data headed for this table once it's been established... set any other way, constraints would just blow up like crazy on me and they were too tough to identify what was specifically wrong... almost to the point that logic is more paranoid than it should be // ReSharper restore RedundantArgumentName incomingTable.TableName = incomingName; //to prevent name collision as we loop through //now that it's been cached... cachedTable = DsCache.Tables[cacheTableName]; //make the DefaultView.Sort the same as the PK so that we can consistently use Table.DefaultView.FindRows() & Table.Find() interchangeably if (cachedTable.DefaultView.Sort == "" && cachedTable.PrimaryKey.Length > 0) { cachedTable.DefaultView.Sort = cachedTable.PrimaryKey[0].ColumnName; } } }
public static void Main(string[] args) { string varSRV = "SRV-OLAP"; string varDB = "dw_olap"; string varCube = "Рух товарів"; for (int i = 0; i < args.Length; i++) { if (args[i].ToUpper().StartsWith("/SERVER:")) { varSRV = args[i].Substring(8); } else if (args[i].ToUpper().StartsWith("/DB:")) { varDB = args[i].Substring(4); } else if (args[i].ToUpper().StartsWith("/CUBE:")) { varCube = args[i].Substring(6); } } string varSeparator = "\t"; Proc p = new Proc(); p.CreateLog("d:\\Cube_" + varCube + ".txt"); Microsoft.AnalysisServices.Server s = new Microsoft.AnalysisServices.Server(); s.Connect(@"Data Source=" + varSRV + ";Provider=msolap;Initial Catalog=" + varDB); Microsoft.AnalysisServices.Database d = s.Databases.FindByName(varDB); Microsoft.AnalysisServices.Cube c = d.Cubes.FindByName(varCube); p.Log("База:" + varDB + " Куб:" + varCube); p.Log("Розмірності"); foreach (Microsoft.AnalysisServices.CubeDimension dim in c.Dimensions) { p.Log(dim.Name + varSeparator + dim.DimensionID + varSeparator + dim.Description); foreach (Microsoft.AnalysisServices.CubeAttribute attr in dim.Attributes) { p.Log(varSeparator + attr.Attribute + varSeparator + attr.AttributeID + "\t" + attr.Attribute.Description); } } p.Log("Групи мір"); foreach (Microsoft.AnalysisServices.MeasureGroup mg in c.MeasureGroups) { p.Log(mg.Name + varSeparator + mg.ID + varSeparator + mg.Description); foreach (Microsoft.AnalysisServices.Measure m in mg.Measures) { p.Log(varSeparator + m.ID + varSeparator + m.Name + varSeparator + m.Description); } } p.Log("Калькульовані міри"); Microsoft.AnalysisServices.AdomdClient.AdomdConnection cn = new Microsoft.AnalysisServices.AdomdClient.AdomdConnection("Data Source=" + varSRV + ";Provider=msolap;Initial Catalog=" + varDB); cn.Open(); /*foreach ( Microsoft.AnalysisServices.AdomdClient.CubeDef tt in cn.Cubes) * p.Log(tt.Name+varSeparator+tt.Caption );*/ try { foreach (Microsoft.AnalysisServices.AdomdClient.Measure m in cn.Cubes[varCube].Measures) { if (string.IsNullOrEmpty(m.Expression) == false) { p.Log(m.UniqueName + varSeparator); //+m.Expression +varSeparator+ m.Description ); } } //Console.WriteLine("{0}: {1}",m.UniqueName,m.Expression ); } catch { } finally { cn.Close(); }; //Console.ReadLine(); p.CloseLog(); }
public void TestWriteThrowsOnNull() { byte[] buffer = null; Proc.Write(IntPtr.Zero, buffer, 4); }
public void TestReadTArrayThrowsOnZeroNegativeElementCount() { Proc.Read <Int32>(IntPtr.Zero, 0); }
public void TestWriteThrowsIfBufferNotLargeEnough() { Proc.Write(IntPtr.Zero, new byte[4], 5); }
void Start() { Proc.Delay(0, afterStart.Invoke); }
public void TestSetPrivilegeThrowsOnApiError() { Proc.SetPrivilege("testjaskdajsd", true); }
public void TestReadThrowsOnNull() { byte[] buffer = null; Proc.Read(IntPtr.Zero, buffer, 4); }
public void TestAllocFree() { IntPtr address = Proc.Alloc(1024); Proc.Free(address); }
public void TestSetPrivilege() { Proc.SetPrivilege(PrivilegeName.Debug, true); Proc.SetPrivilege(PrivilegeName.AuthenticateAsUser, true); }
public void TestFreeThrowsOnApiError() { Proc.Free(IntPtr.Zero); }
public void TestAllocThrowsOnApiError() { Proc.Alloc(int.MaxValue); }
public void TestAllocThrowsOnZeroCount() { Proc.Alloc(0); }
public static void AutoRollbackTransaction(IsolationLevel level, Proc transactional) { AutoRollbackTransaction(level, transactional, UnitOfWorkNestingOptions.ReturnExistingOrCreateUnitOfWork); }
protected override void PostArrival() { Vehicle.IsSirenSilent = true; Proc.SwapProcesses(PostArrival, GoToPatient); }
public void TestWriteTArrayThrowsOnZeroArray() { Proc.Write <Int32>(IntPtr.Zero, new Int32[0]); }
/// <summary> /// Kills all running processes. /// </summary> public static void KillAll() { List <IProcess> ProcessesToKill = new List <IProcess>(); lock (SyncObject) { foreach (var ProcResult in ActiveProcesses) { if (!ProcResult.HasExited) { ProcessesToKill.Add(ProcResult); } } ActiveProcesses.Clear(); } // Remove processes that can't be killed for (int ProcessIndex = ProcessesToKill.Count - 1; ProcessIndex >= 0; --ProcessIndex) { var ProcessName = ProcessesToKill[ProcessIndex].GetProcessName(); if (!String.IsNullOrEmpty(ProcessName) && !CanBeKilled(ProcessName)) { CommandUtils.LogLog("Ignoring process \"{0}\" because it can't be killed.", ProcessName); ProcessesToKill.RemoveAt(ProcessIndex); } } if (ProcessesToKill.Count > 0) { CommandUtils.LogLog("Trying to kill {0} spawned processes.", ProcessesToKill.Count); foreach (var Proc in ProcessesToKill) { CommandUtils.LogLog(" {0}", Proc.GetProcessName()); } if (CommandUtils.IsBuildMachine) { for (int Cnt = 0; Cnt < 9; Cnt++) { bool AllDone = true; foreach (var Proc in ProcessesToKill) { try { if (!Proc.HasExited) { AllDone = false; CommandUtils.LogLog("Waiting for process: {0}", Proc.GetProcessName()); } } catch (Exception) { CommandUtils.LogWarning("Exception Waiting for process"); AllDone = false; } } try { if (ProcessResult.HasAnyDescendants(Process.GetCurrentProcess())) { AllDone = false; CommandUtils.LogInformation("Waiting for descendants of main process..."); } } catch (Exception Ex) { CommandUtils.LogWarning("Exception Waiting for descendants of main process. " + Ex); AllDone = false; } if (AllDone) { break; } Thread.Sleep(10000); } } foreach (var Proc in ProcessesToKill) { var ProcName = Proc.GetProcessName(); try { if (!Proc.HasExited) { CommandUtils.LogLog("Killing process: {0}", ProcName); Proc.StopProcess(false); } } catch (Exception Ex) { CommandUtils.LogWarning("Exception while trying to kill process {0}:", ProcName); CommandUtils.LogWarning(LogUtils.FormatException(Ex)); } } try { if (CommandUtils.IsBuildMachine && ProcessResult.HasAnyDescendants(Process.GetCurrentProcess())) { CommandUtils.LogLog("current process still has descendants, trying to kill them..."); ProcessResult.KillAllDescendants(Process.GetCurrentProcess()); } } catch (Exception) { CommandUtils.LogWarning("Exception killing descendants of main process"); } } }
public PIMSimulator(string[] args) { initAllconfigs(args); trace = new TraceFetcher(); ins_p = new InsPartition(); pg = new PageConverter(); if (Config.shared_cache) { shared_cache = new Shared_Cache(); } proc = new List <Proc>(); for (int i = 0; i < Config.N; i++) { Proc to_add = new Proc(ref ins_p, i); if (Config.shared_cache) { to_add.attach_shared_cache(ref shared_cache); } to_add.attach_tlb(ref pg); proc.Add(to_add); } int count = 0; foreach (var item in Config.memory) { if (item.Key.Equals("HMC")) { var tp = new HMCMem(count++) as MemObject; MemorySelector.add(item.Value, ref tp); } else { if (item.Key.Equals("DRAM") || item.Key.Equals("PCM")) { var tp = new DDRMem(count++) as MemObject; MemorySelector.add(item.Value, ref tp); } else { //error DEBUG.Error("Unknown Memory Type."); Environment.Exit(3); } } } Mctrl.init_queue(); PIMMctrl.init_queue(); pim = new PIM.PIM(ref ins_p); Coherence.init(); Coherence.linkproc(proc); GlobalTimer.InitClock(); BuildTopology(); }
public void TestWriteTThrowsOnNull() { MockClass value = null; Proc.Write <MockClass>(IntPtr.Zero, value); }
public BaseWindow() { InitializeComponent(); resize = WindowManager.ConvertMessagePoint; }
public static BlockParam /*!*/ CreateBfcForProcCall(Proc /*!*/ proc) { Assert.NotNull(proc); return(new BlockParam(proc, BlockCallerKind.Call, false)); }
public RemoteCall(Proc code, Guid clientId, params RemoteArg[] args) { Procedure = code; ClientId = clientId; Args = args; }
/// <summary> /// Defines ordered expectations /// </summary> /// <param name="methodCallsDescribingExpectations">A delegate describing the expectations</param> /// <returns>an IMockVerifier</returns> public IMockVerifier ExpectingInSameOrder(Proc methodCallsDescribingExpectations) { using (_mocks.Ordered()) { methodCallsDescribingExpectations(); } _mocks.ReplayAll(); return this; }
/// <summary> /// 构造 /// </summary> public Notifier(string name_, Proc proc_) { name = name_; proc = proc_; }
/// <summary> /// Initialize a code block where Mocker.Current is initialized. /// At the end of the code block, all the expectation will be verified. /// This overload will create a new MockRepository. /// </summary> /// <param name="methodCallThatHasMocks">The code that will be executed under the mock context</param> public static void Mocks(Proc methodCallThatHasMocks) { MockRepository mocks = new MockRepository(); Mocks(mocks, methodCallThatHasMocks); }
public PythonTestCase(string name, Proc testCase) { _name = name; _testCase = testCase; }
public void TestReadWithStreamThrowsOnNull() { Stream stream = null; Proc.Read(IntPtr.Zero, stream, 4); }
public static void Update() { // Kill all instances of the track maker.. foreach (Process Proc in Process.GetProcessesByName("Track Maker")) { Proc.Kill(); } foreach (string DirName in Directory.EnumerateDirectories(Directory.GetCurrentDirectory())) { string[] TempArray = DirName.Split('\\'); string DirNameTemp = TempArray[TempArray.Length - 1]; // Delete all track maker files. foreach (string FileName in Directory.EnumerateFiles($@"{Directory.GetCurrentDirectory()}\{DirNameTemp}")) { // Don't delete ourselves! if (!FileName.Contains("Update") && !FileName.Contains(".ico") && !FileName.Contains(".txt") && !FileName.Contains(".docx") && !FileName.Contains(".cs") && !FileName.Contains(".xaml") && !FileName.Contains(".odt") && !FileName.Contains(".cmd") ) { File.Delete(FileName); } } } // Set our current dir Directory.SetCurrentDirectory(@".\update"); // Copy the new files. foreach (string FileName in Directory.EnumerateFiles($@"{Directory.GetCurrentDirectory()}")) { // Don't copy over ourselves. The batch file does this. string[] TempArray0 = FileName.Split('\\'); string FileNameTemp = TempArray0[TempArray0.Length - 1]; if (!FileName.Contains("Update")) { File.Copy(FileName, $@"..\{FileNameTemp}"); } } foreach (string DirName in Directory.EnumerateDirectories(Directory.GetCurrentDirectory())) { string[] TempArray = DirName.Split('\\'); string DirNameTemp = TempArray[TempArray.Length - 1]; foreach (string FileName in Directory.EnumerateFiles($@"{Directory.GetCurrentDirectory()}\{DirNameTemp}")) { // Don't copy over ourselves. The batch file does this. string[] TempArray2 = FileName.Split('\\'); string FileNameTemp = TempArray2[TempArray2.Length - 1]; if (!FileName.Contains("Update")) { File.Copy(FileName, $@"..\{DirNameTemp}\{FileNameTemp}"); } } } Directory.SetCurrentDirectory(@".."); // File.Delete("new.zip"); // Completion batchfile that updates this updater. Process.Start(@"..\UpdateComplete.cmd"); }
public void TestWriteWithStreamThrowsOnNull() { Stream stream = null; Proc.Write(IntPtr.Zero, stream, 4); }
public static void AutoRollbackTransaction(Proc transactional) { AutoRollbackTransaction(IsolationLevel.ReadCommitted, transactional); }
public void TestWriteThrowsOnApiError() { Proc.Read(IntPtr.Zero, OutStream.GetBuffer(), 4); }
/// <summary> /// Defines unordered expectations /// </summary> /// <param name="methodCallsDescribingExpectations">A delegate describing the expectations</param> /// <returns>an IMockVerifier</returns> public IMockVerifier Expecting(Proc methodCallsDescribingExpectations) { methodCallsDescribingExpectations(); _mocks.ReplayAll(); return this; }
public void TestWriteTArrayThrowsOnNullArray() { Int32[] values = null; Proc.Write(IntPtr.Zero, values); }
/// <summary> /// Verifies previously defined expectations /// </summary> public void Verify(Proc methodCallsToBeVerified) { methodCallsToBeVerified(); _mocks.VerifyAll(); }
// friend: RubyOps internal BlockParam(Proc /*!*/ proc, BlockCallerKind callerKind, bool isLibProcConverter) { _callerKind = callerKind; _proc = proc; _isLibProcConverter = isLibProcConverter; }
/// <summary> /// Initialize a code block where Mocker.Current is initialized. /// At the end of the code block, all the expectation will be verified. /// This overload will create a new MockRepository. /// </summary> /// <param name="mocks">The mock repository to use, at the end of the code block, VerifyAll() will be called on the repository.</param> /// <param name="methodCallThatHasMocks">The code that will be executed under the mock context</param> public static void Mocks(MockRepository mocks, Proc methodCallThatHasMocks) { Mocker.Current = mocks; try { methodCallThatHasMocks(); mocks.VerifyAll(); } finally { Mocker.Current = null; } }
public void TestReadThrowsIfBufferNotLargeEnough() { Proc.Read(IntPtr.Zero, OutStream.GetBuffer(), 10000); }