internal int ExecuteNonQuery() { SqlConnection.ExecutePermission.Demand(); IntPtr hscp; Bid.ScopeEnter(out hscp, "<sc.SqlCommandSet.ExecuteNonQuery|API> %d#", ObjectID); try { if (Connection.IsContextConnection) { throw SQL.BatchedUpdatesNotAvailableOnContextConnection(); } ValidateCommandBehavior(ADP.ExecuteNonQuery, CommandBehavior.Default); BatchCommand.BatchRPCMode = true; BatchCommand.ClearBatchCommand(); BatchCommand.Parameters.Clear(); for (int ii = 0; ii < _commandList.Count; ii++) { LocalCommand cmd = _commandList[ii]; BatchCommand.AddBatchCommand(cmd.CommandText, cmd.Parameters, cmd.CmdType); } return(BatchCommand.ExecuteBatchRPCCommand()); } finally { Bid.ScopeLeave(ref hscp); } }
public static int Main(string[] args) { //args = "genmap -l Analise -w 20 -h 20 -n 100".Split(' '); // args = "batch -a 0 -h 0 -n 4 -m 1 -c 1 -f 0 -l Analise".Split(' '); #if DEBUG //Console.WriteLine("Waiting for attach process"); //Console.Read(); #endif return(Parser.Default .ParseArguments < MapGenerationOption, MapViewerOption, FinderOption, GAFinderOption, BatchOption >(args) .MapResult( (MapGenerationOption opt) => MapGenerationCommand.RunMapGeneration(opt), (MapViewerOption opt) => MapViewerCommand.RunMapViewer(opt), (FinderOption opt) => FinderCommand.RunMapFinder(opt), (GAFinderOption opt) => FinderCommand.RunGAMapFinder(opt), (BatchOption opt) => BatchCommand.RunBatch(opt), errs => 1 )); }
internal int ExecuteNonQuery() { SqlConnection.ExecutePermission.Demand(); long scopeID = SqlClientEventSource.Log.TryScopeEnterEvent("<sc.SqlCommandSet.ExecuteNonQuery|API> {0}", ObjectID); try { if (Connection.IsContextConnection) { throw SQL.BatchedUpdatesNotAvailableOnContextConnection(); } ValidateCommandBehavior(ADP.ExecuteNonQuery, CommandBehavior.Default); BatchCommand.BatchRPCMode = true; BatchCommand.ClearBatchCommand(); BatchCommand.Parameters.Clear(); for (int ii = 0; ii < _commandList.Count; ii++) { LocalCommand cmd = _commandList[ii]; BatchCommand.AddBatchCommand(cmd.CommandText, cmd.Parameters, cmd.CmdType, cmd.ColumnEncryptionSetting); } return(BatchCommand.ExecuteBatchRPCCommand()); } finally { SqlClientEventSource.Log.TryScopeLeaveEvent(scopeID); } }
public ICommand GetProjectsFromVCS() { BatchCommand batchCommand = new BatchCommand(); foreach (var project in productInfo.Projects) batchCommand.Add(new GetFromVCSCommand(envSettings, project.VSSPath, project.LocalPath, string.Format("get {0} from VCS", project.ProjectName), envSettings.WorkingDir )); return batchCommand; }
internal void WriteNewSchemaVersion(string appVersion) { var updateSchemaVersion = string.Format("UPDATE dbo.Configuration SET ConfigValue = N'{0}' WHERE ConfigKey = N'Version'", appVersion); BatchCommand.RunBatch(updateSchemaVersion); }
internal bool GetBatchedAffected(int commandIdentifier, out int recordsAffected, out Exception error) { error = BatchCommand.GetErrors(commandIdentifier); int?affected = BatchCommand.GetRecordsAffected(commandIdentifier); recordsAffected = affected.GetValueOrDefault(); return(affected.HasValue); }
public void StartBatch(BatchCommand command) { Debug.Assert(!Editor.Transaction.HasId); Editor.Transaction.Start(); OnBeforeBatch(); OnBatch(command); }
public void UpdateByBatch() { var upgradeScript = File.ReadAllText(_filename); var batches = upgradeScript.Split(new[] { "GO" + Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries); foreach (var batch in batches) { BatchCommand.RunBatch(batch); } }
public ICommand BuildProjects() { BatchCommand batchCommand = new BatchCommand(); foreach (var project in productInfo.Projects) { if (EnvironmentSettings.Platform == Platform.Unix) batchCommand.Add(new UnixGrantAccessCommand(project.LocalPath, envSettings.WorkingDir)); batchCommand.Add(new RestoreCommand(envSettings, project)); batchCommand.Add(new BuildCommand(envSettings, project)); batchCommand.Add(new InstallPackageCommand(envSettings, project)); } return batchCommand; }
#pragma warning disable RIT0007 // Optional parameters in public methods of public types. public UndoRedStackBatch StartBatch(string text = "") { #pragma warning restore RIT0007 // Optional parameters in public methods of public types. if (_batch is null) { _batch = new BatchCommand(new UndoRedStackBatch(this), text); } else { _batch.UndoRedStackBatch.Level++; } return(_batch.UndoRedStackBatch); }
private void OnBatch(BatchCommand command) { History.Instance.ExecuteInTransaction( delegate() { BatchAction action = new BatchAction(command); action.Id = Editor.Transaction.CurrentId; action.OnFinished += OnAfterBatch; History.Instance.Execute(action); }, Editor.Transaction.CurrentId, "Document.OnBatch"); }
internal int ExecuteNonQuery() { ValidateCommandBehavior(nameof(ExecuteNonQuery), CommandBehavior.Default); BatchCommand.BatchRPCMode = true; BatchCommand.ClearBatchCommand(); BatchCommand.Parameters.Clear(); for (int ii = 0; ii < _commandList.Count; ii++) { LocalCommand cmd = _commandList[ii]; BatchCommand.AddBatchCommand(cmd.CommandText, cmd.Parameters, cmd.CmdType, cmd.ColumnEncryptionSetting); } return(BatchCommand.ExecuteBatchRPCCommand()); }
public async Task Examples() { using (var documentStore = new DocumentStore()) { #region batch_3 using (var session = documentStore.OpenSession()) { var commands = new List <ICommandData> { new PutCommandData("users/3", null, new DynamicJsonValue { ["Name"] = "James" }), new PatchCommandData("users/1-A", null, new PatchRequest { Script = "this.Name = 'Nhoj';" }, null), new DeleteCommandData("users/2-A", null) }; var batch = new BatchCommand(documentStore.Conventions, session.Advanced.Context, commands); session.Advanced.RequestExecutor.Execute(batch, session.Advanced.Context); } #endregion #region batch_3_async using (var session = documentStore.OpenAsyncSession()) { var commands = new List <ICommandData> { new PutCommandData("users/3", null, new DynamicJsonValue { ["Name"] = "James" }), new PatchCommandData("users/1-A", null, new PatchRequest { Script = "this.Name = 'Nhoj';" }, null), new DeleteCommandData("users/2-A", null) }; var batch = new BatchCommand(documentStore.Conventions, session.Advanced.Context, commands); await session.Advanced.RequestExecutor.ExecuteAsync(batch, session.Advanced.Context); } #endregion } }
internal int ExecuteNonQuery() { ValidateCommandBehavior(nameof(ExecuteNonQuery), CommandBehavior.Default); using (TryEventScope.Create("SqlCommandSet.ExecuteNonQuery | API | Object Id {0}, Commands executed in Batch RPC mode", ObjectID)) { BatchCommand.BatchRPCMode = true; BatchCommand.ClearBatchCommand(); BatchCommand.Parameters.Clear(); for (int ii = 0; ii < _commandList.Count; ii++) { LocalCommand cmd = _commandList[ii]; BatchCommand.AddBatchCommand(cmd.CommandText, cmd.Parameters, cmd.CmdType, cmd.ColumnEncryptionSetting); } return(BatchCommand.ExecuteBatchRPCCommand()); } }
// POST api/values public IHttpActionResult Post(BatchCommand command) { var filepath = HttpContext.Current.Server.MapPath("~/"); if (command == null) { return(BadRequest("Post value is null")); } if (!ModelState.IsValid) { return(new InvalidModelStateResult(ModelState, this)); } BatchProcessor batch = new BatchProcessor(command); batch.StartNewAndWriteToFile(filepath); return(Ok(filepath)); }
public ICommand RunTests() { BatchCommand batchCommand = new BatchCommand(true); batchCommand.Add(new GetFromVCSCommand( envSettings, Path.Combine(envSettings.RemoteSettingsPath, "NUnitXml.xslt"), envSettings.WorkingDir, "get NUnitXml.xslt", envSettings.WorkingDir)); batchCommand.Add(new ActionCommand("Tests clear", () => { foreach (var project in productInfo.Projects) { string xUnitResults = Path.Combine(envSettings.WorkingDir, project.TestResultFileName); string nUnitResults = Path.Combine(envSettings.WorkingDir, project.NunitTestResultFileName); if (File.Exists(xUnitResults)) File.Delete(xUnitResults); } })); foreach (var project in productInfo.Projects) batchCommand.Add(new RunTestsCommand(envSettings, project)); batchCommand.Add(new ActionCommand("Tests transform", () => { XslCompiledTransform xslt = new XslCompiledTransform(); xslt.Load("NUnitXml.xslt"); List<string> nUnitTestFiles = new List<string>(); foreach (var project in productInfo.Projects) { string xUnitResults = Path.Combine(envSettings.WorkingDir, project.TestResultFileName); string nUnitResults = Path.Combine(envSettings.WorkingDir, project.NunitTestResultFileName); if (File.Exists(nUnitResults)) File.Delete(nUnitResults); if (File.Exists(xUnitResults)) { xslt.Transform(xUnitResults, nUnitResults); nUnitTestFiles.Add(nUnitResults); } } NUnitMerger.MergeFiles(nUnitTestFiles, "nunit-result.xml"); })); return batchCommand; }
private void BatchCommands() { if (_commands.Count == 0) { return; } if (!_dialect.SupportsBatching) { return; } var batches = new List <IIndexCommand>(); foreach (var page in _commands.OrderBy(x => x.ExecutionOrder).PagesOf(_store.Configuration.CommandsPageSize)) { var batch = new BatchCommand(); foreach (var command in page) { if (!command.AddToBatch(_dialect, batch.Queries, batch.Parameters, batch.Actions)) { // If the command can't be added to a batch, we execute it independently if (batch.Queries.Count > 0) { batches.Add(batch); // Then start a new batch batch = new BatchCommand(); } batches.Add(command); } } if (batch.Queries.Count > 0) { batches.Add(batch); } } _commands.Clear(); _commands.AddRange(batches); }
internal void CommitBatch() { if (_batch is null) { throw new InvalidOperationException("Batch is not started"); } // Push the command on the undo stack // Can't use AddAndPerformOperation() because the Redo operation may // not be exucuted (again). UndoStack.Push(_batch); ClearRedoStack(); _batch = null; CanExecuteChanged(); //this.AddAndPerformOperation(_batch); }
public IHttpActionResult Get() { BatchCommand command = new BatchCommand(); command.Title = "TEST TITLE"; command.Description = "TEST DESCRIPTION"; command.Actions = new GroupAction[2]; command.Actions[0] = new GroupAction() { CategoryName = "Creator", GroupBy = "creator" }; command.Actions[1] = new GroupAction() { CategoryName = "material", GroupBy = "material" }; command.Mappings = new XmlMappings(); command.Mappings.Title = "title"; command.Mappings.Description = "description"; command.Mappings.Begindate = "production.date.start"; command.Mappings.Enddate = "production.date.end"; command.Mappings.Id = "priref"; command.Mappings.Images = "reproduction.reference"; command.BaseUrl = "http://amdata.adlibsoft.com/"; command.Database = "AMCollect"; command.ImagesLocation = "http://ahm.adlibsoft.com/ahmimages/"; var filepath = HttpContext.Current.Server.MapPath("~/"); if (command == null) { return(BadRequest("Post value is null")); } if (!ModelState.IsValid) { return(new InvalidModelStateResult(ModelState, this)); } BatchProcessor batch = new BatchProcessor(command); batch.StartNewAndWriteToFile(filepath); return(Ok(filepath)); }
public ActionResult Index(HttpPostedFileBase file) { if (!isAuthenticated() || !loggedUser.isAdmin) { return(new HttpUnauthorizedResult()); } else { if (file != null && file.ContentLength > 0 && (file.ContentType == "text/csv" || file.ContentType == "application/vnd.ms-excel")) { var fileName = new Common().generatePassword(20) + DateTime.Now.Ticks; var path = Path.Combine(Server.MapPath("~/App_Data"), fileName); file.SaveAs(path); Result result = new Result(false); using (var reader = new StreamReader(path, System.Text.Encoding.GetEncoding("ISO-8859-1"))) { result = new BatchCommand().execute(reader, loggedUser.Unit); } System.IO.File.Delete(path); if (result.Success) { return(RedirectToAction("Success")); } else { ModelState.AddModelError("", "Erro ao ler arquivo. Por favor verifique o arquivo novamente"); } } else { ModelState.AddModelError("", "Erro ao ler arquivo. Por favor verifique o arquivo novamente"); } return(View()); } }
protected override void LoadInternal(IEnumerable <ICommandData> items, JsonOperationContext context) { var commands = items as List <ICommandData>; Debug.Assert(commands != null); if (commands.Count == 0) { return; } BatchOptions options = null; if (Configuration.LoadRequestTimeoutInSec != null) { options = new BatchOptions { RequestTimeout = TimeSpan.FromSeconds(Configuration.LoadRequestTimeoutInSec.Value) }; } var batchCommand = new BatchCommand(DocumentConventions.Default, context, commands, options); try { AsyncHelpers.RunSync(() => _requestExecutor.ExecuteAsync(batchCommand, context, token: CancellationToken)); _recentUrl = _requestExecutor.Url; } catch (OperationCanceledException e) { if (CancellationToken.IsCancellationRequested == false) { ThrowTimeoutException(commands.Count, e); } throw; } }
internal int ExecuteNonQuery() { ValidateCommandBehavior(nameof(ExecuteNonQuery), CommandBehavior.Default); long scopeID = SqlClientEventSource.Log.TryScopeEnterEvent("SqlCommandSet.ExecuteNonQuery | API | Object Id {0}, Commands executed in Batch RPC mode", ObjectID); try { BatchCommand.BatchRPCMode = true; BatchCommand.ClearBatchCommand(); BatchCommand.Parameters.Clear(); for (int ii = 0; ii < _commandList.Count; ii++) { LocalCommand cmd = _commandList[ii]; BatchCommand.AddBatchCommand(cmd.CommandText, cmd.Parameters, cmd.CmdType, cmd.ColumnEncryptionSetting); } return(BatchCommand.ExecuteBatchRPCCommand()); } finally { SqlClientEventSource.Log.TryScopeLeaveEvent(scopeID); } }
/// <summary> /// Process a single input file as a batch run file /// </summary> /// <param name="path">Path to the input file</param> private void ProcessScript(string path) { // If the file doesn't exist, warn but continue if (!File.Exists(path)) { logger.User($"{path} does not exist. Skipping..."); return; } // Try to process the file now try { // Every line is its own command string[] lines = File.ReadAllLines(path); // Each batch file has its own state BatchState batchState = new BatchState(); // Process each command line foreach (string line in lines) { // Skip empty lines if (string.IsNullOrWhiteSpace(line)) { continue; } // Skip lines that start with REM or # if (line.StartsWith("REM") || line.StartsWith("#")) { continue; } // Read the command in, if possible var command = BatchCommand.Create(line); if (command == null) { logger.User($"Could not process {path} due to the following line: {line}"); logger.User($"Please see the help text for more details about possible commands"); break; } // Validate that the command has the proper number and type of arguments (bool valid, string error) = command.ValidateArguments(); if (!valid) { logger.User(error); logger.User($"Usage: {command.Usage()}"); break; } // Now run the command logger.User($"Attempting to invoke {command.Name} with {(command.Arguments.Count == 0 ? "no arguments" : "the following argument(s): " + string.Join(", ", command.Arguments))}"); command.Process(batchState); } } catch (Exception ex) { logger.Error(ex, $"There was an exception processing {path}"); } }
public async Task BatchAsync(List <ICommandData> commands) { var command = new BatchCommand(_store.Conventions, Context, commands); await RequestExecutor.ExecuteAsync(command, Context); }
static void Main(string[] args) { //GenerateFontMap(@"C:\root\git\thenfour\PetsciiMapgen\img\fonts\EmojiOneColor.otf", 32, @"c:\temp\emojione.png"); //GenerateFontMap2(@"C:\root\git\thenfour\PetsciiMapgen\img\fonts\EmojiOneColor.otf", 32, @"c:\temp\comicsans.png"); //GenerateFontMap(@"Arial Unicode MS", 32, @"c:\temp\aunicod1.png"); //GenerateFontMap2(@"Arial Unicode MS", 32, @"c:\temp\aunicod2.png"); //args = new string[] { "-batchrun", "C64", "heavy", "+2" }; ArgSetList batchOverride = null; //batchOverride = Batches.Or(Batches.Args(new string[] { @"fonttag:emojidark12", @"-fonttype", @"normal", @"-fontImage", @"C:\root\git\thenfour\PetsciiMapgen\img\fonts\emojidark12.png", @"-charsize", @"12x12", @"pftag:Heavy Grayscale", @"-cs", @"lab", @"-pf", @"fivetile", @"-pfargs", @"48v5+0", @"-partitions", @"4", @"-testpalette", @"ThreeBit", @"-loadOrCreateMap", @"-outdir", @"f:\maps\emojidark12 Heavy Grayscale" })); //batchOverride = Batches.Args( // //@"-processImagesInDir", @"C:\root\git\thenfour\PetsciiMapgen\img\testImages", // //@"-testpalette", "ThreeBit", // @"-outdir", @"f:\maps", // @"-fonttype", @"mono", // @"-palette", "C64Color", // @"-fontImage", @"C:\root\git\thenfour\PetsciiMapgen\img\fonts\c64opt160.png", // @"-charsize", @"8x8", // @"-cs", @"lab", // @"-pf", @"fivetile", // @"-pfargs", @"9v5+2" //) + Batches.Or( // //Batches.Args(@"-tessellator", "a"), // //Batches.Args(@"-tessellator", "b"), // Batches.Args(@"-tessellator", "c") // ) + Batches.Or( // Batches.Args(@"-partitions", "2") // //Batches.Args(@"-partitions", "7"), // //Batches.Args(@"-partitions", "10") // ); //batchOverride = Batches.Or( // Batches.Args( // @"-fonttype", @"mono", @"-fontImage", @"C:\root\git\thenfour\PetsciiMapgen\img\fonts\c64opt160.png", @"-charsize", @"8x8", // @"-processImagesInDir", @"C:\root\git\thenfour\PetsciiMapgen\img\testImages", // @"-testpalette", "ThreeBit", // @"-palette", @"C64Color", // @"-cs", @"lab", // @"-pf", @"fivetile", // @"-pfargs", @"14v5+0", // @"-partitions", @"2", // @"-loadOrCreateMap", // @"-outdir", @"f:\maps\C64 Budget Color") // ); //batchOverride = Batches.Or(Batches.Args("-batchrun", "C64", "heavy", "+2" )); using (var stayon = new StayOn()) { if (batchOverride != null) { int ibatch = 0; Log.WriteLine("Batch contains {0} runs", batchOverride.argSets.Count()); foreach (var argset in batchOverride.argSets) { Log.EnterTask("Running batch #{0}", ibatch); Log.WriteLine("Args: {0}", argset.ToCSString()); Main2(argset.Args.ToArray()); Log.EndTask(); ibatch++; } return; } string[] batchKeywords = new string[] { }; List <string> batchAddArgs = new List <string>(); BatchCommand batchCommand = BatchCommand.None; args.ProcessArg2(new string[] { "-batchrun", "-batchlist" }, (thisArg, remainingArgs) => { if (remainingArgs != null) { batchKeywords = remainingArgs.ToArray(); } switch (thisArg.ToLowerInvariant()) { case "-batchrun": batchCommand = BatchCommand.Run; break; default: case "-batchlist": batchCommand = BatchCommand.List; break; } }); if (batchCommand == BatchCommand.None) { Main2(args); return; } string batchBaseDir = @"f:\maps"; string batchFontDir = @"C:\root\git\thenfour\PetsciiMapgen\img\fonts"; LogCore batchLog = new LogCore(); args.ProcessArg("-batchfontdir", s => { batchFontDir = s; batchLog.WriteLine("Setting font dir: {0}", batchFontDir); }); args.ProcessArg("-batchbasedir", s => { batchBaseDir = s; batchLog.WriteLine("Setting base dir: {0}", batchBaseDir); }); batchLog.WriteLine("Batch font dir: {0}", batchFontDir); batchLog.WriteLine("Batch base dir: {0}", batchBaseDir); string batchLogPath = System.IO.Path.Combine(batchBaseDir, @"batchLog.txt"); batchLog.SetLogFile(batchLogPath); Func <string, string> batchFontPath = delegate(string s) { return(System.IO.Path.Combine(batchFontDir, s)); }; args.ProcessArg("-batchaddarg", s => { batchAddArgs.Add(s); }); foreach (var arg in batchKeywords) { batchLog.WriteLine("Using batch keyword: {0}", arg); } foreach (var arg in batchAddArgs) { batchLog.WriteLine("Adding additional batch argument: {0}", arg); } var All = Batches.GetAllBatches(batchBaseDir, batchFontPath, batchAddArgs); var filtered = All.Filter(batchKeywords).ToArray(); switch (batchCommand) { case BatchCommand.None: Debug.Assert(false);// handled above. break; case BatchCommand.List: int ibatch = 0; foreach (var argset in filtered) { batchLog.WriteLine(" {0}: {1}", ibatch, argset.ToCSString()); ibatch++; } batchLog.WriteLine("Batch contains {0} runs", filtered.Length); break; case BatchCommand.Run: ibatch = 0; batchLog.WriteLine("Batch contains {0} runs", filtered.Length); foreach (var argset in filtered) { batchLog.EnterTask("Running batch #{0} of keywords", ibatch, string.Join(", ", batchKeywords)); batchLog.WriteLine("Args: {0}", argset.ToCSString()); Main2(argset.Args.ToArray()); batchLog.EndTask(); ibatch++; } break; } } }
public ICommand InstallTestbuild(string runtime, string framework) { var batchCommand = new BatchCommand(); var localPath = PlatformPathsCorrector.Inst.Correct(Path.Combine(envSettings.WorkingDir, "testbuild"), Platform.Windows); var sourcePath = PlatformPathsCorrector.Inst.Correct(Path.Combine(envSettings.BuildArtifactsFolder, runtime, framework), Platform.Windows); batchCommand.Add(new CopyDirectoryCommand(sourcePath, localPath, true)); var version = envSettings.BranchVersionShort + ".0"; foreach(var enumerateDirectory in Directory.EnumerateDirectories(sourcePath)) { var pathToPackage = Path.Combine(enumerateDirectory, version); var packageName = string.Format("{0}.{1}.nupkg", new DirectoryInfo(enumerateDirectory).Name, version); var fullPath = Path.Combine(pathToPackage, packageName); batchCommand.Add(new InstallPackageCommand(envSettings, fullPath)); } return batchCommand; }
public BatchProcessor(BatchCommand command) { this.command = command; }
private void BatchCommands() { if (_commands != null && _commands.Count == 0) { return; } if (!_dialect.SupportsBatching || _store.Configuration.CommandsPageSize == 0) { return; } var batches = new List <IIndexCommand>(); // holds the queries, parameters and actions returned by an IIndexCommand, until we know we can // add it to a batch if it fits the limits (page size and parameters boundaries) var localDbCommand = _connection.CreateCommand(); var localQueries = new List <string>(); var localActions = new List <Action <DbDataReader> >(); var batch = new BatchCommand(_connection.CreateCommand()); var index = 0; foreach (var command in _commands.OrderBy(x => x.ExecutionOrder)) { index++; // Can the command be batched if (command.AddToBatch(_dialect, localQueries, localDbCommand, localActions, index)) { // Does it go over the page or parameters limits var tooManyQueries = batch.Queries.Count + localQueries.Count > _store.Configuration.CommandsPageSize; var tooManyCommands = batch.Command.Parameters.Count + localDbCommand.Parameters.Count > _store.Configuration.SqlDialect.MaxParametersPerCommand; if (tooManyQueries || tooManyCommands) { batches.Add(batch); // Then start a new batch batch = new BatchCommand(_connection.CreateCommand()); } // We can add the queries to the current batch batch.Queries.AddRange(localQueries); batch.Actions.AddRange(localActions); for (var i = localDbCommand.Parameters.Count - 1; i >= 0; i--) { // npgsql will prevent a parameter from being added to a collection // if it's already in another one var parameter = localDbCommand.Parameters[i]; localDbCommand.Parameters.RemoveAt(i); batch.Command.Parameters.Add(parameter); } } else { // The command can't be added to a batch, we leave it in the list of commands to execute individually // Finalize the current batch if (batch.Queries.Count > 0) { batches.Add(batch); // Then start a new batch batch = new BatchCommand(_connection.CreateCommand()); } batches.Add(command); } localQueries.Clear(); localDbCommand.Parameters.Clear(); localActions.Clear(); } // If the ongoing batch is not empty, add it if (batch.Queries.Count > 0) { batches.Add(batch); } _commands.Clear(); _commands.AddRange(batches); }
IEnumerator LoadInternal() { m_IsLoading = true; m_Friends = null; List <FacebookFriend> friends = new List <FacebookFriend>(); if (FacebookPlugin.Instance.CurrentUser == null) { Debug.LogError("FacebookFriendList: Facebook user is not logged in."); m_IsLoading = false; yield break; } yield return(StartCoroutine(FacebookPlugin.Instance.LoadFriends())); SocialPlugin.Person[] friendsPlayingDeadZone = FacebookPlugin.Instance.Friends; if (friendsPlayingDeadZone == null || friendsPlayingDeadZone.Length <= 0) { m_IsLoading = false; yield break; } int friendsProcessed = 0; while (friendsProcessed < friendsPlayingDeadZone.Length) { int friendsInThisBatch = (friendsProcessed + m_MaxFriendsInOneBatch < friendsPlayingDeadZone.Length) ? m_MaxFriendsInOneBatch : friendsPlayingDeadZone.Length - friendsProcessed; BatchCommandAction[] actions = new BatchCommandAction[friendsInThisBatch]; for (int i = 0; i < friendsInThisBatch; i++) { actions[i] = new GetPrimaryKeysLinkedWithID(friendsPlayingDeadZone[friendsProcessed + i].ID, CloudServices.LINK_ID_TYPE_FACEBOOK, E_UserAcctKind.Any, BaseCloudAction.NoTimeOut, false); } BatchCommand findFriendsPrimaryKeys = new BatchCommand(actions); GameCloudManager.AddAction(findFriendsPrimaryKeys); while (findFriendsPrimaryKeys.isDone == false) { yield return(new WaitForEndOfFrame()); } if (findFriendsPrimaryKeys.isSucceeded) { for (int i = 0; i < friendsInThisBatch; i++) { if (findFriendsPrimaryKeys.actions[i].isSucceeded) { GetPrimaryKeysLinkedWithID usersLinkedWithID = (GetPrimaryKeysLinkedWithID)findFriendsPrimaryKeys.actions[i]; if (usersLinkedWithID.AllPrimaryKeys != null && usersLinkedWithID.AllPrimaryKeys.Length > 0) { friends.Add(new FacebookFriend(usersLinkedWithID.AllPrimaryKeys, friendsPlayingDeadZone[friendsProcessed + i])); } } } } friendsProcessed += m_MaxFriendsInOneBatch; } if (friends.Count > 0) { m_Friends = friends.ToArray(); } m_IsLoading = false; }
public override void ProcessFeatures(Dictionary <string, Library.Help.Feature> features) { base.ProcessFeatures(features); // Try to read each input as a batch run file foreach (string path in Inputs) { // If the file doesn't exist, warn but continue if (!File.Exists(path)) { logger.User($"{path} does not exist. Skipping..."); continue; } // Try to process the file now try { // Every line is its own command string[] lines = File.ReadAllLines(path); // Each batch file has its own state int index = 0; DatFile datFile = DatFile.Create(); string outputDirectory = null; // Process each command line foreach (string line in lines) { // Skip empty lines if (string.IsNullOrWhiteSpace(line)) { continue; } // Skip lines that start with REM or # if (line.StartsWith("REM") || line.StartsWith("#")) { continue; } // Read the command in, if possible var command = BatchCommand.Create(line); if (command == null) { logger.User($"Could not process {path} due to the following line: {line}"); break; } // Now switch on the command logger.User($"Attempting to invoke {command.Name} with {(command.Arguments.Count == 0 ? "no arguments" : "the following argument(s): " + string.Join(", ", command.Arguments))}"); switch (command.Name.ToLowerInvariant()) { // Set a header field case "set": if (command.Arguments.Count != 2) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: set(header.field, value);"); continue; } // Read in the individual arguments Field field = command.Arguments[0].AsField(); string value = command.Arguments[1]; // If we had an invalid input, log and continue if (field == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } // Set the header field datFile.Header.SetFields(new Dictionary <Field, string> { [field] = value }); break; // Parse in new input file(s) case "input": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: input(datpath, ...);"); continue; } // Get only files from inputs List <ParentablePath> datFilePaths = DirectoryExtensions.GetFilesOnly(command.Arguments); // Assume there could be multiple foreach (ParentablePath datFilePath in datFilePaths) { datFile.Parse(datFilePath, index++); } break; // Run DFD/D2D on path(s) case "d2d": case "dfd": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: d2d(path, ...);"); continue; } // TODO: Should any of the other options be added for D2D? // Assume there could be multiple foreach (string input in command.Arguments) { datFile.PopulateFromDir(input); } // TODO: We might not want to remove higher order hashes in the future // TODO: We might not want to remove dates in the future Cleaner dfdCleaner = new Cleaner() { ExcludeFields = Hash.DeepHashes.AsFields() }; dfdCleaner.ExcludeFields.Add(Field.DatItem_Date); datFile.ApplyCleaning(dfdCleaner); break; // Apply a filter case "filter": if (command.Arguments.Count < 2 || command.Arguments.Count > 4) { logger.User($"Invoked {command.Name} and expected between 2-4 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: filter(field, value, [remove = false, [perMachine = false]]);"); continue; } // Read in the individual arguments Field filterField = command.Arguments[0].AsField(); string filterValue = command.Arguments[1]; bool? filterRemove = false; if (command.Arguments.Count >= 3) { filterRemove = command.Arguments[2].AsYesNo(); } bool?filterPerMachine = false; if (command.Arguments.Count >= 4) { filterPerMachine = command.Arguments[3].AsYesNo(); } // If we had an invalid input, log and continue if (filterField == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } if (filterRemove == null) { logger.User($"{command.Arguments[2]} was an invalid true/false value"); continue; } if (filterPerMachine == null) { logger.User($"{command.Arguments[3]} was an invalid true/false value"); continue; } // Create a filter with this new set of fields Filter filter = new Filter(); filter.SetFilter(filterField, filterValue, filterRemove.Value); // Apply the filter blindly datFile.ApplyFilter(filter, filterPerMachine.Value); // Cleanup after the filter // TODO: We might not want to remove immediately datFile.Items.ClearMarked(); datFile.Items.ClearEmpty(); break; // Apply an extra INI case "extra": if (command.Arguments.Count != 2) { logger.User($"Invoked {command.Name} and expected 2 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: extra(field, inipath);"); continue; } // Read in the individual arguments Field extraField = command.Arguments[0].AsField(); string extraFile = command.Arguments[1]; // If we had an invalid input, log and continue if (extraField == Field.NULL) { logger.User($"{command.Arguments[0]} was an invalid field name"); continue; } if (!File.Exists(command.Arguments[1])) { logger.User($"{command.Arguments[1]} was an invalid file name"); continue; } // Create the extra INI ExtraIni extraIni = new ExtraIni(); ExtraIniItem extraIniItem = new ExtraIniItem(); extraIniItem.PopulateFromFile(extraFile); extraIniItem.Field = extraField; extraIni.Items.Add(extraIniItem); // Apply the extra INI blindly datFile.ApplyExtras(extraIni); break; // Apply internal split/merge case "merge": if (command.Arguments.Count != 1) { logger.User($"Invoked {command.Name} and expected 1 argument, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: merge(split|merged|nonmerged|full|device);"); continue; } // Read in the individual arguments MergingFlag mergingFlag = command.Arguments[0].AsMergingFlag(); // If we had an invalid input, log and continue if (mergingFlag == MergingFlag.None) { logger.User($"{command.Arguments[0]} was an invalid merging flag"); continue; } // Apply the merging flag datFile.ApplySplitting(mergingFlag, false); break; // Apply description-as-name logic case "descname": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: descname();"); continue; } // Apply the logic datFile.MachineDescriptionToName(); break; // Apply 1G1R case "1g1r": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: 1g1r(region, ...);"); continue; } // Run the 1G1R functionality datFile.OneGamePerRegion(command.Arguments); break; // Apply one rom per game (ORPG) case "orpg": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: orpg();"); continue; } // Apply the logic datFile.OneRomPerGame(); break; // Remove a field case "remove": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: remove(field, ...);"); continue; } // Run the removal functionality datFile.RemoveFieldsFromItems(command.Arguments.Select(s => s.AsField()).ToList()); break; // Apply scene date stripping case "sds": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: sds();"); continue; } // Apply the logic datFile.StripSceneDatesFromItems(); break; // Set new output format(s) case "format": if (command.Arguments.Count == 0) { logger.User($"Invoked {command.Name} but no arguments were provided"); logger.User("Usage: format(datformat, ...);"); continue; } // Assume there could be multiple datFile.Header.DatFormat = 0x00; foreach (string format in command.Arguments) { datFile.Header.DatFormat |= format.AsDatFormat(); } // If we had an invalid input, log and continue if (datFile.Header.DatFormat == 0x00) { logger.User($"No valid output format found"); continue; } break; // Set output directory case "output": if (command.Arguments.Count != 1) { logger.User($"Invoked {command.Name} and expected exactly 1 argument, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: output(outdir);"); continue; } // Only set the first as the output directory outputDirectory = command.Arguments[0]; break; // Write out the current DatFile case "write": if (command.Arguments.Count > 1) { logger.User($"Invoked {command.Name} and expected 0-1 arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: write([overwrite = true]);"); continue; } // Get overwrite value, if possible bool?overwrite = true; if (command.Arguments.Count == 1) { overwrite = command.Arguments[0].AsYesNo(); } // If we had an invalid input, log and continue if (overwrite == null) { logger.User($"{command.Arguments[0]} was an invalid true/false value"); continue; } // Write out the dat with the current state datFile.Write(outputDirectory, overwrite: overwrite.Value); break; // Reset the internal state case "reset": if (command.Arguments.Count != 0) { logger.User($"Invoked {command.Name} and expected no arguments, but {command.Arguments.Count} arguments were provided"); logger.User("Usage: reset();"); continue; } // Reset all state variables index = 0; datFile = DatFile.Create(); outputDirectory = null; break; default: logger.User($"Could not find a match for '{command.Name}'. Please see the help text for more details."); break; } } } catch (Exception ex) { logger.Error(ex, $"There was an exception processing {path}"); continue; } } }
public void Init() { instance = new BatchCommand(); }
public JsonResult BatchCommand(string PageName, string CommandName, inputParameter[][][][] records) { Page Info = PDAL.FindPage(PageName); BatchCommand B = null; for (int i = 0; i < Info.BatchCommands.Count; i++) { if (Info.BatchCommands[i].name == CommandName) { B = Info.BatchCommands[i]; break; } } ScallerResult Res = new ScallerResult(); PDALSect.AccessResult DoAccess = PDALSect.GetCheckPer(B.PerKey, PageName, null); if (DoAccess == PDALSect.AccessResult.AccessDenied) { Res.code = 403; Res.Message = "شما اجازه دسترسی به این قسمت را ندارید"; return(Json(Res)); } if (DoAccess == PDALSect.AccessResult.ReLogin) { Res.code = 401; Res.Message = "نیاز به ورود مجدد می باشد"; return(Json(Res)); } SqlConnection Con = PDAL.GetConnection(); if (Con.State != ConnectionState.Open) { Con.Open(); } string[] scallerValues = new string[records.Length]; SqlTransaction Tran = Con.BeginTransaction(); try { for (int k = 0; k < records.Length; k++) { Command Com = B.Commands[k]; List <inputParameter> initValues = new List <inputParameter>(); for (int q = 0; q < Com.Parameters.Count; q++) { if (Com.Parameters[q].sourceType == "Session") { inputParameter ix = new inputParameter(); ix.key = Com.Parameters[q].name; ix.value = PDAL.GetSession(Com.Parameters[q].sourceTypeParameter); initValues.Add(ix); } if (Com.Parameters[q].sourceType == "SpecValue") { inputParameter ix = new inputParameter(); ix.key = Com.Parameters[q].name; ix.value = PDAL.GetSpecValue(Com.Parameters[q].sourceTypeParameter); initValues.Add(ix); } if (Com.Parameters[q].sourceType == "ScallerValues") { inputParameter ix = new inputParameter(); ix.key = Com.Parameters[q].name; ix.value = scallerValues[int.Parse(Com.Parameters[q].sourceTypeParameter)]; initValues.Add(ix); } } for (int k2 = 0; k2 < records[k].Length; k2++) { PDAL.ConvertValuesBatch(Info, Com, records[k][k2]); if (Com.DBCommand.StartsWith("$") == false) { for (int k3 = 1; k3 < records[k][k2].Length; k3++) { List <inputParameter> par = new List <inputParameter>(); par.AddRange(initValues); par.AddRange(records[k][k2][k3]); scallerValues[k] = PDAL.ExecScallerWithConnection(ref Com.DBCommand, par, Con, Tran); } } else { for (int k3 = 1; k3 < records[k][k2].Length; k3++) { List <inputParameter> par = new List <inputParameter>(); par.AddRange(initValues); par.AddRange(records[k][k2][k3]); scallerValues[k] = CustomScaler(Com.DBCommand, PageName, par.ToArray()); } } } } Tran.Commit(); Res.code = 0; Res.Message = "با موفقیت انجام شد"; } catch (Exception ex) { Tran.Rollback(); Res.code = 500; Res.Message = "خطا در انجام عملیات" + "<br />" + ex.Message; } if (Con.State != ConnectionState.Closed) { Con.Close(); } return(Json(Res)); //ConvertNullToEmpty(Parameters); //PDAL.ConvertValues(Info, Parameters); //ScallerResult Res = new ScallerResult(); //bool DoAccess = PDALSect.GetCheckPer(Info.PerKey, PageName, Parameters); //PDAL.InitServerSideParametersForSubmit(PageName, ref Parameters); //string error = PDAL.ValidateAndSetDefaultValue(Info, Parameters); //if (DoAccess == false) //{ // Res.code = 403; // Res.Message = "شما اجازه دسترسی به این قسمت را ندارید"; // return Json(Res); //} //if (error != "") //{ // Res.code = 200; // Res.Message = error; // return Json(Res); //} //try //{ // Res.code = 0; // Res.Message = "با موفقیت انجام شد"; // Res.retrunValue = PDAL.ExecScaller(Info.DBCommand, PDAL.SkipParameters(Info, Parameters)); //} //catch (SqlException Ex) //{ // Res.code = 300; // Res.Message = "خطایی رخ داد:" + Ex.ErrorCode.ToString() + ":" + Ex.Message; //} //catch (Exception Ex) //{ // Res.code = 500; // Res.Message = "خطایی رخ داد:" + Ex.Message; //} //return Json(Res); }