public static void ClearArchivedDumpFiles(string path) { if (!Directory.Exists(path)) { MiniDumpProvider._tracer.TraceInformation("No dump file directory"); } else { foreach (string file in Directory.GetFiles(path, "*.dmp", SearchOption.TopDirectoryOnly)) { using (LogicalOperation.Create(string.Format("Deleting {0}", (object)file), new object[0])) { try { File.Delete(file); MiniDumpProvider._tracer.TraceInformation(string.Format("Removed")); } catch (Exception ex) { MiniDumpProvider._tracer.TraceInformation(string.Format("Error: {0}", (object)ex)); } } } } }
protected override bool OnApply() { if (csprojs.Count > 1) { var incorrectCsprojs = csprojs .Where(csproj => csproj != correctCsprojName) .ToList(); foreach (var csproj in incorrectCsprojs) { using (LogicalOperation.Start($"Deleting {csproj}")) { var path = Path.Combine(Context.Path, csproj); File.Delete(path); csprojs.Remove(csproj); } } } if (csprojs.Count == 0) { using (LogicalOperation.Start($"Creating {correctCsprojName}")) { VisualStudioProject.Create(correctCsprojPath); } } return(true); }
Build( ProduceRepository repository, VisualStudioSolution sln, IList <VisualStudioSolutionProjectReference> projs, string framework) { var properties = new Dictionary <string, String>() { { "TargetFramework", framework }, }; var targets = projs.Select(p => $"{p.MSBuildTargetName}:Publish"); using (LogicalOperation.Start($"Building .NET for {framework}")) { var isNetFramework = Regex.IsMatch(framework, @"^net\d+$"); if (isNetFramework && !CanBuildNetFramework) { Trace.TraceInformation("This system can't build for .NET Framework"); return; } if (isNetFramework && BuildNetFrameworkUsingMSBuild) { MSBuild(repository, sln, properties, targets); return; } DotnetMSBuild(repository, sln, properties, targets); } }
public LogicGateGene(int index, LogicalOperation operation) : base(index) { Contract.Requires(index > 0); Operation = operation; }
public void AddRule(List <string> ifNodeNames, LogicalOperation operation, List <string> thenNodeNames) { if (ifNodeNames == null) { throw new ArgumentNullException(nameof(ifNodeNames)); } if (thenNodeNames == null) { throw new ArgumentNullException(nameof(thenNodeNames)); } if (!ifNodeNames.Any()) { throw new ArgumentNullException(nameof(ifNodeNames)); } if (!thenNodeNames.Any()) { throw new ArgumentNullException(nameof(thenNodeNames)); } foreach (var nodeName in thenNodeNames) { UpdateNodeList(nodeName); } foreach (var nodeName in ifNodeNames) { UpdateNodeList(nodeName); } List <IInferenceNode> ifNodes = GetNodes(ifNodeNames); var rule = new GraphRule(ifNodes, operation, GetNodes(thenNodeNames)); _rules.Add(rule); ifNodes.ForEach(ifn => ifn.RelatedRules.Add(rule)); }
public OpCodePsetp(InstEmitter emitter, ulong address, long opCode) : base(emitter, address, opCode) { Predicate12 = new Register(opCode.Extract(12, 3), RegisterType.Predicate); Predicate29 = new Register(opCode.Extract(29, 3), RegisterType.Predicate); LogicalOpAB = (LogicalOperation)opCode.Extract(24, 2); }
DistFiles(ProduceRepository repository, IEnumerable <string> sourceDirs) { Guard.NotNull(repository, nameof(repository)); Guard.NotNull(sourceDirs, nameof(sourceDirs)); var destDir = repository.GetWorkSubdirectory("distfiles"); if (Directory.Exists(destDir)) { using (LogicalOperation.Start("Deleting " + destDir)) Directory.Delete(destDir, true); } if (!sourceDirs.Where(p => Directory.Exists(p)).Any()) { return; } using (LogicalOperation.Start("Creating " + destDir)) Directory.CreateDirectory(destDir); foreach (var sourceDir in sourceDirs.Where(d => Directory.Exists(d))) { using (LogicalOperation.Start("Copying distributable files from " + sourceDir)) foreach (var sourceFile in Directory.EnumerateFiles(sourceDir, "*", SearchOption.AllDirectories)) { var localFile = sourceFile.Substring(sourceDir.Length + 1); var destFile = Path.Combine(destDir, localFile); Trace.TraceInformation(destFile); File.Copy(sourceFile, destFile); } } }
Clone(string parentPath, GitUrl url) { using (LogicalOperation.Start($"Cloning {url}")) { GitRepository.Clone(parentPath, url); } }
CheckOut(NuGitRepository repository, GitCommitName commit) { using (LogicalOperation.Start($"Checking out {commit}")) { repository.Checkout(commit); } }
GenerateProgramWrappers(ProduceWorkspace workspace) { Guard.NotNull(workspace, nameof(workspace)); var scripts = new HashSet <string>( workspace.FindRepositories() .Select(r => r.DotProducePath) .Where(p => File.Exists(p)) .Select(p => new DotProduce(p)) .Where(dp => dp != null) .SelectMany(dp => dp.Programs) .Select(path => Path.GetFileNameWithoutExtension(path)), StringComparer.OrdinalIgnoreCase); var orphans = Directory.GetFiles(workspace.GetBinDirectory()) .Where(file => !scripts.Contains(Path.GetFileNameWithoutExtension(file))) .ToList(); if (orphans.Count > 0) { using (LogicalOperation.Start("Deleting orphan program wrapper scripts")) { foreach (var file in orphans) { Trace.WriteLine(file); File.Delete(file); } } } }
Restore(NuGitRepository repository, bool exact) { var workspace = repository.Workspace; var lockDependencies = repository.ReadDotNuGitLock(); foreach (var d in lockDependencies) { using (LogicalOperation.Start($"Restoring {d.Url.RepositoryName} to {d.CommitName} ({d.CommitId})")) { var name = d.Url.RepositoryName; var r = workspace.FindRepository(name); if (r == null) { Clone(workspace.RootPath, d.Url); r = workspace.GetRepository(name); } var head = r.GetCommitId(new GitCommitName("HEAD")); var isCheckedOutToExact = head == d.CommitId; var isCheckedOutToDescendent = r.IsAncestor(d.CommitId, head); var hasUncommittedChanges = r.HasUncommittedChanges(); var isCommitNameAtCommitId = r.GetCommitId(d.CommitName) == d.CommitId; if (!exact && isCheckedOutToExact && hasUncommittedChanges) { Trace.TraceInformation($"Already checked out with uncommitted changes"); } else if (!exact && isCheckedOutToExact) { Trace.TraceInformation($"Already checked out"); } else if (!exact && isCheckedOutToDescendent && hasUncommittedChanges) { Trace.TraceInformation($"Already checked out to descendent with uncommitted changes"); } else if (!exact && isCheckedOutToDescendent) { Trace.TraceInformation($"Already checked out to descendent"); } else if (r.HasUncommittedChanges()) { Trace.TraceError("Uncommitted changes"); throw new UserException($"Uncommitted changes in {name}"); } else if (isCheckedOutToExact) { Trace.TraceInformation($"Already checked out"); } else if (isCommitNameAtCommitId) { CheckOut(r, d.CommitName); } else { CheckOut(r, d.CommitId); } } } }
public RuleCondition(string fieldName, ExpressionOperator expressionOperator, string value, LogicalOperation operation) { FieldName = fieldName; ExpressionOperator = expressionOperator; Value = value; Operation = operation; }
public Criteria(String propertyName, String value, CriteriaOperator criteriaOperator, LogicalOperation logicalOperation = Core.Query.LogicalOperation.And) { this.PropertyName = propertyName; this.Value = value; this.Operator = criteriaOperator; this.LogicalOperation = logicalOperation; }
Update(ProduceRepository repository) { using (LogicalOperation.Start("Updating NuGit dependencies")) if (ProcessExtensions.ExecuteAny(true, true, repository.Path, "nugit", "update") != 0) { throw new UserException("nugit failed"); } }
void RemoveBom(string path) { using (LogicalOperation.Start($"Removing UTF-8 BOM(s) from {path}")) { while (FileExtensions.DetectUtf8Bom(path) == true) { FileExtensions.RemoveFirst(path, FileExtensions.Utf8Bom.Count); } } }
public QueryRule(string field, FieldOperation fieldOperation, object value, LogicalOperation logicalOperation) { ConditionChecker.Requires(string.IsNullOrEmpty(field) == false); ConditionChecker.Requires(value != null); Field = field; FieldOperation = fieldOperation; Value = value; LogicalOperation = logicalOperation; }
public static Flags LogicalFlags(byte first, byte second, LogicalOperation operation) { if (EnablePrecalculation && _initialised) { return(new Flags(_logicalFlags[first, second, (int)operation])); } else { return(GetLogicalFlags(first, second, operation)); } }
Clean(ProduceRepository repository) { Guard.NotNull(repository, nameof(repository)); var workDir = repository.WorkDirectory; if (Directory.Exists(workDir)) { using (LogicalOperation.Start("Deleting " + workDir)) Directory.Delete(workDir, true); } }
protected override bool OnApply() { using (LogicalOperation.Start($"Renaming {slnName} to {correctSlnName}")) { var slnPath = Path.Combine(Context.Path, slnName); var correctSlnPath = Path.Combine(Context.Path, correctSlnName); File.Move(slnPath, correctSlnPath); } return(true); }
Restore(ProduceRepository repository, string slnPath) { if (slnPath == null) { return; } var sln = new VisualStudioSolution(slnPath); using (LogicalOperation.Start("Restoring NuGet packages")) Dotnet(repository, "restore", sln); }
public void Pushes_And_Pops_Correctly() { int initialCount = Trace.CorrelationManager.LogicalOperationStack.Count; string op = "op"; using (LogicalOperation.Start(op)) { Trace.CorrelationManager.LogicalOperationStack.Count.ShouldBe(initialCount + 1); Trace.CorrelationManager.LogicalOperationStack.Peek().ShouldBe(op); } Trace.CorrelationManager.LogicalOperationStack.Count.ShouldBe(initialCount); }
NugetPush(ProduceRepository repository, string nupkgPath) { Guard.NotNull(repository, nameof(repository)); if (nupkgPath == null) { return; } using (LogicalOperation.Start("Publishing nupkg")) { Dotnet(repository, "nuget", "push", nupkgPath, "-s", "https://api.nuget.org/v3/index.json"); } }
protected override bool OnApply() { foreach (var file in filesWithIncorrectLineEndings) { using (LogicalOperation.Start($"Applying {lineEndingDescription} line endings to {file}")) { var lines = File.ReadAllLines(file); FileExtensions.WriteAllLines(file, lines, lineEnding, false); } } return(true); }
public static OperationType GetOperationType(this LogicalOperation op) { switch (op) { case LogicalOperation.And: return(OperationType.Intersection); case LogicalOperation.Or: return(OperationType.Union); default: throw new ArgumentException("Unknown logical operation"); } }
GetProduceDirectory() { var path = IOPath.Combine(Path, ProduceDirectoryName); if (!Directory.Exists(path)) { using (LogicalOperation.Start("Creating " + path)) { Directory.CreateDirectory(path); } } return(path); }
public LogicalGrammarResult Analyze() { AntlrInputStream stream = new AntlrInputStream(new StringReader(_equation)); var lexer = new LogicalGrammarLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new LogicalGrammarParser(tokens); var tree = parser.compileUnit(); ExpressionVisitor visitor = new ExpressionVisitor(); LogicalOperation result = visitor.Visit(tree); return(new LogicalGrammarResult(result, visitor.Variables)); }
public GraphRule(List <IInferenceNode> ifNodes, LogicalOperation operation, List <IInferenceNode> thenNodes) { if (ifNodes == null || ifNodes.Count == 0) { throw new ArgumentNullException(nameof(ifNodes)); } if (thenNodes == null || thenNodes.Count == 0) { throw new ArgumentNullException(nameof(thenNodes)); } _ifNodes = ifNodes; _operation = operation; _thenNodes = thenNodes; }
protected override void OnStop() { using (LogicalOperation.Create("Stopping Oculi Service", new object[0])) { try { this.loader.Unload(); } catch (Exception ex) { this.logger.Critical(ex, "Unhandled exception while unloading the Oculi Service"); throw; } } }
Clean(ProduceRepository repository, string slnPath) { if (slnPath == null) { return; } var sln = new VisualStudioSolution(slnPath); var projs = FindLocalBuildableProjects(repository, sln); var targets = projs.Select(p => $"{p.MSBuildTargetName}:Clean"); using (LogicalOperation.Start("Cleaning .NET artifacts")) DotnetMSBuild(repository, sln, targets); }
public static Operand GetPredLogicalOp( EmitterContext context, LogicalOperation logicalOp, Operand input, Operand pred) { switch (logicalOp) { case LogicalOperation.And: return(context.BitwiseAnd(input, pred)); case LogicalOperation.Or: return(context.BitwiseOr(input, pred)); case LogicalOperation.ExclusiveOr: return(context.BitwiseExclusiveOr(input, pred)); } return(input); }
public static string GetLogicalOperation(LogicalOperation opt) { switch (opt) { case LogicalOperation.Equal: { return("="); } case LogicalOperation.Greater: { return(">"); } case LogicalOperation.GreaterOrEqual: { return(">="); } case LogicalOperation.Is: { return("IS"); } case LogicalOperation.Less: { return("<"); } case LogicalOperation.LessOrEqual: { return("<="); } case LogicalOperation.NotEqual: { return("<>"); } default: { return("="); } } }
protected override void OnStart(string[] args) { ThreadPool.QueueUserWorkItem((WaitCallback)(param0 => { this.Initialize(); using (LogicalOperation.Create("Starting Oculi Service", new object[0])) { try { this.loader.Load(); } catch (Exception ex) { this.logger.Critical(ex, "Unhandled exception while loading the Oculi Service"); throw; } } })); }
internal static CompressedBitmap CompressedBinaryExpression([NotNull] CompressedBitmap left, [NotNull] CompressedBitmap right, LogicalOperation op) { Contract.Requires(left != null && right != null && op != LogicalOperation.And && Enum.IsDefined(typeof(LogicalOperation), op)); var writer = new CompressedBitmapWriter(); using (var liter = left.GetEnumerator()) using (var riter = right.GetEnumerator()) { int ln = 0; // remaining count of current word in left int rn = 0; // remaining count of current word in right int lw = 0; // value of current word in left (if ln > 0) int rw = 0; // value of current word in right (if rn > 0) const int DONE = -1; while (true) { if (ln == 0) { if (!liter.MoveNext()) { // left is done if (op == LogicalOperation.And || rn == DONE) { // no need to continue break; } // continue with right until it's done ln = DONE; lw = 0; continue; } ln = liter.Current.WordCount; lw = liter.Current.WordValue; } if (rn == 0) { if (!riter.MoveNext()) { // right is done if (op == LogicalOperation.And || ln == DONE) { // no need to continue break; } // continue with left until it's done rn = DONE; rw = 0; } rn = riter.Current.WordCount; rw = riter.Current.WordValue; } if (ln == DONE) { // copy right writer.Write((uint)rw, rn); rn = 0; } else if (rn == DONE) { // copy left writer.Write((uint)lw, ln); ln = 0; } else { // merge left & right int n = Math.Min(ln, rn); switch (op) { case LogicalOperation.And: writer.Write((uint)(lw & rw), n); break; case LogicalOperation.AndNot: writer.Write((uint)(lw & ~rw), n); break; case LogicalOperation.Or: writer.Write((uint)(lw | rw), n); break; case LogicalOperation.OrNot: writer.Write((uint)(lw | ~rw), n); break; case LogicalOperation.Xor: writer.Write((uint)(lw ^ rw), n); break; case LogicalOperation.XorNot: writer.Write((uint)(lw ^ ~rw), n); break; default: throw new InvalidOperationException(); } ln -= n; rn -= n; } } } return writer.GetBitmap(); }
public XmlVerifyRuntimeTypeAttribute(LogicalOperation logicalCheck, params Type[] types) { this.Types = types; }
protected override void VisitLogicalOperation(LogicalOperation logicalOperation) { }
private static DataArray<double> PerformLogicalOperation(DataArray<double> data, double value, LogicalOperation operation) { if (data == null) { throw new ArgumentNullException("data", "DataArray<double> must not be null."); } var array = new DataArray<double>(data.Count); for (int i = 0; i < data.StartIndex; i++) { array.Add(0.0); } switch (operation) { case LogicalOperation.Not: for (int j = data.StartIndex; j < data.Count; j++) { array.Add(Convert.ToDouble(data[j] == 0.0)); } break; case LogicalOperation.And: for (int k = data.StartIndex; k < data.Count; k++) { array.Add(Convert.ToDouble((data[k] != 0.0) && (value != 0.0))); } break; case LogicalOperation.Or: for (int m = data.StartIndex; m < data.Count; m++) { array.Add(Convert.ToDouble((data[m] != 0.0) || (value != 0.0))); } break; case LogicalOperation.Xor: for (int n = data.StartIndex; n < data.Count; n++) { array.Add(Convert.ToDouble((data[n] != 0.0) ^ (value != 0.0))); } break; } array.StartIndex = data.StartIndex; return array; }
private static DataArray<double> PerformLogicalOperation(DataArray<double> data1, DataArray<double> data2, LogicalOperation operation) { if (data1 == null) { throw new ArgumentNullException("data1", "DataArray<double> must not be null."); } if (data2 == null) { throw new ArgumentNullException("data2", "DataArray<double> must not be null."); } if (data1.Count != data2.Count) { throw new ArgumentException("Array sizes do not match.", "data1"); } var array = new DataArray<double>(data1.Count); DataArray<double> array2 = data2; if (data1.StartIndex > data2.StartIndex) { array2 = data1; } for (int i = 0; i < array2.StartIndex; i++) { array.Add(0.0); } switch (operation) { case LogicalOperation.And: for (int j = array2.StartIndex; j < array2.Count; j++) { array.Add(Convert.ToDouble((data1[j] != 0.0) && (data2[j] != 0.0))); } break; case LogicalOperation.Or: for (int k = array2.StartIndex; k < array2.Count; k++) { array.Add(Convert.ToDouble((data1[k] != 0.0) || (data2[k] != 0.0))); } break; case LogicalOperation.Xor: for (int m = array2.StartIndex; m < array2.Count; m++) { array.Add(Convert.ToDouble((data1[m] != 0.0) ^ (data2[m] != 0.0))); } break; } array.StartIndex = array2.StartIndex; return array; }