public AIGoalEat(AIAgent agent) : base(agent) { BasePriority = 1; PreConditions.Add("hungry", true); PostConditions.Add("hungry", false); }
public ForensicIntegrityLog(ProjectManager projectManager, IDetectorFormatter detectorFormatter) { PreConditions.Argument("projectManager").Value(projectManager).IsNotNull(); _projectManager = projectManager; _detectorFormatter = detectorFormatter; }
/// <summary> /// Creates a new file scanner for scanning (multiple) files. /// </summary> /// <param name="containerDataScanner">The <see cref="IDataScanner"/> for container formats</param> /// <param name="codecDataScanner">The <see cref="IDataScanner"/> for codec formats</param> /// <param name="dataReaderPool">The shared pool of file data readers</param> /// <param name="createDataBlockBuilder">The factory method for creating data blocks</param> /// <param name="createSubProgressReporter">The factory method for creating a sub progress reporter</param> /// data scanner fors canning codec streams (of a detected data block)</param> /// <exception cref="ArgumentNullException">If any argument is <c>null</c></exception> public FileScanner(IDataScanner containerDataScanner, IDataScanner codecDataScanner, IDataScanner codecStreamDataScanner, IDataReaderPool dataReaderPool, Creator <IDataBlockBuilder> createDataBlockBuilder, Creator <IProgressReporter, IProgressReporter, long, long, long> createSubProgressReporter) { PreConditions.Argument("containerDataScanner").Value(containerDataScanner).IsNotNull(); PreConditions.Argument("codecDataScanner").Value(codecDataScanner).IsNotNull(); PreConditions.Argument("codecStreamDataScanner").Value(codecStreamDataScanner).IsNotNull(); PreConditions.Argument("dataReaderPool").Value(dataReaderPool).IsNotNull(); PreConditions.Argument("createDataBlockBuilder").Value(createDataBlockBuilder).IsNotNull(); PreConditions.Argument("createSubProgressReporter").Value(createSubProgressReporter).IsNotNull(); _containerDataScanner = containerDataScanner; _codecDataScanner = codecDataScanner; _codecStreamDataScanner = codecStreamDataScanner; _dataReaderPool = dataReaderPool; _createDataBlockBuilder = createDataBlockBuilder; _createSubProgressReporter = createSubProgressReporter; _codecDataScanner.DataBlockDetected += (sender, e) => FixAndReportDataBlock(e.DataBlock); _containerDataScanner.DataBlockDetected += (sender, e) => ScanCodecStreams(e, e.DataBlock); _containerDataScanner.UnknownDataDetected += (sender, e) => ScanForCodecFormats(_inputFile.CreateDataPacket().GetSubPacket(e.Offset, e.Length)); // The default is that container blocks are allowed to overlap. See also issue DEFR-867. AllowOverlap = true; }
public AIGoalDiscardLitter(AIAgent agent) : base(agent) { BasePriority = 99999; PreConditions.Add("carryingLitter", true); PostConditions.Add("carryingLitter", false); }
/// <summary> /// Saves the <paramref name="dataPacket"/> sequentially to a single file with /// given <paramref name="filePath"/>. /// </summary> /// <param name="dataPacket">The data packet to save</param> /// <param name="detectors">The detectors used to create data packet</param> /// <param name="dataReaderPool">The shared pool of file data readers</param> /// <param name="filePath">The path name of the file to write to</param> /// <param name="progressReporter">For reporting progress and checking cancellation</param> /// <param name="createForensicIntegrityLog">Create a forensic integrity log file along with the normal output</param> /// <exception cref="ArgumentNullException">If any argument is <c>null</c></exception> /// <exception cref="IOException">On error writing the output file</exception> public void Save(IDataPacket dataPacket, IEnumerable <IDetector> detectors, IDataReaderPool dataReaderPool, string filePath, IProgressReporter progressReporter, bool createForensicIntegrityLog) { PreConditions.Argument("dataPacket").Value(dataPacket).IsNotNull(); PreConditions.Argument("detectors").Value(detectors).IsNotNull().And.DoesNotContainNull(); PreConditions.Argument("dataReaderPool").Value(dataReaderPool).IsNotNull(); PreConditions.Argument("filePath").Value(filePath).IsNotNull().And.IsNotEmpty(); PreConditions.Argument("progressReporter").Value(progressReporter).IsNotNull(); if (progressReporter.CancellationPending) { return; } using (IDataWriter dataWriter = _createDataWriter(filePath)) { using (IDataReader dataReader = dataReaderPool.CreateDataReader(dataPacket, progressReporter)) { dataWriter.Write(dataReader); } } if (createForensicIntegrityLog) { string logFileName = string.Format("{0}.csv", filePath); using (FileStream fs = new FileStream(logFileName, FileMode.Create, FileAccess.Write, FileShare.Read)) { _forensicIntegrityLog.Log(dataPacket, detectors, filePath, fs, ForensicLogType.CopiedData); } } }
public ContentionLoadRunner Build() { PreConditions.AssertNotNull(m_tests, "Tests"); PreConditions.AssertNotNull(m_runTimeout, "expect run timeout"); return(new ContentionLoadRunner(m_tests, runTimeout: m_runTimeout.GetValueOrDefault(), threadPriority: m_threadPriority)); }
public void Apply() { Element["name"] = Name; Element["path"] = Path; Element["preCondition"] = PreConditions.Combine(","); Element["enableCache"] = EnableCache; }
/// <summary> /// Gets the results for the given <paramref name="fragment"/> and /// reports progress to the given <paramref name="progressReporter"/>. /// </summary> /// <param name="fragment">the data block or codec stream to scan</param> /// <param name="progressReporter">the progress reporter</param> /// <param name="dataReaderPool">The shared pool of file data readers</param> /// <returns>the root node of the results</returns> public IResultNode GetResults(IFragment fragment, IProgressReporter progressReporter, IDataReaderPool dataReaderPool) { PreConditions.Argument("fragment").Value(fragment).IsNotNull(); // TODO: fragment.Detector can be null !! PreConditions.Argument("progressReporter").Value(progressReporter).IsNotNull(); // Check if there exists a cached copy for the fragment IResultNode results; if (_resultsCache.TryGetValue(fragment, out results)) { return(results); } // During the rescan, set the IsFragmented property as it was during the first scan. // to make sure the result will be the same results = RescanDetectable(fragment, progressReporter, dataReaderPool); if (!progressReporter.CancellationPending) { CacheResults(fragment, results); } return(results); }
public PrepareMeatMethod1() { SubTasks.Add(ExampleDomain.Instance.GetTask(DefinedTaskEnum.EatMeatTask)); SubTasks.Add(ExampleDomain.Instance.GetTask(DefinedTaskEnum.MoveToTask)); PreConditions.Add(ExampleDomain.Instance.GetPreCondition(DefinedPreConditionEnum.EatMeatPreCondition)); }
public void Save(IEnumerable <object> items, IEnumerable <IDetector> detectors, IDataReaderPool dataReaderPool, string directory, IProgressReporter progressReporter, bool createForensicIntegrityLog) { PreConditions.Argument("items").Value(items).IsNotNull().And.IsNotEmpty(); PreConditions.Argument("detectors").Value(detectors).IsNotNull().And.DoesNotContainNull(); PreConditions.Argument("dataReaderPool").Value(dataReaderPool).IsNotNull(); PreConditions.Argument("directory").Value(directory).IsNotNull().And.IsNotEmpty(); PreConditions.Argument("progressReporter").Value(progressReporter).IsNotNull(); if (progressReporter.CancellationPending) { return; } var overallProgressReporter = new OverallProgressReporter(progressReporter); overallProgressReporter.CountNumberOfParts(items); var handledContainers = new HandledContainers(); int numSavedFiles = 0; handledContainers.ClearHandledFragmentedContainers(); foreach (object item in items) { string path = Path.Combine(directory, ReplaceIllegalPathCharactersByUnderscore(GetFileName(item))); numSavedFiles += SaveItem(item, dataReaderPool, path, overallProgressReporter, handledContainers, createForensicIntegrityLog); if (overallProgressReporter.CancellationPending) { break; } } }
public async Task <AggregateRootCheckpoint <TPayload> > GetLatestCheckpointAsync( string aggregateRootId, Type aggregateRootType, CancellationToken token = default) { PreConditions.NotNullOrEmpty(aggregateRootId, nameof(aggregateRootId)); PreConditions.NotNull(aggregateRootType, nameof(aggregateRootType)); if (!typeof(IEventSourcedAggregateRoot).IsAssignableFrom(aggregateRootType)) { throw new ArgumentException($"The {aggregateRootType.FullName} cannot assign to {typeof(IEventSourcedAggregateRoot).FullName}."); } var tables = _options.Tables.DomainModelOptions; var sql = $"SELECT d.`AggregateRootId`,d.`AggregateRootType`,d.`AggregateRootVersion`,d.`AggregateRootGeneration`,d.`CreatedTimestamp`, p.`Payload` FROM `{tables.AggregateRootCheckpointIndices}` d INNER JOIN `{tables.AggregateRootCheckpoints}` p ON d.`AggregateRootId`=p.`AggregateRootId` WHERE d.`AggregateRootId`=@AggregateRootId ORDER BY d.`pkId` DESC LIMIT 1"; var records = await _db.ReadAsync <AggregateRootCheckpointRecord>(sql, new { AggregateRootId = aggregateRootId, }, token); return(records.IsNotEmpty() ? AggregateRootCheckpointRecordPortAdapter.ToCheckpoint <TPayload>(records.FirstOrDefault(), _typeResolver, _binarySerializer) : null); }
public int Read(byte[] array, int arrayOffset, int count) { PreConditions.Object(this).IsDisposedIf(_dataReaderPool == null); PreConditions.Argument("array").Value(array).IsNotNull(); PreConditions.Argument("arrayOffset").Value(arrayOffset).InRange(0, array.Length); PreConditions.Argument("count").Value(count).InRange(0, (array.Length - arrayOffset)); int totalBytesToRead = (int)Math.Min(count, (Length - Position)); int bytesRead = 0; while (bytesRead < totalBytesToRead) { IDataPacket fragment = _dataPacket.GetFragment(_position + bytesRead); int fragmentBytes = (int)Math.Min((totalBytesToRead - bytesRead), fragment.Length); int fragmentBytesRead = _dataReaderPool.ReadInputFile(fragment.InputFile, fragment.StartOffset, array, (arrayOffset + bytesRead), fragmentBytes); bytesRead += fragmentBytesRead; // Completed or cancelled if not read the _entire_ fragment if (fragmentBytesRead != fragmentBytes) { break; } } return(bytesRead); }
public void UpdateColumnWidth(IDetector detector, string columnName, int columnWidth) { // TODO: detector should be in project; create unit test PreConditions.Argument("detector").Value(detector).IsNotNull(); PreConditions.Argument("columnName").Value(columnName).IsNotNull().And.IsNotEmpty(); if (columnWidth < 0) { throw new ArgumentOutOfRangeException("columnWidth"); } List <IColumnInfo> columns; if (_visibleColumns.TryGetValue(detector, out columns)) { for (int i = 0; i < columns.Count; i++) { IColumnInfo columnInfo = columns[i]; if (columnInfo.Name == columnName) { IColumnInfo updatedColumnInfo = columnInfo.UpdateWidth(columnWidth); if (updatedColumnInfo != columnInfo) { columns[i] = updatedColumnInfo; UpdateProject(ProjectChangedType.VisibleColumnsChanged, detector); } return; } } } throw new ArgumentException(string.Format("'{0}' is not a visible column.", columnName), "columnName"); }
public ICodecStream Build() { PreConditions.Operation().IsInvalidIf((Data == null), "Data was not set") .And.IsInvalidIf((DataBlock == null), "DataBlock was not set"); return(new CodecStream(this)); }
/// <summary> /// Saves the given <paramref name="project"/>. /// </summary> /// <remarks> /// Saving a project automatically updates its modifcation date. /// </remarks> /// <param name="project">the project to save</param> public void SaveProject(IProject project) { PreConditions.Argument("project").Value(project).IsNotNull(); PreConditions.Operation().IsInvalidIf(!_projects.Contains(project), "Project is not open"); // Update modification date var metadata = new Dictionary <ProjectMetadataKey, string>(project.GetMetadata()); metadata[ProjectMetadataKey.DateLastModified] = DateTime.Now.ToString(DateTimeFormat); project.SetMetadata(metadata); //ValidateFragmentenDataBlocks(project); // Serialize (save) the project try { var xmlObjectSerializer = _createXmlObjectSerializer(); var writerSettings = new XmlWriterSettings { Indent = true }; using (XmlWriter writer = XmlWriter.Create(project.FileName, writerSettings)) { xmlObjectSerializer.WriteObject(writer, project); } project.Dirty = false; } catch (Exception exception) { MessageBox.Show(exception.Message, "Failed to save the project", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public void Start() { PreConditions.AssertNotNullOrWhitespace(m_pathToFile, "MetricFilePath"); ClearQueue(); StartWriteTask(); }
/// <summary> /// Reset one configuration item /// </summary> /// <param name="detector">the detector to set the configuration item on</param> /// <param name="configurationItemKey">The configuration item to reset to its default value</param> public static void ResetConfigurationItem(this IDetector detector, string configurationItemKey) { PreConditions.Argument("detector").Value(detector).IsNotNull(); PreConditions.Argument("configurationItemKey").Value(configurationItemKey).IsNotNull().And.IsNotEmpty(); GetConfigurationItem(detector, configurationItemKey).ResetDefault(); }
/// <summary> /// Sorts the <paramref name="results"/> on a user selected column. /// This function returns <paramref name="results"/> unmodified if /// the list is already sorted /// </summary> /// <param name="results">the resuls to sort</param> /// <param name="columnName">the name of the column to compare</param> /// <param name="sortDirection">the sort direction</param> /// <returns>the results list (sorted)</returns> public static IEnumerable <IResultNode> Sort(this IEnumerable <IResultNode> results, string columnName, ListSortDirection sortDirection) { PreConditions.Argument("results").Value(results).IsNotNull(); PreConditions.Argument("columnName").Value(columnName).IsNotNull(); DefaultColumnIndex index; if (DefaultColumnExtensions.TryParse(columnName, out index)) { switch (index) { case DefaultColumnIndex.Name: return(results.OrderBy(r => r.Name, sortDirection)); case DefaultColumnIndex.Detector: return(results.OrderBy(r => r.Detectors.First().Name, sortDirection)); case DefaultColumnIndex.DetectorVersion: return(results.OrderBy(r => r.Detectors.First().VersionString(), sortDirection)); case DefaultColumnIndex.Offset: return(results.OrderBy(r => r.StartOffset, sortDirection)); case DefaultColumnIndex.Length: return(results.OrderBy(r => r.Length, sortDirection)); case DefaultColumnIndex.EndOffset: return(results.OrderBy(r => r.EndOffset, sortDirection)); case DefaultColumnIndex.File: return(results.OrderBy(r => GetResultFileName(r), sortDirection)); } } return(results.OrderBy(r => r.FindAttributeByName(columnName) == null ? null : r.FindAttributeByName(columnName).Value, sortDirection)); }
public async Task <DomainEventMetrics> StatMetricsAsync( string aggregateRootId, int generation, CancellationToken token = default) { PreConditions.NotNullOrEmpty(aggregateRootId, nameof(aggregateRootId)); PreConditions.Nonnegative(generation, nameof(generation)); var tables = _options.Tables.DomainEventOptions; var sql = $"SELECT `DomainEventPayloadBytes` AS `UnCheckpointedBytes` FROM `{tables.DomainEventIndices}` WHERE `AggregateRootId`=@AggregateRootId AND AggregateRootGeneration>=@Generation"; var records = await _db.ReadAsync <DomainEventMetrics>(sql, new { AggregateRootId = aggregateRootId, Generation = generation }, token); if (records.IsEmpty()) { return(DomainEventMetrics.Empty); } var aggregate = new DomainEventMetrics(); foreach (var record in records) { aggregate.UnCheckpointedCount++; aggregate.UnCheckpointedBytes += record.UnCheckpointedBytes; } return(aggregate); }
/// <summary> /// Sorts the <paramref name="dataBlocks"/> on a user selected column. /// </summary> /// <param name="dataBlocks">the data blocks to sort</param> /// <param name="columnName">the name of the column to compare</param> /// <param name="sortDirection">the sort direction</param> /// <returns>the sorted data blocks</returns> public static IEnumerable <IDataBlock> Sort(this IEnumerable <IDataBlock> dataBlocks, string columnName, ListSortDirection sortDirection) { PreConditions.Argument("dataBlocks").Value(dataBlocks).IsNotNull(); PreConditions.Argument("columnName").Value(columnName).IsNotNull(); DefaultColumnIndex index; if (DefaultColumnExtensions.TryParse(columnName, out index)) { switch (index) { case DefaultColumnIndex.Name: return(dataBlocks); case DefaultColumnIndex.Detector: return(dataBlocks.OrderBy(d => d.Detectors.First().Name, sortDirection)); case DefaultColumnIndex.DetectorVersion: return(dataBlocks.OrderBy(d => d.Detectors.First().VersionString(), sortDirection)); case DefaultColumnIndex.Offset: return(dataBlocks.OrderBy(d => d.StartOffset, sortDirection)); case DefaultColumnIndex.Length: return(dataBlocks.OrderBy(d => d.Length, sortDirection)); case DefaultColumnIndex.EndOffset: return(dataBlocks.OrderBy(d => d.EndOffset, sortDirection)); } } throw new ArgumentException("Invalid sort column name.", "columnName"); }
/// <summary> /// Creates a <see cref="IDataWriter"/> for writing to <paramref name="filePath"/>. /// </summary> /// <param name="filePath">The path of the file to write to</param> /// <exception cref="ArgumentNullException">If <paramref name="filePath"/> is <c>null</c></exception> public FileDataWriter(String filePath) { PreConditions.Argument("filePath").Value(filePath).IsNotNull().And.IsNotEmpty(); _buffer = new byte[DefaultBufferSize]; _outputStream = File.Create(filePath); }
/// <summary> /// Opens an existing project. /// </summary> /// <param name="path">the path for the project file</param> /// <returns>the project</returns> public IProject OpenProject(string path) { PreConditions.Argument("path").Value(path).IsNotNull().And.IsNotEmpty().And.IsExistingFile(); PreConditions.Operation().IsInvalidIf(IsOpenProject(path), "Project already open"); // Deserialize the project XmlObjectSerializer dataContractSerializer = _createXmlObjectSerializer(); using (XmlReader reader = XmlReader.Create(path)) { try { IProject project = dataContractSerializer.ReadObject(reader) as IProject; _projects.Add(project); project.ProjectChanged += OnProjectChanged; project.PropertyChanged += OnPropertyChanged; OnProjectChanged(this, new ProjectChangedEventArgs(ProjectChangedType.Opened, project)); return(project); } catch (SerializationException se) { throw new InvalidProjectException("Project is invalid.", path, se); } } }
/// <summary> /// Creates a new event args of the given <paramref name="type"/>. /// </summary> /// <param name="type">the type of change</param> /// <param name="item">the affected item</param> public ProjectChangedEventArgs(ProjectChangedType type, Object item) { PreConditions.Argument("type").Value(type).IsDefinedOnEnum(typeof(ProjectChangedType)); Type = type; Item = item; }
public void Remove(IFragment fragment) { PreConditions.Argument("fragment").Value(fragment).IsNotNull(); _fragments.Remove(fragment); Length -= fragment.Length; }
public IDataPacket GetSubPacket(long offset, long length) { PreConditions.Argument("offset").Value(offset).InRange(0L, (Length - 1L)); PreConditions.Argument("length").Value(length).InRange(1L, (Length - offset)); if ((offset == 0) && (length == Length)) { return(this); // Sub-packet is the entire packet } long firstLength = _firstDataPacket.Length; if (offset >= firstLength) { return(_secondDataPacket.GetSubPacket((offset - firstLength), length)); } long relativeEndOffset = (offset + length); if (relativeEndOffset <= firstLength) { return(_firstDataPacket.GetSubPacket(offset, length)); } IDataPacket firstSubPacket = _firstDataPacket.GetSubPacket(offset, (firstLength - offset)); IDataPacket secondSubPacket = _secondDataPacket.GetSubPacket(0, (relativeEndOffset - firstLength)); return(firstSubPacket.Append(secondSubPacket)); }
public void Add(IFragment fragment) { PreConditions.Argument("fragment").Value(fragment).IsNotNull(); _fragments.Add(fragment); Length += fragment.Length; }
public AIGoalPickupLitter(AIAgent agent) : base(agent) { BasePriority = 1; PreConditions.Add("seeLitter", true); PostConditions.Add("seeLitter", false); }
public Money(decimal amount, Currency currency) { PreConditions.NotNull(currency, nameof(currency)); Amount = amount; Currency = currency; }
public AIActionBladder(AIAgent agent) : base(agent) { PreConditions.Add("bladder", true); PostConditions.Add("bladder", false); StateMachine.AddState(new AIActionStateEatFood(agent), "ACTION"); StateMachine.ChangeState("ACTION"); }
/// <summary> /// Creates a new <see cref="HexWorkshopEventArgs"/>. /// </summary> /// <param name="filePath">the path of the file to open</param> /// <param name="offset">the offset in the file</param> public HexWorkshopEventArgs(string filePath, long offset) { PreConditions.Argument("filePath").Value(filePath).IsNotNull(); PreConditions.Argument("offset").Value(offset).IsNotNegative(); FilePath = filePath; Offset = offset; }