private void PopulateLargeFile(FileEnumeratorParameters parameters, INode node, bool hasFileReadPermissions) { IEnumerable <byte[]> fileChunks = node.GetBytes(); _timingMetrics.Start(TimingMetric.FileHashing); this.Sha256 = Hash.ByteEnumerable.Sha256(fileChunks); if (hasFileReadPermissions) { this._peData = PeDataObject.TryGetPeDataObject(FullPath); if (_peData != null) { this.SHA1 = _peData.SHA1Hash; this.MD5 = _peData.MD5Hash; } else { this.SHA1 = Hash.ByteEnumerable.Sha1(fileChunks); this.MD5 = Hash.ByteEnumerable.MD5(fileChunks); } } _timingMetrics.Stop(TimingMetric.FileHashing); if (_peData != null) { this._authenticode = AuthenticodeData.GetAuthenticodeData(this._peData.Certificate); } CancellationHelper.ThrowIfCancelled(); if (hasFileReadPermissions) { PopulateFileInfoProperties(FullPath); PopulateShellFileInfo(FullPath); CancellationHelper.ThrowIfCancelled(); } if (hasFileReadPermissions) { Rules compiledRules = GetCompiledYaraRules(parameters.YaraParameters); if (compiledRules != null) { _timingMetrics.Start(TimingMetric.YaraScanning); this._yaraRulesMatched = YaraHelper.ScanFile(FullPath, compiledRules); _timingMetrics.Stop(TimingMetric.YaraScanning); compiledRules = null; } CancellationHelper.ThrowIfCancelled(); } if (parameters.CalculateEntropy) // Should we calculate entropy on really large files? { _timingMetrics.Start(TimingMetric.CalculatingEntropy); this.Entropy = EntropyHelper.CalculateFileEntropy(fileChunks, this.Length); _timingMetrics.Stop(TimingMetric.CalculatingEntropy); CancellationHelper.ThrowIfCancelled(); } }
private void PopulateLargeFile(FileEnumeratorParameters parameters, INode node, bool hasFileReadPermissions) { IEnumerable <byte[]> fileChunks = null; using (var timer = new TimingMetrics(TimingMetric.ReadingMFTBytes)) { fileChunks = node.GetBytes(); } using (var timer = new TimingMetrics(TimingMetric.FileHashing)) { this.Sha256 = Hash.ByteEnumerable.Sha256(fileChunks); if (hasFileReadPermissions) { this._peData = PeDataObject.TryGetPeDataObject(FullPath); if (_peData != null) { this.SHA1 = _peData.SHA1Hash; this.MD5 = _peData.MD5Hash; } else { this.SHA1 = Hash.ByteEnumerable.Sha1(fileChunks); this.MD5 = Hash.ByteEnumerable.MD5(fileChunks); } } } using (var timer = new TimingMetrics(TimingMetric.MiscFileProperties)) { if (_peData != null) { this._authenticode = AuthenticodeData.GetAuthenticodeData(this._peData.Certificate); } CancellationHelper.ThrowIfCancelled(); if (hasFileReadPermissions) { PopulateFileInfoProperties(FullPath); } } if (hasFileReadPermissions) { PopulateShellFileInfo(FullPath); CancellationHelper.ThrowIfCancelled(); } if (hasFileReadPermissions) { YSScanner compiledRules = GetCompiledYaraRules(parameters); if (compiledRules != null) { using (var timer = new TimingMetrics(TimingMetric.YaraScanning)) { try { _yaraRulesMatched = YaraHelper.ScanFile(FullPath, compiledRules); } catch (Exception ex) { parameters.ReportExceptionFunction.Invoke(nameof(PopulateLargeFile), string.Empty, ex); } } } CancellationHelper.ThrowIfCancelled(); } if (parameters.CalculateEntropy) // Should we calculate entropy on really large files? { using (var timer = new TimingMetrics(TimingMetric.CalculatingEntropy)) { this.Entropy = EntropyHelper.CalculateFileEntropy(fileChunks, this.Length); } CancellationHelper.ThrowIfCancelled(); } }