protected override void ProcessRecord() { if (_expectedExtension != null) { // ReSharper disable once PossibleNullReferenceException var fileExtension = Path.GetExtension(AudioFile.Path); if (!fileExtension.Equals(_expectedExtension, StringComparison.OrdinalIgnoreCase)) { WriteError(new ErrorRecord(new ArgumentException( $"The '{Format}' metadata encoder cannot be used with '{fileExtension}' files."), nameof(ArgumentException), ErrorCategory.InvalidArgument, AudioFile)); return; } } try { // ReSharper disable twice PossibleNullReferenceException if (ShouldProcess(AudioFile.Path)) { AudioFile.SaveMetadata(SettingAdapter.ParametersToSettings(_parameters)); ProcessLogMessages(); } } catch (AudioUnsupportedException e) { WriteError(new ErrorRecord(e, e.GetType().Name, ErrorCategory.InvalidData, AudioFile)); } if (PassThru) { WriteObject(AudioFile); } }
public object GetDynamicParameters() { // AudioFile parameter may not be bound yet if (Encoder == null) { return(null); } return(_parameters = SettingAdapter.SettingInfoToParameters( AudioEncoderManager.GetSettingInfo(Encoder))); }
public object GetDynamicParameters() { if (Format != null) { return(_parameters = SettingAdapter.SettingInfoToParameters( AudioMetadataEncoderManager.GetSettingInfoByFormat(Format))); } // AudioFile parameter may not be bound yet if (AudioFile == null) { return(null); } return(_parameters = SettingAdapter.SettingInfoToParameters( AudioMetadataEncoderManager.GetSettingInfoByExtension(Path.GetExtension(AudioFile.Path)))); }
protected override void EndProcessing() { // ReSharper disable once AssignNullToNotNullAttribute var encoder = new AudioFileEncoder( Encoder, SessionState.Path.GetUnresolvedProviderPathFromPSPath(Path), Name, SettingAdapter.ParametersToSettings(_parameters)) { Overwrite = Replace, MaxDegreeOfParallelism = MaxDegreeOfParallelism }; var activity = $"Encoding {_sourceAudioFiles.Count} audio files in {Encoder} format"; var totalFrames = (double)_sourceAudioFiles.Sum(audioFile => audioFile.Info.FrameCount); var lastAudioFilesCompleted = 0; var lastPercentComplete = 0; using (var messageQueue = new BlockingCollection <object>()) { var progress = new SimpleProgress <ProgressToken>(token => { var percentComplete = (int)Math.Round(token.FramesCompleted / totalFrames * 100); // Only report progress if something has changed if (percentComplete <= lastPercentComplete && token.AudioFilesCompleted <= lastAudioFilesCompleted) { return; } lastAudioFilesCompleted = token.AudioFilesCompleted; lastPercentComplete = percentComplete; // ReSharper disable once AccessToDisposedClosure messageQueue.Add(new ProgressRecord(0, activity, $"{token.AudioFilesCompleted} of {_sourceAudioFiles.Count} audio files encoded") { // If the audio files have estimated frame counts, make sure this doesn't go over 100% PercentComplete = Math.Min(percentComplete, 100) }); // Send any new log messages to the output queue while (LoggerProvider.TryDequeueMessage(out var logMessage)) { // ReSharper disable once AccessToDisposedClosure messageQueue.Add(logMessage); } }); var encodeTask = encoder.EncodeAsync(_sourceAudioFiles, _cancellationSource.Token, progress); // ReSharper disable once AccessToDisposedClosure encodeTask.ContinueWith(task => messageQueue.CompleteAdding(), TaskScheduler.Current); this.OutputMessages(messageQueue, _cancellationSource.Token); try { WriteObject(encodeTask.Result, true); } catch (AggregateException e) { throw e.GetBaseException(); } } }
protected override void EndProcessing() { // ReSharper disable once AssignNullToNotNullAttribute var analyzer = new AudioFileAnalyzer(Analyzer, SettingAdapter.ParametersToSettings(_parameters)) { MaxDegreeOfParallelism = MaxDegreeOfParallelism }; var activity = $"Performing {Analyzer} analysis on {_audioFiles.Count} audio files"; var totalFrames = (double)_audioFiles.Sum(audioFile => audioFile.Info.FrameCount); var lastAudioFilesCompleted = 0; var lastPercentComplete = 0; using (var messageQueue = new BlockingCollection <object>()) { var progress = new SimpleProgress <ProgressToken>(token => { var percentComplete = (int)Math.Round(token.FramesCompleted / totalFrames * 100); // Avoid reporting progress when nothing has changed if (percentComplete <= lastPercentComplete && token.AudioFilesCompleted <= lastAudioFilesCompleted) { return; } lastAudioFilesCompleted = token.AudioFilesCompleted; lastPercentComplete = percentComplete; // ReSharper disable once AccessToDisposedClosure messageQueue.Add(new ProgressRecord(0, activity, $"{token.AudioFilesCompleted} of {_audioFiles.Count} audio files analyzed") { // If the audio files have estimated frame counts, make sure this doesn't go over 100% PercentComplete = Math.Min(percentComplete, 100) }); // Send any new log messages to the output queue while (LoggerProvider.TryDequeueMessage(out var logMessage)) { // ReSharper disable once AccessToDisposedClosure messageQueue.Add(logMessage); } }); var analyzeTask = analyzer.AnalyzeAsync(_audioFiles, _cancellationSource.Token, progress); // ReSharper disable once AccessToDisposedClosure analyzeTask.ContinueWith(task => messageQueue.CompleteAdding(), TaskScheduler.Current); this.OutputMessages(messageQueue, _cancellationSource.Token); if (analyzeTask.Exception != null) { throw analyzeTask.Exception.GetBaseException(); } } if (PassThru) { WriteObject(_audioFiles, true); } }