Example #1
0
        public override bool assignScan(out ICustomScan newscan)
        {
            newscan = null;
            //Console.WriteLine("Attempting assignment; " + MS1_outstanding.ToString());

            if (scan_queue.IsEmpty && MS1_tardiness.Elapsed > MS1_expiration_span)
            {
                //Debug.Assert(false);
                Console.WriteLine("ERRANT MS1, resubmitting MS1.");
                log_write("MS1 TIMEOUT AT " + RunTimer.Elapsed.ToString());
                MS1_outstanding = false;
            }

            //ICustomScan newscan;
            if (!scan_queue.TryDequeue(out newscan) && !MS1_outstanding)
            {
                GenerateMS1();
                MS1_outstanding = true;
                MS1_tardiness.Restart();
                Debug.Assert(scan_queue.Count == 1);
                Debug.Assert(scan_queue.TryDequeue(out newscan));
                Console.WriteLine("Generated MS1.");
                log_write("Generated MS1.");
            }
            if (newscan == null)
            {
                Console.Write(".");
                //log_write("Waiting for assignment. " + MS1_tardiness.Elapsed.ToString());
                logfile.Flush();
                return(false);
            }
            else
            {
                ScanType scantype = ScanType.Unknown;
                ScanID.TryGetValue(newscan.RunningNumber, out scantype);
                Console.WriteLine("Assigning " + scantype.ToString() + " " + newscan.RunningNumber.ToString());
                log_write("Assigning " + scantype.ToString() + " " + newscan.RunningNumber.ToString());
                logfile.Flush();
                return(true);
            }
        }
Example #2
0
        public override void receiveScan(IMsScan scanData)
        {
            //IInfoContainer info = scanData.CommonInformation;
            //IInfoContainer more_info = scanData.SpecificInformation;
            //MsScanInformationSource infosource = MsScanInformationSource.Unknown;
            object   obj_holder = null;
            ScanType scan_id    = ScanType.Unknown;
            int      scannum    = -1;

            //if (info.TryGetRawValue("Scan", out obj_holder, ref infosource))
            //{
            //    scannum = (int)obj_holder;
            //    if (ScanID.ContainsKey(scannum)) // Auto-scans also have Scan numbers!
            //    {
            //        scan_id = ScanID[scannum];
            //        ScanID.Remove(scannum);
            //    }
            //}
            //object accessid = null;
            if (scanData.SpecificInformation.TryGetRawValue("Access Id:", out obj_holder))
            {
                scannum = (int)obj_holder;
                if (ScanID.ContainsKey(scannum))
                {
                    scan_id = ScanID[scannum];
                    ScanID.Remove(scannum);
                }
            }

            //Debug.Assert(false);
            if (scannum >= ScanCount)
            {
                Console.WriteLine("Adjusting ScanCount: " + scannum.ToString() + " " + ScanCount.ToString());
                log_write("Adjusting ScanCount: " + scannum.ToString() + " " + ScanCount.ToString());
                ScanCount = scannum + 1;
            }
            //else if (scannum + 100 < ScanCount)
            //{
            //    Console.WriteLine("Adjusting ScanCount DOWNWARD: " + scannum.ToString() + " " + ScanCount.ToString());
            //    log_write("Adjusting ScanCount DOWNWARD: " + scannum.ToString() + " " + ScanCount.ToString());
            //    ScanCount = scannum + 1;
            //}

            Console.WriteLine("Received a " + scan_id.ToString() + " " + scannum.ToString());
            log_write("Received a " + scan_id.ToString() + " " + scannum.ToString());

            switch (scan_id)
            {
            case ScanType.MS2:
            case ScanType.Auto:
            case ScanType.Unknown:
                return;

            case ScanType.MS1:
                MS1_outstanding = false;

                IEnumerable <Tuple <double, double> >               scan             = Spectral_Processing.ConvertCentroids(scanData.Centroids);
                List <Tuple <int, Tuple <double, double> > >        molecular_peaks  = Spectral_Processing.deisotope_scan(scan);
                IEnumerable <Tuple <int, Tuple <double, double> > > valid_precursors = molecular_peaks.Where(x => x.Item1 > 1);

                IEnumerable <Tuple <int, Tuple <double, double> > > precursors_by_int = valid_precursors.OrderByDescending(OrderPeak);

                GenerateMS2s(precursors_by_int);
                Console.WriteLine("MS1 stats: " + scan.Count() + " " + valid_precursors.Count());
                log_write("MS1 stats: " + scan.Count() + " " + valid_precursors.Count());
                return;

            default:
                Debug.Assert(false);
                return;
            }
        }
Example #3
0
 private string GetScanTaskId(ScanType type, Event scanEvent)
 {
     return string.Format("{0}{1}", type.ToString(), scanEvent.Id);
 }
Example #4
0
        private string GetScanTaskParamString(ScanType type, Event scanEvent)
        {
            string resultCollection = type.ToString();
            if (type == ScanType.Official)
            {
                resultCollection = MatchScanner.OfficialResultsCollectionId;
            }
            else if (type == ScanType.Manual)
            {
                // Manual uses a random result collection.
                resultCollection = Guid.NewGuid().ToString("N");
            }

            string gameIdString = scanEvent.Game.GameId.ToString();

            DateTime? startTime;
            if (type == ScanType.TestPass 
                || type == ScanType.Manual)
            {
                startTime = Time.UtcNow;
            }
            else
            {
                startTime = null;
            }

            TimeSpan refreshTime = scanEvent.Game.MatchHistoryRefreshTime;

            return String.Format("{0};{1};{2};{3};{4};{5};{6};{7};{8}", 
                scanEvent.Id, 
                resultCollection, 
                scanEvent.LiveStart.ToFileTimeUtc().ToString(),
                scanEvent.EventEnd.ToFileTimeUtc().ToString(), 
                gameIdString, 
                type.ToString(), 
                startTime.HasValue ? startTime.Value.ToFileTimeUtc().ToString() : "null",
                refreshTime.Ticks.ToString(),
                scanEvent.Game.NumMatchesPerQuery);
        }
Example #5
0
        /// <summary>
        /// 
        /// </summary>
        /// <returns></returns>
        public CommandStatus Scan(ScanDirection dir, uint lba, ScanType type)
        {
            if (m_logger != null)
            {
                string args = dir.ToString() + ", " + lba.ToString() + ", " + type.ToString();
                m_logger.LogMessage(new UserMessage(UserMessage.Category.Debug, 8, "Bwg.Scsi.Device.Scan(" + args + ")"));
            }

            if (type == ScanType.Reserved)
                throw new Exception("type parameter is of type Reserved");

            using (Command cmd = new Command(ScsiCommandCode.Scan, 12, 0, Command.CmdDirection.None, 10))
            {
                if (dir == ScanDirection.Reverse)
                    cmd.SetCDB8(1, 0x10);
                cmd.SetCDB32(2, lba);
                cmd.SetCDB8(9, (byte)((byte)type << 6));

                CommandStatus st = SendCommand(cmd);
                if (st != CommandStatus.Success)
                    return st;
            }

            return CommandStatus.Success;
        }
Example #6
0
        /// <summary>
        /// Initiates a scan.
        /// </summary>
        /// <param name="type">
        /// The type of scan to start.
        /// </param>
        /// <param name="scanEvent">
        /// The event for which this scan is taking place.
        /// </param>
        /// <returns>
        /// True if a scan was started, false if the requested scan for the given event
        /// has already completed.
        /// </returns>
        public async Task<InitiateScanResult> InitiateScanAsync(ScanType type, Event scanEvent)
        {
            string scanTaskId = GetScanTaskId(type, scanEvent);
            string scanTaskParamString = GetScanTaskParamString(type, scanEvent);
            string scanTaskType = GetScanTaskType();
            int scanTaskPriority = GetScanTaskPriority(type);

            Log.TraceInformation("Initiating Scan, id={0}, type={1}, priority={2}, paramString={3}", scanTaskId, scanTaskType, scanTaskPriority, scanTaskParamString);

            try
            {
                IDistributedTaskParticipationHandle handle = await _taskManager.JoinOrBeginAsync(scanTaskId, scanTaskType, scanTaskPriority, scanTaskParamString, _workerId + "-initiator-" + type.ToString()).ConfigureAwait(false);

                // We immediately leave after starting the task.  This component isn't responsible
                // for participating in scans, all we need to do is initiate them.  ScanMonitors will
                // pick this up and begin working.
                await handle.LeaveAsync(ParticipantState.Departed).ConfigureAwait(false);
                return handle.WasTaskStarted ? InitiateScanResult.ScanWasStarted : InitiateScanResult.ScanWasAlreadyRunning;
            }
            catch (TaskIsOverException)
            {
                Log.TraceInformation("Scan has already ended");

                // This worker just popped up during a time when a scan is needed.  However,
                // the scan was already completed by other workers.  It's tempting to assert here
                // that this isn't an official run, but the problem is we could technically start
                // just moments before the run finished and hit this condition anyway.
                return InitiateScanResult.ScanWasAlreadyComplete;
            }
        }
        public override void receiveScan(IMsScan scanData)
        {
            //IInfoContainer info = scanData.CommonInformation;
            //IInfoContainer more_info = scanData.SpecificInformation;
            //MsScanInformationSource infosource = MsScanInformationSource.Unknown;
            object   obj_holder = null;
            ScanType scan_id    = ScanType.Unknown;
            int      scannum    = -1;

            //if (info.TryGetRawValue("Scan", out obj_holder, ref infosource))
            //{
            //    scannum = (int)obj_holder;
            //    if (ScanID.ContainsKey(scannum)) // Auto-scans also have Scan numbers!
            //    {
            //        scan_id = ScanID[scannum];
            //        ScanID.Remove(scannum);
            //    }
            //}
            //object accessid = null;
            if (scanData.SpecificInformation.TryGetRawValue("Access Id:", out obj_holder))
            {
                scannum = (int)obj_holder;
                if (ScanID.ContainsKey(scannum))
                {
                    scan_id = ScanID[scannum];
                    ScanID.Remove(scannum);
                }
            }

            if (scannum > ScanCount)
            {
                Console.WriteLine("Adjusting ScanCount: " + scannum.ToString() + " " + ScanCount.ToString());
                log_write("Adjusting ScanCount: " + scannum.ToString() + " " + ScanCount.ToString());
                ScanCount = scannum + 1;
            }
            //else if (scannum + 100 < ScanCount)
            //{
            //    Console.WriteLine("Adjusting ScanCount DOWNWARD: " + scannum.ToString() + " " + ScanCount.ToString());
            //    log_write("Adjusting ScanCount DOWNWARD: " + scannum.ToString() + " " + ScanCount.ToString());
            //    ScanCount = scannum + 1;
            //}

            Console.WriteLine("Received a " + scan_id.ToString() + " " + scannum.ToString());
            log_write("Received a " + scan_id.ToString() + " " + scannum.ToString());

            switch (scan_id)
            {
            case ScanType.MS1:
            case ScanType.MS2:
            case ScanType.Auto:
            case ScanType.Unknown:
                // Only Box-MS1 scans are being used for precursor assignment.
                // Other scans are merely written to the file automatically.
                return;

            case ScanType.BoxMS1_1:
            case ScanType.BoxMS1_2:
            case ScanType.BoxMS1_3:
                //MS1_outstanding -= 1;
                Debug.Assert(!BoxAggregation.ContainsKey(scan_id));
                BoxAggregation.Add(scan_id, Spectral_Processing.ConvertCentroids(scanData.Centroids).ToList());
                Console.WriteLine("Received MS1 " + MS1_outstanding.ToString() + " " + BoxAggregation.Count().ToString());


                if (BoxAggregation.Count() >= 3)
                {
                    log_write("Processing boxes. " + BoxAggregation.Keys.ToString());

                    MS1_outstanding = false;

                    IEnumerable <Peak> scan = BoxesToMS1();

                    List <Peak>        molecular_peaks  = Spectral_Processing.deisotope_scan(scan);
                    IEnumerable <Peak> valid_precursors = molecular_peaks.Where(x => x.Item1 > 1);

                    IEnumerable <Peak> precursors_by_int = valid_precursors.OrderByDescending(OrderPeak);

                    GenerateMS2s(precursors_by_int);
                }
                return;

            default:
                Debug.Assert(false);
                return;
            }
        }
Example #8
0
        private void GetListOfFiles(ScanType scanType)
        {
            String searchPattern = "";
            int    filesScanned  = 0;

            List <String> folderList = null;

            switch (scanType)
            {
            case ScanType.Tape:
                folderList    = Configuration.Persistent.TapeFolders;
                searchPattern = "*.ta?";
                break;

            case ScanType.Disk:
                folderList    = Configuration.Persistent.DiskFolders;
                searchPattern = "*.dsk";
                break;

            case ScanType.ROM:
                folderList    = Configuration.Persistent.RomFolders;
                searchPattern = "*.rom";
                break;

            case ScanType.Other:
                folderList    = Configuration.Persistent.OtherFilesFolders;
                searchPattern = "*.*";
                break;
            }

            lblInfo.Text     = String.Format("Searching folders for {0} files...", scanType.ToString());
            lblFile.Text     = "";
            lblProgress.Text = "";

            ctlProgressBar.PercentageValue = 0;

            Application.DoEvents();

            foreach (string directory in folderList)
            {
                DirectoryInfo directoryInfo = new DirectoryInfo(directory);

                if (directoryInfo.Exists)
                {
                    FileInfo[] fileInfoList = directoryInfo.GetFiles(searchPattern, SearchOption.AllDirectories);

                    if (fileInfoList != null)
                    {
                        lblInfo.Text = String.Format("Loading {0} files...", scanType.ToString());
                        Application.DoEvents();

                        foreach (FileInfo fileInfo in fileInfoList)
                        {
                            lblFile.Text = Path.GetFileNameWithoutExtension(fileInfo.FullName);

                            AddFileToTree(scanType, fileInfo, directoryInfo.FullName);

                            filesScanned++;

                            float percentage = (100 / (float)fileInfoList.Length) * filesScanned;
                            ctlProgressBar.PercentageValue = (int)percentage;

                            lblProgress.Text = String.Format("{0:N0} of {1:N0} ({2:N1}%)", filesScanned, fileInfoList.Length, percentage);

                            Application.DoEvents();

                            if (cancelScan)
                            {
                                break;
                            }
                        }
                    }
                }
            }
        }
        public void SubmitNewScanItem(string assetTag, ScanType scanType)
        {
            if (currentScan == null)
            {
                scannerInput?.BadScan();
                OnExceptionOccured(new ScanNotStartedException());
                return;
            }

            using (var itemDetail = DetailOfAsset(assetTag))
            {
                if (itemDetail.Rows.Count < 1)
                {
                    scannerInput?.BadScan();
                    OnExceptionOccured(new ItemNotFoundException(assetTag));
                    return;
                }

                var itemRow = itemDetail.Rows[0];

                // Return silently if the item has already been scanned.
                if (!string.IsNullOrEmpty(itemRow[ScanItemsTable.ScanStatus].ToString()))
                {
                    scannerInput?.BadScan();
                    OnExceptionOccured(new DuplicateScanException());
                    return;
                }

                bool locationMismatch = false;

                // Check if the scan location matches the location in inventory.
                // Set the scan status and throw exception if there's a mismatch.
                if (itemRow[MunisFixedAssetTable.Location].ToString() != currentScan.MunisLocation.MunisCode)
                {
                    locationMismatch = true;
                    itemRow[ScanItemsTable.ScanStatus] = ScanStatus.LocationMismatch.ToString();
                }
                else
                {
                    locationMismatch = false;
                    itemRow[ScanItemsTable.ScanStatus] = ScanStatus.OK.ToString();
                }

                itemRow[ScanItemsTable.Location]  = currentScan.MunisLocation.MunisCode;
                itemRow[ScanItemsTable.ScanType]  = scanType.ToString();
                itemRow[ScanItemsTable.ScanUser]  = currentScan.User;
                itemRow[ScanItemsTable.Datestamp] = DateTime.Now.ToString(DataConsistency.DBDateTimeFormat);
                itemRow[ScanItemsTable.ScanYear]  = DateTime.Now.Year.ToString();
                itemRow[ScanItemsTable.ScanId]    = currentScan.ID;

                var updatedRows = DBFactory.GetSqliteScanDatabase(currentScan.ID).UpdateTable(Queries.Sqlite.SelectAssetDetailByAssetTag(assetTag), itemDetail);

                LoadCurrentScanItems(view.LocationFilters);
                view.PopulateNewScan(assetTag, itemDetail);

                // Throw mismatch exception after adding the scan to the DB.
                if (locationMismatch)
                {
                    var expectedLocation = AttributeInstances.MunisAttributes.MunisToAssetLocations[itemRow[MunisFixedAssetTable.Location].ToString()];
                    var scanLocation     = AttributeInstances.MunisAttributes.MunisToAssetLocations[currentScan.MunisLocation.MunisCode];

                    scannerInput?.BadScan();
                    OnExceptionOccured(new LocationMismatchException(expectedLocation.DisplayValue, scanLocation.DisplayValue, assetTag));
                    return;
                }

                scannerInput?.GoodScan();
            }
        }
Example #10
0
        /// <summary>
        /// Updates the Video file properties structure, check for crop, audio and video information
        /// </summary>
        /// <param name="skipCropDetect">True to skip detecting cropping parameters</param>
        /// <param name="detectInterlace">Extracts the video intelacing type by analyzing it in depth</param>
        /// <param name="videoFileName">Path to Original Source Video</param>
        /// <param name="remuxedFileName">Path to Remuxed video, else null or empty string</param>
        /// <param name="edlFile">Path to EDL file else null or empty string</param>
        /// <param name="audioLanguage">Audio Language</param>
        /// <param name="jobStatus">JobStatus</param>
        /// <param name="jobLog">JobLog</param>
        public void UpdateVideoInfo(bool skipCropDetect, bool detectInterlace, string videoFileName, string remuxedFileName, string edlFile, string audioLanguage, JobStatus jobStatus, Log jobLog)
        {
            ResetParameters(); // Reset VideoInfo parameters

            _jobStatus = jobStatus;
            _jobLog    = jobLog;
            _EDLFile   = edlFile;
            _requestedAudioLanguage = audioLanguage;
            _originalFileName       = videoFileName;
            _remuxedFileName        = remuxedFileName;

            _skipCropDetect    = skipCropDetect;
            _detectInterlacing = detectInterlace;

            _jobLog.WriteEntry(this, "Reading MediaInfo from " + SourceVideo, Log.LogEntryType.Information);

            _videoCodec = VideoParams.VideoFormat(SourceVideo);
            jobLog.WriteEntry(this, "Video Codec : " + _videoCodec, Log.LogEntryType.Debug);
            if (String.IsNullOrWhiteSpace(_videoCodec))
            {
                _error = true;
            }

            _audioCodec = VideoParams.AudioFormat(SourceVideo);
            jobLog.WriteEntry(this, "Audio Codec : " + _audioCodec, Log.LogEntryType.Debug);
            if (String.IsNullOrWhiteSpace(_audioCodec))
            {
                _error = true;
            }

            _fps = VideoParams.FPS(SourceVideo);
            jobLog.WriteEntry(this, "Video FPS : " + _fps.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);
            if (_fps <= 0)
            {
                _error = true;
            }

            _width = VideoParams.VideoWidth(SourceVideo);
            jobLog.WriteEntry(this, "Video Width : " + _width.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);
            if (_width <= 0)
            {
                _error = true;
            }

            _height = VideoParams.VideoHeight(SourceVideo);
            jobLog.WriteEntry(this, "Video Height : " + _height.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);
            if (_height <= 0)
            {
                _error = true;
            }

            _duration = VideoParams.VideoDuration(SourceVideo);
            jobLog.WriteEntry(this, "Video Duration : " + _duration.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);
            if (_duration <= 0)
            {
                _error = true;
            }

            _audioDelay = VideoParams.AudioDelay(SourceVideo);
            jobLog.WriteEntry(this, "Audio Delay : " + _audioDelay.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);

            if (_detectInterlacing) // Get Interlacing from FFMPEG more reliable then MediaInfo - avoid unnecessary cycles if not required
            {
                jobLog.WriteEntry(this, "Scan type unknown, trying with FFMPEGMediaInfo", Log.LogEntryType.Debug);

                _ffmpegStreamInfo = new FFmpegMediaInfo(SourceVideo, _jobStatus, _jobLog, 0, 0, _ignoreSuspend); // Run interlace detection with defaults
                if (_ffmpegStreamInfo.Success && !_ffmpegStreamInfo.ParseError)
                {
                    // Now calcuate whether it's Interlaced or Progressive based on the Multi Frame Interlaced Detection Results
                    long totalInterlaced   = _ffmpegStreamInfo.MFInterlaceDetectionResults.BFF + _ffmpegStreamInfo.MFInterlaceDetectionResults.TFF;
                    long totalProgressive  = _ffmpegStreamInfo.MFInterlaceDetectionResults.Progressive;
                    long totalUndetermined = _ffmpegStreamInfo.MFInterlaceDetectionResults.Undetermined; // TODO: What to do with this?

                    if (totalInterlaced == 0 && totalProgressive == 0)                                   // Boundary conditions
                    {
                        _scanType = ScanType.Unknown;
                    }
                    else if (totalInterlaced == 0) // Avoid divide by zero exception
                    {
                        _scanType = ScanType.Progressive;
                    }
                    else
                    {
                        double PtoIRatio = totalProgressive / totalInterlaced; // See below comment

                        // Refer to this, how to tell if the video is interlaced or telecine
                        // http://forum.videohelp.com/threads/295007-Interlaced-or-telecined-how-to-tell?p=1797771&viewfull=1#post1797771
                        // It is a statistical ratio, telecine has approx split of 3 progressive and 2 interlaced (i.e. ratio of about 1.5 progressive to interlaced)
                        // TODO: We need to revisit the logic below for telecine, interlaced or progressive detection (check idet filter for updates ffmpeg ticket #3073)
                        if ((totalProgressive == 0) && (totalProgressive == 0)) // Unknown - could not find
                        {
                            _scanType = ScanType.Unknown;
                        }
                        else if ((PtoIRatio > TELECINE_LOW_P_TO_I_RATIO) && (PtoIRatio < TELECINE_HIGH_P_TO_I_RATIO)) // Let us keep a band to measure telecine ratio, see comment above
                        {
                            _scanType = ScanType.Telecine;
                        }
                        else if (PtoIRatio <= TELECINE_LOW_P_TO_I_RATIO) // We play safe, more interlaced than progressive
                        {
                            _scanType = ScanType.Interlaced;
                        }
                        else if (PtoIRatio >= TELECINE_HIGH_P_TO_I_RATIO) // Progressive has the clear lead
                        {
                            _scanType = ScanType.Progressive;
                        }
                        else
                        {
                            _scanType = ScanType.Unknown; // No idea where we are
                        }
                    }

                    jobLog.WriteEntry(this, "FFMPEG Video Scan Type : " + _scanType.ToString(), Log.LogEntryType.Debug);
                }
                else
                {
                    jobLog.WriteEntry(this, "Error reading scan type from FFMPEGMediaInfo", Log.LogEntryType.Warning);
                }

                if (_scanType == ScanType.Unknown) // If we couldn't get it from FFMPEG lets try MediaInfo as a backup
                {
                    _scanType = VideoParams.VideoScanType(SourceVideo);
                    jobLog.WriteEntry(this, " MediaInfo Video Scan Type : " + _scanType.ToString(), Log.LogEntryType.Debug);
                }
            }

            // We don't get AudioChannel information here as it interfers with FFMPEG

            /*mi.Option("Inform", "Audio; %Channels%");
             * int.TryParse(mi.Inform(), out _audioChannels);
             * jobLog.WriteEntry(this, "Audio Channels : " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);*/

            // Supplement with extracting Video and Audio information (sometimes MediaInfo fails) using FFMPEG and selected the Audio Language specified
            _jobLog.WriteEntry(this, "Supplementing Media information using FFMPEG", Log.LogEntryType.Information);
            _ffmpegStreamInfo = new FFmpegMediaInfo(SourceVideo, _jobStatus, _jobLog, _ignoreSuspend); // this may be called from the UI request
            if (_ffmpegStreamInfo.Success && !_ffmpegStreamInfo.ParseError)
            {
                // Store the video information (there's only 1 video per file)
                _width  = _ffmpegStreamInfo.MediaInfo.VideoInfo.Width;
                _height = _ffmpegStreamInfo.MediaInfo.VideoInfo.Height;
                if ((_fps <= 0) || ((_fps > _ffmpegStreamInfo.MediaInfo.VideoInfo.FPS) && (_ffmpegStreamInfo.MediaInfo.VideoInfo.FPS > 0))) // Check _fps, sometimes MediaInfo get it below 0 or too high (most times it's reliable)
                {
                    _fps = _ffmpegStreamInfo.MediaInfo.VideoInfo.FPS;
                }
                else
                {
                    _ffmpegStreamInfo.MediaInfo.VideoInfo.FPS = _fps; // Store the value from MediaInfo, more reliable
                }
                _duration    = _ffmpegStreamInfo.MediaInfo.VideoInfo.Duration;
                _videoCodec  = _ffmpegStreamInfo.MediaInfo.VideoInfo.VideoCodec;
                _videoStream = _ffmpegStreamInfo.MediaInfo.VideoInfo.Stream;
                _videoPID    = _ffmpegStreamInfo.MediaInfo.VideoInfo.PID; // video PID

                // Default Check if all audio streams have same codec, if so populate the field for later use during reading profiles
                for (int i = 0; i < _ffmpegStreamInfo.AudioTracks; i++)
                {
                    if (i == 0)
                    {
                        _audioCodec = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].AudioCodec; // baseline the codec name
                    }
                    else if (_audioCodec != _ffmpegStreamInfo.MediaInfo.AudioInfo[i].AudioCodec)
                    {
                        _audioCodec = ""; // All codecs are not the same, reset it and let the encoder figure it out
                        break;            // we're done here
                    }
                }

                // Default check if all audio streams have same channels, if so populate the field for later use during reading profiles
                for (int i = 0; i < _ffmpegStreamInfo.AudioTracks; i++)
                {
                    if (i == 0)
                    {
                        _audioChannels = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels; // baseline the channels
                    }
                    else if (_audioChannels != _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels)
                    {
                        _audioChannels = 0; // All channels are not the same, reset it and let the encoder figure it out
                        break;              // we're done here
                    }
                }

                // Audio parameters - find the best Audio channel for the selected language or best audio track if there are imparired tracks otherwise by default the encoder will select the best audio channel (encoders do not do a good job of ignoring imparired tracks)
                bool selectedTrack = false;
                if ((!String.IsNullOrEmpty(_requestedAudioLanguage) || (_ffmpegStreamInfo.ImpariedAudioTrackCount > 0)) && (_ffmpegStreamInfo.AudioTracks > 1)) // More than 1 audio track to choose from and either we have a language match request or a presence of an imparied channel (likely no audio)
                {
                    for (int i = 0; i < _ffmpegStreamInfo.AudioTracks; i++)
                    {
                        bool processTrack = false; // By default we don't need to process

                        // Language selection check, if the user has picked a specific language code, look for it
                        // If we find a match, we look the one with the highest number of channels in it
                        if (!String.IsNullOrEmpty(_requestedAudioLanguage))
                        {
                            if ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Language.ToLower() == _requestedAudioLanguage) && (_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > 0))
                            {
                                if (selectedTrack)
                                {
                                    if (!(                                                                                                                                  // take into account impaired tracks (since impaired tracks typically have no audio)
                                            ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > _audioChannels) && !_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired) || // PREFERENCE to non-imparied Audio tracks with the most channels
                                            ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > _audioChannels) && _selectedAudioImpaired) ||                             // PREFERENCE to Audio tracks with most channels if currently selected track is impaired
                                            (!_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired && _selectedAudioImpaired)                                                  // PREFER non impaired audio over currently selected impaired
                                            ))
                                    {
                                        continue; // we have found a lang match, now we are looking for more channels only now
                                    }
                                }

                                processTrack = true; // All conditions met, we need to process this track
                            }
                        }
                        else if (_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > 0)// we have a imparied audio track, select the non impaired track with the highest number of tracks or bitrate or frequency
                        {
                            if (selectedTrack)
                            {
                                if (!(                                                                                                                                  // take into account impaired tracks (since impaired tracks typically have no audio)
                                        ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > _audioChannels) && !_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired) || // PREFERENCE to non-imparied Audio tracks with the most channels
                                        ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > _audioChannels) && _selectedAudioImpaired) ||                             // PREFERENCE to Audio tracks with most channels if currently selected track is impaired
                                        (!_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired && _selectedAudioImpaired)                                                  // PREFER non impaired audio over currently selected impaired
                                        ))
                                {
                                    continue; // we have found a lang match, now we are looking for more channels only now
                                }
                            }

                            processTrack = true; // All conditions met, we need to process this track
                        }

                        if (processTrack) // We need to process this track
                        {
                            _audioChannels         = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels;
                            _audioStream           = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Stream;             // store the stream number for the selected audio channel
                            _audioCodec            = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].AudioCodec;
                            _audioPID              = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].PID;                // Audio PID
                            _audioTrack            = i;                                                           // Store the audio track number we selected
                            _audioLanguage         = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Language.ToLower(); // this is what we selected
                            _selectedAudioImpaired = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired;           // Is this an imparied audio track?
                            selectedTrack          = true;                                                        // We found a suitable track

                            if (!String.IsNullOrEmpty(_requestedAudioLanguage))
                            {
                                _jobLog.WriteEntry(this, "Found Audio Language match for language" + " " + _requestedAudioLanguage.ToUpper() + ", " + "Audio Stream" + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + " Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                            }
                            else
                            {
                                _jobLog.WriteEntry(this, "Compensating for audio impaired tracks, selected track with language" + " " + _requestedAudioLanguage.ToUpper() + ", " + "Audio Stream" + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + " Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                            }
                        }
                    }

                    if (!selectedTrack)
                    {
                        _jobLog.WriteEntry(this, ("Could not find a match for selected Audio Language Code") + " " + _requestedAudioLanguage + ", letting encoder choose best audio language", Log.LogEntryType.Warning);
                    }
                    else
                    {
                        _jobLog.WriteEntry(this, ("Selected Audio Stream") + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + ", Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                    }
                }
                else if (_ffmpegStreamInfo.AudioTracks == 1) // We have just one audio track, then populate the information otherwise the encoding operations will have a hard time determining audio information
                {
                    if (_ffmpegStreamInfo.MediaInfo.AudioInfo[0].Channels > 0)
                    {
                        _audioChannels         = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].Channels;
                        _audioStream           = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].Stream;             // store the stream number for the selected audio channel
                        _audioCodec            = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].AudioCodec;
                        _audioPID              = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].PID;                // Audio PID
                        _audioTrack            = 0;                                                           // Store the audio track number we selected
                        _audioLanguage         = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].Language.ToLower(); // this is what we selected
                        _selectedAudioImpaired = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].Impaired;           // Is this an imparied audio track?

                        _jobLog.WriteEntry(this, "Only one audio track present, " + ("Audio Stream") + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + " Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                    }
                }
                else
                {
                    _jobLog.WriteEntry(this, ("Audio Stream") + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + ", Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                    _jobLog.WriteEntry(this, "No audio language selected, letting encoder choose best audio language", Log.LogEntryType.Warning);
                }

                _error = false; // all good now
            }
            else
            {
                _error = true;
            }

            if (_error)
            {
                _jobLog.WriteEntry(this, ("Unable to read media information using FFMPEG or MediaInfo"), Log.LogEntryType.Error);
                return;
            }

            // Get the video properties for the original video
            _jobLog.WriteEntry(this, "Reading Original File Media information", Log.LogEntryType.Information);
            _originalFileFFmpegStreamInfo = new FFmpegMediaInfo(_originalFileName, _jobStatus, _jobLog, _ignoreSuspend);
            if (!_originalFileFFmpegStreamInfo.Success || _originalFileFFmpegStreamInfo.ParseError)
            {
                _jobLog.WriteEntry(this, ("Unable to read media information using FFMPEG"), Log.LogEntryType.Warning);
            }

            if (_skipCropDetect)
            {
                _jobLog.WriteEntry(this, "Skipping crop information", Log.LogEntryType.Information);
            }
            else
            {
                UpdateCropInfo(jobLog);
            }
        }
Example #11
0
        /// <summary>
        /// Updates the Video file properties structure, check for crop, audio and video information
        /// </summary>
        /// <param name="skipCropDetect">True to skip detecting cropping parameters</param>
        /// <param name="detectInterlace">Extracts the video intelacing type by analyzing it in depth</param>
        /// <param name="videoFileName">Path to Original Source Video</param>
        /// <param name="remuxedFileName">Path to Remuxed video, else null or empty string</param>
        /// <param name="edlFile">Path to EDL file else null or empty string</param>
        /// <param name="audioLanguage">Audio Language</param>
        /// <param name="jobStatus">JobStatus</param>
        /// <param name="jobLog">JobLog</param>
        public void UpdateVideoInfo(bool skipCropDetect, bool detectInterlace, string videoFileName, string remuxedFileName, string edlFile, string audioLanguage, JobStatus jobStatus, Log jobLog)
        {
            ResetParameters(); // Reset VideoInfo parameters

            _jobStatus = jobStatus;
            _jobLog = jobLog;
            _EDLFile = edlFile;
            _requestedAudioLanguage = audioLanguage;
            _originalFileName = videoFileName;
            _remuxedFileName = remuxedFileName;

            Ini ini = new Ini(GlobalDefs.ProfileFile);

            _skipCropDetect = skipCropDetect;
            _detectInterlacing = detectInterlace;

            _jobLog.WriteEntry(this, "Reading MediaInfo from " + SourceVideo, Log.LogEntryType.Information);

            _videoCodec = VideoParams.VideoFormat(SourceVideo);
            jobLog.WriteEntry(this, "Video Codec : " + _videoCodec, Log.LogEntryType.Debug);
            if (String.IsNullOrWhiteSpace(_videoCodec))
                _error = true;

            _audioCodec = VideoParams.AudioFormat(SourceVideo);
            jobLog.WriteEntry(this, "Audio Codec : " + _audioCodec, Log.LogEntryType.Debug);
            if (String.IsNullOrWhiteSpace(_audioCodec))
                _error = true;

            _fps = VideoParams.FPS(SourceVideo);
            jobLog.WriteEntry(this, "Video FPS : " + _fps.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);
            if (_fps <= 0)
                _error = true;

            _width = VideoParams.VideoWidth(SourceVideo);
            jobLog.WriteEntry(this, "Video Width : " + _width.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);
            if (_width <= 0)
                _error = true;

            _height = VideoParams.VideoHeight(SourceVideo);
            jobLog.WriteEntry(this, "Video Height : " + _height.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);
            if (_height <= 0)
                _error = true;

            _duration = VideoParams.VideoDuration(SourceVideo);
            jobLog.WriteEntry(this, "Video Duration : " + _duration.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);
            if (_duration <= 0)
                _error = true;

            _audioDelay = VideoParams.AudioDelay(SourceVideo);
            jobLog.WriteEntry(this, "Audio Delay : " + _audioDelay.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);

            _scanType = VideoParams.VideoScanType(SourceVideo);
            jobLog.WriteEntry(this, "Video Scan Type : " + _scanType.ToString(), Log.LogEntryType.Debug);

            if (_scanType == ScanType.Unknown && _detectInterlacing) // If we couldn't find it lets try ffmpeg - avoid unnecessary cycles if not required
            {
                jobLog.WriteEntry(this, "Scan type unknown, trying with FFMPEGMediaInfo", Log.LogEntryType.Debug);

                _ffmpegStreamInfo = new FFmpegMediaInfo(SourceVideo, _jobStatus, _jobLog, 0, 0, _ignoreSuspend); // Run interlace detection with defaults
                if (_ffmpegStreamInfo.Success && !_ffmpegStreamInfo.ParseError)
                {
                    // Now calcuate whether it's Interlaced or Progressive based on the Multi Frame Interlaced Detection Results
                    long totalInterlaced = _ffmpegStreamInfo.MFInterlaceDetectionResults.BFF + _ffmpegStreamInfo.MFInterlaceDetectionResults.TFF;
                    long totalProgressive = _ffmpegStreamInfo.MFInterlaceDetectionResults.Progressive;
                    long totalUndetermined = _ffmpegStreamInfo.MFInterlaceDetectionResults.Undetermined; // TODO: What to do with this?

                    if (totalInterlaced == 0 && totalProgressive == 0) // Boundary conditions
                        _scanType = ScanType.Unknown;
                    else if (totalInterlaced == 0) // Avoid divide by zero exception
                        _scanType = ScanType.Progressive;
                    else
                    {
                        double PtoIRatio = totalProgressive / totalInterlaced; // See below comment

                        // Refer to this, how to tell if the video is interlaced or telecine
                        // http://forum.videohelp.com/threads/295007-Interlaced-or-telecined-how-to-tell?p=1797771&viewfull=1#post1797771
                        // It is a statistical ratio, telecine has approx split of 3 progressive and 2 interlaced (i.e. ratio of about 1.5 progressive to interlaced)
                        // TODO: We need to revisit the logic below for telecine, interlaced or progressive detection (check idet filter for updates ffmpeg ticket #3073)
                        if ((totalProgressive == 0) && (totalProgressive == 0)) // Unknown - could not find
                            _scanType = ScanType.Unknown;
                        else if ((PtoIRatio > TELECINE_LOW_P_TO_I_RATIO) && (PtoIRatio < TELECINE_HIGH_P_TO_I_RATIO)) // Let us keep a band to measure telecine ratio, see comment above
                            _scanType = ScanType.Telecine;
                        else if (PtoIRatio <= TELECINE_LOW_P_TO_I_RATIO) // We play safe, more interlaced than progressive
                            _scanType = ScanType.Interlaced;
                        else if (PtoIRatio >= TELECINE_HIGH_P_TO_I_RATIO) // Progressive has the clear lead
                            _scanType = ScanType.Progressive;
                        else
                            _scanType = ScanType.Unknown; // No idea where we are
                    }

                    jobLog.WriteEntry(this, "FFMPEG Video Scan Type : " + _scanType.ToString(), Log.LogEntryType.Debug);
                }
                else
                    jobLog.WriteEntry(this, "Error reading scan type from FFMPEGMediaInfo", Log.LogEntryType.Warning);
            }

            // We don't get AudioChannel information here as it interfers with FFMPEG
            /*mi.Option("Inform", "Audio; %Channels%");
            int.TryParse(mi.Inform(), out _audioChannels);
            jobLog.WriteEntry(this, "Audio Channels : " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture), Log.LogEntryType.Debug);*/

            // Supplement with extracting Video and Audio information (sometimes MediaInfo fails) using FFMPEG and selected the Audio Language specified
            _jobLog.WriteEntry(this, "Supplementing Media information using FFMPEG", Log.LogEntryType.Information);
            _ffmpegStreamInfo = new FFmpegMediaInfo(SourceVideo, _jobStatus, _jobLog, _ignoreSuspend); // this may be called from the UI request
            if (_ffmpegStreamInfo.Success && !_ffmpegStreamInfo.ParseError)
            {
                // Store the video information (there's only 1 video per file)
                _width = _ffmpegStreamInfo.MediaInfo.VideoInfo.Width;
                _height = _ffmpegStreamInfo.MediaInfo.VideoInfo.Height;
                if ((_fps <= 0) || ((_fps > _ffmpegStreamInfo.MediaInfo.VideoInfo.FPS) && (_ffmpegStreamInfo.MediaInfo.VideoInfo.FPS > 0))) // Check _fps, sometimes MediaInfo get it below 0 or too high (most times it's reliable)
                    _fps = _ffmpegStreamInfo.MediaInfo.VideoInfo.FPS;
                else
                    _ffmpegStreamInfo.MediaInfo.VideoInfo.FPS = _fps; // Store the value from MediaInfo, more reliable
                _duration = _ffmpegStreamInfo.MediaInfo.VideoInfo.Duration;
                _videoCodec = _ffmpegStreamInfo.MediaInfo.VideoInfo.VideoCodec;
                _videoStream = _ffmpegStreamInfo.MediaInfo.VideoInfo.Stream;
                _videoPID = _ffmpegStreamInfo.MediaInfo.VideoInfo.PID; // video PID
                
                // Default Check if all audio streams have same codec, if so populate the field for later use during reading profiles
                for (int i = 0; i < _ffmpegStreamInfo.AudioTracks; i++)
                {
                    if (i == 0)
                        _audioCodec = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].AudioCodec; // baseline the codec name
                    else if (_audioCodec != _ffmpegStreamInfo.MediaInfo.AudioInfo[i].AudioCodec)
                    {
                        _audioCodec = ""; // All codecs are not the same, reset it and let the encoder figure it out
                        break; // we're done here
                    }
                }

                // Default check if all audio streams have same channels, if so populate the field for later use during reading profiles
                for (int i = 0; i < _ffmpegStreamInfo.AudioTracks; i++)
                {
                    if (i == 0)
                        _audioChannels = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels; // baseline the channels
                    else if (_audioChannels != _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels)
                    {
                        _audioChannels = 0; // All channels are not the same, reset it and let the encoder figure it out
                        break; // we're done here
                    }
                }

                // Audio parameters - find the best Audio channel for the selected language or best audio track if there are imparired tracks otherwise by default the encoder will select the best audio channel (encoders do not do a good job of ignoring imparired tracks)
                bool selectedTrack = false;
                if ((!String.IsNullOrEmpty(_requestedAudioLanguage) || (_ffmpegStreamInfo.ImpariedAudioTrackCount > 0)) && (_ffmpegStreamInfo.AudioTracks > 1)) // More than 1 audio track to choose from and either we have a language match request or a presence of an imparied channel (likely no audio)
                {
                    for (int i = 0; i < _ffmpegStreamInfo.AudioTracks; i++)
                    {
                        bool processTrack = false; // By default we don't need to process

                        // Language selection check, if the user has picked a specific language code, look for it
                        // If we find a match, we look the one with the highest number of channels in it
                        if (!String.IsNullOrEmpty(_requestedAudioLanguage))
                        {
                            if ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Language.ToLower() == _requestedAudioLanguage) && (_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > 0))
                            {
                                if (selectedTrack)
                                {
                                    if (!( // take into account impaired tracks (since impaired tracks typically have no audio)
                                        ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > _audioChannels) && !_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired) || // PREFERENCE to non-imparied Audio tracks with the most channels
                                        ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > _audioChannels) && _selectedAudioImpaired) || // PREFERENCE to Audio tracks with most channels if currently selected track is impaired
                                        (!_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired && _selectedAudioImpaired) // PREFER non impaired audio over currently selected impaired
                                        ))
                                        continue; // we have found a lang match, now we are looking for more channels only now
                                }
                             
                                processTrack = true; // All conditions met, we need to process this track
                            }
                        }
                        else if (_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > 0)// we have a imparied audio track, select the non impaired track with the highest number of tracks or bitrate or frequency
                        {
                            if (selectedTrack)
                            {
                                if (!( // take into account impaired tracks (since impaired tracks typically have no audio)
                                    ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > _audioChannels) && !_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired) || // PREFERENCE to non-imparied Audio tracks with the most channels
                                    ((_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels > _audioChannels) && _selectedAudioImpaired) || // PREFERENCE to Audio tracks with most channels if currently selected track is impaired
                                    (!_ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired && _selectedAudioImpaired) // PREFER non impaired audio over currently selected impaired
                                    ))
                                    continue; // we have found a lang match, now we are looking for more channels only now
                            }

                            processTrack = true; // All conditions met, we need to process this track
                        }

                        if (processTrack) // We need to process this track
                        {
                            _audioChannels = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Channels;
                            _audioStream = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Stream; // store the stream number for the selected audio channel
                            _audioCodec = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].AudioCodec;
                            _audioPID = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].PID; // Audio PID
                            _audioTrack = i; // Store the audio track number we selected
                            _audioLanguage = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Language.ToLower(); // this is what we selected
                            _selectedAudioImpaired = _ffmpegStreamInfo.MediaInfo.AudioInfo[i].Impaired; // Is this an imparied audio track?
                            selectedTrack = true; // We found a suitable track

                            if (!String.IsNullOrEmpty(_requestedAudioLanguage))
                                _jobLog.WriteEntry(this, "Found Audio Language match for language" + " " + _requestedAudioLanguage.ToUpper() + ", " + "Audio Stream" + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + " Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                            else
                                _jobLog.WriteEntry(this, "Compensating for audio impaired tracks, selected track with language" + " " + _requestedAudioLanguage.ToUpper() + ", " + "Audio Stream" + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + " Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                        }
                    }

                    if (!selectedTrack)
                        _jobLog.WriteEntry(this, ("Could not find a match for selected Audio Language Code") + " " + _requestedAudioLanguage + ", letting encoder choose best audio language", Log.LogEntryType.Warning);
                    else
                        _jobLog.WriteEntry(this, ("Selected Audio Stream") + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + ", Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                }
                else if (_ffmpegStreamInfo.AudioTracks == 1) // We have just one audio track, then populate the information otherwise the encoding operations will have a hard time determining audio information
                {
                    if (_ffmpegStreamInfo.MediaInfo.AudioInfo[0].Channels > 0)
                    {
                        _audioChannels = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].Channels;
                        _audioStream = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].Stream; // store the stream number for the selected audio channel
                        _audioCodec = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].AudioCodec;
                        _audioPID = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].PID; // Audio PID
                        _audioTrack = 0; // Store the audio track number we selected
                        _audioLanguage = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].Language.ToLower(); // this is what we selected
                        _selectedAudioImpaired = _ffmpegStreamInfo.MediaInfo.AudioInfo[0].Impaired; // Is this an imparied audio track?

                        _jobLog.WriteEntry(this, "Only one audio track present, " + ("Audio Stream") + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + " Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                    }
                }
                else
                {
                    _jobLog.WriteEntry(this, ("Audio Stream") + " " + _audioStream.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Audio Track") + " " + _audioTrack.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Channels") + " " + _audioChannels.ToString(System.Globalization.CultureInfo.InvariantCulture) + ", " + ("Codec") + " " + _audioCodec + ", Impaired " + _selectedAudioImpaired.ToString(), Log.LogEntryType.Debug);
                    _jobLog.WriteEntry(this, "No audio language selected, letting encoder choose best audio language", Log.LogEntryType.Warning);
                }

                _error = false; // all good now
            }
            else
                _error = true;
            
            if (_error)
            {
                _jobLog.WriteEntry(this, ("Unable to read media information using FFMPEG or MediaInfo"), Log.LogEntryType.Error);
                return;
            }

            // Get the video properties for the original video
            _jobLog.WriteEntry(this, "Reading Original File Media information", Log.LogEntryType.Information);
            _originalFileFFmpegStreamInfo = new FFmpegMediaInfo(_originalFileName, _jobStatus, _jobLog, _ignoreSuspend);
            if (!_originalFileFFmpegStreamInfo.Success || _originalFileFFmpegStreamInfo.ParseError)
                _jobLog.WriteEntry(this, ("Unable to read media information using FFMPEG"), Log.LogEntryType.Warning);

            if (_skipCropDetect)
                _jobLog.WriteEntry(this, "Skipping crop information", Log.LogEntryType.Information);
            else
                UpdateCropInfo(jobLog);
        }