bool ProcessStreamIndexLine(ref AVStream s, string strText) { strText = strText.Replace("stream", "").Trim(); // Strip any Language tag if (strText.Contains("(")) { int firstBracket = strText.IndexOf("("); int secondBracket = strText.IndexOf(")"); if ((firstBracket != -1) && (secondBracket != -1)) { s.Language = strText.Substring(firstBracket + 1, (secondBracket - firstBracket - 1)); strText = strText.Remove(firstBracket, (secondBracket - firstBracket) + 1); } } // Strip any Codec tag if (strText.Contains("[")) { int firstBracket = strText.IndexOf("["); int secondBracket = strText.IndexOf("]"); if ((firstBracket != -1) && (secondBracket != -1)) { s.CodecTag = strText.Substring(firstBracket + 1, (secondBracket - firstBracket - 1)); strText = strText.Remove(firstBracket, (secondBracket - firstBracket) + 1); } } // Extra trim strText = strText.Trim(); int dotIndex = strText.IndexOf("."); if (dotIndex == -1) { return(false); } if (dotIndex == (strText.Length - 1)) { return(false); // it's at the last character } string strIndex = strText.Substring(dotIndex + 1); int iIndex; if (!(int.TryParse(strIndex, out iIndex))) { return(false); } s.StreamIndex = iIndex; return(true); }
bool ProcessStreamIndexLine(ref AVStream s, string strText) { strText = strText.Replace("stream", "").Trim(); // Strip any Language tag if (strText.Contains("(")) { int firstBracket = strText.IndexOf("("); int secondBracket = strText.IndexOf(")"); if ((firstBracket != -1) && (secondBracket != -1)) { s.Language = strText.Substring(firstBracket + 1, (secondBracket - firstBracket - 1)); strText = strText.Remove(firstBracket, (secondBracket - firstBracket) + 1); } } // Strip any Codec tag if (strText.Contains("[")) { int firstBracket = strText.IndexOf("["); int secondBracket = strText.IndexOf("]"); if ((firstBracket != -1) && (secondBracket != -1)) { s.CodecTag = strText.Substring(firstBracket + 1, (secondBracket - firstBracket - 1)); strText = strText.Remove(firstBracket, (secondBracket - firstBracket) + 1); } } // Extra trim strText = strText.Trim(); int dotIndex = strText.IndexOf("."); if (dotIndex == -1) return false; if (dotIndex == (strText.Length - 1)) return false; // it's at the last character string strIndex = strText.Substring(dotIndex + 1); int iIndex; if (! (int.TryParse(strIndex, out iIndex))) return false; s.StreamIndex = iIndex; return true; }
bool getStreamByParsingLine(string strText, out AVStream s) { s = new AVStream(); try { if ((strText.Contains("audio:"))) { s.CodecType = AVCodecType.Audio; if ((strText.Contains("hearing impaired"))) s.AudioCodecSubType = AudioStreamTypes.Commentary; } else if (strText.Contains("video:")) s.CodecType = AVCodecType.Video; else if (strText.Contains("subtitle:")) s.CodecType = AVCodecType.Subtitle; else s.CodecType = AVCodecType.Unknown; // For all streams, get the stream index number, e.g. #0.1 => 1 List<string> mainParts = splitByDelimiter(strText, ":"); if (mainParts.Count < 3) return false; if (!ProcessStreamIndexLine(ref s, mainParts[0])) return false; if (s.CodecType == AVCodecType.Audio) { // Strip away everything before the av tag int endAudioTag = strText.IndexOf("audio:"); strText = strText.Substring(endAudioTag + 6); // Look for number of channels string strChannels; if (findSuffixAndNumberInString(strText, "channels", out strChannels)) { int iChannels; if (int.TryParse(strChannels, out iChannels)) s.Channels = iChannels; } else { if (strText.Contains("stereo")) s.Channels = 2; else if (strText.Contains("mono")) s.Channels = 1; } // Finally split into comma-delimited strings List<string> subParts = splitByDelimiter(strText, ","); if (subParts.Count < 1) return false; // The first subpart is always the codec type s.CodecName = subParts[0]; } if (s.CodecType == AVCodecType.Video) { // Strip away everything before the av tag int endVideoTag = strText.IndexOf("video:"); strText = strText.Substring(endVideoTag + 6); // Find a PAR&DAR if one exists string strPARValue; string strDARValue; if (findPrefixAndRatioInString(strText, "par", out strPARValue)) s.SampleAspectRatio = strPARValue; if (findPrefixAndRatioInString(strText, "dar", out strDARValue)) s.DisplayAspectRatio= strDARValue; //Determine Framerate string frameRate; if (findFrameRate(strText, "tbr", out frameRate)) s.frameRate = frameRate; // Now split into comma-delimited strings List<string> subParts = splitByDelimiter(strText, ","); if (subParts.Count < 1) return false; // The first subpart is always the codec type s.CodecName = subParts[0]; // Find a size in the remaining strings //bool found; int Width; int Height; foreach (string subpart in subParts) { if (findSizeInString(subpart, out Width, out Height)) { s.Width = Width; s.Height = Height; //found = true; continue; } } // if (!found) return false; // Un-comment if we ever REQUIRE a video size } return true; } catch { return false; } }
void AddStreamToMap(AVStream strm) { mapArguments.AddArg("-map 0:" + strm.StreamIndex.ToString()); }
bool getStreamByParsingLine(string strText, out AVStream s) { s = new AVStream(); try { if ((strText.Contains("audio:"))) { s.CodecType = AVCodecType.Audio; if ((strText.Contains("hearing impaired"))) { s.AudioCodecSubType = AudioStreamTypes.Commentary; } } else if (strText.Contains("video:")) { s.CodecType = AVCodecType.Video; } else if (strText.Contains("subtitle:")) { s.CodecType = AVCodecType.Subtitle; } else { s.CodecType = AVCodecType.Unknown; } //To make new ffmpeglatest compatible with old ffmpeg: var regex = new Regex(Regex.Escape(":")); strText = regex.Replace(strText, ".", 1); // For all streams, get the stream index number, e.g. #0.1 => 1 List <string> mainParts = splitByDelimiter(strText, ":"); if (mainParts.Count < 3) { return(false); } if (!ProcessStreamIndexLine(ref s, mainParts[0])) { return(false); } if (s.CodecType == AVCodecType.Subtitle) { // Strip away everything before the av tag int endAudioTag = strText.IndexOf("subtitle:"); strText = strText.Substring(endAudioTag + 9); // Finally split into comma-delimited strings List <string> subParts = splitByDelimiter(strText, ","); if (subParts.Count < 1) { return(false); } // The first subpart is always the codec type s.CodecName = subParts[0]; } if (s.CodecType == AVCodecType.Audio) { // Strip away everything before the av tag int endAudioTag = strText.IndexOf("audio:"); strText = strText.Substring(endAudioTag + 6); // Look for number of channels string strChannels; if (findSuffixAndNumberInString(strText, "channels", out strChannels)) { int iChannels; if (int.TryParse(strChannels, out iChannels)) { s.Channels = iChannels; } } else { if (strText.Contains("stereo")) { s.Channels = 2; } else if (strText.Contains("mono")) { s.Channels = 1; } else if (strText.Contains("5.1")) { s.Channels = 6; } else if (strText.Contains("7.1")) { s.Channels = 8; } } // Finally split into comma-delimited strings List <string> subParts = splitByDelimiter(strText, ","); if (subParts.Count < 1) { return(false); } // The first subpart is always the codec type s.CodecName = subParts[0]; } if (s.CodecType == AVCodecType.Video) { // Strip away everything before the av tag int endVideoTag = strText.IndexOf("video:"); strText = strText.Substring(endVideoTag + 6); // Find a SAR&DAR if one exists if (strText.Contains("mjpeg")) // skip the thumbnail { } else { string strSARValue; string strDARValue; if (findPrefixAndRatioInString(strText, "sar", out strSARValue)) { s.SampleAspectRatio = strSARValue; } if (findPrefixAndRatioInString(strText, "dar", out strDARValue)) { s.DisplayAspectRatio = strDARValue; } } //Determine Framerate string frameRate; if (findFrameRate(strText, "tbr", out frameRate)) { s.frameRate = frameRate; } // Now split into comma-delimited strings List <string> subParts = splitByDelimiter(strText, ","); if (subParts.Count < 1) { return(false); } // The first subpart is always the codec type s.CodecName = subParts[0]; // Find a size in the remaining strings //bool found; int Width; int Height; foreach (string subpart in subParts) { if (findSizeInString(subpart, out Width, out Height)) { s.Width = Width; s.Height = Height; //found = true; continue; } } // if (!found) return false; // Un-comment if we ever REQUIRE a video size } return(true); } catch { return(false); } }
void AnalyseStreams() { SendDebugMessage("Analysing streams."); mapArguments = new CommandArguments(); // Okay, let's look at what we got... // NOT: There must be at least one audio stream AND one video stream for us to add mapping parameters to ffmpeg if ((AVStreams == null) || ((AVAudioStreams.Count < 1) && (AVVideoStreams.Count < 1)) ) { ProbeSuccess = false; ProbeReport = "Not enough audio or video streams detected to add mappings."; EndProbe(); return; } // We already know there's at least one video stream // Use the first video stream if (AVVideoStreams.Count > 0) { AVStream CurrentAvs = null; foreach (AVStream avs in AVVideoStreams) { CurrentAvs = avs; if (avs.CodecName.Equals("mjpeg")) { //skip } else { break; } } SendDebugMessage("MediaProbe: Adding first non MJPEG video stream 0:" + CurrentAvs.StreamIndex.ToString() + " (" + CurrentAvs.CodecTag + ")"); AddStreamToMap(CurrentAvs); } // Do we have a preferred audio index? if ((UsePreferredAudioStreamIndex) && (AVStreamByIndex(PreferredAudioStreamIndex) != null)) { AVStream ast = AVStreamByIndex(PreferredAudioStreamIndex); SendDebugMessage("MediaProbe: Adding requested multiple track audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } else { // TODO: For MP2, different behavioru with AC3? // We already know there's at least one audio stream if (AVAudioStreams.Count == 1) // If there's just one audio stream, use it. { AddStreamToMap(AVAudioStreams[0]); } else if (AVAudioStreamsStereo.Count > 0) // If there are some stereo streams { if (AVAudioStreamsStereo.Count == 1) // If there's just one stereo audio stream, use it { AVStream ast = AVAudioStreamsStereo[0]; SendDebugMessage("MediaProbe: Adding only multiple track audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } else { // There are multiple stereo streams: add the LAST stereo stream AVStream ast = AVAudioStreamsStereo[AVAudioStreamsStereo.Count - 1]; SendDebugMessage("MediaProbe: Adding last multiple track audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } } else { // There are no stereo streams: just add the first audio stream if (AVAudioStreams.Count > 0) { AVStream ast = AVAudioStreams[0]; SendDebugMessage("MediaProbe: No multiple track audio streams found, adding first audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } } } // Use the first subtitle TODO for testing purposes //if (AVSubtitleStreams.Count > 0) //{ // SendDebugMessage("MediaProbe: Adding second subtitle stream 0:" + AVSubtitleStreams[0].StreamIndex.ToString() + " (" + AVSubtitleStreams[0].CodecTag + ")"); // AddStreamToMap(AVSubtitleStreams[0]); //} SendDebugMessage("MediaProbe: Analysis complete. Mappings are: " + mapArguments.ToString()); // It's a success! ProbeReport = "Probe finished OK."; ProbeSuccess = true; ProbeFinished = true; EndProbe(); }
void processOutputBuffer() { foreach (string s in outputBuffer) { string txtOutput = s.Trim().ToLowerInvariant(); if (txtOutput.Length < 3) continue; if (txtOutput.Equals(kBeginStreamBlock)) { currentStream = new AVStream(); } if (currentStream == null) continue; // ignore if we're not in a stream block if (txtOutput.Equals(kEndStreamBlock)) { AVStreams.Add(currentStream); currentStream = null; // does this reset my pointer, or the stored one?? } // From this point we're only interested in Key=Value lines if (!txtOutput.Contains("=")) continue; List<string> outputParts = txtOutput.Split(new char[] { '=' }).ToList(); if (outputParts.Count < 2) continue; // not formatted properly if (outputParts[0].Length < 1) continue; // no first part if (outputParts[1].Length < 1) continue; // no second part switch (outputParts[0]) { case "index": int index; if (int.TryParse(outputParts[1], out index)) currentStream.StreamIndex = index; else SendDebugMessage("Media Probe: Cannot parse index of stream: " + outputParts[1]); break; case "channels": int nChannels; if (int.TryParse(outputParts[1], out nChannels)) currentStream.Channels = nChannels; break; case "codec_tag": currentStream.CodecTag = outputParts[1]; break; case "codec_type": if (outputParts[1].Equals("audio")) currentStream.CodecType = AVCodecType.Audio; if (outputParts[1].Equals("video")) currentStream.CodecType = AVCodecType.Video; break; default: break; } } }
void AnalyseStreams() { SendDebugMessage("Analysing streams."); mapArguments = new CommandArguments(); // Okay, let's look at what we got... // There must be at least one audio stream AND one video stream for us to add mapping parameters to ffmpeg if ((AVStreams == null) || (AVAudioStreams.Count < 1) || (AVVideoStreams.Count < 1) ) { ProbeSuccess = false; ProbeReport = "Not enough audio or video streams detected to add mappings."; EndProbe(); return; } // We already know there's at least one video stream // Use the first video stream if (AVVideoStreams.Count > 0) { SendDebugMessage("MediaProbe: Adding first video stream 0:" + AVVideoStreams[0].StreamIndex.ToString() + " (" + AVVideoStreams[0].CodecTag + ")"); AddStreamToMap(AVVideoStreams[0]); } // TODO: For MP2, different behavioru with AC3? // We already know there's at least one audio stream if (AVAudioStreams.Count == 1) // If there's just one audio stream, use it. { AddStreamToMap(AVAudioStreams[0]); } else if (AVAudioStreamsStereo.Count > 0) // If there are some stereo streams { if (AVAudioStreamsStereo.Count == 1) // If there's just one stereo audio stream, use it { AVStream ast = AVAudioStreamsStereo[0]; SendDebugMessage("MediaProbe: Adding only stereo audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } else { // There are multiple stereo streams: add the LAST stereo stream AVStream ast = AVAudioStreamsStereo[AVAudioStreamsStereo.Count - 1]; SendDebugMessage("MediaProbe: Adding last stereo audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } } else { // There are no stereo streamss: just add the first audio stream AVStream ast = AVAudioStreams[0]; SendDebugMessage("MediaProbe: No stereo audio streams found, adding first audio stream 0:" + ast.StreamIndex.ToString() + " (" + ast.CodecTag + ")"); AddStreamToMap(ast); } SendDebugMessage("MediaProbe: Analysis complete. Mappings are: " + mapArguments.ToString()); // IT's a success! ProbeReport = "Probe finished OK."; ProbeSuccess = true; ProbeFinished = true; EndProbe(); }
void processOutputBuffer() { foreach (string s in outputBuffer) { string txtOutput = s.Trim().ToLowerInvariant(); if (txtOutput.Length < 3) { continue; } if (txtOutput.Equals(kBeginStreamBlock)) { currentStream = new AVStream(); } if (currentStream == null) { continue; // ignore if we're not in a stream block } if (txtOutput.Equals(kEndStreamBlock)) { AVStreams.Add(currentStream); currentStream = null; // does this reset my pointer, or the stored one?? } // From this point we're only interested in Key=Value lines if (!txtOutput.Contains("=")) { continue; } List <string> outputParts = txtOutput.Split(new char[] { '=' }).ToList(); if (outputParts.Count < 2) { continue; // not formatted properly } if (outputParts[0].Length < 1) { continue; // no first part } if (outputParts[1].Length < 1) { continue; // no second part } switch (outputParts[0]) { case "index": int index; if (int.TryParse(outputParts[1], out index)) { currentStream.StreamIndex = index; } else { SendDebugMessage("Media Probe: Cannot parse index of stream: " + outputParts[1]); } break; case "channels": int nChannels; if (int.TryParse(outputParts[1], out nChannels)) { currentStream.Channels = nChannels; } break; case "codec_tag": currentStream.CodecTag = outputParts[1]; break; case "codec_type": if (outputParts[1].Equals("audio")) { currentStream.CodecType = AVCodecType.Audio; } if (outputParts[1].Equals("video")) { currentStream.CodecType = AVCodecType.Video; } break; default: break; } } }