예제 #1
0
 public void SetFormat(StreamFormat format)
 {
     if (this._client != null)
     {
         this._client.StreamFormat = format;
     }
 }
예제 #2
0
        public void NarrowFileTest()
        {
            var narrowStream = StreamFormat.CreateFromFile("narrow.json");

            Assert.NotNull(narrowStream);
            Assert.True(narrowStream is NarrowFormat);
        }
예제 #3
0
        public void WideFileTest()
        {
            var wideStream = StreamFormat.CreateFromFile("wide.json");

            Assert.NotNull(wideStream);
            Assert.True(wideStream is StreamFormat);
        }
예제 #4
0
        public void LoadCSVFileWideFormatChunkedTest()
        {
            ApiHelper helper    = new ApiHelper(account, token, api);
            var       files     = from file in Directory.EnumerateFiles(Path.Combine(rootPath, "WideFormat"), "*Medium5.csv", SearchOption.AllDirectories) select file;
            var       apiHelper = new ApiHelper(account, token, api);
            var       stream    = apiHelper.SetDataStream(streamName, "GMT", MODELTYPE.SLIDING_WINDOW);
            // -d '{ "jobType": "INGESTDATA", "status": "CREATED", "datastream": "1554938538981549", "spec": { "format": {"entityIdentifier": "person", "timeIdentifier": "time", "timeFormat": "YYYY-MMDD HH:mm:ss.SSS", "timeZone": "America/Los_Angeles" } } }
            var sf = new StreamFormat()
            {
                entityIdentifier = "entity",
                timeIdentifier   = "timestamp",
                timeFormat       = "YYYY-MM-DD HH:mm:ss",
                timeZone         = "Europe/London"
            };
            var spec = new IngestSpec <StreamFormat>()
            {
                format = sf
            };
            var job = new Job <StreamFormat>()
            {
                jobType    = JOBTYPE.INGESTDATA,
                status     = JOBSTATUS.CREATED,
                datastream = (string)stream.id,
                spec       = spec
            };

            foreach (var file in files)
            {
                var response = apiHelper.LoadCSVFile(file, job, 6U);
                Assert.NotNull(response);
                Assert.Equal(response[response.Count - 1].status, JOBSTATUS.COMPLETED.ToString());
            }
        }
예제 #5
0
        public void LoadSmallCSVFilesWideFormatTest()
        {
            ApiHelper helper     = new ApiHelper(account, token, api);
            var       apiHelper  = new ApiHelper(account, token, api);
            var       dataStream = apiHelper.SetDataStream(streamName, "GMT", MODELTYPE.SLIDING_WINDOW);
            // -d '{ "jobType": "INGESTDATA", "status": "CREATED", "datastream": "1554938538981549", "spec": { "format": {"entityIdentifier": "person", "timeIdentifier": "time", "timeFormat": "YYYY-MMDD HH:mm:ss.SSS", "timeZone": "America/Los_Angeles" } } }
            var sf = new StreamFormat()
            {
                entityIdentifier = "entity",
                timeIdentifier   = "timestamp",
                timeFormat       = "YYYY-MM-DD HH:mm:ss",
                timeZone         = "Europe/London"
            };
            var spec = new IngestSpec <StreamFormat>()
            {
                format = sf
            };
            var job = new Job <StreamFormat>()
            {
                jobType    = JOBTYPE.INGESTDATA,
                status     = JOBSTATUS.CREATED,
                datastream = dataStream.id,
                spec       = spec
            };

            var response = apiHelper.LoadCSVFiles(filePaths, job, 3);

            Assert.NotNull(response);
            Assert.True(response.Count > (filePaths.Count + 2));
        }
예제 #6
0
        public void NarrowBatchFileTest()
        {
            var batchStream = StreamFormat.CreateFromFile("narrow_batch.json");

            Assert.NotNull(batchStream);
            Assert.True(batchStream is NarrowBatchFormat);
        }
예제 #7
0
        public void WideBatchFileTest()
        {
            var batchStream = StreamFormat.CreateFromFile("wide_batch.json");

            Assert.NotNull(batchStream);
            Assert.True(batchStream is BatchFormat);
        }
예제 #8
0
        private static bool AreBytesReverse(StreamFormat format)
        {
            switch (format)
            {
            case StreamFormat.Byte:
            case StreamFormat.SByte:
                return(false);

            default:
                return((((int)format & 1) == 1) != BitConverter.IsLittleEndian);
            }
        }
예제 #9
0
        /// <summary>
        /// Check if audio stream is Blu-Ray compatible
        /// </summary>
        /// <param name="aud"><see cref="AudioInfo"/></param>
        /// <returns>true if stream is Blu-Ray compatible, false otherwise</returns>
        public bool CheckAudioBluRayCompatible(AudioInfo aud)
        {
            var ext = StreamFormat.GetFormatExtension(aud.Format, aud.FormatProfile, false);

            var compat = !(ext != "ac3" &&
                           ext != "eac3" &&
                           ext != "dts" &&
                           ext != "dtshd" &&
                           ext != "mp2" &&
                           ext != "truehd");

            return(compat);
        }
예제 #10
0
        public RiffChunkStrf(string aId, UInt32 aSize, ByteArray aByteArray, RiffChunkList aParent)
            : base(aId, aSize, aByteArray, aParent)
        {
            if (size == 30)
            {
                streamFormat = new Mpeglayer3waveformat(aByteArray, informationList);
            }

            if (size == 40)
            {
                streamFormat = new Bitmapinfo(aByteArray, informationList);
            }
        }
예제 #11
0
        /// <summary>
        /// Check if audio stream is DVD compatible
        /// </summary>
        /// <param name="aud"><see cref="AudioInfo"/></param>
        /// <returns>true if stream is DVD compatible, false otherwise</returns>
        public bool CheckAudioDvdCompatible(AudioInfo aud)
        {
            var ext = StreamFormat.GetFormatExtension(aud.Format, aud.FormatProfile, false);

            var compat = true;

            Log.Info("Check if audio is compatible with DVD Spec");
            Log.Info($"Format: {aud.Format}, Profile: {aud.FormatProfile}");
            Log.Info($"Bitrate: {aud.Bitrate:0}, Samplerate: {aud.SampleRate:0}, Channel Count: {aud.ChannelCount:0}");

            if (ext != "ac3")
            {
                Log.Info("Format is not AC3");
                compat = false;
            }

            if (compat)
            {
                if (ext == "ac3")
                {
                    if (aud.Bitrate > 448000)
                    {
                        Log.Info("Bitrate is higher than 448kbit/s");
                        compat = false;
                    }
                }
            }

            if (compat)
            {
                if (aud.ChannelCount > 6)
                {
                    Log.Info("This channel configuration is not supported");
                    compat = false;
                }
            }

            if (!compat)
            {
                return(false);
            }
            if (aud.SampleRate == 48000)
            {
                return(true);
            }

            Log.Info("Samplerate != 48000Hz");

            return(false);
        }
예제 #12
0
        /// <summary>
        /// check if audio stream is dvd compatible
        /// </summary>
        /// <param name="aud"></param>
        /// <returns>true if stream is dvd compatible, false otherwise</returns>
        public static bool CheckAudioDvdCompatible(AudioInfo aud)
        {
            string ext = StreamFormat.GetFormatExtension(aud.Format, aud.FormatProfile, false);

            bool compat = true;

            Log.Info("Check if audio is compatible with DVD Spec");
            Log.InfoFormat("Format: {0:s}, Profile: {1:s}", aud.Format, aud.FormatProfile);
            Log.InfoFormat("Bitrate: {0:g}, Samplerate: {1:g}, Channel Count: {2:g}", aud.Bitrate, aud.SampleRate,
                           aud.ChannelCount);

            if (ext != "ac3")
            {
                Log.Info("Format is not AC3");
                compat = false;
            }

            if (compat)
            {
                if (ext == "ac3")
                {
                    if (aud.Bitrate > 448000)
                    {
                        Log.InfoFormat("Bitrate is higher than 448kbit/s");
                        compat = false;
                    }
                }
            }

            if (compat)
            {
                if (aud.ChannelCount > 6)
                {
                    Log.InfoFormat("This channel configuration is not supported");
                    compat = false;
                }
            }

            if (compat)
            {
                if (aud.SampleRate != 48000)
                {
                    Log.InfoFormat("Samplerate != 48000Hz");
                    compat = false;
                }
            }

            return(compat);
        }
        private string GenerateCommandLine()
        {
            var sb = new StringBuilder();

            sb.Append(DefaultParams);

            _subtitle  = _currentTask.SubtitleStreams[_currentTask.StreamId];
            _inputFile = _subtitle.TempFile;
            var    ext          = StreamFormat.GetFormatExtension(_subtitle.Format, "", true);
            string formattedExt = $"raw.{ext}";

            _outputFile = FileSystemHelper.CreateTempFile(_appConfig.TempPath, _inputFile, formattedExt);

            sb.Append($"tracks \"{_inputFile}\" 0:\"{_outputFile}\" ");

            return(sb.ToString());
        }
예제 #14
0
        public UDPServer()
        {
            streamFormat = new StreamFormat();
            streamFormat.TimeStampMilliseconds = true;
            streamFormat.GazePosition          = true;

            // Listen for changes in settings, start/stop
            GTSettings.Settings.Instance.Network.PropertyChanged += Network_PropertyChanged;

            try
            {
                socket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
            }
            catch (Exception ex)
            {
                ErrorLogger.ProcessException(ex, false);
            }
        }
        /// <summary>
        /// Generates commandline for the eac3to executable
        /// </summary>
        /// <param name="jobInfo">Job entry to process</param>
        /// <returns>commandline arguments</returns>
        public static string GenerateDemuxLine(ref EncodeInfo jobInfo)
        {
            StringBuilder sb = new StringBuilder();

            string inputFile;
            int    startstream = 0;
            string ext;
            string formattedExt;

            // generate output filename depending input file given
            if (jobInfo.Input == InputType.InputDvd)
            {
                inputFile = jobInfo.DumpOutput;
                jobInfo.VideoStream.TempFile = Path.ChangeExtension(jobInfo.DumpOutput, "demuxed.mkv");
            }
            else
            {
                inputFile = string.IsNullOrEmpty(jobInfo.TempInput) ? jobInfo.InputFile : jobInfo.TempInput;

                jobInfo.VideoStream.TempFile = string.IsNullOrEmpty(jobInfo.TempInput)
                                                   ? Processing.CreateTempFile(
                    string.IsNullOrEmpty(jobInfo.TempOutput)
                                                           ? jobInfo.BaseName
                                                           : jobInfo.TempOutput, "demuxed.video.mkv")
                                                   : Processing.CreateTempFile(jobInfo.TempInput, "demuxed.video.mkv");
            }

            sb.AppendFormat("\"{0}\" {1:g}:\"{2}\" ", inputFile, jobInfo.VideoStream.StreamId + startstream,
                            jobInfo.VideoStream.TempFile);

            // on stereo sources, decide if stream for right eye should be extracted
            if (jobInfo.StereoVideoStream.RightStreamId > 0 && jobInfo.EncodingProfile.StereoType != StereoEncoding.None)
            {
                jobInfo.StereoVideoStream.RightTempFile = Processing.CreateTempFile(jobInfo.VideoStream.TempFile,
                                                                                    "right.h264");
                jobInfo.StereoVideoStream.LeftTempFile = Processing.CreateTempFile(jobInfo.VideoStream.TempFile,
                                                                                   "left.h264");
                sb.AppendFormat("{0:g}:\"{1}\" {2:g}:\"{3}\" ", jobInfo.StereoVideoStream.LeftStreamId,
                                jobInfo.StereoVideoStream.LeftTempFile, jobInfo.StereoVideoStream.RightStreamId,
                                jobInfo.StereoVideoStream.RightTempFile);
            }

            // if input source is dvd, increment stream id to match eac2to stream counting
            if (jobInfo.Input == InputType.InputDvd)
            {
                startstream++;
            }

            // process all audio streams
            foreach (AudioInfo item in jobInfo.AudioStreams)
            {
                // get file extension for selected stream based on format and format profile
                ext = StreamFormat.GetFormatExtension(item.Format, item.FormatProfile, false);
                string core = string.Empty;

                // extract only core audio data for dvd output
                if (jobInfo.EncodingProfile.OutFormat == OutputType.OutputDvd && jobInfo.AudioProfile.Type == ProfileType.Copy)
                {
                    if (string.CompareOrdinal(ext, "dtshd") == 0)
                    {
                        core = "-core";
                        ext  = "dts";
                    }
                    else if (string.CompareOrdinal(ext, "truehd") == 0)
                    {
                        core = "-core";
                        ext  = "ac3";
                    }
                }
                formattedExt = string.Format("demuxed.audio.{0:g}.{1}.{2}", item.StreamId, item.LangCode, ext);

                switch (jobInfo.Input)
                {
                case InputType.InputDvd:
                    item.TempFile = Processing.CreateTempFile(jobInfo.DumpOutput, formattedExt);
                    break;

                default:
                    item.TempFile = string.IsNullOrEmpty(jobInfo.TempInput)
                            ? Processing.CreateTempFile(
                        string.IsNullOrEmpty(jobInfo.TempOutput)
                                    ? jobInfo.BaseName
                                    : jobInfo.TempOutput, formattedExt)
                            : Processing.CreateTempFile(jobInfo.TempInput, formattedExt);
                    break;
                }

                sb.AppendFormat("{0:g}:\"{1}\" {2} ", item.Id + startstream, item.TempFile, core);
            }

            // process all subtitle streams
            foreach (SubtitleInfo item in jobInfo.SubtitleStreams)
            {
                ext          = StreamFormat.GetFormatExtension(item.Format, String.Empty, false);
                formattedExt = string.Format("demuxed.subtitle.{0:g}.{1}.{2}", item.StreamId, item.LangCode, ext);

                switch (jobInfo.Input)
                {
                case InputType.InputDvd:
                    item.TempFile = Processing.CreateTempFile(jobInfo.DumpOutput, formattedExt);
                    break;

                default:
                    item.TempFile = string.IsNullOrEmpty(jobInfo.TempInput)
                            ? Processing.CreateTempFile(
                        string.IsNullOrEmpty(jobInfo.TempOutput)
                                    ? jobInfo.BaseName
                                    : jobInfo.TempOutput, formattedExt)
                            : Processing.CreateTempFile(jobInfo.TempInput, formattedExt);
                    break;
                }

                sb.AppendFormat("{0:g}:\"{1}\" ", item.Id + startstream, item.TempFile);
                item.RawStream = true;
            }

            // add logfile to tempfiles list for deletion
            jobInfo.TempFiles.Add(
                jobInfo.VideoStream.TempFile.Substring(0, jobInfo.VideoStream.TempFile.LastIndexOf('.')) + " - Log.txt");

            if (jobInfo.Input == InputType.InputDvd)
            {
                jobInfo.TempFiles.Add(jobInfo.DumpOutput);
            }

            sb.Append("-progressNumbers -no2ndpass ");

            return(sb.ToString());
        }
예제 #16
0
        public void LoadParams(string[] arguments)
        {
            var cmdline = new aCommandLine(arguments);
            aCommandLineParameter parameter;

            // input
            parameter = mareep.GetLastCmdParam(cmdline, "-input");

            if (parameter == null)
            {
                mareep.WriteError("WAVE: missing -input parameter.");
            }
            else if (parameter.Count == 0)
            {
                mareep.WriteError("WAVE: missing argument for -input parameter.");
            }

            mInput       = parameter[0];
            mInputFormat = GetFormat(Path.GetExtension(mInput));

            if (mInputFormat == IOFormat.Raw)
            {
                if (parameter.Count < 2)
                {
                    mareep.WriteError("WAVE: missing format for raw input.");
                }

                if (!Enum.TryParse(parameter[1], true, out mRawInputFormat))
                {
                    mareep.WriteError("WAVE: bad format '{0}' for raw input.", parameter[1]);
                }
            }

            // output
            parameter = mareep.GetLastCmdParam(cmdline, "-output");

            if (parameter == null)
            {
                mareep.WriteError("WAVE: missing -output parameter.");
            }
            else if (parameter.Count == 0)
            {
                mareep.WriteError("WAVE: missing argument for -output parameter.");
            }

            mOutput       = parameter[0];
            mOutputFormat = GetFormat(Path.GetExtension(mOutput));

            if (mOutputFormat == IOFormat.Raw)
            {
                if (parameter.Count < 2)
                {
                    mareep.WriteError("WAVE: missing format for raw output.");
                }

                if (!Enum.TryParse(parameter[1], true, out mRawOutputFormat))
                {
                    mareep.WriteError("WAVE: bad format '{0}' for raw output.", parameter[1]);
                }
            }
            else if (mOutputFormat == IOFormat.AfcStream)
            {
                if (parameter.Count < 2)
                {
                    mStreamFormat = StreamFormat.Adpcm;
                }
                else if (!Enum.TryParse(parameter[1], true, out mStreamFormat))
                {
                    mareep.WriteError("WAVE: bad stream format '{0}'.", parameter[1]);
                }
            }

            // mode
            if (mInputFormat == IOFormat.Raw && mOutputFormat == IOFormat.Raw)
            {
                mMode = Mode.RawToRaw;
            }
            else if (mInputFormat == IOFormat.Raw && mOutputFormat == IOFormat.MicrosoftWave)
            {
                mMode = Mode.RawToWav;
            }
            else if (mInputFormat == IOFormat.MicrosoftWave && mOutputFormat == IOFormat.Raw)
            {
                mMode = Mode.WavToRaw;
            }
            else if (mInputFormat == IOFormat.MicrosoftWave && mOutputFormat == IOFormat.AfcStream)
            {
                mMode = Mode.WavToStream;
            }
            else if (mInputFormat == IOFormat.AfcStream && mOutputFormat == IOFormat.MicrosoftWave)
            {
                mMode = Mode.StreamToWav;
            }
            else
            {
                mareep.WriteError("WAVE: unsupported combination of input and output formats.");
            }

            // mix mode
            parameter = mareep.GetLastCmdParam(cmdline, "-mix-mode");

            if (parameter != null)
            {
                if (parameter.Count < 1)
                {
                    mareep.WriteError("WAVE: bad -mix-mode parameter.");
                }

                if (!Enum.TryParse(parameter[0], true, out mMixerMode))
                {
                    mareep.WriteError("WAVE: bad mixer mode '{0}' in -mix-mode parameter.", parameter[0]);
                }
            }

            // sample rate
            parameter = mareep.GetLastCmdParam(cmdline, "-sample-rate");

            if (parameter != null)
            {
                if (parameter.Count < 1)
                {
                    mareep.WriteError("WAVE: missing argument for -sample-rate parameter.");
                }

                if (!Int32.TryParse(parameter[0], out mRawSampleRate) || mRawSampleRate < 0)
                {
                    mareep.WriteError("WAVE: bad sample rate '{0}'.", parameter[0]);
                }
            }
            else if (mInputFormat == IOFormat.Raw && mOutputFormat != IOFormat.Raw)
            {
                mareep.WriteError("WAVE: missing -sample-rate parameter for raw input.");
            }

            // frame rate
            parameter = mareep.GetLastCmdParam(cmdline, "-frame-rate");

            if (parameter != null)
            {
                if (parameter.Count < 1)
                {
                    mareep.WriteError("WAVE: missing argument for -frame-rate parameter.");
                }

                if (!Int32.TryParse(parameter[0], out mStreamFrameRate) || mStreamFrameRate < 0)
                {
                    mareep.WriteError("WAVE: bad frame rate '{0}'.", parameter[0]);
                }
            }
            else
            {
                mStreamFrameRate = 30;
            }

            // loop
            parameter = mareep.GetLastCmdParam(cmdline, "-loop");

            if (parameter != null)
            {
                mStreamLoop = true;

                if (parameter.Count < 1)
                {
                    mareep.WriteError("WAVE: missing argument for -loop parameter.");
                }

                if (!Int32.TryParse(parameter[0], out mStreamLoopStart) || mStreamLoopStart < 0)
                {
                    mareep.WriteError("WAVE: bad loop value '{0}'.", parameter[0]);
                }
            }
        }
예제 #17
0
        ////////////////////////////////////////////////////////////////////////////////////////////////////////////////

        internal StreamDescription(StreamFormat format, int offset)
        {
            this.Format = format;
            this.Offset = offset;
        }
예제 #18
0
        /// <summary>
        /// Generates AviSynth script used for audio encoding
        /// </summary>
        /// <param name="inputFile">Path to input file</param>
        /// <param name="inFormat">Format of input file</param>
        /// <param name="inFormatProfile">Format profile of input file</param>
        /// <param name="inChannels">Channel count of input file</param>
        /// <param name="outChannels">Target channel count</param>
        /// <param name="inSampleRate">Samplerate of input file</param>
        /// <param name="outSampleRate">Target samplerate</param>
        /// <returns>Path to AviSynth script</returns>
        public static string GenerateAudioScript(string inputFile, string inFormat, string inFormatProfile,
                                                 int inChannels, int outChannels, int inSampleRate,
                                                 int outSampleRate)
        {
            StringBuilder sb = new StringBuilder();

            string ext = StreamFormat.GetFormatExtension(inFormat, inFormatProfile, false);

            switch (ext)
            {
            case "ac3":
                sb.AppendLine(ImportNicAudio());
                sb.AppendFormat(AppSettings.CInfo, "NicAC3Source(\"{0}\")", inputFile);
                break;

            case "dts":
            case "dtshd":
                sb.AppendLine(ImportNicAudio());
                sb.AppendFormat(AppSettings.CInfo, "NicDTSSource(\"{0}\")", inputFile);
                break;

            case "mp2":
            case "mp3":
            case "mpa":
                sb.AppendLine(ImportNicAudio());
                sb.AppendFormat(AppSettings.CInfo, "NicMPG123Source(\"{0}\")", inputFile);
                break;

            default:
                sb.AppendLine(ImportFFMPEGSource());
                sb.AppendFormat(AppSettings.CInfo, "FFAudioSource(\"{0}\")", inputFile);
                break;
            }
            sb.AppendLine();

            if (inChannels > outChannels && outChannels > 0)
            {
                sb.AppendLine(string.Format(AppSettings.CInfo, "Import(\"{0:s}\")",
                                            Path.Combine(AppSettings.AppPath, "AvsPlugins", "audio",
                                                         "ChannelDownMix.avsi")));

                switch (inChannels)
                {
                case 3:
                    switch (outChannels)
                    {
                    case 2:
                        sb.AppendLine("Dmix3Stereo()");
                        break;

                    case 4:
                    case 3:
                        sb.AppendLine("Dmix3Dpl()");
                        break;

                    case 1:
                        sb.AppendLine("ConvertToMono()");
                        break;
                    }
                    break;

                case 4:
                    switch (outChannels)
                    {
                    case 2:
                        sb.AppendLine("Dmix4qStereo()");
                        break;

                    case 3:
                        sb.AppendLine("Dmix4qDpl()");
                        break;

                    case 4:
                        sb.AppendLine("Dmix4qDpl2()");
                        break;

                    case 1:
                        sb.AppendLine("ConvertToMono()");
                        break;
                    }
                    break;

                case 5:
                    switch (outChannels)
                    {
                    case 2:
                        sb.AppendLine("Dmix5Stereo()");
                        break;

                    case 3:
                        sb.AppendLine("Dmix5Dpl()");
                        break;

                    case 4:
                        sb.AppendLine("Dmix5Dpl2()");
                        break;

                    case 1:
                        sb.AppendLine("ConvertToMono()");
                        break;
                    }
                    break;

                case 6:
                case 7:
                case 8:
                case 9:
                case 10:
                    switch (outChannels)
                    {
                    case 2:
                        sb.AppendLine("Dmix6StereoLfe()");
                        break;

                    case 3:
                        sb.AppendLine("Dmix6DplLfe()");
                        break;

                    case 4:
                        sb.AppendLine("Dmix6Dpl2Lfe()");
                        break;

                    case 1:
                        sb.AppendLine("ConvertToMono()");
                        break;

                    case 6:
                        sb.AppendLine("GetChannel(1,2,3,4,5,6)");
                        break;
                    }
                    break;
                }
            }

            if (inSampleRate != outSampleRate && outSampleRate > 0)
            {
                sb.AppendFormat(AppSettings.CInfo, "SSRC({0},fast=False)", outSampleRate);
                sb.AppendLine();
            }

            sb.AppendLine("return last");

            return(WriteScript(sb.ToString()));
        }
예제 #19
0
        private string GenerateCommandLine()
        {
            var sb = new StringBuilder();

            if (_currentTask.Input == InputType.InputDvd)
            {
                sb.Append("-probesize 2147483647 -analyzeduration 2147483647 -fflags genpts ");
            }

            sb.Append($"-i \"{_inputFile}\" ");

            string baseName;
            string ext;

            var formattedExt = "demuxed.video.mkv";

            if (string.IsNullOrEmpty(_currentTask.TempInput))
            {
                baseName = string.IsNullOrEmpty(_currentTask.TempOutput)
                           ? _currentTask.BaseName
                           : _currentTask.TempOutput;
            }
            else
            {
                baseName = _currentTask.TempInput;
            }

            _currentTask.VideoStream.TempFile = FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation,
                                                                                baseName,
                                                                                formattedExt);

            var streamID = _currentTask.Input == InputType.InputDvd
                              ? $"#0x{_currentTask.VideoStream.StreamId + 479:X}"
                              : $"0:v:{_currentTask.VideoStream.StreamKindID:0}";

            sb.Append($"-map {streamID} -c:v copy -y \"{_currentTask.VideoStream.TempFile}\" ");

            foreach (var item in _currentTask.AudioStreams)
            {
                ext = StreamFormat.GetFormatExtension(item.Format, item.FormatProfile, false);

                string acodec;
                switch (ext)
                {
                case "flac":
                    acodec = "flac";
                    break;

                case "wav":
                    acodec = "pcm_s16le";
                    break;

                default:
                    acodec = "copy";
                    break;
                }

                formattedExt = $"demuxed.audio.{item.StreamId:g}.{item.LangCode}.{ext}";

                item.TempFile =
                    FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation, baseName, formattedExt);

                if (_currentTask.Input == InputType.InputDvd)
                {
                    var dvdStreamId = item.StreamId;
                    if (string.CompareOrdinal(item.Format.ToLowerInvariant(), "mpeg1") == 0 ||
                        string.CompareOrdinal(item.Format.ToLowerInvariant(), "mpeg2") == 0)
                    {
                        dvdStreamId += 256;
                    }
                    streamID = $"#0x{dvdStreamId:X}";
                }
                else
                {
                    streamID = $"0:a:{item.StreamKindId:0}";
                }

                sb.Append($"-map {streamID} -c:a {acodec} -y \"{item.TempFile}\" ");
            }

            foreach (var item in _currentTask.SubtitleStreams)
            {
                ext = "mkv";

                formattedExt = $"demuxed.subtitle.{item.StreamId:g}.{item.LangCode}.{ext}";

                item.TempFile = FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation, baseName, formattedExt);

                item.RawStream = false;

                streamID = _currentTask.Input == InputType.InputDvd
                    ? $"#0x{item.StreamId:X}"
                    : $"0:s:{item.StreamKindId:0}";

                var codec = "copy";

                if (item.Format == "VobSub")
                {
                    codec = "dvd_subtitle";
                }

                sb.Append($"-map {streamID} -c:s {codec} -y \"{item.TempFile}\" ");
            }

            return(sb.ToString());
        }
예제 #20
0
 public virtual void SetSubStreamFormat(StreamFormat streamFormat)
 {
 }
예제 #21
0
 public void BadFileTest()
 {
     Assert.Throws <ArgumentException>(() => StreamFormat.CreateFromFile("missing_entity.json"));
 }
예제 #22
0
        private static SpeechAudioFormatInfo ConvertFormat(StreamFormat eFormat)
        {
            WaveFormatEx waveFormatEx = new WaveFormatEx();

            byte[] array = null;
            if (eFormat >= StreamFormat.PCM_8kHz8BitMono && eFormat <= StreamFormat.PCM_48kHz16BitStereo)
            {
                uint   num    = (uint)(eFormat - 4);
                bool   flag   = (num & 1) != 0;
                bool   flag2  = (num & 2) != 0;
                uint   num2   = (num & 0x3C) >> 2;
                uint[] array2 = new uint[9]
                {
                    8000u,
                    11025u,
                    12000u,
                    16000u,
                    22050u,
                    24000u,
                    32000u,
                    44100u,
                    48000u
                };
                waveFormatEx.wFormatTag     = 1;
                waveFormatEx.nChannels      = (waveFormatEx.nBlockAlign = (ushort)((!flag) ? 1 : 2));
                waveFormatEx.nSamplesPerSec = array2[num2];
                waveFormatEx.wBitsPerSample = 8;
                if (flag2)
                {
                    waveFormatEx.wBitsPerSample *= 2;
                    waveFormatEx.nBlockAlign    *= 2;
                }
                waveFormatEx.nAvgBytesPerSec = waveFormatEx.nSamplesPerSec * waveFormatEx.nBlockAlign;
            }
            else
            {
                switch (eFormat)
                {
                case StreamFormat.TrueSpeech_8kHz1BitMono:
                    waveFormatEx.wFormatTag      = 34;
                    waveFormatEx.nChannels       = 1;
                    waveFormatEx.nSamplesPerSec  = 8000u;
                    waveFormatEx.nAvgBytesPerSec = 1067u;
                    waveFormatEx.nBlockAlign     = 32;
                    waveFormatEx.wBitsPerSample  = 1;
                    waveFormatEx.cbSize          = 32;
                    array    = new byte[32];
                    array[0] = 1;
                    array[2] = 240;
                    break;

                case StreamFormat.CCITT_ALaw_8kHzMono:
                case StreamFormat.CCITT_ALaw_8kHzStereo:
                case StreamFormat.CCITT_ALaw_11kHzMono:
                case StreamFormat.CCITT_ALaw_11kHzStereo:
                case StreamFormat.CCITT_ALaw_22kHzMono:
                case StreamFormat.CCITT_ALaw_22kHzStereo:
                case StreamFormat.CCITT_ALaw_44kHzMono:
                case StreamFormat.CCITT_ALaw_44kHzStereo:
                {
                    uint   num8    = (uint)(eFormat - 41);
                    uint   num9    = num8 / 2u;
                    uint[] array13 = new uint[4]
                    {
                        8000u,
                        11025u,
                        22050u,
                        44100u
                    };
                    bool flag5 = (num8 & 1) != 0;
                    waveFormatEx.wFormatTag      = 6;
                    waveFormatEx.nChannels       = (waveFormatEx.nBlockAlign = (ushort)((!flag5) ? 1 : 2));
                    waveFormatEx.nSamplesPerSec  = array13[num9];
                    waveFormatEx.wBitsPerSample  = 8;
                    waveFormatEx.nAvgBytesPerSec = waveFormatEx.nSamplesPerSec * waveFormatEx.nBlockAlign;
                    break;
                }

                default:
                    if (eFormat >= StreamFormat.CCITT_uLaw_8kHzMono && eFormat <= StreamFormat.CCITT_uLaw_44kHzStereo)
                    {
                        uint   num3   = (uint)(eFormat - 49);
                        uint   num4   = num3 / 2u;
                        uint[] array3 = new uint[4]
                        {
                            8000u,
                            11025u,
                            22050u,
                            44100u
                        };
                        bool flag3 = (num3 & 1) != 0;
                        waveFormatEx.wFormatTag      = 7;
                        waveFormatEx.nChannels       = (waveFormatEx.nBlockAlign = (ushort)((!flag3) ? 1 : 2));
                        waveFormatEx.nSamplesPerSec  = array3[num4];
                        waveFormatEx.wBitsPerSample  = 8;
                        waveFormatEx.nAvgBytesPerSec = waveFormatEx.nSamplesPerSec * waveFormatEx.nBlockAlign;
                    }
                    else if (eFormat >= StreamFormat.ADPCM_8kHzMono && eFormat <= StreamFormat.ADPCM_44kHzStereo)
                    {
                        uint[] array4 = new uint[4]
                        {
                            8000u,
                            11025u,
                            22050u,
                            44100u
                        };
                        uint[] array5 = new uint[8]
                        {
                            4096u,
                            8192u,
                            5644u,
                            11289u,
                            11155u,
                            22311u,
                            22179u,
                            44359u
                        };
                        uint[] array6 = new uint[4]
                        {
                            256u,
                            256u,
                            512u,
                            1024u
                        };
                        byte[] array7 = new byte[32]
                        {
                            244,
                            1,
                            7,
                            0,
                            0,
                            1,
                            0,
                            0,
                            0,
                            2,
                            0,
                            255,
                            0,
                            0,
                            0,
                            0,
                            192,
                            0,
                            64,
                            0,
                            240,
                            0,
                            0,
                            0,
                            204,
                            1,
                            48,
                            255,
                            136,
                            1,
                            24,
                            255
                        };
                        byte[] array8 = new byte[32]
                        {
                            244,
                            3,
                            7,
                            0,
                            0,
                            1,
                            0,
                            0,
                            0,
                            2,
                            0,
                            255,
                            0,
                            0,
                            0,
                            0,
                            192,
                            0,
                            64,
                            0,
                            240,
                            0,
                            0,
                            0,
                            204,
                            1,
                            48,
                            255,
                            136,
                            1,
                            24,
                            255
                        };
                        byte[] array9 = new byte[32]
                        {
                            244,
                            7,
                            7,
                            0,
                            0,
                            1,
                            0,
                            0,
                            0,
                            2,
                            0,
                            255,
                            0,
                            0,
                            0,
                            0,
                            192,
                            0,
                            64,
                            0,
                            240,
                            0,
                            0,
                            0,
                            204,
                            1,
                            48,
                            255,
                            136,
                            1,
                            24,
                            255
                        };
                        byte[][] array10 = new byte[4][]
                        {
                            array7,
                            array7,
                            array8,
                            array9
                        };
                        uint num5  = (uint)(eFormat - 57);
                        uint num6  = num5 / 2u;
                        bool flag4 = (num5 & 1) != 0;
                        waveFormatEx.wFormatTag      = 2;
                        waveFormatEx.nChannels       = (ushort)((!flag4) ? 1 : 2);
                        waveFormatEx.nSamplesPerSec  = array4[num6];
                        waveFormatEx.nAvgBytesPerSec = array5[num5];
                        waveFormatEx.nBlockAlign     = (ushort)(array6[num6] * waveFormatEx.nChannels);
                        waveFormatEx.wBitsPerSample  = 4;
                        waveFormatEx.cbSize          = 32;
                        array = (byte[])array10[num6].Clone();
                    }
                    else if (eFormat >= StreamFormat.GSM610_8kHzMono && eFormat <= StreamFormat.GSM610_44kHzMono)
                    {
                        uint[] array11 = new uint[4]
                        {
                            8000u,
                            11025u,
                            22050u,
                            44100u
                        };
                        uint[] array12 = new uint[4]
                        {
                            1625u,
                            2239u,
                            4478u,
                            8957u
                        };
                        uint num7 = (uint)(eFormat - 65);
                        waveFormatEx.wFormatTag      = 49;
                        waveFormatEx.nChannels       = 1;
                        waveFormatEx.nSamplesPerSec  = array11[num7];
                        waveFormatEx.nAvgBytesPerSec = array12[num7];
                        waveFormatEx.nBlockAlign     = 65;
                        waveFormatEx.wBitsPerSample  = 0;
                        waveFormatEx.cbSize          = 2;
                        array = new byte[2]
                        {
                            64,
                            1
                        };
                    }
                    else
                    {
                        waveFormatEx = null;
                        if (eFormat != 0 && eFormat != StreamFormat.Text)
                        {
                            throw new FormatException();
                        }
                    }
                    break;
                }
            }
            if (waveFormatEx == null)
            {
                return(null);
            }
            return(new SpeechAudioFormatInfo((EncodingFormat)waveFormatEx.wFormatTag, (int)waveFormatEx.nSamplesPerSec, waveFormatEx.wBitsPerSample, waveFormatEx.nChannels, (int)waveFormatEx.nAvgBytesPerSec, waveFormatEx.nBlockAlign, array));
        }
예제 #23
0
        /// <summary>
        /// Demux processing function, called by BackgroundWorker thread
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        public void DoDemux(object sender, DoWorkEventArgs e)
        {
            _bw = (BackgroundWorker)sender;

            bool use64BitEncoder = AppSettings.Use64BitEncoders &&
                                   AppSettings.Ffmpeg64Installed &&
                                   Environment.Is64BitOperatingSystem;

            string status = Processing.GetResourceString("ffmpeg_demuxing_status");

            _bw.ReportProgress(-10, status);
            _bw.ReportProgress(0, status);

            string inputFile;

            if (_jobInfo.Input == InputType.InputDvd)
            {
                inputFile = _jobInfo.DumpOutput;
            }
            else
            {
                inputFile = string.IsNullOrEmpty(_jobInfo.TempInput) ? _jobInfo.InputFile : _jobInfo.TempInput;
            }
            _jobInfo.VideoStream.TempFile = inputFile;
            try
            {
                _jobInfo.MediaInfo = Processing.GetMediaInfo(inputFile);
                if (_jobInfo.Input == InputType.InputDvd)
                {
                    _jobInfo.VideoStream = VideoHelper.GetStreamInfo(_jobInfo.MediaInfo, _jobInfo.VideoStream, false);
                }
            }
            catch (TimeoutException ex)
            {
                Log.Error(ex);
            }

            StringBuilder sb = new StringBuilder();

            if (_jobInfo.Input == InputType.InputDvd)
            {
                sb.Append("-probesize 2147483647 -analyzeduration 2147483647 -fflags genpts ");
            }

            sb.AppendFormat("-i \"{0}\" ", inputFile);

            string baseName;
            string ext;

            string formattedExt = "demuxed.video.mkv";

            if (string.IsNullOrEmpty(_jobInfo.TempInput))
            {
                baseName = string.IsNullOrEmpty(_jobInfo.TempOutput) ? _jobInfo.BaseName : _jobInfo.TempOutput;
            }
            else
            {
                baseName = _jobInfo.TempInput;
            }

            _jobInfo.VideoStream.TempFile =
                Processing.CreateTempFile(baseName, formattedExt);

            string streamID = _jobInfo.Input == InputType.InputDvd
                ? string.Format("#0x{0:X}", _jobInfo.VideoStream.StreamId + 479)
                : string.Format("0:v:{0:0}", _jobInfo.VideoStream.StreamKindID);

            sb.AppendFormat("-map {0} -c:v copy -y \"{1}\" ", streamID, _jobInfo.VideoStream.TempFile);

            foreach (AudioInfo item in _jobInfo.AudioStreams)
            {
                ext = StreamFormat.GetFormatExtension(item.Format, item.FormatProfile, false);

                string acodec;

                switch (ext)
                {
                case "flac":
                    acodec = "flac";
                    break;

                case "wav":
                    acodec = "pcm_s16le";
                    break;

                default:
                    acodec = "copy";
                    break;
                }

                formattedExt = string.Format("demuxed.audio.{0:g}.{1}.{2}", item.StreamId, item.LangCode, ext);

                if (string.IsNullOrEmpty(_jobInfo.TempInput))
                {
                    baseName = string.IsNullOrEmpty(_jobInfo.TempOutput) ? _jobInfo.BaseName : _jobInfo.TempOutput;
                }
                else
                {
                    baseName = _jobInfo.TempInput;
                }
                item.TempFile =
                    Processing.CreateTempFile(baseName, formattedExt);

                if (_jobInfo.Input == InputType.InputDvd)
                {
                    int dvdStreamId = item.StreamId;
                    if (String.CompareOrdinal(item.Format.ToLowerInvariant(), "mpeg1") == 0 ||
                        String.CompareOrdinal(item.Format.ToLowerInvariant(), "mpeg2") == 0)
                    {
                        dvdStreamId += 256;
                    }
                    streamID = string.Format("#0x{0:X}", dvdStreamId);
                }
                else
                {
                    streamID = string.Format("0:a:{0:0}", item.StreamKindId);
                }

                sb.AppendFormat("-map {0} -c:a {1} -y \"{2}\" ", streamID, acodec, item.TempFile);
            }

            foreach (SubtitleInfo item in _jobInfo.SubtitleStreams)
            {
                ext = "mkv";

                formattedExt = string.Format("demuxed.subtitle.{0:g}.{1}.{2}", item.StreamId, item.LangCode, ext);

                if (string.IsNullOrEmpty(_jobInfo.TempInput))
                {
                    baseName = string.IsNullOrEmpty(_jobInfo.TempOutput) ? _jobInfo.BaseName : _jobInfo.TempOutput;
                }
                else
                {
                    baseName = _jobInfo.TempInput;
                }

                item.TempFile = Processing.CreateTempFile(baseName, formattedExt);

                item.RawStream = false;

                streamID = _jobInfo.Input == InputType.InputDvd
                    ? string.Format("#0x{0:X}", item.StreamId)
                    : string.Format("0:s:{0:0}", item.StreamKindId);

                string codec = "copy";
                if (item.Format == "VobSub")
                {
                    codec = "dvd_subtitle";
                }

                sb.AppendFormat("-map {0} -c:s {1} -y \"{2}\" ", streamID, codec, item.TempFile);
            }

            string localExecutable = Path.Combine(AppSettings.ToolsPath, use64BitEncoder ? Executable64 : Executable);

            using (Process encoder = new Process())
            {
                ProcessStartInfo parameter = new ProcessStartInfo(localExecutable)
                {
                    WorkingDirectory      = AppSettings.DemuxLocation,
                    Arguments             = sb.ToString(),
                    CreateNoWindow        = true,
                    UseShellExecute       = false,
                    RedirectStandardError = true
                };
                encoder.StartInfo          = parameter;
                encoder.ErrorDataReceived += DemuxOnErrorDataReceived;

                Log.InfoFormat("ffmpeg {0:s}", parameter.Arguments);

                bool started;
                try
                {
                    started = encoder.Start();
                }
                catch (Exception ex)
                {
                    started = false;
                    Log.ErrorFormat("ffmpeg exception: {0}", ex);
                    _jobInfo.ExitCode = -1;
                }

                if (started)
                {
                    encoder.PriorityClass = AppSettings.GetProcessPriority();
                    encoder.BeginErrorReadLine();

                    _bw.ReportProgress(-1, status);

                    while (!encoder.HasExited)
                    {
                        if (_bw.CancellationPending)
                        {
                            encoder.Kill();
                        }
                        Thread.Sleep(200);
                    }

                    encoder.WaitForExit(10000);
                    encoder.CancelErrorRead();

                    _jobInfo.ExitCode = encoder.ExitCode;

                    if (_jobInfo.ExitCode == 0)
                    {
                        if (_jobInfo.Input == InputType.InputDvd)
                        {
                            _jobInfo.TempFiles.Add(inputFile);
                        }
                    }
                    Log.InfoFormat("Exit Code: {0:g}", _jobInfo.ExitCode);
                }
            }

            _bw.ReportProgress(100);
            _jobInfo.CompletedStep = _jobInfo.NextStep;
            e.Result = _jobInfo;
        }
예제 #24
0
        /// <summary>
        /// This method converts the specified stream format into a wave format
        /// </summary>
        private static SpeechAudioFormatInfo ConvertFormat(StreamFormat eFormat)
        {
            WaveFormatEx waveEx = new();

            byte[] extra = null;

            if (eFormat >= StreamFormat.PCM_8kHz8BitMono && eFormat <= StreamFormat.PCM_48kHz16BitStereo)
            {
                uint   index    = (uint)(eFormat - StreamFormat.PCM_8kHz8BitMono);
                bool   isStereo = (index & 0x1) != 0;
                bool   is16     = (index & 0x2) != 0;
                uint   dwKHZ    = (index & 0x3c) >> 2;
                uint[] adwKHZ   = new uint[] { 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000 };
                waveEx.wFormatTag     = (ushort)WaveFormatId.Pcm;
                waveEx.nChannels      = waveEx.nBlockAlign = (ushort)(isStereo ? 2 : 1);
                waveEx.nSamplesPerSec = adwKHZ[dwKHZ];
                waveEx.wBitsPerSample = 8;
                if (is16)
                {
                    waveEx.wBitsPerSample *= 2;
                    waveEx.nBlockAlign    *= 2;
                }
                waveEx.nAvgBytesPerSec = waveEx.nSamplesPerSec * waveEx.nBlockAlign;
            }
            else if (eFormat == StreamFormat.TrueSpeech_8kHz1BitMono)
            {
                waveEx.wFormatTag      = (ushort)WaveFormatId.TrueSpeech;
                waveEx.nChannels       = 1;
                waveEx.nSamplesPerSec  = 8000;
                waveEx.nAvgBytesPerSec = 1067;
                waveEx.nBlockAlign     = 32;
                waveEx.wBitsPerSample  = 1;
                waveEx.cbSize          = 32;
                extra    = new byte[32];
                extra[0] = 1;
                extra[2] = 0xF0;
            }
            else if ((eFormat >= StreamFormat.CCITT_ALaw_8kHzMono) && (eFormat <= StreamFormat.CCITT_ALaw_44kHzStereo))
            {
                uint   index    = (uint)(eFormat - StreamFormat.CCITT_ALaw_8kHzMono);
                uint   dwKHZ    = index / 2;
                uint[] adwKHZ   = { 8000, 11025, 22050, 44100 };
                bool   isStereo = (index & 0x1) != 0;
                waveEx.wFormatTag      = (ushort)WaveFormatId.Alaw;
                waveEx.nChannels       = waveEx.nBlockAlign = (ushort)(isStereo ? 2 : 1);
                waveEx.nSamplesPerSec  = adwKHZ[dwKHZ];
                waveEx.wBitsPerSample  = 8;
                waveEx.nAvgBytesPerSec = waveEx.nSamplesPerSec * waveEx.nBlockAlign;
            }
            else if ((eFormat >= StreamFormat.CCITT_uLaw_8kHzMono) &&
                     (eFormat <= StreamFormat.CCITT_uLaw_44kHzStereo))
            {
                uint   index    = (uint)(eFormat - StreamFormat.CCITT_uLaw_8kHzMono);
                uint   dwKHZ    = index / 2;
                uint[] adwKHZ   = new uint[] { 8000, 11025, 22050, 44100 };
                bool   isStereo = (index & 0x1) != 0;
                waveEx.wFormatTag      = (ushort)WaveFormatId.Mulaw;
                waveEx.nChannels       = waveEx.nBlockAlign = (ushort)(isStereo ? 2 : 1);
                waveEx.nSamplesPerSec  = adwKHZ[dwKHZ];
                waveEx.wBitsPerSample  = 8;
                waveEx.nAvgBytesPerSec = waveEx.nSamplesPerSec * waveEx.nBlockAlign;
            }
            else if ((eFormat >= StreamFormat.ADPCM_8kHzMono) &&
                     (eFormat <= StreamFormat.ADPCM_44kHzStereo))
            {
                //--- Some of these values seem odd. We used what the codec told us.
                uint[] adwKHZ      = new uint[] { 8000, 11025, 22050, 44100 };
                uint[] BytesPerSec = new uint[] { 4096, 8192, 5644, 11289, 11155, 22311, 22179, 44359 };
                uint[] BlockAlign  = new uint[] { 256, 256, 512, 1024 };
                byte[] Extra811    = new byte[32]
                {
                    0xF4, 0x01, 0x07, 0x00, 0x00, 0x01, 0x00, 0x00,
                    0x00, 0x02, 0x00, 0xFF, 0x00, 0x00, 0x00, 0x00,
                    0xC0, 0x00, 0x40, 0x00, 0xF0, 0x00, 0x00, 0x00,
                    0xCC, 0x01, 0x30, 0xFF, 0x88, 0x01, 0x18, 0xFF
                };

                byte[] Extra22 = new byte[32]
                {
                    0xF4, 0x03, 0x07, 0x00, 0x00, 0x01, 0x00, 0x00,
                    0x00, 0x02, 0x00, 0xFF, 0x00, 0x00, 0x00, 0x00,
                    0xC0, 0x00, 0x40, 0x00, 0xF0, 0x00, 0x00, 0x00,
                    0xCC, 0x01, 0x30, 0xFF, 0x88, 0x01, 0x18, 0xFF
                };

                byte[] Extra44 = new byte[32]
                {
                    0xF4, 0x07, 0x07, 0x00, 0x00, 0x01, 0x00, 0x00,
                    0x00, 0x02, 0x00, 0xFF, 0x00, 0x00, 0x00, 0x00,
                    0xC0, 0x00, 0x40, 0x00, 0xF0, 0x00, 0x00, 0x00,
                    0xCC, 0x01, 0x30, 0xFF, 0x88, 0x01, 0x18, 0xFF
                };

                byte[][] Extra    = new byte[][] { Extra811, Extra811, Extra22, Extra44 };
                uint     index    = (uint)(eFormat - StreamFormat.ADPCM_8kHzMono);
                uint     dwKHZ    = index / 2;
                bool     isStereo = (index & 0x1) != 0;
                waveEx.wFormatTag      = (ushort)WaveFormatId.AdPcm;
                waveEx.nChannels       = (ushort)(isStereo ? 2 : 1);
                waveEx.nSamplesPerSec  = adwKHZ[dwKHZ];
                waveEx.nAvgBytesPerSec = BytesPerSec[index];
                waveEx.nBlockAlign     = (ushort)(BlockAlign[dwKHZ] * waveEx.nChannels);
                waveEx.wBitsPerSample  = 4;
                waveEx.cbSize          = 32;
                extra = (byte[])Extra[dwKHZ].Clone();
            }
            else if ((eFormat >= StreamFormat.GSM610_8kHzMono) &&
                     (eFormat <= StreamFormat.GSM610_44kHzMono))
            {
                //--- Some of these values seem odd. We used what the codec told us.
                uint[] adwKHZ      = new uint[] { 8000, 11025, 22050, 44100 };
                uint[] BytesPerSec = new uint[] { 1625, 2239, 4478, 8957 };
                uint   index       = (uint)(eFormat - StreamFormat.GSM610_8kHzMono);
                waveEx.wFormatTag      = (ushort)WaveFormatId.Gsm610;
                waveEx.nChannels       = 1;
                waveEx.nSamplesPerSec  = adwKHZ[index];
                waveEx.nAvgBytesPerSec = BytesPerSec[index];
                waveEx.nBlockAlign     = 65;
                waveEx.wBitsPerSample  = 0;
                waveEx.cbSize          = 2;
                extra    = new byte[2];
                extra[0] = 0x40;
                extra[1] = 0x01;
            }
            else
            {
                waveEx = null;
                switch (eFormat)
                {
                case StreamFormat.NoAssignedFormat:
                    break;

                case StreamFormat.Text:
                    break;

                default:
                    throw new FormatException();
                }
            }

            return(waveEx != null ? new SpeechAudioFormatInfo((EncodingFormat)waveEx.wFormatTag, (int)waveEx.nSamplesPerSec, waveEx.wBitsPerSample, waveEx.nChannels, (int)waveEx.nAvgBytesPerSec, waveEx.nBlockAlign, extra) : null);
        }
예제 #25
0
        public void DemuxSubtitle(object sender, DoWorkEventArgs e)
        {
            _bw = (BackgroundWorker)sender;

            string localExecutable = Path.Combine(AppSettings.ToolsPath, "mkvextract.exe");
            string status          = Processing.GetResourceString("mkvmerge_demuxing_status");

            _bw.ReportProgress(-10, status);
            _bw.ReportProgress(0, status);

            SubtitleInfo sub          = _jobInfo.SubtitleStreams[_jobInfo.StreamId];
            string       input        = sub.TempFile;
            string       ext          = StreamFormat.GetFormatExtension(sub.Format, "", true);
            string       formattedExt = string.Format("raw.{0}", ext);

            sub.TempFile = Processing.CreateTempFile(sub.TempFile, formattedExt);

            StringBuilder sb = new StringBuilder();

            sb.AppendFormat("{0} tracks \"{1}\" 0:\"{2}\" ", Defaultparams, input, sub.TempFile);

            using (Process encoder = new Process())
            {
                ProcessStartInfo parameter = new ProcessStartInfo(localExecutable)
                {
                    WorkingDirectory       = AppSettings.DemuxLocation,
                    Arguments              = sb.ToString(),
                    CreateNoWindow         = true,
                    UseShellExecute        = false,
                    RedirectStandardOutput = true
                };

                encoder.StartInfo = parameter;

                encoder.OutputDataReceived += OnDemuxDataReceived;

                Log.InfoFormat("mkvextract {0:s}", parameter.Arguments);

                bool started;
                try
                {
                    started = encoder.Start();
                }
                catch (Exception ex)
                {
                    started = false;
                    Log.ErrorFormat("mkvmerge exception: {0}", ex);
                    _jobInfo.ExitCode = -1;
                }

                if (started)
                {
                    encoder.PriorityClass = AppSettings.GetProcessPriority();
                    encoder.BeginOutputReadLine();

                    while (!encoder.HasExited)
                    {
                        if (_bw.CancellationPending)
                        {
                            encoder.Kill();
                        }
                        Thread.Sleep(200);
                    }
                    encoder.WaitForExit(10000);
                    encoder.CancelOutputRead();

                    _jobInfo.ExitCode = encoder.ExitCode;
                    Log.InfoFormat("Exit Code: {0:g}", _jobInfo.ExitCode);
                    if (_jobInfo.ExitCode < 2)
                    {
                        if (_jobInfo.ExitCode == 1)
                        {
                            string warningStr = Processing.GetResourceString("process_finish_warnings");
                            _bw.ReportProgress(-10, warningStr);
                            _jobInfo.ExitCode = 0;
                        }

                        _jobInfo.TempFiles.Add(input);
                        sub.RawStream = true;
                        if (sub.Format == "VobSub")
                        {
                            _jobInfo.TempFiles.Add(sub.TempFile);
                            sub.TempFile = Path.ChangeExtension(sub.TempFile, "idx");
                        }
                    }
                }
            }

            _bw.ReportProgress(100);
            _jobInfo.CompletedStep = _jobInfo.NextStep;

            e.Result = _jobInfo;
        }
예제 #26
0
        private string GenerateCommandLine()
        {
            var sb = new StringBuilder();

            string baseFileName;

            _inputFile = string.IsNullOrEmpty(_currentTask.TempInput)
                            ? _currentTask.InputFile
                            : _currentTask.TempInput;

            if (string.IsNullOrEmpty(_currentTask.TempInput))
            {
                baseFileName = Path.Combine(_appConfig.DemuxLocation,
                                            string.IsNullOrEmpty(_currentTask.TempOutput)
                        ? _currentTask.BaseName
                        : Path.GetFileNameWithoutExtension(_currentTask.TempOutput));

                _currentTask.VideoStream.TempFile =
                    FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation,
                                                    baseFileName, "demuxed.video.mkv");
            }
            else
            {
                baseFileName = Path.Combine(_appConfig.DemuxLocation,
                                            Path.GetFileNameWithoutExtension(_currentTask.TempInput));
                _currentTask.VideoStream.TempFile =
                    FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation, baseFileName,
                                                    "demuxed.video.mkv");
            }

            sb.Append($"\"{_inputFile}\" {_currentTask.VideoStream.StreamId:0}:\"{_currentTask.VideoStream.TempFile}\" ");

            // on stereo sources, decide if stream for right eye should be extracted
            if (_currentTask.StereoVideoStream.RightStreamId > 0 &&
                _currentTask.EncodingProfile.StereoType != StereoEncoding.None)
            {
                _currentTask.StereoVideoStream.RightTempFile =
                    FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation,
                                                    _currentTask.VideoStream.TempFile,
                                                    "right.h264");
                _currentTask.StereoVideoStream.LeftTempFile =
                    FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation,
                                                    _currentTask.VideoStream.TempFile,
                                                    "left.h264");
                sb.Append($"{_currentTask.StereoVideoStream.LeftStreamId:0}:\"{_currentTask.StereoVideoStream.LeftTempFile}\" ");
                sb.Append($"{_currentTask.StereoVideoStream.RightStreamId:0}:\"{_currentTask.StereoVideoStream.RightTempFile}\" ");
            }

            string ext;
            string formattedExt;

            // process all audio streams
            foreach (var item in _currentTask.AudioStreams)
            {
                // get file extension for selected stream based on format and format profile
                ext = StreamFormat.GetFormatExtension(item.Format, item.FormatProfile, false);

                formattedExt = $"demuxed.audio.{item.StreamId:g}.{item.LangCode}.{ext}";

                item.TempFile = FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation,
                                                                baseFileName,
                                                                formattedExt);

                sb.Append($"{item.Id:0}:\"{item.TempFile}\" ");
            }

            // process all subtitle streams
            foreach (var item in _currentTask.SubtitleStreams)
            {
                ext          = StreamFormat.GetFormatExtension(item.Format, string.Empty, false);
                formattedExt = $"demuxed.subtitle.{item.StreamId:g}.{item.LangCode}.{ext}";

                item.TempFile = FileSystemHelper.CreateTempFile(_appConfig.DemuxLocation,
                                                                _currentTask.TempInput,
                                                                formattedExt);

                sb.Append($"{item.Id:0}:\"{item.TempFile}\" ");
                item.RawStream = true;
            }

            // add logfile to tempfiles list for deletion
            _currentTask.TempFiles.Add(_currentTask.VideoStream.TempFile.Substring(0,
                                                                                   _currentTask.VideoStream.TempFile.LastIndexOf('.')) + " - Log.txt");

            sb.Append("-progressNumbers ");

            return(sb.ToString());
        }
예제 #27
0
        /// <summary>
        /// Extract the value from the passed object.
        /// </summary>
        /// <remarks>
        /// The returned value may be <c>null</c>.
        /// </remarks>
        /// <param name="obj">
        /// An object to retrieve the value from.
        /// </param>
        /// <returns>
        /// The extracted value as an object; <c>null</c> is an acceptable
        /// value.
        /// </returns>
        /// <exception cref="InvalidCastException">
        /// If this IValueExtractor is incompatible with the passed object to
        /// extract a value from and the implementation <b>requires</b> the
        /// passed object to be of a certain type.
        /// </exception>
        /// <exception cref="ArgumentException">
        /// If this AbstractExtractor cannot handle the passed object for any
        /// other reason; an implementor should include a descriptive
        /// message.
        /// </exception>
        public override object Extract(object obj)
        {
            IValueExtractor[] extractors = Extractors;
            IComparer         comparer   = Comparer;

            object o1 = extractors[0].Extract(obj);
            object o2 = extractors[1].Extract(obj);

            if (NumberUtils.IsNumber(o1) && NumberUtils.IsNumber(o2) && comparer == null)
            {
                StreamFormat type;

                if (o1.GetType() == o2.GetType())
                {
                    // most common case; same types
                    type = GetStreamFormat(o1);
                }
                else
                {
                    StreamFormat[] types = new StreamFormat[] {
                        StreamFormat.Byte,          // 0
                        StreamFormat.Short,         // 1
                        StreamFormat.Int,           // 2
                        StreamFormat.Long,          // 3
                        StreamFormat.Float,         // 4
                        StreamFormat.Double,        // 5
                        StreamFormat.RawInt128,     // 6
                        StreamFormat.Decimal        // 7
                    };

                    StreamFormat type1 = GetStreamFormat(o1);
                    StreamFormat type2 = GetStreamFormat(o2);
                    int          typesCount, ix1, ix2;

                    ix1 = ix2 = typesCount = types.Length;
                    for (int i = 0; i < typesCount; i++)
                    {
                        StreamFormat t = types[i];
                        if (t == type1)
                        {
                            ix1 = i;
                        }
                        if (t == type2)
                        {
                            ix2 = i;
                        }
                    }

                    switch (Math.Max(ix1, ix2))
                    {
                    case 0:
                    case 1:
                    case 2:
                        type = StreamFormat.Int;
                        break;

                    case 3:
                        type = StreamFormat.Long;
                        break;

                    case 4:
                    case 5:
                        type = StreamFormat.Double;
                        break;

                    case 6:
                    case 7:
                        type = StreamFormat.Decimal;
                        o1   = EnsureDecimal(o1);
                        o2   = EnsureDecimal(o2);
                        break;

                    default:
                        type = StreamFormat.None;
                        break;
                    }
                }

                switch (type)
                {
                case StreamFormat.Byte:
                case StreamFormat.Short:
                case StreamFormat.Int:
                    return(Convert.ToInt32(o1) - Convert.ToInt32(o2));

                case StreamFormat.Long:
                    return(Convert.ToInt64(o1) - Convert.ToInt64(o2));

                case StreamFormat.Float:
                    return(Convert.ToSingle(o1) - Convert.ToSingle(o2));

                case StreamFormat.Double:
                    return(Convert.ToDouble(o1) - Convert.ToDouble(o2));

                case StreamFormat.RawInt128:
                    return(NumberUtils.DecimalToRawInt128(Decimal.Subtract(((RawInt128)o1).ToDecimal(), ((RawInt128)o2).ToDecimal())));

                case StreamFormat.Decimal:
                    return(Decimal.Subtract(Convert.ToDecimal(o1), Convert.ToDecimal(o2)));
                }
            }
            return(SafeComparer.CompareSafe(comparer, o1, o2));
        }
예제 #28
0
        /// <summary>
        /// Generates AviSynth script used for audio encoding
        /// </summary>
        /// <param name="inputFile">Path to input file</param>
        /// <param name="inFormat">Format of input file</param>
        /// <param name="inFormatProfile">Format profile of input file</param>
        /// <param name="inChannels">Channel count of input file</param>
        /// <param name="outChannels">Target channel count</param>
        /// <param name="inSampleRate">Samplerate of input file</param>
        /// <param name="outSampleRate">Target samplerate</param>
        /// <returns>Path to AviSynth script</returns>
        public string GenerateAudioScript(string inputFile, string inFormat, string inFormatProfile,
                                          int inChannels, int outChannels, int inSampleRate,
                                          int outSampleRate)
        {
            var sb = new StringBuilder();

            var ext = StreamFormat.GetFormatExtension(inFormat, inFormatProfile, false);

            sb.AppendLine($"LoadPlugin(\"{Path.Combine(_appConfig.AvsPluginsPath, "ffms2.dll")}\")");
            sb.AppendLine($"FFAudioSource(\"{inputFile}\")");

            if (inChannels > outChannels && outChannels > 0)
            {
                sb.AppendLine($"Import(\"{Path.Combine(_appConfig.AvsPluginsPath, "audio", "ChannelDownMix.avsi")}\")");

                switch (inChannels)
                {
                case 3:
                    switch (outChannels)
                    {
                    case 2:
                        sb.AppendLine("Dmix3Stereo()");
                        break;

                    case 4:
                    case 3:
                        sb.AppendLine("Dmix3Dpl()");
                        break;

                    case 1:
                        sb.AppendLine("ConvertToMono()");
                        break;
                    }
                    break;

                case 4:
                    switch (outChannels)
                    {
                    case 2:
                        sb.AppendLine("Dmix4qStereo()");
                        break;

                    case 3:
                        sb.AppendLine("Dmix4qDpl()");
                        break;

                    case 4:
                        sb.AppendLine("Dmix4qDpl2()");
                        break;

                    case 1:
                        sb.AppendLine("ConvertToMono()");
                        break;
                    }
                    break;

                case 5:
                    switch (outChannels)
                    {
                    case 2:
                        sb.AppendLine("Dmix5Stereo()");
                        break;

                    case 3:
                        sb.AppendLine("Dmix5Dpl()");
                        break;

                    case 4:
                        sb.AppendLine("Dmix5Dpl2()");
                        break;

                    case 1:
                        sb.AppendLine("ConvertToMono()");
                        break;
                    }
                    break;

                case 6:
                case 7:
                case 8:
                case 9:
                case 10:
                    switch (outChannels)
                    {
                    case 2:
                        sb.AppendLine("Dmix6StereoLfe()");
                        break;

                    case 3:
                        sb.AppendLine("Dmix6DplLfe()");
                        break;

                    case 4:
                        sb.AppendLine("Dmix6Dpl2Lfe()");
                        break;

                    case 1:
                        sb.AppendLine("ConvertToMono()");
                        break;

                    case 6:
                        sb.AppendLine("GetChannel(1,2,3,4,5,6)");
                        break;
                    }
                    break;
                }
            }

            if (inSampleRate != outSampleRate && outSampleRate > 0)
            {
                sb.Append($"SSRC({outSampleRate},fast=False)");
                sb.AppendLine();
            }

            sb.AppendLine("return last");

            return(WriteScript(sb.ToString()));
        }