public PrimaryAudioStreamInfo(MemoryStream ms) : base(PlaylistModel.StreamType.PrimaryAudio) { readStreamAttribute(ms); //StreamAttribute byte[] attributeBuffer = ms.ReadFixedLengthByteArray(ms.ReadInt8()); MemoryStream ms2 = new MemoryStream(attributeBuffer); AudioCodec = (AudioCodec)ms2.ReadInt8(); int value = ms2.ReadInt8(); AudioPresentationMode = (StreamModel.AudioPresentationMode)(value >> 4); SamplingRate samplingRate = (SamplingRate)(value & 0x0f); switch (samplingRate) { case SamplingRate._48: PrimarySamplingRate = 48000; break; case SamplingRate._96: PrimarySamplingRate = 96000; break; case SamplingRate._192: PrimarySamplingRate = 192000; break; default: throw new NotImplementedException(samplingRate.ToString()); } LanguageCode = ms2.ReadFixedLengthString(3); }
private OpusDecoder(IntPtr decoder, SamplingRate.Template outputSamplingRateHz, Channels.Template outputChannels) { _decoder = decoder; OutputSamplingRate = outputSamplingRateHz; OutputChannels = outputChannels; MaxDataBytes = 4000; }
internal RemoteVoice(Client client, VoiceInfo info, SamplingRate outputSamplingRateHz, Channels numChannels, byte lastEventNumber) : base(outputSamplingRateHz, numChannels) { this.client = client; this.Info = info; this.lastEvNumber = lastEventNumber; }
public OpusEncoder( SamplingRate Fs, Channels channels, ILoggerFactory loggerFactory, ElapsedTimeCounter counter ) { ArgumentNullException.ThrowIfNull(loggerFactory); ArgumentNullException.ThrowIfNull(counter); _logger = loggerFactory.CreateLogger <OpusEncoder>(); _counter = counter; _logger.LogInformation($"opus version {NativeMethods.opus_get_version_string()}"); _encoder = NativeMethods.opus_encoder_create( Fs, channels, OpusApplicationType.Audio, out var error ); if (error != OpusStatusCode.OK) { throw new OpusException($"[opus] opus_encoder_create error:{NativeMethods.opus_strerror((int)error)}({error})"); } }
/// <summary> /// /// </summary> /// <param name="channels"></param> /// <param name="mappingFamily"></param> /// <param name="samplingRate"></param> /// <param name="preskip"></param> /// <param name="gain">0 dB gain is recommended unless you know what you're doing</param> /// <param name="version"></param> public OpusHeader(byte channels, MappingFamily mappingFamily, SamplingRate samplingRate, UInt16 preskip, UInt16 gain, byte version) { Packet = new byte[21 + channels]; const string keyTemplate = "OpusHead"; for (int index = 0; index < keyTemplate.Length; index++) { Packet[index] = (byte)keyTemplate[index]; } Version = version; Channels = channels; PreSkip = preskip; InputSampleRate = samplingRate.Value; Gain = gain; ChannelMapping = (byte)mappingFamily; if (mappingFamily == MappingFamily.SingleStream || mappingFamily == MappingFamily.Vorbis) { var template = ChanelTemplateCollection.GetTemplate(channels - 1); StreamsCount = template.StreamsCount; CoupledStreamsCount = template.CoupledStreamsCount; StreamMap = template.Mapping; } else { StreamsCount = channels; StreamMap = Enumerable.Range(0, channels).Select(b => (byte)b).ToArray(); } }
private void OnSamplingRateChanged(int index) { SamplingRate newSamplingRate = this.recorder.SamplingRate; switch (index) { case 0: newSamplingRate = SamplingRate.Sampling08000; break; case 1: newSamplingRate = SamplingRate.Sampling12000; break; case 2: newSamplingRate = SamplingRate.Sampling16000; break; case 3: newSamplingRate = SamplingRate.Sampling24000; break; case 4: newSamplingRate = SamplingRate.Sampling48000; break; } this.recorder.SamplingRate = newSamplingRate; if (this.recorder.RequiresRestart) { this.recorder.RestartRecording(); } }
public OpusDecoder(SamplingRate outputSamplingRateHz, Channels numChannels) { if ((outputSamplingRateHz != SamplingRate.Sampling08000) && (outputSamplingRateHz != SamplingRate.Sampling12000) && (outputSamplingRateHz != SamplingRate.Sampling16000) && (outputSamplingRateHz != SamplingRate.Sampling24000) && (outputSamplingRateHz != SamplingRate.Sampling48000)) { throw new ArgumentOutOfRangeException("outputSamplingRateHz", "Must use one of the pre-defined sampling rates (" + outputSamplingRateHz + ")"); } if ((numChannels != Channels.Mono) && (numChannels != Channels.Stereo)) { throw new ArgumentOutOfRangeException("numChannels", "Must be Mono or Stereo"); } _channelCount = (int)numChannels; _handle = Wrapper.opus_decoder_create(outputSamplingRateHz, numChannels); _version = Marshal.PtrToStringAnsi(Wrapper.opus_get_version_string()); if (_handle == IntPtr.Zero) { throw new OpusException(OpusStatusCode.AllocFail, "Memory was not allocated for the encoder"); } }
public ContentionEventParser(SamplingRate samplingRate) { _samplingRate = samplingRate; _eventPairTimer = new EventPairTimer <long>( EventIdContentionStart, EventIdContentionStop, x => x.OSThreadId, samplingRate ); }
/// <summary> /// Creates a new Opus decoder. /// </summary> /// <param name="outputSamplingRateHz">Sample rate to decode at (Hz). This must be one of 8000, 12000, 16000, 24000, or 48000.</param> /// <param name="outputChannels">Number of channels to decode.</param> /// <returns>A new <c>OpusDecoder</c>.</returns> public static OpusDecoder Create(SamplingRate.Template outputSamplingRateHz, Channels.Template outputChannels) { IntPtr error; IntPtr decoder = Api.opus_decoder_create((int)outputSamplingRateHz, (int)outputChannels, out error); if ((ErrorCode)error != ErrorCode.OK) { throw new Exception("Exception occured while creating decoder"); } return new OpusDecoder(decoder, outputSamplingRateHz, outputChannels); }
public JitEventParser(SamplingRate samplingRate) { _samplingRate = samplingRate; _eventPairTimer = new EventPairTimer <ulong>( EventIdMethodJittingStarted, EventIdMethodLoadVerbose, x => (ulong)x.Payload[0], samplingRate ); }
/// <summary> /// Creates a new CustomDevice /// </summary> /// <param name="name">human-readable name</param> /// <param name="captureRate">sampling rate of the capture</param> /// <param name="captureChannels">amount of channels to capture, can be 1 or 2</param> /// <param name="playbackRate">sampling rate of the playback</param> /// <param name="playbackChannels">amount of channels of the playback, can be 1 or 2</param> public CustomDevice(string name, SamplingRate captureRate, int captureChannels, SamplingRate playbackRate, int playbackChannels) { ID = Guid.NewGuid().ToString("N"); Name = name; PlaybackRate = playbackRate; PlaybackChannels = playbackChannels; CaptureRate = captureRate; CaptureChannels = captureChannels; api = Library.Api; api.RegisterCustomDevice(ID, Name, (int)CaptureRate, CaptureChannels, (int)PlaybackRate, PlaybackChannels); }
/// <summary> /// Writes our settings to an XML writer /// </summary> /// <param name="writer">an XML writer</param> public void WriteXml(System.Xml.XmlWriter writer) { writer.WriteAttributeString("BaudRate", BaudRate.ToString()); writer.WriteAttributeString("ControllerType", ControllerType.ToString()); writer.WriteAttributeString("SamplingChannels", SamplingChannels.ToString()); writer.WriteAttributeString("SamplingCompression", SamplingCompression.ToString()); writer.WriteAttributeString("SamplingMode", SamplingMode.ToString()); writer.WriteAttributeString("SamplingRate", SamplingRate.ToString()); writer.WriteAttributeString("SamplingTime", SamplingTime.ToString()); writer.WriteAttributeString("SerialPortName", SerialPortName); }
public ThreadPoolSchedulingStatsCollector(double[] histogramBuckets, SamplingRate samplingRate) { _histogramBuckets = histogramBuckets; _samplingRate = samplingRate; _eventPairTimer = new EventPairTimer <long>( EventIdThreadPoolEnqueueWork, EventIdThreadPoolDequeueWork, x => (long)x.Payload[0], samplingRate, new Cache <long, int>(TimeSpan.FromSeconds(30), initialCapacity: 512) ); }
public static string ExtractMp3(string input, string output, BitrateMp3 br = BitrateMp3.Standard, SamplingRate sr = SamplingRate.AudioCD) { if(string.IsNullOrWhiteSpace(input)) throw new ArgumentNullException("input"); if(string.IsNullOrWhiteSpace(output)) throw new ArgumentNullException("output"); return new CommandLineBuilder() .AddEntry(input) .VideoCodec(VideoEncoding.NOVIDEO) .AudioCodec(AudioEncoding.MP3) .Param(Parameter.A_BITRATE, ((int)br).ToString()) .Param(Parameter.A_SAMPLE, ((int)sr).ToString()) .Param(Parameter.MISC_OVERWRITE_YES) .Output(output); }
public void Given_1000_Events_ShouldSampleEvent_Returns_True_Every_Nth_Event(SampleEvery samplingRate, int expectedEvents) { var eventsSampled = 0; var sr = new SamplingRate(samplingRate); for (int i = 0; i < 1_000; i++) { if (sr.ShouldSampleEvent()) { eventsSampled++; } } Assert.That(eventsSampled, Is.EqualTo(expectedEvents)); }
public KestrelEventParser(SamplingRate samplingRate) { _samplingRate = samplingRate; _eventPairTimerConnections = new EventPairTimer <long>( EventIdConnectionStart, EventIdConnectionStop, x => x.OSThreadId, samplingRate ); _eventPairTimerRequests = new EventPairTimer <long>( EventIdRequestStart, EventIdRequestStop, x => x.OSThreadId, samplingRate ); }
public void SaveClickedAllChangedTest() { //Für den ServiceProviderMock //Muss enthalten sein, damit der Mock nicht überschrieben wird IServiceProvider unused = ServiceManager.ServiceProvider; //Feld Infos holen System.Reflection.FieldInfo instance = typeof(ServiceManager).GetField("_serviceProvider", System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.NonPublic); //Mocksaufsetzen //ServiceProvider Mock <IServiceProvider> mockSingleton = new Mock <IServiceProvider>(); //Service der gemockt werden soll Mock <ISettingsService> mockSettingsService = new Mock <ISettingsService>(); User initUser = new User("Alice", 70); CultureInfo initCultureInfo = CultureInfo.GetCultureInfo("en-US"); SamplingRate newSamplingRate = SamplingRate.Hz_100; User newUser = new User("Bob", 80); CultureInfo newCultureInfo = CultureInfo.GetCultureInfo("de-DE"); mockSettingsService.SetupProperty(x => x.SamplingRate, SamplingRate.Hz_50); mockSettingsService.SetupProperty(x => x.ActiveUser, initUser); mockSettingsService.SetupProperty(x => x.ActiveLanguage, initCultureInfo); mockSingleton.Setup(x => x.GetService(typeof(ISettingsService))).Returns(mockSettingsService.Object); instance.SetValue(null, mockSingleton.Object); //Ausführung SettingsViewModel settingsViewModel = new SettingsViewModel(); bool actual = settingsViewModel.SaveClicked(newUser.Username, newUser.Steplength, newSamplingRate, newCultureInfo); //Asserts Assert.True(actual); Assert.Equal((int)newSamplingRate, settingsViewModel.SamplingRate); Assert.Equal(newUser.Username, settingsViewModel.Username); Assert.Equal(newUser.Steplength, settingsViewModel.Steplength); }
//public OpusEncoder(SamplingRate inputSamplingRateHz, Channels numChannels) // : this(inputSamplingRateHz, numChannels, 120000, OpusApplicationType.Audio, Delay.Delay20ms) //{ } //public OpusEncoder(SamplingRate inputSamplingRateHz, Channels numChannels, int bitrate) // : this(inputSamplingRateHz, numChannels, bitrate, OpusApplicationType.Audio, Delay.Delay20ms) //{ } //public OpusEncoder(SamplingRate inputSamplingRateHz, Channels numChannels, int bitrate, OpusApplicationType applicationType) // : this(inputSamplingRateHz, numChannels, bitrate, applicationType, Delay.Delay20ms) //{ } public OpusEncoder(SamplingRate inputSamplingRateHz, Channels numChannels, int bitrate, OpusApplicationType applicationType, Delay encoderDelay) { if ((inputSamplingRateHz != SamplingRate.Sampling08000) && (inputSamplingRateHz != SamplingRate.Sampling12000) && (inputSamplingRateHz != SamplingRate.Sampling16000) && (inputSamplingRateHz != SamplingRate.Sampling24000) && (inputSamplingRateHz != SamplingRate.Sampling48000)) { throw new ArgumentOutOfRangeException("inputSamplingRateHz", "Must use one of the pre-defined sampling rates(" + inputSamplingRateHz + ")"); } if ((numChannels != Channels.Mono) && (numChannels != Channels.Stereo)) { throw new ArgumentOutOfRangeException("numChannels", "Must be Mono or Stereo"); } if ((applicationType != OpusApplicationType.Audio) && (applicationType != OpusApplicationType.RestrictedLowDelay) && (applicationType != OpusApplicationType.Voip)) { throw new ArgumentOutOfRangeException("applicationType", "Must use one of the pre-defined application types (" + applicationType + ")"); } if ((encoderDelay != Delay.Delay10ms) && (encoderDelay != Delay.Delay20ms) && (encoderDelay != Delay.Delay2dot5ms) && (encoderDelay != Delay.Delay40ms) && (encoderDelay != Delay.Delay5ms) && (encoderDelay != Delay.Delay60ms)) { throw new ArgumentOutOfRangeException("encoderDelay", "Must use one of the pre-defined delay values (" + encoderDelay + ")");; } _inputSamplingRate = inputSamplingRateHz; _inputChannels = numChannels; _handle = Wrapper.opus_encoder_create(inputSamplingRateHz, numChannels, applicationType); if (_handle == IntPtr.Zero) { throw new OpusException(OpusStatusCode.AllocFail, "Memory was not allocated for the encoder"); } EncoderDelay = encoderDelay; Bitrate = bitrate; UseInbandFEC = true; PacketLossPercentage = 30; }
public GsrPpgSampler(string portName, int baudRate, SamplingRate gsr, SamplingRate ppg) : base(portName, baudRate) { GsrSamplingRate = gsr; PpgSamplingRate = ppg; base.Open(); if (!base.IsOpen) { throw new Exception("Connection failed!"); } byte[] srBytes1 = BitConverter.GetBytes((int)GsrSamplingRate); byte[] srBytes2 = BitConverter.GetBytes((int)PpgSamplingRate); byte[] dataToSend = new byte[9]; dataToSend[0] = (byte)'!'; Buffer.BlockCopy(srBytes1, 0, dataToSend, 1, srBytes1.Length); Buffer.BlockCopy(srBytes2, 0, dataToSend, srBytes1.Length + 1, srBytes2.Length); base.Write(dataToSend, 0, dataToSend.Length); }
public static IntPtr opus_decoder_create(SamplingRate Fs, Channels channels) { int size = Wrapper.opus_decoder_get_size(channels); IntPtr ptr = Marshal.AllocHGlobal(size); OpusStatusCode statusCode = Wrapper.opus_decoder_init(ptr, Fs, channels); try { HandleStatusCode(statusCode); } catch (Exception ex) { if (ptr != IntPtr.Zero) { Wrapper.opus_decoder_destroy(ptr); ptr = IntPtr.Zero; } throw ex; } return(ptr); }
internal static extern OpusStatusCode opus_encoder_init( [In] this Encoder st, [In] SamplingRate Fs, [In] Channels channels, [In] OpusApplicationType application );
/// <summary> /// Instead of opening existing sound devices that TeamSpeak has detected, you can also use our custom capture and playback mechanism to allow you to override the way in which TeamSpeak does capture and playback. When you have opened a custom capture and playback device you must regularly supply new "captured" sound data via <see cref="CustomDevice.ProcessData(short[], int)"/> and retrieve data that should be "played back" via <see cref="CustomDevice.AcquireData(short[], int)"/>. Where exactly this captured sound data comes from and where the playback data goes to is up to you, which allows a lot of cool things to be done with this mechanism. /// A custom device can be opened like any standard device with <see cref="Connection.OpenCapture(ISoundDevice)"/> and <see cref="Connection.OpenPlayback(ISoundDevice)"/>. /// </summary> /// <param name="name">Displayed name of the custom device. Freely choose a name which identifies your device.</param> /// <param name="captureRate">Frequency of the capture device.</param> /// <param name="captureChannels">Number of channels of the capture device. This value depends on if the used codec is a mono or stereo codec.</param> /// <param name="playbackRate">Frequency of the playback device.</param> /// <param name="playbackChannels">Number of channels of the playback device.</param> /// <remarks> Implementing own custom devices is for special use cases and entirely optional.</remarks> /// <returns>a new <see cref="CustomDevice"/></returns> public static CustomDevice CreateCustomDevice(string name, SamplingRate captureRate = SamplingRate.Hz48000, int captureChannels = 1, SamplingRate playbackRate = SamplingRate.Hz48000, int playbackChannels = 1) { return(new CustomDevice(name, captureRate, captureChannels, playbackRate, playbackChannels)); }
public void SampleEvery_Reflects_The_Ratio_Of_Every_100_Events_That_Will_Be_Sampled(SampleEvery samplingRate, int expected) { var sr = new SamplingRate(samplingRate); Assert.That(sr.SampleEvery, Is.EqualTo(expected)); }
public void MergeFrom(Telemetry other) { if (other == null) { return; } if (other.Ver != 0) { Ver = other.Ver; } if (other.DataTypeName.Length != 0) { DataTypeName = other.DataTypeName; } if (other.DateTime.Length != 0) { DateTime = other.DateTime; } if (other.samplingRate_ != null) { if (samplingRate_ == null) { samplingRate_ = new global::Google.Protobuf.WellKnownTypes.DoubleValue(); } SamplingRate.MergeFrom(other.SamplingRate); } if (other.SequenceNumber.Length != 0) { SequenceNumber = other.SequenceNumber; } if (other.InstrumentationKey.Length != 0) { InstrumentationKey = other.InstrumentationKey; } tags_.Add(other.tags_); switch (other.DataCase) { case DataOneofCase.Event: if (Event == null) { Event = new global::Microsoft.LocalForwarder.Library.Inputs.Contracts.Event(); } Event.MergeFrom(other.Event); break; case DataOneofCase.Message: if (Message == null) { Message = new global::Microsoft.LocalForwarder.Library.Inputs.Contracts.Message(); } Message.MergeFrom(other.Message); break; case DataOneofCase.Metric: if (Metric == null) { Metric = new global::Microsoft.LocalForwarder.Library.Inputs.Contracts.Metric(); } Metric.MergeFrom(other.Metric); break; case DataOneofCase.Exception: if (Exception == null) { Exception = new global::Microsoft.LocalForwarder.Library.Inputs.Contracts.Exception(); } Exception.MergeFrom(other.Exception); break; case DataOneofCase.Dependency: if (Dependency == null) { Dependency = new global::Microsoft.LocalForwarder.Library.Inputs.Contracts.Dependency(); } Dependency.MergeFrom(other.Dependency); break; case DataOneofCase.Availability: if (Availability == null) { Availability = new global::Microsoft.LocalForwarder.Library.Inputs.Contracts.Availability(); } Availability.MergeFrom(other.Availability); break; case DataOneofCase.PageView: if (PageView == null) { PageView = new global::Microsoft.LocalForwarder.Library.Inputs.Contracts.PageView(); } PageView.MergeFrom(other.PageView); break; case DataOneofCase.Request: if (Request == null) { Request = new global::Microsoft.LocalForwarder.Library.Inputs.Contracts.Request(); } Request.MergeFrom(other.Request); break; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); }
public override int GetHashCode() { int hash = 1; if (Ver != 0) { hash ^= Ver.GetHashCode(); } if (DataTypeName.Length != 0) { hash ^= DataTypeName.GetHashCode(); } if (DateTime.Length != 0) { hash ^= DateTime.GetHashCode(); } if (samplingRate_ != null) { hash ^= SamplingRate.GetHashCode(); } if (SequenceNumber.Length != 0) { hash ^= SequenceNumber.GetHashCode(); } if (InstrumentationKey.Length != 0) { hash ^= InstrumentationKey.GetHashCode(); } hash ^= Tags.GetHashCode(); if (dataCase_ == DataOneofCase.Event) { hash ^= Event.GetHashCode(); } if (dataCase_ == DataOneofCase.Message) { hash ^= Message.GetHashCode(); } if (dataCase_ == DataOneofCase.Metric) { hash ^= Metric.GetHashCode(); } if (dataCase_ == DataOneofCase.Exception) { hash ^= Exception.GetHashCode(); } if (dataCase_ == DataOneofCase.Dependency) { hash ^= Dependency.GetHashCode(); } if (dataCase_ == DataOneofCase.Availability) { hash ^= Availability.GetHashCode(); } if (dataCase_ == DataOneofCase.PageView) { hash ^= PageView.GetHashCode(); } if (dataCase_ == DataOneofCase.Request) { hash ^= Request.GetHashCode(); } hash ^= (int)dataCase_; if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
private static extern OpusStatusCode opus_decoder_init(IntPtr st, SamplingRate Fs, Channels channels);
public static string ExtractMp3(string input, string output, BitrateMp3 br = BitrateMp3.Standard, SamplingRate sr = SamplingRate.AudioCD) { if (string.IsNullOrWhiteSpace(input)) { throw new ArgumentNullException("input"); } if (string.IsNullOrWhiteSpace(output)) { throw new ArgumentNullException("output"); } return(new CommandLineBuilder() .AddEntry(input) .VideoCodec(VideoEncoding.NOVIDEO) .AudioCodec(AudioEncoding.MP3) .Param(Parameter.A_BITRATE, ((int)br).ToString()) .Param(Parameter.A_SAMPLE, ((int)sr).ToString()) .Param(Parameter.MISC_OVERWRITE_YES) .Output(output)); }
internal static extern ErrorCode opus_decoder_init(IntPtr st, SamplingRate.Template fs, Channels.Template channels);
public static string Value(this SamplingRate value) { return(((int)value).ToString()); }
private void ParseProgram(byte[] buffer) { MemoryStream ms = new MemoryStream(buffer); ms.Position++; byte numPrograms = ms.ReadInt8(); Programs = new Program[numPrograms]; for (int i = 0; i < numPrograms; i++) { Programs[i] = new Program(); Programs[i].ProgramSequenceStart = ms.ReadInt32BE(); Programs[i].ProgramMapPid = ms.ReadUInt16BE(); Programs[i].NumStreams = ms.ReadInt8(); Programs[i].NumGroups = ms.ReadInt8(); Programs[i].ProgramStreams = new ProgramStream[Programs[i].NumStreams]; for (int j = 0; j < Programs[i].NumStreams; j++) { ushort PID = ms.ReadUInt16BE(); int len = ms.ReadInt8(); int pos = (int)ms.Position; byte codecType = ms.ReadInt8(); switch (codecType) { case 0x01: case 0x02: case 0x1b: case 0xea: VideoProgramStream child = new VideoProgramStream(); child.VideoCodec = (VideoCodec)codecType; int value = ms.ReadInt8(); child.VideoResolution = (VideoResolution)(value >> 4); FrameRate frameRate = (FrameRate)(value & 0x0f); switch (frameRate) { case PlaylistModel.StreamModel.FrameRate._23_076: child.FrameRate = 23.976; break; case PlaylistModel.StreamModel.FrameRate._24: child.FrameRate = 24; break; case PlaylistModel.StreamModel.FrameRate._25: child.FrameRate = 25; break; case PlaylistModel.StreamModel.FrameRate._29_976: child.FrameRate = 29.976; break; case PlaylistModel.StreamModel.FrameRate._50: child.FrameRate = 50; break; case PlaylistModel.StreamModel.FrameRate._59_94: child.FrameRate = 59.94; break; default: throw new NotImplementedException(frameRate.ToString()); } Programs[i].ProgramStreams[j] = child; break; case 0x03: case 0x04: case 0x80: case 0x81: case 0x82: case 0x83: case 0x84: case 0x85: case 0x86: case 0xa1: case 0xa2: AudioProgramStream child2 = new AudioProgramStream(); child2.AudioCodec = (AudioCodec)codecType; int value2 = ms.ReadInt8(); child2.AudioPresentationMode = (AudioPresentationMode)(value2 >> 4); SamplingRate samplingRate = (SamplingRate)(value2 & 0x0f); switch (samplingRate) { case SamplingRate._48: child2.PrimarySamplingRate = 48000; break; case SamplingRate._96: child2.PrimarySamplingRate = 96000; break; case SamplingRate._192: child2.PrimarySamplingRate = 192000; break; default: throw new NotImplementedException(samplingRate.ToString()); } child2.LanguageCode = ms.ReadFixedLengthString(3); Programs[i].ProgramStreams[j] = child2; break; case 0x90: case 0x91: case 0xa0: GraphicsProgramStream child3 = new GraphicsProgramStream(); child3.LanguageCode = ms.ReadFixedLengthString(3); Programs[i].ProgramStreams[j] = child3; break; case 0x92: SubtitleProgramStream child4 = new SubtitleProgramStream(); child4.CharCode = ms.ReadInt8(); child4.LanguageCode = ms.ReadFixedLengthString(3); Programs[i].ProgramStreams[j] = child4; break; default: Debug.WriteLine(String.Format("Unknown codec type: {0}", codecType)); break; } ms.Position = pos + len; Programs[i].ProgramStreams[j].PID = PID; } } }
internal static extern ErrorCode opus_encoder_init(IntPtr st, SamplingRate.Template fs, Channels.Template channels, ApplicationType application);
/// <summary> /// /// </summary> /// <param name="channels"></param> /// <param name="samplingRate"></param> /// <param name="preskip"></param> public OpusHeader(byte channels, SamplingRate samplingRate, UInt16 preskip) : this(channels, channels > 8 ? MappingFamily.Multy : MappingFamily.Vorbis, samplingRate, preskip, 0, 1) { }
public OpusEncoder(SamplingRate inputSamplingRateHz, Channels numChannels) : this(inputSamplingRateHz, numChannels, 120000, OpusApplicationType.Audio, Delay.Delay20ms) { }
private static extern OpusStatusCode opus_encoder_init(IntPtr st, SamplingRate Fs, Channels channels, OpusApplicationType application);
public OpusEncoder(SamplingRate inputSamplingRateHz, Channels numChannels, int bitrate, OpusApplicationType applicationType) : this(inputSamplingRateHz, numChannels, bitrate, applicationType, Delay.Delay20ms) { }
private void Validate(object sender, EventArgs e) { int tmp; if (gbVideo.Enabled) { // Vérification CRF if (tbCRF.Text.Length != 0) { if (!int.TryParse(tbCRF.Text, out tmp)) { MessageBox.Show(string.Format(I18n.Get("FormatCrf"), Common.CRF_MIN, Common.CRF_MAX)); return; } if (tmp < Common.CRF_MIN || tmp > Common.CRF_MAX) { MessageBox.Show(string.Format(I18n.Get("FormatCrf"), Common.CRF_MIN, Common.CRF_MAX)); return; } _builder.Param(Parameter.V_CRF, tbCRF.Text); } // Vérification Qscale if (tbQscale.Text.Length != 0) { if (!int.TryParse(tbQscale.Text, out tmp)) { MessageBox.Show(string.Format(I18n.Get("FormatQscale"), Common.QSCALE_MIN, Common.QSCALE_MAX)); return; } if (tmp < Common.QSCALE_MIN || tmp > Common.QSCALE_MAX) { MessageBox.Show(string.Format(I18n.Get("FormatQscale"), Common.QSCALE_MIN, Common.QSCALE_MAX)); return; } _builder.Param(Parameter.V_QSCALE, tbQscale.Text); } // Vérification Qmin int qmin = 4; if (tbQmin.Text.Length != 0) { if (!int.TryParse(tbQmin.Text, out qmin)) { MessageBox.Show(string.Format(I18n.Get("FormatQmin"), Common.QMIN_MIN, Common.QMIN_MAX)); return; } if (qmin < Common.QMIN_MIN || qmin > Common.QMIN_MAX) { MessageBox.Show(string.Format(I18n.Get("FormatQmin"), Common.QMIN_MIN, Common.QMIN_MAX)); return; } _builder.Param(Parameter.V_QMIN, tbQmin.Text); } // Vérification Qmax if (tbQmax.Text.Length != 0) { if (!int.TryParse(tbQmax.Text, out tmp)) { MessageBox.Show(string.Format(I18n.Get("FormatQmax"), qmin, Common.QMAX_MAX)); return; } if (tmp < qmin || tmp > Common.QMAX_MAX) { MessageBox.Show(string.Format(I18n.Get("FormatQmax"), qmin, Common.QMAX_MAX)); return; } _builder.Param(Parameter.V_QMAX, tbQmax.Text); } // Vérification Bitrate if (tbBV.Text.Length != 0) { bool unit = tbBV.Text.EndsWith("k", StringComparison.CurrentCultureIgnoreCase) || tbBV.Text.EndsWith("m", StringComparison.CurrentCultureIgnoreCase); if (!int.TryParse(unit ? tbBV.Text.Substring(0, tbBV.Text.Length - 1) : tbBV.Text, out tmp)) { MessageBox.Show(I18n.Get("FormatBitrate")); return; } if (tmp <= 0) { MessageBox.Show(I18n.Get("FormatBitrate")); return; } _builder.Param(Parameter.V_BITRATE, tbBV.Text); } _builder.VideoCodec((VideoEncoding)cbCV.SelectedValue); } if (gbAudio.Enabled) { _builder.AudioCodec((AudioEncoding)cbCA.SelectedValue); BitrateMp3 br = (BitrateMp3)Enum.Parse(typeof(BitrateMp3), cbBA.SelectedValue.ToString()); if (br != BitrateMp3.Defaut) { _builder.Param(Parameter.A_BITRATE, ((int)br).ToString()); } SamplingRate sr = (SamplingRate)Enum.Parse(typeof(SamplingRate), cbSR.SelectedValue.ToString()); if (sr != SamplingRate.Defaut) { _builder.Param(Parameter.A_SAMPLE, ((int)sr).ToString()); } } _builder.Param(Parameter.NONE, cbFormat.SelectedValue.ToString()); this.DialogResult = DialogResult.OK; }
private void Validate(object sender, EventArgs e) { if (_media == null) { MessageBox.Show("Aucun fichier valable en entrée."); return; } if (tbOut.Text.Length == 0) { MessageBox.Show("Choisissez un fichier de sortie."); return; } if (tbFile.Text.Equals(tbOut.Text)) { MessageBox.Show(I18n.Get("ErrorSameInputOutput")); return; } _builder = new CommandLineBuilder(); _builder.AddEntry(tbFile.Text); int tmp; if (gbVideo.Enabled) { // Vérification CRF if (tbCRF.Text.Length != 0) { if (!int.TryParse(tbCRF.Text, out tmp)) { MessageBox.Show(string.Format(I18n.Get("FormatCrf"), Common.CRF_MIN, Common.CRF_MAX)); _builder = null; return; } if (tmp < Common.CRF_MIN || tmp > Common.CRF_MAX) { MessageBox.Show(string.Format(I18n.Get("FormatCrf"), Common.CRF_MIN, Common.CRF_MAX)); _builder = null; return; } _builder.Param(Parameter.V_CRF, tbCRF.Text); } // Vérification Qscale if (tbQscale.Text.Length != 0) { if (!int.TryParse(tbQscale.Text, out tmp)) { MessageBox.Show(string.Format(I18n.Get("FormatQscale"), Common.QSCALE_MIN, Common.QSCALE_MAX)); _builder = null; return; } if (tmp < Common.QSCALE_MIN || tmp > Common.QSCALE_MAX) { MessageBox.Show(string.Format(I18n.Get("FormatQscale"), Common.QSCALE_MIN, Common.QSCALE_MAX)); _builder = null; return; } _builder.Param(Parameter.V_QSCALE, tbQscale.Text); } // Vérification Qmin int qmin = 4; if (tbQmin.Text.Length != 0) { if (!int.TryParse(tbQmin.Text, out qmin)) { MessageBox.Show(string.Format(I18n.Get("FormatQmin"), Common.QMIN_MIN, Common.QMIN_MAX)); _builder = null; return; } if (qmin < Common.QMIN_MIN || qmin > Common.QMIN_MAX) { MessageBox.Show(string.Format(I18n.Get("FormatQmin"), Common.QMIN_MIN, Common.QMIN_MAX)); _builder = null; return; } _builder.Param(Parameter.V_QMIN, tbQmin.Text); } // Vérification Qmax if (tbQmax.Text.Length != 0) { if (!int.TryParse(tbQmax.Text, out tmp)) { MessageBox.Show(string.Format(I18n.Get("FormatQmax"), qmin, Common.QMAX_MAX)); _builder = null; return; } if (tmp < qmin || tmp > Common.QMAX_MAX) { MessageBox.Show(string.Format(I18n.Get("FormatQmax"), qmin, Common.QMAX_MAX)); _builder = null; return; } _builder.Param(Parameter.V_QMAX, tbQmax.Text); } // Vérification Bitrate if (tbBV.Text.Length != 0) { bool unit = tbBV.Text.EndsWith("k", StringComparison.CurrentCultureIgnoreCase) || tbBV.Text.EndsWith("m", StringComparison.CurrentCultureIgnoreCase); if (!int.TryParse(unit ? tbBV.Text.Substring(0, tbBV.Text.Length - 1) : tbBV.Text, out tmp)) { MessageBox.Show(I18n.Get("FormatBitrate")); _builder = null; return; } if (tmp <= 0) { MessageBox.Show(I18n.Get("FormatBitrate")); _builder = null; return; } _builder.Param(Parameter.V_BITRATE, tbBV.Text); } _builder.VideoCodec((VideoEncoding)cbCV.SelectedValue); } else { _builder.VideoCodec(VideoEncoding.NOVIDEO); } if (gbAudio.Enabled) { _builder.AudioCodec((AudioEncoding)cbCA.SelectedValue); BitrateMp3 br = (BitrateMp3)Enum.Parse(typeof(BitrateMp3), cbBA.SelectedValue.ToString()); if (br != BitrateMp3.Defaut) { _builder.Param(Parameter.A_BITRATE, ((int)br).ToString()); } SamplingRate sr = (SamplingRate)Enum.Parse(typeof(SamplingRate), cbSR.SelectedValue.ToString()); if (sr != SamplingRate.Defaut) { _builder.Param(Parameter.A_SAMPLE, ((int)sr).ToString()); } } else { _builder.AudioCodec(AudioEncoding.NOAUDIO); } _builder.Param(Parameter.NONE, cbFormat.SelectedValue.ToString()).Param(Parameter.MISC_OVERWRITE_YES); this.DialogResult = DialogResult.OK; }
/// <summary> /// Creates a new Opus encoder. /// </summary> /// <param name="inputSamplingRate">Sampling rate of the input signal (Hz). This must be one of 8000, 12000, 16000, 24000, or 48000.</param> /// <param name="inputChannels">Number of channels (1 or 2) in input signal.</param> /// <param name="application">Coding mode.</param> /// <returns>A new <c>OpusEncoder</c></returns> public static OpusEncoder Create(SamplingRate inputSamplingRate, byte inputChannels, ApplicationType application) { IntPtr error; IntPtr encoder = Api.opus_encoder_create((int)inputSamplingRate.Value, inputChannels, (int)application, out error); if ((ErrorCode)error != ErrorCode.OK) { throw new Exception("Exception occured while creating encoder"); } return new OpusEncoder(encoder, inputChannels, 4000); }