/// <summary> /// Constructs a new signal generator. /// </summary> /// public SignalGenerator(Func<double, double> func) { this.Function = func; this.Channels = 1; this.SamplingRate = 1; this.Format = SampleFormat.Format32BitIeeeFloat; }
/// <summary> /// Creates a new Impulse Signal Generator. /// </summary> public ImpulseGenerator(int bpm, int pulses, int sampleRate, SampleFormat format) { this.SamplingRate = sampleRate; this.BeatsPerMinute = bpm; this.Pulses = pulses; this.Channels = 1; this.Format = format; }
public AudioStream(SampleFormat format, Synthesizer synth) { MMRESULT result; var fmt = CreateFormatSpec(format, synth.SampleRate); // Create output device if ((result = waveOutOpen(ref _ptrOutputDevice, WAVE_MAPPER, ref fmt, WaveOutProc, IntPtr.Zero, WaveOutOpenFlags.CALLBACK_FUNCTION)) != MMRESULT.MMSYSERR_NOERROR) throw new ExternalException($"Function 'waveOutOpen' returned error code {result}"); synth.TTS.AddActiveAudio(this); _synth = synth; }
public FormatConverter(SampleFormat destinationFormat) { this.destinationFormat = destinationFormat; if (destinationFormat == SampleFormat.Format32BitIeeeFloat) { formatTranslations[SampleFormat.Format16Bit] = destinationFormat; formatTranslations[SampleFormat.Format32Bit] = destinationFormat; } else { throw new UnsupportedSampleFormatException(); } }
/// <summary>Constructs a new SoundFormat.</summary> public SoundFormat(int channels, int bitsPerSample, int sampleRate) { if (sampleRate == 0) throw new ArgumentOutOfRangeException("sampleRate", "Must be higher than 0."); SampleFormat = 0; switch (channels) { case 1: if (bitsPerSample == 8) SampleFormat = SampleFormat.Mono8; else if (bitsPerSample == 16) SampleFormat = SampleFormat.Mono16; break; case 2: if (bitsPerSample == 8) SampleFormat = SampleFormat.Stereo8; else if (bitsPerSample == 16) SampleFormat = SampleFormat.Stereo16; break; } SampleRate = sampleRate; }
protected override void ProcessFilter(Signal sourceData, Signal destinationData) { int channels = sourceData.Channels; int length = sourceData.Length; SampleFormat dstFormat = destinationData.SampleFormat; SampleFormat srcFormat = sourceData.SampleFormat; if (dstFormat == SampleFormat.Format32BitIeeeFloat) { float dst; if (srcFormat == SampleFormat.Format16Bit) { short src; for (int c = 0; c < channels; c++) { for (int i = 0; i < length; i++) { src = (short)sourceData.GetSample(c, i); SampleConverter.Convert(src, out dst); } } } else if (srcFormat == SampleFormat.Format32Bit) { int src; for (int c = 0; c < channels; c++) { for (int i = 0; i < length; i++) { src = (int)sourceData.GetSample(c, i); SampleConverter.Convert(src, out dst); } } } } }
/// <summary> /// Applies the filter to a signal. /// </summary> protected override void ProcessFilter(Signal sourceData, Signal destinationData) { SampleFormat format = sourceData.SampleFormat; int channels = sourceData.Channels; int length = sourceData.Length; int samples = sourceData.Samples; if (format == SampleFormat.Format32BitIeeeFloat) { unsafe { float *src = (float *)sourceData.Data.ToPointer(); float *dst = (float *)destinationData.Data.ToPointer(); for (int i = 0; i < samples; i++, dst++, src++) { *dst = System.Math.Abs(*src); } } } else if (format == SampleFormat.Format128BitComplex) { unsafe { Complex *src = (Complex *)sourceData.Data.ToPointer(); Complex *dst = (Complex *)destinationData.Data.ToPointer(); Complex c = new Complex(); for (int i = 0; i < samples; i++, dst++, src++) { c.Re = (*src).Magnitude; *dst = c; } } } }
public StreamSampleIO(Stream underlyingStream, SampleFormat format, int sampleRate, int headerLength, int channels, int bufferSize) { this.underlyingStream = underlyingStream; this.format = format; this.sampleRate = sampleRate; this.headerLength = headerLength; this.channels = channels; //Configure switch (format) { case SampleFormat.Float32: bitsPerSample = 32; break; case SampleFormat.Short16: bitsPerSample = 16; break; case SampleFormat.Byte: bitsPerSample = 8; break; default: throw new Exception("Unknown sample format."); } //Open buffer bufferSizeSamples = bufferSize; bufferSizeBytes = bufferSizeSamples * BytesPerSample; buffer = new byte[bufferSizeBytes]; bufferHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); bufferPtrByte = (byte *)bufferHandle.AddrOfPinnedObject(); bufferPtrShort = (short *)bufferHandle.AddrOfPinnedObject(); bufferPtrFloat = (float *)bufferHandle.AddrOfPinnedObject(); }
/// <summary> /// Applies the filter to a signal. /// </summary> /// public Signal Apply(Signal signal) { // check pixel format of the source signal CheckSourceFormat(signal.SampleFormat); // get number of channels and samples int channels = signal.Channels; int samples = signal.Length; // retrieve other information int rate = signal.SampleRate; // destination sample format SampleFormat dstSampleFormat = FormatTranslations[signal.SampleFormat]; // create new signal of required format Signal dstSignal = NewSignal(channels, samples, rate, dstSampleFormat); // process the filter ProcessFilter(signal, dstSignal); // return the processed signal return(dstSignal); }
/// <summary> /// Constructs a new signal. /// </summary> /// /// <param name="data">The raw data for the signal.</param> /// <param name="channels">The number of channels for the signal.</param> /// <param name="length">The length of the signal.</param> /// <param name="format">The sample format for the signal.</param> /// <param name="sampleRate">The sample date of the signal.</param> /// public Signal(byte[] data, int channels, int length, int sampleRate, SampleFormat format) { init(data, channels, length, sampleRate, format); }
public bool SupportsSampleFormat(SampleFormat sampleFormat) { return(true); }
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount) { if (channelCount == 0) { channelCount = 2; } if (sampleRate == 0) { sampleRate = Constants.TargetSampleRate; } if (direction != Direction.Output) { throw new ArgumentException($"{direction}"); } else if (!SupportsChannelCount(channelCount)) { throw new ArgumentException($"{channelCount}"); } lock (_lock) { OpenALHardwareDeviceSession session = new OpenALHardwareDeviceSession(this, memoryManager, sampleFormat, sampleRate, channelCount); _sessions.Add(session); return(session); } }
/// <summary> /// Creates a new signal from the given signal parameters. This /// method can be overridden on child classes to modify how /// output signals are created. /// </summary> /// protected virtual Signal NewSignal(int channels, int samples, int rate, SampleFormat dstSampleFormat) { return(new Signal(channels, samples, rate, dstSampleFormat)); }
public static bool IsFormatSupported(SampleFormat SF) { switch (SF) { case SampleFormat.ADPCM: case SampleFormat.PCM: return true; default: return false; } }
/// <summary> /// Creates a new signal from the given signal parameters. This /// method can be overridden on child classes to modify how /// output signals are created. /// </summary> /// protected virtual Signal NewSignal(int channels, int samples, int rate, SampleFormat dstSampleFormat) { return new Signal(channels, samples, rate, dstSampleFormat); }
/// <summary> /// Creates a new Square Signal Generator. /// </summary> /// public SquareGenerator() { this.Format = SampleFormat.Format32BitIeeeFloat; this.Channels = 1; }
public WavFileWriter(string path, FileMode mode, int sampleRate, short channels, SampleFormat format, int bufferSize) : this(new FileStream(path, mode), sampleRate, channels, format, bufferSize) { }
public TiffDirectory() { td_subfiletype = 0; td_compression = 0; td_photometric = 0; td_planarconfig = 0; td_fillorder = FillOrder.MSB2LSB; td_bitspersample = 1; td_threshholding = Threshold.BILEVEL; td_orientation = Orientation.TOPLEFT; td_samplesperpixel = 1; td_rowsperstrip = -1; td_tiledepth = 1; td_stripbytecountsorted = true; // Our own arrays always sorted. td_resolutionunit = ResUnit.INCH; td_sampleformat = SampleFormat.UINT; td_imagedepth = 1; td_ycbcrsubsampling[0] = 2; td_ycbcrsubsampling[1] = 2; td_ycbcrpositioning = YCbCrPosition.CENTERED; }
private void init(double frequency, double amplitude, int samplingRate) { this.Frequency = frequency; this.Amplitude = amplitude; this.Format = SampleFormat.Format32BitIeeeFloat; this.Channels = 1; this.theta = 2.0 * Math.PI * frequency / samplingRate; }
public void DataReceived(object sender, SerialDataReceivedEventArgs e) { _MeterResponded = true; if (!Port.IsOpen) { return; } _TempString += Port.ReadExisting(); if (_TempString.Contains("\r\n")) { string commandResponse = _TempString.Substring(0, _TempString.IndexOf("\r\n") + 2); _TempString = _TempString.Replace(commandResponse, ""); //========== serial number response ========== if (commandResponse.StartsWith("@")) { SerialNumber = commandResponse.Split(new char[] { ' ' })[1].Replace('\"', ' ').Trim(); #if DEBUG Console.WriteLine("SerialNumber: " + SerialNumber); #endif }//if //========== patient record response ========== else if (commandResponse.StartsWith("P")) { if (!_HeaderRead) { //first row of P records is the header _HeaderRead = true; _RecordCount = 0; string[] header = commandResponse.Split(new char[] { ',' }); SampleCount = int.Parse(header[0].Split(new char[] { ' ' })[1]); SerialNumber = header[1].Replace("\"", ""); SampleFormat = header[2].Substring(header[2].IndexOf('\"') + 1, header[2].LastIndexOf('\"') - (header[2].IndexOf('\"') + 1)).ToLower().Trim() == "mg/dl" ? SampleFormat.MGDL : SampleFormat.MMOL; #if DEBUG Console.WriteLine("SampleCount: " + SampleCount); Console.WriteLine("SampleFormat: " + SampleFormat.ToString()); Console.WriteLine("SerialNumber: " + SerialNumber); #endif if (_TestMode) { base.Close(); Dispose(); } OnHeaderRead(new HeaderReadEventArgs(SampleCount, this)); }//if else { //all other P records are glucose records Console.WriteLine("Record: " + commandResponse); string[] parsedMsg = commandResponse.Replace("\"", "").Replace(" ", "").Split(new char[] { ',' }); string[] parsedDate = parsedMsg[1].Split(new char[] { '/' }); string[] parsedTime = parsedMsg[2].Split(new char[] { ':' }); try { DateTime dtTimeStamp = new DateTime(int.Parse(parsedDate[2].ToString()), int.Parse(parsedDate[0].ToString()), int.Parse(parsedDate[1].ToString()), int.Parse(parsedTime[0].ToString()), int.Parse(parsedTime[1].ToString()), int.Parse(parsedTime[2].ToString())); if (dtTimeStamp.Year < 100) { //two digit year encountered (all dates are assumed to be in 2000+) dtTimeStamp = dtTimeStamp.AddYears(2000); }//if OnRecordRead(new RecordReadEventArgs(Records.AddRecordRow(dtTimeStamp, Int32.Parse(parsedMsg[3].ToString()), "mg/dl"))); }//try catch { } if (++_RecordCount == SampleCount) { //all records read so close the port and dispose OnReadFinished(new ReadFinishedEventArgs(this)); Dispose(); } //if } } //elseif } //if }
/// <summary> /// Initializes a new instance of the <see cref="SinusoidSource"/> class. /// </summary> /// <param name="format">The output format.</param> public SinusoidSource(SampleFormat format) { SamplingFrequencyInverse = 1.0 / format.SampleRate; Format = format; }
/// <summary> /// Creates a new signal from the given signal parameters. This /// method can be overridden on child classes to modify how /// output signals are created. /// </summary> /// protected override Signal NewSignal(int channels, int samples, int rate, SampleFormat dstSampleFormat) { return(new Signal(1, samples, rate, dstSampleFormat)); }
private void init(byte[] data, int channels, int length, int sampleRate, SampleFormat format) { this.handle = GCHandle.Alloc(data, GCHandleType.Pinned); this.ptrData = handle.AddrOfPinnedObject(); this.rawData = data; this.sampleRate = sampleRate; this.format = format; this.length = length; this.channels = channels; }
public StreamSampleReader(Stream underlyingStream, SampleFormat format, int sampleRate, int headerLength, int channels, int bufferSize) : base(underlyingStream, format, sampleRate, headerLength, channels, bufferSize) { }
public WavFileWriter(Stream underlyingStream, int sampleRate, short channels, SampleFormat format, int bufferSize) : base(underlyingStream) { //Create wrapper writer = new StreamSampleWriter(underlyingStream, format, sampleRate, WavHeaderUtil.HEADER_LENGTH, channels, bufferSize); //Create info info = new WavFileInfo { bitsPerSample = writer.BitsPerSample, channels = channels, sampleRate = sampleRate }; //Write header byte[] header = WavHeaderUtil.CreateHeader(info); underlyingStream.Position = 0; underlyingStream.Write(header, 0, header.Length); }
/// <summary> /// Initializes a new sample with the given format and data length. /// </summary> public Sample(SampleFormat format,int length) { _format = format; _data = new byte[length]; }
public HeaderTiff(string path) { Path = path; using (Tiff Image = Tiff.Open(path, "r")) { { FieldValue[] value = Image.GetField(TiffTag.IMAGEWIDTH); Dimensions.X = value[0].ToInt(); } { FieldValue[] value = Image.GetField(TiffTag.IMAGELENGTH); Dimensions.Y = value[0].ToInt(); } { Dimensions.Z = Image.NumberOfDirectories(); } { FieldValue[] value = Image.GetField(TiffTag.SAMPLEFORMAT); SampleFormat Format = SampleFormat.UINT; // (SampleFormat)value[0].ToInt(); int BitsPerPixel = 8; // Image.GetField(TiffTag.BITSPERSAMPLE)[0].ToInt(); if (Format == SampleFormat.UINT) { if (BitsPerPixel == 8) { Mode = TiffDataType.Byte; } else if (BitsPerPixel == 16) { Mode = TiffDataType.Ushort; } else if (BitsPerPixel == 32) { Mode = TiffDataType.Uint; } else if (BitsPerPixel == 64) { Mode = TiffDataType.Ulong; } else { throw new FormatException("Unexpected bits per pixel."); } } else if (Format == SampleFormat.INT) { if (BitsPerPixel == 16) { Mode = TiffDataType.Short; } else if (BitsPerPixel == 32) { Mode = TiffDataType.Int; } else if (BitsPerPixel == 64) { Mode = TiffDataType.Long; } else { throw new FormatException("Unexpected bits per pixel."); } } else if (Format == SampleFormat.IEEEFP) { if (BitsPerPixel == 32) { Mode = TiffDataType.Float; } else if (BitsPerPixel == 64) { Mode = TiffDataType.Double; } else { throw new FormatException("Unexpected bits per pixel."); } } } } }
/// <summary> /// Sets the value(s) of a tag in a TIFF file/stream open for writing. /// </summary> /// <param name="tif">An instance of the <see cref="Tiff"/> class.</param> /// <param name="tag">The tag.</param> /// <param name="value">The tag value(s).</param> /// <returns> /// <c>true</c> if tag value(s) were set successfully; otherwise, <c>false</c>. /// </returns> /// <seealso cref="Tiff.SetField"/> public virtual bool SetField(Tiff tif, TiffTag tag, FieldValue[] value) { const string module = "vsetfield"; TiffDirectory td = tif.m_dir; bool status = true; int v32 = 0; int v = 0; bool end = false; bool badvalue = false; bool badvalue32 = false; switch (tag) { case TiffTag.SUBFILETYPE: td.td_subfiletype = (FileType)value[0].ToByte(); break; case TiffTag.IMAGEWIDTH: td.td_imagewidth = value[0].ToInt(); break; case TiffTag.IMAGELENGTH: td.td_imagelength = value[0].ToInt(); break; case TiffTag.BITSPERSAMPLE: td.td_bitspersample = value[0].ToShort(); // If the data require post-decoding processing to byte-swap samples, set it // up here. Note that since tags are required to be ordered, compression code // can override this behavior in the setup method if it wants to roll the post // decoding work in with its normal work. if ((tif.m_flags & TiffFlags.SWAB) == TiffFlags.SWAB) { switch (td.td_bitspersample) { case 16: tif.m_postDecodeMethod = Tiff.PostDecodeMethodType.pdmSwab16Bit; break; case 24: tif.m_postDecodeMethod = Tiff.PostDecodeMethodType.pdmSwab24Bit; break; case 32: tif.m_postDecodeMethod = Tiff.PostDecodeMethodType.pdmSwab32Bit; break; case 64: tif.m_postDecodeMethod = Tiff.PostDecodeMethodType.pdmSwab64Bit; break; case 128: // two 64's tif.m_postDecodeMethod = Tiff.PostDecodeMethodType.pdmSwab64Bit; break; } } break; case TiffTag.COMPRESSION: v = value[0].ToInt() & 0xffff; Compression comp = (Compression)v; // If we're changing the compression scheme, then notify the previous module // so that it can cleanup any state it's setup. if (tif.fieldSet(FieldBit.Compression)) { if (td.td_compression == comp) { break; } tif.m_currentCodec.Cleanup(); tif.m_flags &= ~TiffFlags.CODERSETUP; } // Setup new compression scheme. status = tif.setCompressionScheme(comp); if (status) { td.td_compression = comp; } else { status = false; } break; case TiffTag.PHOTOMETRIC: td.td_photometric = (Photometric)value[0].ToInt(); break; case TiffTag.THRESHHOLDING: td.td_threshholding = (Threshold)value[0].ToByte(); break; case TiffTag.FILLORDER: v = value[0].ToInt(); FillOrder fo = (FillOrder)v; if (fo != FillOrder.LSB2MSB && fo != FillOrder.MSB2LSB) { badvalue = true; break; } td.td_fillorder = fo; break; case TiffTag.ORIENTATION: v = value[0].ToInt(); Orientation or = (Orientation)v; if (or < Orientation.TOPLEFT || Orientation.LEFTBOT < or) { badvalue = true; break; } else { td.td_orientation = or; } break; case TiffTag.SAMPLESPERPIXEL: // XXX should cross check - e.g. if pallette, then 1 v = value[0].ToInt(); if (v == 0) { badvalue = true; break; } td.td_samplesperpixel = (short)v; break; case TiffTag.ROWSPERSTRIP: v32 = value[0].ToInt(); if (v32 == 0) { badvalue32 = true; break; } td.td_rowsperstrip = v32; if (!tif.fieldSet(FieldBit.TileDimensions)) { td.td_tilelength = v32; td.td_tilewidth = td.td_imagewidth; } break; case TiffTag.MINSAMPLEVALUE: td.td_minsamplevalue = value[0].ToUShort(); break; case TiffTag.MAXSAMPLEVALUE: td.td_maxsamplevalue = value[0].ToUShort(); break; case TiffTag.SMINSAMPLEVALUE: td.td_sminsamplevalue = value[0].ToDouble(); break; case TiffTag.SMAXSAMPLEVALUE: td.td_smaxsamplevalue = value[0].ToDouble(); break; case TiffTag.XRESOLUTION: td.td_xresolution = value[0].ToFloat(); break; case TiffTag.YRESOLUTION: td.td_yresolution = value[0].ToFloat(); break; case TiffTag.PLANARCONFIG: v = value[0].ToInt(); PlanarConfig pc = (PlanarConfig)v; if (pc != PlanarConfig.CONTIG && pc != PlanarConfig.SEPARATE) { badvalue = true; break; } td.td_planarconfig = pc; break; case TiffTag.XPOSITION: td.td_xposition = value[0].ToFloat(); break; case TiffTag.YPOSITION: td.td_yposition = value[0].ToFloat(); break; case TiffTag.RESOLUTIONUNIT: v = value[0].ToInt(); ResUnit ru = (ResUnit)v; if (ru < ResUnit.NONE || ResUnit.CENTIMETER < ru) { badvalue = true; break; } td.td_resolutionunit = ru; break; case TiffTag.PAGENUMBER: td.td_pagenumber[0] = value[0].ToShort(); td.td_pagenumber[1] = value[1].ToShort(); break; case TiffTag.HALFTONEHINTS: td.td_halftonehints[0] = value[0].ToShort(); td.td_halftonehints[1] = value[1].ToShort(); break; case TiffTag.COLORMAP: v32 = 1 << td.td_bitspersample; Tiff.setShortArray(out td.td_colormap[0], value[0].ToShortArray(), v32); Tiff.setShortArray(out td.td_colormap[1], value[1].ToShortArray(), v32); Tiff.setShortArray(out td.td_colormap[2], value[2].ToShortArray(), v32); break; case TiffTag.EXTRASAMPLES: if (!setExtraSamples(td, ref v, value)) { badvalue = true; break; } break; case TiffTag.MATTEING: if (value[0].ToShort() != 0) { td.td_extrasamples = 1; } else { td.td_extrasamples = 0; } if (td.td_extrasamples != 0) { td.td_sampleinfo = new ExtraSample[1]; td.td_sampleinfo[0] = ExtraSample.ASSOCALPHA; } break; case TiffTag.TILEWIDTH: v32 = value[0].ToInt(); if ((v32 % 16) != 0) { if (tif.m_mode != Tiff.O_RDONLY) { badvalue32 = true; break; } Tiff.WarningExt(tif, tif.m_clientdata, tif.m_name, "Nonstandard tile width {0}, convert file", v32); } td.td_tilewidth = v32; tif.m_flags |= TiffFlags.ISTILED; break; case TiffTag.TILELENGTH: v32 = value[0].ToInt(); if ((v32 % 16) != 0) { if (tif.m_mode != Tiff.O_RDONLY) { badvalue32 = true; break; } Tiff.WarningExt(tif, tif.m_clientdata, tif.m_name, "Nonstandard tile length {0}, convert file", v32); } td.td_tilelength = v32; tif.m_flags |= TiffFlags.ISTILED; break; case TiffTag.TILEDEPTH: v32 = value[0].ToInt(); if (v32 == 0) { badvalue32 = true; break; } td.td_tiledepth = v32; break; case TiffTag.DATATYPE: v = value[0].ToInt(); SampleFormat sf = SampleFormat.VOID; switch (v) { case DATATYPE_VOID: sf = SampleFormat.VOID; break; case DATATYPE_INT: sf = SampleFormat.INT; break; case DATATYPE_UINT: sf = SampleFormat.UINT; break; case DATATYPE_IEEEFP: sf = SampleFormat.IEEEFP; break; default: badvalue = true; break; } if (!badvalue) { td.td_sampleformat = sf; } break; case TiffTag.SAMPLEFORMAT: v = value[0].ToInt(); sf = (SampleFormat)v; if (sf < SampleFormat.UINT || SampleFormat.COMPLEXIEEEFP < sf) { badvalue = true; break; } td.td_sampleformat = sf; // Try to fix up the SWAB function for complex data. if (td.td_sampleformat == SampleFormat.COMPLEXINT && td.td_bitspersample == 32 && tif.m_postDecodeMethod == Tiff.PostDecodeMethodType.pdmSwab32Bit) { tif.m_postDecodeMethod = Tiff.PostDecodeMethodType.pdmSwab16Bit; } else if ((td.td_sampleformat == SampleFormat.COMPLEXINT || td.td_sampleformat == SampleFormat.COMPLEXIEEEFP) && td.td_bitspersample == 64 && tif.m_postDecodeMethod == Tiff.PostDecodeMethodType.pdmSwab64Bit) { tif.m_postDecodeMethod = Tiff.PostDecodeMethodType.pdmSwab32Bit; } break; case TiffTag.IMAGEDEPTH: td.td_imagedepth = value[0].ToInt(); break; case TiffTag.SUBIFD: if ((tif.m_flags & TiffFlags.INSUBIFD) != TiffFlags.INSUBIFD) { td.td_nsubifd = value[0].ToInt(); Tiff.setLong8Array(out td.td_subifd, value[1].TolongArray(), td.td_nsubifd); } else { Tiff.ErrorExt(tif, tif.m_clientdata, module, "{0}: Sorry, cannot nest SubIFDs", tif.m_name); status = false; } break; case TiffTag.YCBCRPOSITIONING: td.td_ycbcrpositioning = (YCbCrPosition)value[0].ToShort(); break; case TiffTag.YCBCRSUBSAMPLING: td.td_ycbcrsubsampling[0] = value[0].ToShort(); td.td_ycbcrsubsampling[1] = value[1].ToShort(); break; case TiffTag.TRANSFERFUNCTION: v = ((td.td_samplesperpixel - td.td_extrasamples) > 1 ? 3 : 1); for (int i = 0; i < v; i++) { Tiff.setShortArray(out td.td_transferfunction[i], value[0].ToShortArray(), 1 << td.td_bitspersample); } break; case TiffTag.REFERENCEBLACKWHITE: // XXX should check for null range Tiff.setFloatArray(out td.td_refblackwhite, value[0].ToFloatArray(), 6); break; case TiffTag.INKNAMES: v = value[0].ToInt(); string s = value[1].ToString(); v = checkInkNamesString(tif, v, s); status = v > 0; if (v > 0) { setNString(out td.td_inknames, s, v); td.td_inknameslen = v; } break; default: // This can happen if multiple images are open with // different codecs which have private tags. The global tag // information table may then have tags that are valid for // one file but not the other. If the client tries to set a // tag that is not valid for the image's codec then we'll // arrive here. This happens, for example, when tiffcp is // used to convert between compression schemes and // codec-specific tags are blindly copied. TiffFieldInfo fip = tif.FindFieldInfo(tag, TiffType.ANY); if (fip == null || fip.Bit != FieldBit.Custom) { Tiff.ErrorExt(tif, tif.m_clientdata, module, "{0}: Invalid {1}tag \"{2}\" (not supported by codec)", tif.m_name, Tiff.isPseudoTag(tag) ? "pseudo-" : string.Empty, fip != null ? fip.Name : "Unknown"); status = false; break; } // Find the existing entry for this custom value. int tvIndex = -1; for (int iCustom = 0; iCustom < td.td_customValueCount; iCustom++) { if (td.td_customValues[iCustom].info.Tag == tag) { tvIndex = iCustom; td.td_customValues[iCustom].value = null; break; } } // Grow the custom list if the entry was not found. if (tvIndex == -1) { td.td_customValueCount++; TiffTagValue[] new_customValues = Tiff.Realloc( td.td_customValues, td.td_customValueCount - 1, td.td_customValueCount); td.td_customValues = new_customValues; tvIndex = td.td_customValueCount - 1; td.td_customValues[tvIndex].info = fip; td.td_customValues[tvIndex].value = null; td.td_customValues[tvIndex].count = 0; } // Set custom value ... save a copy of the custom tag value. int tv_size = Tiff.dataSize(fip.Type); if (tv_size == 0) { status = false; Tiff.ErrorExt(tif, tif.m_clientdata, module, "{0}: Bad field type {1} for \"{2}\"", tif.m_name, fip.Type, fip.Name); end = true; break; } int paramIndex = 0; if (fip.PassCount) { if (fip.WriteCount == TiffFieldInfo.Variable2) { td.td_customValues[tvIndex].count = value[paramIndex++].ToInt(); } else { td.td_customValues[tvIndex].count = value[paramIndex++].ToInt(); } } else if (fip.WriteCount == TiffFieldInfo.Variable || fip.WriteCount == TiffFieldInfo.Variable2) { td.td_customValues[tvIndex].count = 1; } else if (fip.WriteCount == TiffFieldInfo.Spp) { td.td_customValues[tvIndex].count = td.td_samplesperpixel; } else { td.td_customValues[tvIndex].count = fip.WriteCount; } if (fip.Type == TiffType.ASCII) { string ascii; Tiff.setString(out ascii, value[paramIndex++].ToString()); td.td_customValues[tvIndex].value = Tiff.Latin1Encoding.GetBytes(ascii); } else { td.td_customValues[tvIndex].value = new byte[tv_size * td.td_customValues[tvIndex].count]; if ((fip.PassCount || fip.WriteCount == TiffFieldInfo.Variable || fip.WriteCount == TiffFieldInfo.Variable2 || fip.WriteCount == TiffFieldInfo.Spp || td.td_customValues[tvIndex].count > 1) && fip.Tag != TiffTag.PAGENUMBER && fip.Tag != TiffTag.HALFTONEHINTS && fip.Tag != TiffTag.YCBCRSUBSAMPLING && fip.Tag != TiffTag.DOTRANGE) { byte[] apBytes = value[paramIndex++].GetBytes(); Buffer.BlockCopy(apBytes, 0, td.td_customValues[tvIndex].value, 0, Math.Min(apBytes.Length, td.td_customValues[tvIndex].value.Length)); } else { // XXX: The following loop required to handle // PAGENUMBER, HALFTONEHINTS, // YCBCRSUBSAMPLING and DOTRANGE tags. // These tags are actually arrays and should be // passed as arrays to SetField() function, but // actually passed as a list of separate values. // This behavior must be changed in the future! // Upd: This loop also processes some EXIF tags with // UNDEFINED type (like EXIF_FILESOURCE or EXIF_SCENETYPE) // In this case input value is string-based, so // in TiffType.UNDEFINED case we use FieldValue.GetBytes()[0] // construction instead of direct call of FieldValue.ToByte() method. byte[] val = td.td_customValues[tvIndex].value; int valPos = 0; for (int i = 0; i < td.td_customValues[tvIndex].count; i++, valPos += tv_size) { switch (fip.Type) { case TiffType.BYTE: case TiffType.UNDEFINED: val[valPos] = value[paramIndex + i].GetBytes()[0]; break; case TiffType.SBYTE: val[valPos] = value[paramIndex + i].ToByte(); break; case TiffType.SHORT: Buffer.BlockCopy(BitConverter.GetBytes(value[paramIndex + i].ToShort()), 0, val, valPos, tv_size); break; case TiffType.SSHORT: Buffer.BlockCopy(BitConverter.GetBytes(value[paramIndex + i].ToShort()), 0, val, valPos, tv_size); break; case TiffType.LONG: case TiffType.IFD: Buffer.BlockCopy(BitConverter.GetBytes(value[paramIndex + i].ToInt()), 0, val, valPos, tv_size); break; case TiffType.SLONG: Buffer.BlockCopy(BitConverter.GetBytes(value[paramIndex + i].ToInt()), 0, val, valPos, tv_size); break; case TiffType.RATIONAL: case TiffType.SRATIONAL: case TiffType.FLOAT: Buffer.BlockCopy(BitConverter.GetBytes(value[paramIndex + i].ToFloat()), 0, val, valPos, tv_size); break; case TiffType.DOUBLE: Buffer.BlockCopy(BitConverter.GetBytes(value[paramIndex + i].ToDouble()), 0, val, valPos, tv_size); break; default: Array.Clear(val, valPos, tv_size); status = false; break; } } } } break; } if (!end && !badvalue && !badvalue32) { if (status) { tif.setFieldBit(tif.FieldWithTag(tag).Bit); tif.m_flags |= TiffFlags.DIRTYDIRECT; } } if (badvalue) { Tiff.ErrorExt(tif, tif.m_clientdata, module, "{0}: Bad value {1} for \"{2}\" tag", tif.m_name, v, tif.FieldWithTag(tag).Name); return(false); } if (badvalue32) { Tiff.ErrorExt(tif, tif.m_clientdata, module, "{0}: Bad value {1} for \"{2}\" tag", tif.m_name, v32, tif.FieldWithTag(tag).Name); return(false); } return(status); }
/// <summary> /// Creates a new Signal from a float array. /// </summary> /// public static Signal FromArray(Array signal, int channels, int sampleRate, SampleFormat format = SampleFormat.Format32BitIeeeFloat) { return(FromArray(signal, signal.Length, channels, sampleRate, format)); }
/// <summary> /// Creates a new Signal from a float array. /// </summary> /// public static Signal FromArray(Array signal, int channels, int sampleRate, SampleFormat format = SampleFormat.Format32BitIeeeFloat) { return FromArray(signal, signal.Length, channels, sampleRate, format); }
public void DataReceived(object sender, System.IO.Ports.SerialDataReceivedEventArgs e) { _TempString += Port.ReadExisting(); //Get number of samples on meter if (!_NumResultsRead && (_TempString.Contains(Statics.GetStringFromAsciiCode((byte)AsciiCodes.ENQ)) || _TempString.Contains(Statics.GetStringFromAsciiCode((byte)AsciiCodes.ACK)))) { switch (_CountStep) { case 0: Port.Write(new byte[] { 0x15 }, 0, 1); _CountStep++; break; case 1: Port.Write(new byte[] { 0x05 }, 0, 1); _CountStep++; break; case 2: Port.Write("R|"); _CountStep++; break; case 3: Port.Write("M|"); _CountStep++; break; case 4: Port.Write(new byte[] { 0x39, 0x37, 0x0d, 0x0a }, 0, 4); _CountStep++; System.Threading.Thread.Sleep(1000); break; case 5: _NumResultsRead = true; try { byte[] temp = Statics.StrToByteArray(_TempString); string tempCount = _TempString.Substring(_TempString.IndexOf("|") + 1, _TempString.LastIndexOf("|") - _TempString.IndexOf("|") - 1); SampleCount = Convert.ToInt32(tempCount); Port.Write(new byte[] { 0x04 }, 0, 1); _TempString = string.Empty; _CountStep = 0; System.Threading.Thread.Sleep(100); } catch { Port.DataReceived += null; Port.DiscardInBuffer(); Port.DiscardOutBuffer(); _TestFailed = true; return; } #if DEBUG Console.WriteLine("SampleCount: " + SampleCount); #endif break; default: break; }//switch return; }//if else if (_NumResultsRead && !_ConfigRead && (_TempString.Contains(Statics.GetStringFromAsciiCode((byte)AsciiCodes.ENQ)) || _TempString.Contains(Statics.GetStringFromAsciiCode((byte)AsciiCodes.ACK)))) { switch (_CountStep) { case 0: Port.Write(new byte[] { 0x15 }, 0, 1); _CountStep++; break; case 1: Port.Write(new byte[] { 0x05 }, 0, 1); _CountStep++; break; case 2: Port.Write("R|"); _CountStep++; break; case 3: Port.Write("C|"); _CountStep++; break; case 4: Port.Write(new byte[] { 0x38, 0x44, 0x0d, 0x0a }, 0, 4); _CountStep++; System.Threading.Thread.Sleep(1000); break; case 5: _ConfigRead = true; try { string[] splitData = _TempString.Split(new char[] { '|' }); System.Collections.BitArray bitary = new System.Collections.BitArray(Byte.Parse(splitData[1])); SampleFormat = (bitary.Get(2)) ? SampleFormat.MMOL : SampleFormat.MGDL; Port.Write(new byte[] { 0x04 }, 0, 1); _CountStep = 0; _TempString = string.Empty; } catch { Port.DataReceived += null; Port.DiscardInBuffer(); Port.DiscardOutBuffer(); _TestFailed = true; return; } #if DEBUG Console.WriteLine("SampleFormat: " + SampleFormat.ToString()); #endif break; default: break; }//switch return; }//else //if data received is the ENQ to start communications else if (!_HeaderRead && _TempString.Contains(Statics.GetStringFromAsciiCode((byte)AsciiCodes.ENQ))) { Port.Write(Statics.GetStringFromAsciiCode((byte)AsciiCodes.ACK)); RawData += _TempString; _TempString = String.Empty; return; }//if //if data contains an STX and a following LF then a full frame can be trimmed else if (_TempString.Contains(Statics.GetStringFromAsciiCode((byte)AsciiCodes.STX)) && _TempString.Substring(_TempString.IndexOf(Statics.GetStringFromAsciiCode((byte)AsciiCodes.STX))).Contains(Statics.GetStringFromAsciiCode((byte)AsciiCodes.LF))) { RawData += _TempString; //full frame encountered (cut out full frame, and remove the STX on the front) string fullframe = _TempString.Split(new string[] { Statics.GetStringFromAsciiCode((byte)AsciiCodes.CR) }, StringSplitOptions.None)[0].Replace(Statics.GetStringFromAsciiCode((byte)AsciiCodes.STX), ""); //trim off the frame as more data may be in the buffer _TempString = _TempString.Substring(_TempString.LastIndexOf(Statics.GetStringFromAsciiCode((byte)AsciiCodes.LF)) + 1); #region HeaderRecord if (fullframe[1] == 'H') { _HeaderRead = true; string[] headerrecord = fullframe.Split(new char[] { '|' }); string[] typeandserial = headerrecord[4].Split(new char[] { '^' }); string accesspassword = headerrecord[3]; string softwareversion = typeandserial[1].Split(new char[] { '\\' })[0]; string eepromversion = typeandserial[1].Split(new char[] { '\\' })[1]; MeterDescription = typeandserial[0]; string MeterType = typeandserial[0]; SerialNumber = typeandserial[2]; //breeze meters have a product number of 6115 _MeterFound = (MeterType.ToLowerInvariant() == "bayer6115"); if (_TestMode) { Port.DataReceived += null; return; } OnHeaderRead(new HeaderReadEventArgs(SampleCount, this)); Console.WriteLine("Header: " + fullframe); }//if #endregion #region Glucose Record else if (fullframe[1] == 'R') { string[] splitrecord = fullframe.Split(new char[] { '|' }); //only if glucose record if (splitrecord.Length > 10) { int year = int.Parse(splitrecord[11].Substring(0, 4)); int month = int.Parse(splitrecord[11].Substring(4, 2)); int day = int.Parse(splitrecord[11].Substring(6, 2)); int hour = int.Parse(splitrecord[11].Substring(8, 2)); int minute = int.Parse(splitrecord[11].Substring(10, 2)); int glucose = int.Parse(splitrecord[3]); string units = splitrecord[4].Split(new char[] { '^' })[0]; DateTime dtTimeStamp = new DateTime(year, month, day, hour, minute, 0); //put the record in the dataset and raise the read event try { if (Records.FindByTimestamp(dtTimeStamp) == null) { #if DEBUG Console.WriteLine("Record: " + fullframe); #endif OnRecordRead(new RecordReadEventArgs(this._Records.AddRecordRow(dtTimeStamp, glucose, units))); }//if else { #if DEBUG Console.WriteLine("DUPLIC: " + fullframe); #endif } //else } //try catch { } //catch } //if } //elseif #endregion } //else //end of transmission encountered after a header record is read else if (_HeaderRead && _TempString.Contains(Statics.GetStringFromAsciiCode((byte)AsciiCodes.EOT))) { _HeaderRead = false; Port.DataReceived -= new System.IO.Ports.SerialDataReceivedEventHandler(DataReceived); OnReadFinished(new ReadFinishedEventArgs(this)); Close(); Dispose(); return; }//elseif //send response if (_NumResultsRead) { Port.Write(Statics.GetStringFromAsciiCode((byte)AsciiCodes.ACK)); } }
/// <summary> /// Gets the size (in bits) of a sample format. /// </summary> /// public static int GetSampleSize(SampleFormat format) { switch (format) { case SampleFormat.Format128BitComplex: return 128; case SampleFormat.Format32BitIeeeFloat: return 32; case SampleFormat.Format64BitIeeeFloat: return 64; case SampleFormat.Format16Bit: return 16; case SampleFormat.Format32Bit: return 32; case SampleFormat.Format8Bit: return 8; case SampleFormat.Format8BitUnsigned: return 8; default: throw new NotSupportedException(); } }
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount, float volume) { if (sampleRate == 0) { sampleRate = Constants.TargetSampleRate; } if (channelCount == 0) { channelCount = 2; } if (direction == Direction.Output) { return(new DummyHardwareDeviceSessionOutput(this, memoryManager, sampleFormat, sampleRate, channelCount, volume)); } else { return(new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount)); } }
public SoundIoHardwareDeviceSession(SoundIoHardwareDeviceDriver driver, IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount) : base(memoryManager, requestedSampleFormat, requestedSampleRate, requestedChannelCount) { _driver = driver; _updateRequiredEvent = _driver.GetUpdateRequiredEvent(); _queuedBuffers = new ConcurrentQueue <SoundIoAudioBuffer>(); _ringBuffer = new DynamicRingBuffer(); SetupOutputStream(); }
/// <summary> /// Creates a new signal from the given signal parameters. This /// method can be overridden on child classes to modify how /// output signals are created. /// </summary> /// protected override Signal NewSignal(int channels, int samples, int rate, SampleFormat dstSampleFormat) { return new Signal(1, samples, rate, dstSampleFormat); }
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount) { if (channelCount == 0) { channelCount = 2; } if (sampleRate == 0) { sampleRate = Constants.TargetSampleRate; } if (direction != Direction.Output) { throw new NotImplementedException("Input direction is currently not implemented on SoundIO backend!"); } lock (_lock) { SoundIoHardwareDeviceSession session = new SoundIoHardwareDeviceSession(this, memoryManager, sampleFormat, sampleRate, channelCount); _sessions.Add(session); return(session); } }
/// <summary> /// Constructs a new Signal. /// </summary> /// /// <param name="channels">The number of channels for the signal.</param> /// <param name="length">The length of the signal.</param> /// <param name="format">The sample format for the signal.</param> /// <param name="sampleRate">The sample date of the signal.</param> /// public Signal(int channels, int length, int sampleRate, SampleFormat format) { int sampleSize = GetSampleSize(format) / 8; byte[] data = new byte[channels * length * sampleSize]; init(data, channels, length, sampleRate, format); }
public static WaveFormat ToWaveFormat(this SampleFormat sampleFormat) { return(new WaveFormat(sampleFormat.Rate, sampleFormat.Bits, sampleFormat.Channels)); }
/// <summary> /// Creates a new Signal from a float array. /// </summary> /// public static Signal FromArray(Array signal, int sampleRate, SampleFormat format = SampleFormat.Format32BitIeeeFloat) { int channels = signal.Rank == 1 ? 1 : signal.GetLength(1); return FromArray(signal, channels, sampleRate, format); }
/// <summary> /// Creates a new Signal from a float array. /// </summary> /// public static Signal FromArray(Array signal, int size, int channels, int sampleRate, SampleFormat format = SampleFormat.Format32BitIeeeFloat) { int sampleSize = GetSampleSize(format) / 8; byte[] buffer = new byte[size * sampleSize]; Buffer.BlockCopy(signal, 0, buffer, 0, buffer.Length); int samples = size / channels; return new Signal(buffer, channels, samples, sampleRate, format); }
/// <summary> /// Update the internal state from a user parameter. /// </summary> /// <param name="outErrorInfo">The possible <see cref="ErrorInfo"/> that was generated.</param> /// <param name="parameter">The user parameter.</param> /// <param name="poolMapper">The mapper to use.</param> /// <param name="behaviourContext">The behaviour context.</param> public void UpdateParameters(out ErrorInfo outErrorInfo, ref VoiceInParameter parameter, ref PoolMapper poolMapper, ref BehaviourContext behaviourContext) { InUse = parameter.InUse; Id = parameter.Id; NodeId = parameter.NodeId; UpdatePlayState(parameter.PlayState); SrcQuality = parameter.SrcQuality; Priority = parameter.Priority; SortingOrder = parameter.SortingOrder; SampleRate = parameter.SampleRate; SampleFormat = parameter.SampleFormat; ChannelsCount = parameter.ChannelCount; Pitch = parameter.Pitch; Volume = parameter.Volume; parameter.BiquadFilters.ToSpan().CopyTo(BiquadFilters.ToSpan()); WaveBuffersCount = parameter.WaveBuffersCount; WaveBuffersIndex = parameter.WaveBuffersIndex; if (behaviourContext.IsFlushVoiceWaveBuffersSupported()) { FlushWaveBufferCount += parameter.FlushWaveBufferCount; } MixId = parameter.MixId; if (behaviourContext.IsSplitterSupported()) { SplitterId = parameter.SplitterId; } else { SplitterId = RendererConstants.UnusedSplitterId; } parameter.ChannelResourceIds.ToSpan().CopyTo(ChannelResourceIds.ToSpan()); DecodingBehaviour behaviour = DecodingBehaviour.Default; if (behaviourContext.IsDecodingBehaviourFlagSupported()) { behaviour = parameter.DecodingBehaviourFlags; } DecodingBehaviour = behaviour; if (parameter.ResetVoiceDropFlag) { VoiceDropFlag = false; } if (ShouldUpdateParameters(ref parameter)) { DataSourceStateUnmapped = !poolMapper.TryAttachBuffer(out outErrorInfo, ref DataSourceStateAddressInfo, parameter.DataSourceStateAddress, parameter.DataSourceStateSize); } else { outErrorInfo = new ErrorInfo(); } }
public void Generate(Stream output, SampleFormat format, bool includeWavHeader = true) { unchecked { using (var writer = new BinaryWriter(output, Encoding.Default, true)) { int sampleCount = (int)(SampleRate * Timeline.LengthSeconds); if (includeWavHeader) { Wav.GenerateWavHeader(this, output, sampleCount, format); } var nodeEnumerator = Timeline.GetEnumerator(); nodeEnumerator.MoveNext(); for (Position = 0; Position < sampleCount; Position++) { // Traverse nodes if (nodeEnumerator.Current == null) { break; } while (TimePosition >= nodeEnumerator.Current.EndTime) { nodeEnumerator.Current.OnExit(this); State.LastGlottisLevel = State.GlottisLevel; if (!nodeEnumerator.MoveNext()) { goto done; } nodeEnumerator.Current.OnEnter(this); } nodeEnumerator.Current.OnUpdate(this); // Run synthesizer on current sample double currentSample = 0f; foreach (var sampler in samplerSequence) { if (!sampler.Enabled) { continue; } sampler.Update(ref currentSample); } // Limit signal Util.Sigmoid(ref currentSample); // Convert sample to desired format and write to stream switch (format) { case SampleFormat.Float64: writer.Write(currentSample); break; case SampleFormat.Float32: writer.Write((float)currentSample); break; case SampleFormat.Signed16: writer.Write((short)(currentSample * short.MaxValue)); break; case SampleFormat.Unsigned8: writer.Write((byte)((currentSample + 1.0f) / 2.0f * byte.MaxValue)); break; } } done: writer.Flush(); nodeEnumerator.Dispose(); } } }
/// <summary> /// Lookup for opengl pixel information. /// </summary> /// <value> internal static (PixelInternalFormat pif, OpenTK.Graphics.OpenGL.PixelFormat pi, PixelType pt) MapTextureFormat(SampleFormat format) { switch (format) { case SampleFormat.R8: return(PixelInternalFormat.R8, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.UnsignedByte); case SampleFormat.R8I: return(PixelInternalFormat.R8i, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.Byte); case SampleFormat.R8UI: return(PixelInternalFormat.R8ui, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.UnsignedByte); case SampleFormat.R8_SNorm: return(PixelInternalFormat.R8Snorm, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.Byte); case SampleFormat.R16: return(PixelInternalFormat.R16, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.UnsignedShort); case SampleFormat.R16I: return(PixelInternalFormat.R16i, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.Short); case SampleFormat.R16UI: return(PixelInternalFormat.R16ui, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.UnsignedShort); case SampleFormat.R16_SNorm: return(PixelInternalFormat.R16Snorm, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.Short); case SampleFormat.R32I: return(PixelInternalFormat.R32i, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.Int); case SampleFormat.R32UI: return(PixelInternalFormat.R32ui, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.UnsignedInt); case SampleFormat.R32F: return(PixelInternalFormat.R32f, OpenTK.Graphics.OpenGL.PixelFormat.Red, PixelType.Float); case SampleFormat.RG8: return(PixelInternalFormat.Rg8, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.UnsignedByte); case SampleFormat.RG8I: return(PixelInternalFormat.Rg8i, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.Byte); case SampleFormat.RG8UI: return(PixelInternalFormat.Rg8ui, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.UnsignedByte); case SampleFormat.RG8_SNorm: return(PixelInternalFormat.Rg8Snorm, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.Byte); case SampleFormat.RG16: return(PixelInternalFormat.Rg16, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.UnsignedShort); case SampleFormat.RG16I: return(PixelInternalFormat.Rg16i, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.Short); case SampleFormat.RG16UI: return(PixelInternalFormat.Rg16ui, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.UnsignedShort); case SampleFormat.RG16_SNorm: return(PixelInternalFormat.Rg16Snorm, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.Short); case SampleFormat.RG16F: return(PixelInternalFormat.Rg16f, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.Float); case SampleFormat.RG32I: return(PixelInternalFormat.Rg32i, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.Int); case SampleFormat.RG32UI: return(PixelInternalFormat.Rg32ui, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.UnsignedInt); case SampleFormat.RG32F: return(PixelInternalFormat.Rg32f, OpenTK.Graphics.OpenGL.PixelFormat.Rg, PixelType.Float); case SampleFormat.RGB8: return(PixelInternalFormat.Rgb8, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.UnsignedByte); case SampleFormat.RGB8I: return(PixelInternalFormat.Rgb8i, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Byte); case SampleFormat.RGB8UI: return(PixelInternalFormat.Rgb8ui, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.UnsignedByte); case SampleFormat.RGB8_SNorm: return(PixelInternalFormat.Rgb8Snorm, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Byte); case SampleFormat.RGB16: return(PixelInternalFormat.Rgb16, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Short); case SampleFormat.RGB16I: return(PixelInternalFormat.Rgb16i, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Short); case SampleFormat.RGB16UI: return(PixelInternalFormat.Rgb16ui, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.UnsignedByte); case SampleFormat.RGB16_SNorm: return(PixelInternalFormat.Rgb16Snorm, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Byte); case SampleFormat.RGB16F: return(PixelInternalFormat.Rgb16f, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Float); case SampleFormat.RGB32I: return(PixelInternalFormat.Rgb32i, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Int); case SampleFormat.RGB32UI: return(PixelInternalFormat.Rgb32ui, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.UnsignedInt); case SampleFormat.RGB32F: return(PixelInternalFormat.Rgb32f, OpenTK.Graphics.OpenGL.PixelFormat.Rgb, PixelType.Float); case SampleFormat.RGBA8: return(PixelInternalFormat.Rgba8, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.UnsignedByte); case SampleFormat.RGBA8_SNorm: return(PixelInternalFormat.Rgb8Snorm, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.Byte); case SampleFormat.RGBA8I: return(PixelInternalFormat.Rgba8i, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.Byte); case SampleFormat.RGBA8UI: return(PixelInternalFormat.Rgba8ui, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.UnsignedByte); case SampleFormat.RGBA16: return(PixelInternalFormat.Rgba16, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.UnsignedShort); case SampleFormat.RGBA16_SNorm: return(PixelInternalFormat.Rgba16Snorm, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.Short); case SampleFormat.RGBA16I: return(PixelInternalFormat.Rgba16i, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.Short); case SampleFormat.RGBA16UI: return(PixelInternalFormat.Rgba16ui, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.UnsignedShort); case SampleFormat.RGBA16F: return(PixelInternalFormat.Rgba16f, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.Float); case SampleFormat.RGBA32I: return(PixelInternalFormat.Rgba32i, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.Int); case SampleFormat.RGBA32UI: return(PixelInternalFormat.Rgba32ui, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.UnsignedInt); case SampleFormat.RGBA32F: return(PixelInternalFormat.Rgba32f, OpenTK.Graphics.OpenGL.PixelFormat.Rgba, PixelType.Float); case SampleFormat.DepthComponent16: return(PixelInternalFormat.DepthComponent16, OpenTK.Graphics.OpenGL.PixelFormat.DepthComponent, PixelType.UnsignedShort); case SampleFormat.DepthComponent24: return(PixelInternalFormat.DepthComponent24, OpenTK.Graphics.OpenGL.PixelFormat.DepthComponent, PixelType.UnsignedInt); case SampleFormat.DepthComponent32: return(PixelInternalFormat.DepthComponent32, OpenTK.Graphics.OpenGL.PixelFormat.DepthComponent, PixelType.UnsignedInt); case SampleFormat.DepthComponent32F: return(PixelInternalFormat.DepthComponent32f, OpenTK.Graphics.OpenGL.PixelFormat.DepthComponent, PixelType.Float); case SampleFormat.Depth24_Stencil8: return(PixelInternalFormat.Depth24Stencil8, OpenTK.Graphics.OpenGL.PixelFormat.DepthStencil, PixelType.UnsignedInt248); default: throw new Exception("Invalid Texture Format"); } }
private static (float, float) GetCostByFormat(uint sampleCount, SampleFormat format, SampleRateConversionQuality quality) { Debug.Assert(sampleCount == 160 || sampleCount == 240); switch (format) { case SampleFormat.PcmInt16: switch (quality) { case SampleRateConversionQuality.Default: if (sampleCount == 160) { return(6329.44f, 427.52f); } return(7853.28f, 710.14f); case SampleRateConversionQuality.High: if (sampleCount == 160) { return(8049.42f, 371.88f); } return(10138.84f, 610.49f); case SampleRateConversionQuality.Low: if (sampleCount == 160) { return(5062.66f, 423.43f); } return(5810.96f, 676.72f); default: throw new NotImplementedException($"{format} {quality}"); } case SampleFormat.PcmFloat: switch (quality) { case SampleRateConversionQuality.Default: if (sampleCount == 160) { return(7845.25f, 2310.4f); } return(10090.9f, 3490.9f); case SampleRateConversionQuality.High: if (sampleCount == 160) { return(9446.36f, 2308.91f); } return(12520.85f, 3480.61f); case SampleRateConversionQuality.Low: if (sampleCount == 160) { return(9446.36f, 2308.91f); } return(12520.85f, 3480.61f); default: throw new NotImplementedException($"{format} {quality}"); } case SampleFormat.Adpcm: switch (quality) { case SampleRateConversionQuality.Default: if (sampleCount == 160) { return(7913.81f, 1827.66f); } return(9736.70f, 2756.37f); case SampleRateConversionQuality.High: if (sampleCount == 160) { return(9607.81f, 1829.29f); } return(12154.38f, 2731.31f); case SampleRateConversionQuality.Low: if (sampleCount == 160) { return(6517.48f, 1824.61f); } return(7929.44f, 2732.15f); default: throw new NotImplementedException($"{format} {quality}"); } default: throw new NotImplementedException($"{format}"); } }
public OpenALHardwareDeviceSession(OpenALHardwareDeviceDriver driver, IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount, float requestedVolume) : base(memoryManager, requestedSampleFormat, requestedSampleRate, requestedChannelCount) { _driver = driver; _queuedBuffers = new Queue <OpenALAudioBuffer>(); _sourceId = AL.GenSource(); _targetFormat = GetALFormat(); _isActive = false; _playedSampleCount = 0; SetVolume(requestedVolume); }
public static extern int av_get_bits_per_sample_format(SampleFormat sample_fmt);
// Check pixel format of the source signal private void CheckSourceFormat(SampleFormat sampleFormat) { if (!FormatTranslations.ContainsKey(sampleFormat)) throw new UnsupportedSampleFormatException("Source sample format is not supported by the filter."); }
public void sample_test() { string basePath = NUnit.Framework.TestContext.CurrentContext.TestDirectory; string pathWhereTheDatasetShouldBeStored = Path.Combine(basePath, "mfcc"); #region doc_example1 // Let's say we would like to analyse an audio sample. To give an example that // could be reproduced by anyone without having to give a specific sound file // that would need to have been downloaded by every user trying to run this example, // we will use obtain an example from the Free Spoken Digits Dataset instead: var fsdd = new FreeSpokenDigitsDataset(path: pathWhereTheDatasetShouldBeStored); // Let's obtain one of the audio signals: Signal a = fsdd.GetSignal(0, "jackson", 10); // Note: if you would like to load a signal from the // disk, you could use the following method directly: // Signal a = Signal.FromFile(fileName); // First we could extract some characteristics from the audio signal, just // for informative purposes. We don't actually need to register them just // to compute the MFCC, so please skip those checks if you would like! int numberOfChannels = a.NumberOfChannels; // should be: 1 int numberOfFrames = a.NumberOfFrames; // should be: 5451 int numberOfSamples = a.NumberOfSamples; // should be: 5451 SampleFormat format = a.SampleFormat; // should be: Format32BitIeeeFloat int sampleRate = a.SampleRate; // should be: 8000 (8khz) int samples = a.Samples; // should be: 5451 int sampleSize = a.SampleSize; // should be: 4 int numberOfBytes = a.NumberOfBytes; // should be: 21804 // Now, let's say we would like to compute its MFCC: var extractor = new MelFrequencyCepstrumCoefficient( filterCount: 40, // Note: all values are optional, you can cepstrumCount: 13, // specify only the ones you'd like and leave lowerFrequency: 133.3333, // all others at their defaults upperFrequency: 6855.4976, alpha: 0.97, samplingRate: 16000, frameRate: 100, windowLength: 0.0256, numberOfBins: 512); // We can call the transform method of the MFCC extractor class: IEnumerable <MelFrequencyCepstrumCoefficientDescriptor> mfcc = extractor.Transform(a); // or we could also transform them to a matrix directly with: double[][] actual = mfcc.Select(x => x.Descriptor).ToArray(); // This matrix would contain X different MFCC values (due the length of the signal) int numberOfMFCCs = actual.Length; // should be 35 (depends on the MFCC window) // Each of those MFCC values would have length 13; int descriptorLength = actual[0].Length; // 13 (depends on the MFCC Ceptrtum's count) // An example of an MFCC vector would have been: double[] row = actual[0]; // should have been: (see vector written below) double[] expected = new double[] { 10.570020645259348d, 1.3484344242338475d, 0.4861056552885234d, -0.79287993818868352d, -0.64182784362935996d, -0.28079835895392041d, -0.46378109632237779d, 0.072039410871952647d, -0.43971730320461733d, 0.48891921252102533d, -0.22502241185050212d, 0.12478713268421229d, -0.13373400147110801d }; #endregion Assert.AreEqual(1, numberOfChannels); Assert.AreEqual(5451, numberOfFrames); Assert.AreEqual(5451, numberOfSamples); Assert.AreEqual(SampleFormat.Format32BitIeeeFloat, format); Assert.AreEqual(8000, sampleRate); Assert.AreEqual(5451, samples); Assert.AreEqual(4, sampleSize); Assert.AreEqual(21804, numberOfBytes); Assert.AreEqual(sampleSize * numberOfFrames * numberOfChannels, numberOfBytes); Assert.AreEqual(35, numberOfMFCCs); Assert.IsTrue(expected.IsEqual(row, 1e-8)); Signal b = fsdd.GetSignal(0, "nicolas", 10); Assert.AreEqual(2, b.NumberOfChannels); Assert.AreEqual(3755, b.NumberOfFrames); Assert.AreEqual(7510, b.NumberOfSamples); Assert.AreEqual(SampleFormat.Format32BitIeeeFloat, b.SampleFormat); Assert.AreEqual(8000, b.SampleRate); Assert.AreEqual(7510, b.Samples); Assert.AreEqual(4, b.SampleSize); Assert.AreEqual(30040, b.NumberOfBytes); Assert.AreEqual(b.SampleSize * b.NumberOfFrames * b.NumberOfChannels, b.NumberOfBytes); MelFrequencyCepstrumCoefficientDescriptor[] rb = extractor.Transform(b).ToArray(); Assert.AreEqual(24, rb.Length); Assert.IsTrue(new[] { 10.6434445230168, -0.222107787197107, 0.316067614396639, -0.212769536249701, -0.107755264262885, -0.292732772820073, -0.00445205345925395, 0.024397440969199, 0.0213769364471326, -0.0882765552657509, -0.177682484734242, -0.1013307739251, -0.099014915302743 }.IsEqual(rb[0].Descriptor, 1e-8)); Signal c = fsdd.GetSignal(5, "theo", 23); Assert.AreEqual(1, c.NumberOfChannels); Assert.AreEqual(4277, c.NumberOfFrames); Assert.AreEqual(4277, c.NumberOfSamples); Assert.AreEqual(SampleFormat.Format32BitIeeeFloat, c.SampleFormat); Assert.AreEqual(8000, c.SampleRate); Assert.AreEqual(4277, c.Samples); Assert.AreEqual(4, c.SampleSize); Assert.AreEqual(17108, c.NumberOfBytes); Assert.AreEqual(b.SampleSize * c.NumberOfFrames * c.NumberOfChannels, c.NumberOfBytes); MelFrequencyCepstrumCoefficientDescriptor[] rc = extractor.Transform(c).ToArray(); Assert.AreEqual(27, rc.Length); Assert.IsTrue(new[] { 7.24614406589037, -1.16796769512142, -0.134374026111248, -0.192703972718674, 0.112752647291759, -0.118712048308068, -0.0603752892245708, -0.0275002195634854, -0.0830858413953528, -0.0838965948140795, -0.15293502718595, 0.0107796827068413, -0.0491283773795346 }.IsEqual(rc[0].Descriptor, 1e-8)); }
public static void GenerateWavHeader(Synthesizer synth, Stream stream, int sampleCount, SampleFormat sampleFormat) { using (var headerStream = new MemoryStream(40)) using(var header = new BinaryWriter(headerStream, Encoding.Default, false)) { header.Write(CHUNK_RIFF); int posFileSize = (int)headerStream.Position; header.Write(0); // Replace later header.Write(ID_WAVE); // "fmt " chunk header.Write(CHUNK_FMT); header.Write(FMT_CHUNK_SIZE); header.Write(sampleFormat == SampleFormat.Float32 || sampleFormat == SampleFormat.Float64 ? WAV_FORMAT_IEEE_FLOAT : WAV_FORMAT_PCM); header.Write(NUM_CHANNELS); header.Write(synth.SampleRate); // Data rate (bytes per second), block size, and bits per sample int blockSize = 0; switch (sampleFormat) { case SampleFormat.Float64: header.Write(synth.SampleRate * sizeof(double) * NUM_CHANNELS); header.Write((short)(blockSize = sizeof(double) * NUM_CHANNELS)); header.Write((short)(sizeof(double) * 8)); break; case SampleFormat.Float32: header.Write(synth.SampleRate * sizeof(float) * NUM_CHANNELS); header.Write((short)(blockSize = sizeof(float) * NUM_CHANNELS)); header.Write((short)(sizeof(float) * 8)); break; case SampleFormat.Signed16: header.Write(synth.SampleRate * sizeof(short) * NUM_CHANNELS); header.Write((short)(blockSize = sizeof(short) * NUM_CHANNELS)); header.Write((short)(sizeof(short) * 8)); break; case SampleFormat.Unsigned8: header.Write(synth.SampleRate * sizeof(byte) * NUM_CHANNELS); header.Write((short)(blockSize = sizeof(byte) * NUM_CHANNELS)); header.Write((short)(sizeof(byte) * 8)); break; } header.Write(EXT_SIZE); // cbSize, required for non-PCM formats // "fact" chunk (required for non-PCM formats) if (sampleFormat == SampleFormat.Float32 || sampleFormat == SampleFormat.Float64) { header.Write(CHUNK_FACT); header.Write(FACT_CHUNK_SIZE); header.Write(sampleCount); } // "data" chunk header.Write(CHUNK_DATA); int dataSize = blockSize * sampleCount; header.Write(dataSize); int dataPos = (int)headerStream.Position; headerStream.Position = posFileSize; header.Write((int)headerStream.Length + dataSize); // Copy the data over to the audio stream headerStream.Flush(); headerStream.WriteTo(stream); stream.Position = dataPos; } }
public HeaderTiff(string path, Stream stream = null) { Path = path; if (stream == null) { stream = File.OpenRead(path); } Tiff Image = Tiff.ClientOpen("inmemory", "r", stream, new TiffStream()); { { FieldValue[] value = Image.GetField(TiffTag.IMAGEWIDTH); Dimensions.X = value[0].ToInt(); } { FieldValue[] value = Image.GetField(TiffTag.IMAGELENGTH); Dimensions.Y = value[0].ToInt(); } { Dimensions.Z = Image.NumberOfDirectories(); } { FieldValue[] value = Image.GetField(TiffTag.SAMPLEFORMAT); SampleFormat Format = SampleFormat.UINT; if (value != null) { Format = (SampleFormat)value[0].ToInt(); } int BitsPerPixel = Image.GetField(TiffTag.BITSPERSAMPLE)[0].ToInt(); if (Format == SampleFormat.UINT) { if (BitsPerPixel == 8) { Mode = TiffDataType.Byte; } else if (BitsPerPixel == 16) { Mode = TiffDataType.Ushort; } else if (BitsPerPixel == 32) { Mode = TiffDataType.Uint; } else if (BitsPerPixel == 64) { Mode = TiffDataType.Ulong; } else { throw new FormatException("Unexpected bits per pixel."); } } else if (Format == SampleFormat.INT) { if (BitsPerPixel == 16) { Mode = TiffDataType.Short; } else if (BitsPerPixel == 32) { Mode = TiffDataType.Int; } else if (BitsPerPixel == 64) { Mode = TiffDataType.Long; } else { throw new FormatException("Unexpected bits per pixel."); } } else if (Format == SampleFormat.IEEEFP) { if (BitsPerPixel == 32) { Mode = TiffDataType.Float; } else if (BitsPerPixel == 64) { Mode = TiffDataType.Double; } else { throw new FormatException("Unexpected bits per pixel."); } } else { throw new FormatException("Unexpected format."); } } } if (stream.GetType() != typeof(MemoryStream)) { stream.Close(); } }
private void initialize(Signal signal) { this.channels = signal.Channels; this.sampleRate = signal.SampleRate; this.sampleFormat = signal.SampleFormat; this.bitsPerSample = Signal.GetSampleSize(signal.SampleFormat); this.blockAlign = (bitsPerSample / 8) * channels; this.averageBitsPerSecond = sampleRate * blockAlign; this.initialized = true; }
public TiffDirectory() { td_subfiletype = 0; td_compression = 0; td_photometric = 0; td_planarconfig = 0; td_fillorder = BitOrder.BigEndian; td_bitspersample = 1; td_threshholding = Threshold.BILevel; td_orientation = Orientation.TopLeft; td_samplesperpixel = 1; td_rowsperstrip = -1; td_tiledepth = 1; td_stripbytecountsorted = true; // Our own arrays always sorted. td_resolutionunit = ResolutionUnit.Inch; td_sampleformat = SampleFormat.UInt; td_imagedepth = 1; td_ycbcrsubsampling[0] = 2; td_ycbcrsubsampling[1] = 2; td_ycbcrpositioning = YCbCrPosition.Centered; }
private static WAVEFORMATEX CreateFormatSpec(SampleFormat format, int sampleRate) { return new WAVEFORMATEX { FormatTag = WAVE_FORMAT_PCM, Channels = 1, SamplesPerSec = (uint)sampleRate, BitsPerSample = (short)format, AvgBytesPerSec = (uint)(((int)format / 8) * sampleRate), BlockAlign = (short)((int)format / 8), cbSize = 0 }; }