public void Setup() { _rootFolder = new TemporaryFolder("~FamousPainters~"); var italiansFolder = _rootFolder.Combine("~Italians~"); Directory.CreateDirectory(italiansFolder); var imgFileType = new ImageFileType(() => null, null); var audioFileType = new AudioFileType(null, () => null, () => null); var path = _rootFolder.Combine("PierreAugusteRenoir.jpg"); _imgFile1 = SetupData(path, imgFileType, "Claude", "Monet", "1840", "1840"); path = Path.Combine(italiansFolder, "SistineChapel.jpg"); _imgFile2 = SetupData(path, imgFileType, "Michelangelo", "di Lodovico Buonarroti Simoni", "1475", "1564"); path = _rootFolder.Combine("OntheTerrace.mp3"); _audioFile1 = SetupData(path, audioFileType, "Pierre-Auguste", "Renoir", "1841", "1919"); path = Path.Combine(italiansFolder, "TheLastSupper.mp3"); _audioFile2 = SetupData(path, audioFileType, "Leonardo", "da Vinci", "1452", "1519"); _updater = FieldUpdater.CreateMinimalFieldUpdaterForTests(_rootFolder.Path); }
public static AudioStreamBasicDescription[] GetAvailableStreamDescriptions(AudioFileType fileType, AudioFormatType formatType) { AudioFileTypeAndFormatID input; input.FileType = fileType; input.FormatType = formatType; uint size; if (AudioFileGetGlobalInfoSize(AudioFileGlobalProperty.AvailableStreamDescriptionsForFormat, (uint)sizeof(AudioFileTypeAndFormatID), ref input, out size) != 0) { return(null); } var data = new AudioStreamBasicDescription[size / sizeof(AudioStreamBasicDescription)]; fixed(AudioStreamBasicDescription *ptr = data) { var res = AudioFileGetGlobalInfo(AudioFileGlobalProperty.AvailableStreamDescriptionsForFormat, (uint)sizeof(AudioFileTypeAndFormatID), ref input, ref size, ptr); if (res != 0) { return(null); } return(data); } }
internal NativeAudioFile(AudioFileType fileType, Stream stream) { Contract.Requires(stream != null); Contract.Requires(stream.CanRead); Contract.Requires(stream.CanSeek); Contract.Ensures(_stream != null); Contract.Ensures(_stream == stream); Contract.Ensures(Handle != null); Contract.Ensures(!Handle.IsClosed); Contract.Ensures(_readCallback != null); Contract.Ensures(_getSizeCallback != null); _readCallback = ReadCallback; _getSizeCallback = GetSizeCallback; _stream = stream; NativeAudioFileHandle outHandle; AudioFileStatus status = SafeNativeMethods.AudioFileOpenWithCallbacks(IntPtr.Zero, _readCallback, null, _getSizeCallback, null, fileType, out outHandle); if (status != AudioFileStatus.Ok) { throw new IOException(string.Format(CultureInfo.CurrentCulture, Resources.NativeAudioFileInitializationError, status)); } Handle = outHandle; }
public static ExtAudioFile CreateWithUrl(CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, //AudioChannelLayout channelLayout, AudioFileFlags flag) { if (url == null) { throw new ArgumentNullException("url"); } ExtAudioFileError err; var audioFile = CreateWithUrl(url.Handle, fileType, inStreamDesc, flag, out err); if (err != ExtAudioFileError.OK) // if (err != 0) <- to keep old implementation { throw new ArgumentException(String.Format("Error code:{0}", err)); } if (audioFile == null) // if (ptr == IntPtr.Zero) <- to keep old implementation { throw new InvalidOperationException("Can not get object instance"); } return(audioFile); }
public static ExtAudioFile CreateWithUrl(CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, //AudioChannelLayout channelLayout, AudioFileFlags flag) { if (url == null) { throw new ArgumentNullException("url"); } int err; IntPtr ptr = new IntPtr(); unsafe { err = ExtAudioFileCreateWithUrl(url.Handle, fileType, ref inStreamDesc, IntPtr.Zero, (uint)flag, (IntPtr)(&ptr)); } if (err != 0) { throw new ArgumentException(String.Format("Error code:{0}", err)); } if (ptr == IntPtr.Zero) { throw new InvalidOperationException("Can not get object instance"); } return(new ExtAudioFile(ptr)); }
/// <summary> /// Initializes a new instance of AudioContent. /// </summary> /// <param name="audioFileName">Name of the audio source file to be processed.</param> /// <param name="audioFileType">Type of the processed audio: WAV, MP3 or WMA.</param> /// <remarks>Constructs the object from the specified source file, in the format specified.</remarks> public AudioContent(string audioFileName, AudioFileType audioFileType) { FileName = audioFileName; try { // Get the full path to the file. audioFileName = Path.GetFullPath(audioFileName); // Use probe to get the details of the file. DefaultAudioProfile.ProbeFormat( audioFileName, out _fileType, out _format, out _duration, out _loopStart, out _loopLength); // Looks like XNA only cares about type mismatch when // the type is WAV... else it is ok. if ((audioFileType == AudioFileType.Wav || _fileType == AudioFileType.Wav) && audioFileType != _fileType) { throw new ArgumentException("Incorrect file type!", nameof(audioFileType)); } //// Only provide the data for WAV files. //if (audioFileType == AudioFileType.Wav) //{ // byte[] rawData; // // // Must be opened in read mode otherwise it fails to open // // read-only files (found in some source control systems) // using (var fs = new FileStream(audioFileName, FileMode.Open, FileAccess.Read)) // { // rawData = new byte[fs.Length]; // fs.Read(rawData, 0, rawData.Length); // } // // var stripped = DefaultAudioProfile.StripRiffWaveHeader(rawData, out AudioFormat riffAudioFormat); // // if (riffAudioFormat != null) // { // if ((_format.Format != 2 && _format.Format != 17) && _format.BlockAlign != riffAudioFormat.BlockAlign) // throw new InvalidOperationException("Calculated block align does not match RIFF " + _format.BlockAlign + " : " + riffAudioFormat.BlockAlign); // if (_format.ChannelCount != riffAudioFormat.ChannelCount) // throw new InvalidOperationException("Probed channel count does not match RIFF: " + _format.ChannelCount + ", " + riffAudioFormat.ChannelCount); // if (_format.Format != riffAudioFormat.Format) // throw new InvalidOperationException("Probed audio format does not match RIFF: " + _format.Format + ", " + riffAudioFormat.Format); // if (_format.SampleRate != riffAudioFormat.SampleRate) // throw new InvalidOperationException("Probed sample rate does not match RIFF: " + _format.SampleRate + ", " + riffAudioFormat.SampleRate); // } // // _data = stripped; //} } catch (Exception ex) { var message = string.Format( "Failed to open file {0}. Ensure the file is a valid audio file and is not DRM protected.", Path.GetFileNameWithoutExtension(audioFileName)); throw new InvalidContentException(message, ex); } }
/// <summary> /// Initializes a new instance of AudioContent. /// </summary> /// <param name="audioFileName">Name of the audio source file to be processed.</param> /// <param name="audioFileType">Type of the processed audio: WAV, MP3 or WMA.</param> /// <remarks>Constructs the object from the specified source file, in the format specified.</remarks> public AudioContent(string audioFileName, AudioFileType audioFileType) { fileName = audioFileName; fileType = audioFileType; #if WINDOWS Read(); #endif }
internal static extern AudioFileStatus AudioFileOpenWithCallbacks( IntPtr userData, NativeCallbacks.AudioFileReadCallback readCallback, NativeCallbacks.AudioFileWriteCallback?writeCallback, NativeCallbacks.AudioFileGetSizeCallback getSizeCallback, NativeCallbacks.AudioFileSetSizeCallback?setSizeCallback, AudioFileType fileType, out AudioFileHandle handle);
public ExtendedAudioFile( AudioStreamBasicDescription description, AudioFileType fileType, [NotNull] Stream stream) : base(description, fileType, stream) { SafeNativeMethods.ExtAudioFileWrapAudioFile(Handle, true, out _handle); }
internal static extern AudioFileStatus AudioFileOpenWithCallbacks( IntPtr userData, [NotNull] NativeCallbacks.AudioFileReadCallback readCallback, [CanBeNull] NativeCallbacks.AudioFileWriteCallback writeCallback, [NotNull] NativeCallbacks.AudioFileGetSizeCallback getSizeCallback, [CanBeNull] NativeCallbacks.AudioFileSetSizeCallback setSizeCallback, AudioFileType fileType, [NotNull] out AudioFileHandle handle);
public static ExtAudioFile?CreateWithUrl(CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, AudioFileFlags flag, out ExtAudioFileError error) { if (url is null) { ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(url)); } return(CreateWithUrl(url.Handle, fileType, inStreamDesc, flag, out error)); }
public static string GetFileTypeName (AudioFileType fileType) { IntPtr ptr; var size = (uint) sizeof (IntPtr); if (AudioFileGetGlobalInfo (AudioFileGlobalProperty.FileTypeName, sizeof (AudioFileType), ref fileType, ref size, out ptr) != 0) return null; return CFString.FetchString (ptr); }
public static ExtAudioFile CreateWithUrl(CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, AudioFileFlags flag, out ExtAudioFileError error) { if (url == null) { throw new ArgumentNullException("url"); } return(CreateWithUrl(url.Handle, fileType, inStreamDesc, flag, out error)); }
public void SettingAudioTransferTxt(int conferenceID, ConferenceAudioItemTransferEntity conferenceAudioItemTransferEntity) { //上锁,达到线程互斥作用 lock (objTransfer) { try { //当前会议为null,则不执行下列操作 if (conferenceID == 0) { return; } if (string.IsNullOrEmpty(conferenceAudioItemTransferEntity.AudioMessage)) { //音频文件字节数组 byte[] array = null; //获取webservice路径 string strLocal = this.Server.MapPath("."); conferenceAudioItemTransferEntity.AudioFileName = Path.GetFileName(conferenceAudioItemTransferEntity.AudioUrl); string fileName = strLocal + "\\" + Constant.AudioLocalRootName + "\\" + conferenceAudioItemTransferEntity.AudioFileName; if (File.Exists(fileName)) { //通过文件流将音频文件转为字节数组 using (System.IO.FileStream fileStream = new System.IO.FileStream(fileName, System.IO.FileMode.Open, System.IO.FileAccess.Read, FileShare.Delete)) { array = new byte[fileStream.Length]; //将文件流读出给指定字节数组 fileStream.Read(array, 0, array.Length); } AudioFileType audioFileType = (AudioFileType)Enum.Parse(typeof(AudioFileType), Path.GetExtension(fileName).Replace(".", string.Empty)); //语音转文字(通用方法) string message = AudioTransfer.AudioToText(array, audioFileType); //结束语音识别 if (string.IsNullOrEmpty(message)) { message = "无法识别"; } //操作方式改为更新 conferenceAudioItemTransferEntity.Operation = ConferenceAudioOperationType.UpdateType; //语音文本 conferenceAudioItemTransferEntity.AudioMessage = message; //更新一个节点 this.UpdateOne(conferenceID, conferenceAudioItemTransferEntity); } } } catch (Exception ex) { LogManage.WriteLog(this.GetType(), ex); } finally { } } }
///// <summary> ///// Determines audio file type ///// </summary> ///// <param name="filePath"></param> ///// <returns> file type as enum </returns> private static AudioFileType GetAudioFileType(string filePath) { AudioFileType sourceFileType = AudioFileType.NotSupported; string ext = Path.GetExtension(filePath); if (DataProviderFactory.AUDIO_MP3_EXTENSION.Equals(ext, StringComparison.OrdinalIgnoreCase)) { sourceFileType = AudioFileType.Mp3; } if (DataProviderFactory.AUDIO_MP4_EXTENSION.Equals(ext, StringComparison.OrdinalIgnoreCase) || DataProviderFactory.AUDIO_MP4_EXTENSION_.Equals(ext, StringComparison.OrdinalIgnoreCase)) { sourceFileType = AudioFileType.Mp4_AAC; } if (DataProviderFactory.AUDIO_WAV_EXTENSION.Equals(ext, StringComparison.OrdinalIgnoreCase)) { sourceFileType = AudioFileType.WavUncompressed; } return(sourceFileType); // TODO: introspect audio file content instead of checking file extension //if (!File.Exists(filePath)) // throw new FileNotFoundException(); //// first check if it is wav file //FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); //try //{ // uint dataLength; // AudioLibPCMFormat format = AudioLibPCMFormat.RiffHeaderParse(fs, out dataLength); // fs.Close(); // if (format != null) // { // return format.IsCompressed ? AudioFileTypes.UncompressedWav : AudioFileTypes.UncompressedWav; // } //} //catch (System.Exception) //{ // if (fs != null) fs.Close(); //} //// now check for mp3 header //FileStream mp3Filestream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); //try //{ // Mp3Frame frame = new Mp3Frame(mp3Filestream); // mp3Filestream.Close(); // if (frame != null) return AudioFileTypes.mp3; //} //catch (System.Exception) //{ // if (mp3Filestream != null) mp3Filestream.Close(); //} //return AudioFileTypes.NotSupported; }
internal static extern AudioFileStatus AudioFileInitializeWithCallbacks( IntPtr userData, [NotNull] NativeCallbacks.AudioFileReadCallback readCallback, [NotNull] NativeCallbacks.AudioFileWriteCallback writeCallback, [NotNull] NativeCallbacks.AudioFileGetSizeCallback getSizeCallback, [NotNull] NativeCallbacks.AudioFileSetSizeCallback setSizeCallback, AudioFileType fileType, ref AudioStreamBasicDescription description, uint flags, [NotNull] out AudioFileHandle handle);
public static AudioFile Create(string url, AudioFileType fileType, AudioStreamBasicDescription format, AudioFileFlags inFlags) { if (url == null) { throw new ArgumentNullException("url"); } using (CFUrl cfurl = CFUrl.FromUrlString(url, null)) return(Create(cfurl, fileType, format, inFlags)); }
public static AudioFile Open(string url, AudioFilePermission permissions, AudioFileType fileTypeHint) { if (url == null) { throw new ArgumentNullException("url"); } using (CFUrl cfurl = CFUrl.FromUrlString(url, null)) return(Open(cfurl, permissions, fileTypeHint)); }
public static string GetAudioTypeExtension(this AudioFileType type) { var audioFileTypeExtensionAttribute = typeof(AudioFileType).GetField(type.ToString())?.GetCustomAttribute <AudioFileTypeExtension>(); if (audioFileTypeExtensionAttribute == null) { throw new Exception($"AudioFileType enum's field {type.ToString()} is not updated with the proper AudioFileTypeExtension attribute"); } return(audioFileTypeExtensionAttribute.Extension); }
public static string GetFileTypeName(AudioFileType fileType) { IntPtr ptr; var size = (uint)sizeof(IntPtr); if (AudioFileGetGlobalInfo(AudioFileGlobalProperty.FileTypeName, sizeof(AudioFileType), ref fileType, ref size, out ptr) != 0) { return(null); } return(CFString.FetchString(ptr)); }
public static string[] GetMIMETypes(AudioFileType fileType) { IntPtr ptr; var size = (uint)sizeof(IntPtr); if (AudioFileGetGlobalInfo(AudioFileGlobalProperty.MIMETypesForType, sizeof(AudioFileType), ref fileType, ref size, out ptr) != 0) { return(null); } return(NSArray.ArrayFromHandleFunc(ptr, l => CFString.FetchString(l))); }
public static string?[]? GetExtensions(AudioFileType fileType) { IntPtr ptr; var size = (uint)sizeof(IntPtr); if (AudioFileGetGlobalInfo(AudioFileGlobalProperty.ExtensionsForType, sizeof(AudioFileType), ref fileType, ref size, out ptr) != 0) { return(null); } return(NSArray.ArrayFromHandleFunc(ptr, l => CFString.FromHandle(l))); }
// === Public Member Functions === // public AudioFile(string Path) { this.Path_ = Path; try { BinaryReader BR = new BinaryReader(new FileStream(this.Path_, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, 0x30), Encoding.ASCII); this.DetermineType(BR); if (this.Type_ != AudioFileType.Unknown) { this.Header_ = new AudioFileHeader(); switch (this.Type_) { case AudioFileType.BGMStream: this.Header_.SampleFormat = (SampleFormat)BR.ReadInt32(); this.Header_.Size = BR.ReadInt32(); break; case AudioFileType.SoundEffect: this.Header_.Size = BR.ReadInt32(); this.Header_.SampleFormat = (SampleFormat)BR.ReadInt32(); break; } this.Header_.ID = BR.ReadInt32(); this.Header_.SampleBlocks = BR.ReadInt32(); this.Header_.LoopStart = BR.ReadInt32(); this.Header_.SampleRateHigh = BR.ReadInt32(); this.Header_.SampleRateLow = BR.ReadInt32(); this.Header_.Unknown1 = BR.ReadInt32(); this.Header_.Unknown2 = BR.ReadByte(); this.Header_.Unknown3 = BR.ReadByte(); this.Header_.Channels = BR.ReadByte(); this.Header_.BlockSize = BR.ReadByte(); switch (this.Type_) { case AudioFileType.BGMStream: this.Header_.Unknown4 = 0; break; case AudioFileType.SoundEffect: this.Header_.Unknown4 = BR.ReadInt32(); break; } } BR.Close(); } catch { this.Type_ = AudioFileType.Unknown; this.Header_ = new AudioFileHeader(); } }
internal AudioFile(AudioFileType fileType, [NotNull] Stream stream) { // This constructor is for reading _readCallback = ReadCallback; _getSizeCallback = GetSizeCallback; _stream = stream; _endOfData = stream.Length; SafeNativeMethods.AudioFileOpenWithCallbacks(IntPtr.Zero, _readCallback, null, _getSizeCallback, null, fileType, out var handle); Handle = handle; }
static ExtAudioFile CreateWithUrl(IntPtr urlHandle, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, AudioFileFlags flag, out ExtAudioFileError error) { IntPtr ptr; error = (ExtAudioFileError)ExtAudioFileCreateWithUrl(urlHandle, fileType, ref inStreamDesc, IntPtr.Zero, (uint)flag, out ptr); if (error != ExtAudioFileError.OK || ptr == IntPtr.Zero) { return(null); } else { return(new ExtAudioFile(ptr)); } }
protected void Initialize(AudioFileType inFileType, AudioStreamBasicDescription format) { IntPtr h; gch = GCHandle.Alloc(this); var code = AudioFileInitializeWithCallbacks(GCHandle.ToIntPtr(gch), dRead, dWrite, dGetSize, dSetSize, inFileType, ref format, 0, out h); if (code == 0) { handle = h; return; } throw new Exception(String.Format("Unable to create AudioSource, code: 0x{0:x}", code)); }
public AudioFileStream(AudioFileType fileTypeHint) { IntPtr h; gch = GCHandle.Alloc(this); var code = AudioFileStreamOpen(GCHandle.ToIntPtr(gch), dPropertyListener, dInPackets, fileTypeHint, out h); if (code == 0) { handle = h; return; } throw new Exception(String.Format("Unable to create AudioFileStream, code: 0x{0:x}", code)); }
protected void Open(AudioFileType fileTypeHint) { IntPtr h; gch = GCHandle.Alloc(this); var code = AudioFileOpenWithCallbacks(GCHandle.ToIntPtr(gch), dRead, dWrite, dGetSize, dSetSize, fileTypeHint, out h); if (code == 0) { handle = h; return; } throw new Exception(String.Format("Unable to create AudioSource, code: 0x{0:x}", code)); }
public static AudioFormatType[] GetAvailableFormats (AudioFileType fileType) { uint size; if (AudioFileGetGlobalInfoSize (AudioFileGlobalProperty.AvailableFormatIDs, sizeof (AudioFileType), ref fileType, out size) != 0) return null; var data = new AudioFormatType[size / sizeof (AudioFormatType)]; fixed (AudioFormatType* ptr = data) { var res = AudioFileGetGlobalInfo (AudioFileGlobalProperty.AvailableFormatIDs, sizeof (AudioFormatType), ref fileType, ref size, ptr); if (res != 0) return null; return data; } }
public static AudioFile Create(NSUrl url, AudioFileType fileType, AudioStreamBasicDescription format, AudioFileFlags inFlags) { if (url == null) { throw new ArgumentNullException("url"); } IntPtr h; if (AudioFileCreateWithURL(url.Handle, fileType, ref format, inFlags, out h) == 0) { return(new AudioFile(h)); } return(null); }
public static AudioFile Open(NSUrl url, AudioFilePermission permissions, AudioFileType fileTypeHint) { if (url == null) { throw new ArgumentNullException("url"); } IntPtr h; if (AudioFileOpenURL(url.Handle, (byte)permissions, fileTypeHint, out h) == 0) { return(new AudioFile(h)); } return(null); }
internal AudioFile(AudioStreamBasicDescription description, AudioFileType fileType, [NotNull] Stream stream) { // This constructor is for writing _readCallback = ReadCallback; _getSizeCallback = GetSizeCallback; _writeCallback = WriteCallback; _setSizeCallback = SetSizeCallback; _stream = stream; SafeNativeMethods.AudioFileInitializeWithCallbacks(IntPtr.Zero, _readCallback, _writeCallback, _getSizeCallback, _setSizeCallback, fileType, ref description, 0, out var handle); Handle = handle; }
/// <summary> /// Converts one audio format into another. /// </summary> /// <param name="pathToFile">Path to the file to be converted</param> /// <param name="outputStream">Stream where the converted sound will be.</param> /// /// <param name="bitDepth">The target bit depth.</param> /// <param name="sampleRate">The target sample rate.</param> /// <param name="bitDepth">The target bit depth.</param> /// <param name="numChannels">the number of channels.</param> public static WaveFormat ConvertFile(string pathToFile, Stream outputStream, AudioFileType targetFileType, int sampleRate, int bitDepth, int numChannels) { if (targetFileType == AudioFileType.Wma) throw new ArgumentException("WMA is not a vaid output type."); string sourceFileType = pathToFile.Substring(pathToFile.Length - 3).ToLowerInvariant(); switch (sourceFileType) { case "mp3": if (targetFileType != AudioFileType.Wav) throw new NotSupportedException("mp3's should only ever be converted to .wav."); return mp3ToWav(pathToFile, outputStream, sampleRate, bitDepth, numChannels); case "wma": if (targetFileType == AudioFileType.Mp3) { wmaToMp3(pathToFile, outputStream, sampleRate, bitDepth, numChannels); return null; } else if (targetFileType == AudioFileType.Wav) { return wmaToWav(pathToFile, outputStream, sampleRate, bitDepth, numChannels); } break; case "wav": if (targetFileType == AudioFileType.Mp3) { wavToMp3(pathToFile, outputStream, sampleRate, bitDepth, numChannels); return null; } else if (targetFileType == AudioFileType.Wav ) return reencodeWav(pathToFile, outputStream, sampleRate, bitDepth, numChannels); break; } return null; }
static extern int AudioFileGetGlobalInfo(AudioFileGlobalProperty propertyID, uint size, ref AudioFileType inSpecifier, ref uint ioDataSize, AudioFormatType* outPropertyData);
extern static int AudioFileGetGlobalInfo (AudioFileGlobalProperty propertyID, uint size, ref AudioFileType inSpecifier, ref uint ioDataSize, out IntPtr outPropertyData);
extern static int AudioFileGetGlobalInfoSize (AudioFileGlobalProperty propertyID, uint size, ref AudioFileType inSpecifier, out uint outDataSize);
public override void Read(BinaryReader reader, Resource resource) { // NTRO only in version 0? if (resource.IntrospectionManifest == null) { var block = new ResourceIntrospectionManifest.ResourceDiskStruct(); var field = new ResourceIntrospectionManifest.ResourceDiskStruct.Field { FieldName = "m_bitpackedsoundinfo", Type = DataType.UInt32 }; block.FieldIntrospection.Add(field); field = new ResourceIntrospectionManifest.ResourceDiskStruct.Field { FieldName = "m_loopStart", Type = DataType.Int32, OnDiskOffset = 4, }; block.FieldIntrospection.Add(field); field = new ResourceIntrospectionManifest.ResourceDiskStruct.Field { FieldName = "m_flDuration", Type = DataType.Float, OnDiskOffset = 12, }; block.FieldIntrospection.Add(field); resource.Blocks[BlockType.NTRO] = new ResourceIntrospectionManifest(); resource.IntrospectionManifest.ReferencedStructs.Add(block); } reader.BaseStream.Position = Offset; base.Read(reader, resource); LoopStart = ((NTROValue<int>)Output["m_loopStart"]).Value; Duration = ((NTROValue<float>)Output["m_flDuration"]).Value; var bitpackedSoundInfo = ((NTROValue<uint>)Output["m_bitpackedsoundinfo"]).Value; Type = (AudioFileType)ExtractSub(bitpackedSoundInfo, 0, 2); Bits = ExtractSub(bitpackedSoundInfo, 2, 5); Channels = ExtractSub(bitpackedSoundInfo, 7, 2); SampleSize = ExtractSub(bitpackedSoundInfo, 9, 3); AudioFormat = ExtractSub(bitpackedSoundInfo, 12, 2); SampleRate = ExtractSub(bitpackedSoundInfo, 14, 17); if (Type != AudioFileType.MP3 && Type != AudioFileType.WAV) { throw new NotImplementedException($"Unknown audio file format '{Type}', please report this on GitHub."); } }
static ExtAudioFile CreateWithUrl(IntPtr urlHandle, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, AudioFileFlags flag, out ExtAudioFileError error) { IntPtr ptr; error = (ExtAudioFileError) ExtAudioFileCreateWithUrl (urlHandle, fileType, ref inStreamDesc, IntPtr.Zero, (uint)flag, out ptr); if (error != ExtAudioFileError.OK || ptr == IntPtr.Zero) return null; else return new ExtAudioFile (ptr); }
public AudioContent(string audioFileName, AudioFileType audioFileType) { }
public static ExtAudioFile CreateWithUrl(CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, //AudioChannelLayout channelLayout, AudioFileFlags flag) { if (url == null) throw new ArgumentNullException ("url"); ExtAudioFileError err; var audioFile = CreateWithUrl (url.Handle, fileType, inStreamDesc, flag, out err); if (err != ExtAudioFileError.OK) // if (err != 0) <- to keep old implementation throw new ArgumentException (String.Format ("Error code:{0}", err)); if (audioFile == null) // if (ptr == IntPtr.Zero) <- to keep old implementation throw new InvalidOperationException ("Can not get object instance"); return audioFile; }
/// <summary> /// Initializes a new instance of AudioContent. /// </summary> /// <param name="audioFileName">Name of the audio source file to be processed.</param> /// <param name="audioFileType">Type of the processed audio: WAV, MP3 or WMA.</param> /// <remarks>Constructs the object from the specified source file, in the format specified.</remarks> public AudioContent(string audioFileName, AudioFileType audioFileType) { fileName = audioFileName; fileType = audioFileType; Read(audioFileName); }
public static AudioStreamBasicDescription[] GetAvailableStreamDescriptions (AudioFileType fileType, AudioFormatType formatType) { AudioFileTypeAndFormatID input; input.FileType = fileType; input.FormatType = formatType; uint size; if (AudioFileGetGlobalInfoSize (AudioFileGlobalProperty.AvailableStreamDescriptionsForFormat, (uint)sizeof (AudioFileTypeAndFormatID), ref input, out size) != 0) return null; var data = new AudioStreamBasicDescription[size / sizeof (AudioStreamBasicDescription)]; fixed (AudioStreamBasicDescription* ptr = data) { var res = AudioFileGetGlobalInfo (AudioFileGlobalProperty.AvailableStreamDescriptionsForFormat, (uint)sizeof (AudioFileTypeAndFormatID), ref input, ref size, ptr); if (res != 0) return null; return data; } }
public static bool Convert(string input, string output, AudioFormatType targetFormat, AudioFileType containerType, Microsoft.Xna.Framework.Content.Pipeline.Audio.ConversionQuality quality) { CFUrl source = CFUrl.FromFile (input); CFUrl dest = CFUrl.FromFile (output); var dstFormat = new AudioStreamBasicDescription (); var sourceFile = AudioFile.Open (source, AudioFilePermission.Read); AudioFormatType outputFormat = targetFormat; // get the source data format var srcFormat = (AudioStreamBasicDescription)sourceFile.DataFormat; var outputSampleRate = 0; switch (quality) { case Microsoft.Xna.Framework.Content.Pipeline.Audio.ConversionQuality.Low: outputSampleRate = (int)Math.Max (8000, srcFormat.SampleRate / 2); break; default: outputSampleRate = (int)Math.Max (8000, srcFormat.SampleRate); break; } dstFormat.SampleRate = (outputSampleRate == 0 ? srcFormat.SampleRate : outputSampleRate); // set sample rate if (outputFormat == AudioFormatType.LinearPCM) { // if the output format is PC create a 16-bit int PCM file format description as an example dstFormat.Format = outputFormat; dstFormat.ChannelsPerFrame = srcFormat.ChannelsPerFrame; dstFormat.BitsPerChannel = 16; dstFormat.BytesPerPacket = dstFormat.BytesPerFrame = 2 * dstFormat.ChannelsPerFrame; dstFormat.FramesPerPacket = 1; dstFormat.FormatFlags = AudioFormatFlags.LinearPCMIsPacked | AudioFormatFlags.LinearPCMIsSignedInteger; } else { // compressed format - need to set at least format, sample rate and channel fields for kAudioFormatProperty_FormatInfo dstFormat.Format = outputFormat; dstFormat.ChannelsPerFrame = (outputFormat == AudioFormatType.iLBC ? 1 : srcFormat.ChannelsPerFrame); // for iLBC num channels must be 1 // use AudioFormat API to fill out the rest of the description var fie = AudioStreamBasicDescription.GetFormatInfo (ref dstFormat); if (fie != AudioFormatError.None) { return false; } } var converter = AudioConverter.Create (srcFormat, dstFormat); converter.InputData += HandleInputData; // if the source has a cookie, get it and set it on the Audio Converter ReadCookie (sourceFile, converter); // get the actual formats back from the Audio Converter srcFormat = converter.CurrentInputStreamDescription; dstFormat = converter.CurrentOutputStreamDescription; // if encoding to AAC set the bitrate to 192k which is a nice value for this demo // kAudioConverterEncodeBitRate is a UInt32 value containing the number of bits per second to aim for when encoding data if (dstFormat.Format == AudioFormatType.MPEG4AAC) { uint outputBitRate = 192000; // 192k // ignore errors as setting may be invalid depending on format specifics such as samplerate try { converter.EncodeBitRate = outputBitRate; } catch { } // get it back and print it out outputBitRate = converter.EncodeBitRate; } // create the destination file var destinationFile = AudioFile.Create (dest, containerType, dstFormat, AudioFileFlags.EraseFlags); // set up source buffers and data proc info struct afio = new AudioFileIO (32768); afio.SourceFile = sourceFile; afio.SrcFormat = srcFormat; if (srcFormat.BytesPerPacket == 0) { // if the source format is VBR, we need to get the maximum packet size // use kAudioFilePropertyPacketSizeUpperBound which returns the theoretical maximum packet size // in the file (without actually scanning the whole file to find the largest packet, // as may happen with kAudioFilePropertyMaximumPacketSize) afio.SrcSizePerPacket = sourceFile.PacketSizeUpperBound; // how many packets can we read for our buffer size? afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket; // allocate memory for the PacketDescription structures describing the layout of each packet afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead]; } else { // CBR source format afio.SrcSizePerPacket = srcFormat.BytesPerPacket; afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket; // allocate memory for the PacketDescription structures describing the layout of each packet afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead]; } // set up output buffers int outputSizePerPacket = dstFormat.BytesPerPacket; // this will be non-zero if the format is CBR const int theOutputBufSize = 32768; var outputBuffer = Marshal.AllocHGlobal (theOutputBufSize); AudioStreamPacketDescription[] outputPacketDescriptions = null; if (outputSizePerPacket == 0) { // if the destination format is VBR, we need to get max size per packet from the converter outputSizePerPacket = (int)converter.MaximumOutputPacketSize; } // allocate memory for the PacketDescription structures describing the layout of each packet outputPacketDescriptions = new AudioStreamPacketDescription [theOutputBufSize / outputSizePerPacket]; int numOutputPackets = theOutputBufSize / outputSizePerPacket; // if the destination format has a cookie, get it and set it on the output file WriteCookie (converter, destinationFile); // write destination channel layout if (srcFormat.ChannelsPerFrame > 2) { WriteDestinationChannelLayout (converter, sourceFile, destinationFile); } long totalOutputFrames = 0; // used for debugging long outputFilePos = 0; AudioBuffers fillBufList = new AudioBuffers (1); bool error = false; // loop to convert data while (true) { // set up output buffer list fillBufList [0] = new AudioBuffer () { NumberChannels = dstFormat.ChannelsPerFrame, DataByteSize = theOutputBufSize, Data = outputBuffer }; // convert data int ioOutputDataPackets = numOutputPackets; var fe = converter.FillComplexBuffer (ref ioOutputDataPackets, fillBufList, outputPacketDescriptions); // if interrupted in the process of the conversion call, we must handle the error appropriately if (fe != AudioConverterError.None) { error = true; break; } if (ioOutputDataPackets == 0) { // this is the EOF conditon break; } // write to output file var inNumBytes = fillBufList [0].DataByteSize; var we = destinationFile.WritePackets (false, inNumBytes, outputPacketDescriptions, outputFilePos, ref ioOutputDataPackets, outputBuffer); if (we != 0) { error = true; break; } // advance output file packet position outputFilePos += ioOutputDataPackets; if (dstFormat.FramesPerPacket != 0) { // the format has constant frames per packet totalOutputFrames += (ioOutputDataPackets * dstFormat.FramesPerPacket); } else { // variable frames per packet require doing this for each packet (adding up the number of sample frames of data in each packet) for (var i = 0; i < ioOutputDataPackets; ++i) totalOutputFrames += outputPacketDescriptions [i].VariableFramesInPacket; } } Marshal.FreeHGlobal (outputBuffer); if (!error) { // write out any of the leading and trailing frames for compressed formats only if (dstFormat.BitsPerChannel == 0) { // our output frame count should jive with WritePacketTableInfo (converter, destinationFile); } // write the cookie again - sometimes codecs will update cookies at the end of a conversion WriteCookie (converter, destinationFile); } converter.Dispose (); destinationFile.Dispose (); sourceFile.Dispose (); return true; }
/// <summary> /// Initializes a new instance of AudioContent. /// </summary> /// <param name="audioFileName">Name of the audio source file to be processed.</param> /// <param name="audioFileType">Type of the processed audio: WAV, MP3 or WMA.</param> /// <remarks>Constructs the object from the specified source file, in the format specified.</remarks> public AudioContent(string audioFileName, AudioFileType audioFileType) { throw new NotImplementedException(); }
// === Private Member Functions === // private void DetermineType(BinaryReader BR) { string marker = new string(BR.ReadChars(8)); if (marker == "SeWave\0\0") this.Type_ = AudioFileType.SoundEffect; else { marker += new string(BR.ReadChars(4)); if (marker == "BGMStream\0\0\0") this.Type_ = AudioFileType.BGMStream; } }
public static string[] GetMIMETypes (AudioFileType fileType) { IntPtr ptr; var size = (uint) sizeof (IntPtr); if (AudioFileGetGlobalInfo (AudioFileGlobalProperty.MIMETypesForType, sizeof (AudioFileType), ref fileType, ref size, out ptr) != 0) return null; return NSArray.ArrayFromHandleFunc (ptr, l => CFString.FetchString (l)); }
// === Public Member Functions === // public AudioFile(string Path) { this.Path_ = Path; try { BinaryReader BR = new BinaryReader(new FileStream(this.Path_, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, 0x30), Encoding.ASCII); this.DetermineType(BR); if (this.Type_ != AudioFileType.Unknown) { this.Header_ = new AudioFileHeader(); switch (this.Type_) { case AudioFileType.BGMStream: this.Header_.SampleFormat = (SampleFormat) BR.ReadInt32(); this.Header_.Size = BR.ReadInt32(); break; case AudioFileType.SoundEffect: this.Header_.Size = BR.ReadInt32(); this.Header_.SampleFormat = (SampleFormat) BR.ReadInt32(); break; } this.Header_.ID = BR.ReadInt32(); this.Header_.SampleBlocks = BR.ReadInt32(); this.Header_.LoopStart = BR.ReadInt32(); this.Header_.SampleRateHigh = BR.ReadInt32(); this.Header_.SampleRateLow = BR.ReadInt32(); this.Header_.Unknown1 = BR.ReadInt32(); this.Header_.Unknown2 = BR.ReadByte (); this.Header_.Unknown3 = BR.ReadByte (); this.Header_.Channels = BR.ReadByte (); this.Header_.BlockSize = BR.ReadByte (); switch (this.Type_) { case AudioFileType.BGMStream: this.Header_.Unknown4 = 0; break; case AudioFileType.SoundEffect: this.Header_.Unknown4 = BR.ReadInt32(); break; } } BR.Close(); } catch { this.Type_ = AudioFileType.Unknown; this.Header_ = new AudioFileHeader(); } }
public StreamingPlayback (AudioFileType type) { fileStream = new AudioFileStream (type); fileStream.PacketDecoded += AudioPacketDecoded; fileStream.PropertyFound += AudioPropertyFound; }
public static ExtAudioFile CreateWithUrl(MonoMac.CoreFoundation.CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, //AudioChannelLayout channelLayout, AudioFileFlags flag) { int err; IntPtr ptr = new IntPtr(); unsafe { err = ExtAudioFileCreateWithUrl(url.Handle, fileType, ref inStreamDesc, IntPtr.Zero, (uint)flag, (IntPtr)(&ptr)); } if (err != 0) { throw new ArgumentException(String.Format("Error code:{0}", err)); } if (ptr == IntPtr.Zero) { throw new InvalidOperationException("Can not get object instance"); } return new ExtAudioFile(ptr); }
public static ExtAudioFile CreateWithUrl(CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, AudioFileFlags flag, out ExtAudioFileError error) { if (url == null) throw new ArgumentNullException ("url"); return CreateWithUrl (url.Handle, fileType, inStreamDesc, flag, out error); }