/// <summary> /// Indicates whether file writing support for the specified file type is provided /// by this audio file writer. /// </summary> /// <remarks> /// Indicates whether file writing support for the specified file type is provided /// by this audio file writer. /// </remarks> /// <param name="fileType">the file type for which write capabilities are queried</param> /// <returns> /// <code>true</code> if the file type is supported, /// otherwise <code>false</code> /// </returns> public virtual bool isFileTypeSupported(AudioFileFormat.Type fileType) { AudioFileFormat.Type[] types = getAudioFileTypes(); for (int i = 0; i < types.Length; i++) { if (fileType.Equals(types[i])) { return true; } } return false; }
private void button1_Click(object sender, EventArgs e) { if (!Directory.Exists(folderpathTextBox.Text)) { MessageBox.Show("Folder for audio storage doesn't exist", "Folder for audio files"); return; } AudioFileFormat aff = ((AudioFormat)fileformatComboBox.SelectedItem).aff; settings.aff = aff; settings.audiofolder = folderpathTextBox.Text; settings.muxed_audio_file = muxedRadioButton.Checked; }
private static void dumpStreamChunk(File file, string text, long num, long num2) { AudioFileFormat audioFileFormat = AudioSystem.getAudioFileFormat(file); AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(file); AudioFormat format = audioFileFormat.getFormat(); int num3 = Math.round((float)format.getFrameSize() * format.getFrameRate() / 1000f); audioInputStream.skip(num * (long)num3); AudioInputStream audioInputStream2 = new AudioInputStream(audioInputStream, format, num2 * (long)num3); AudioSystem.write(audioInputStream2, audioFileFormat.getType(), new File(text)); audioInputStream.close(); audioInputStream2.close(); }
private void fileformatComboBox_SelectedIndexChanged(object sender, EventArgs e) { AudioFileFormat aff = ((AudioFormat)fileformatComboBox.SelectedItem).aff; if (!File.Exists("lame_enc.dll")) { switch (aff) { case AudioFileFormat.AFF_MP3_64KBIT_FORMAT: case AudioFileFormat.AFF_MP3_128KBIT_FORMAT: case AudioFileFormat.AFF_MP3_256KBIT_FORMAT: MessageBox.Show("DLL file lame_enc.dll is required for MP3 encoding"); fileformatComboBox.SelectedIndex = 0; break; } } }
/// <summary> /// 데이터를 레지스트리에 저장합니다. /// </summary> public void Save() { string path = Path.Combine(Directory.GetCurrentDirectory(), "settings.json"); Dictionary <string, string> option = new Dictionary <string, string>(); option.Add("audioFileFormat", AudioFileFormat.ToString()); option.Add("remainTree", RemainTree.ToString()); option.Add("excludeUnusedFiles", Use.ToString()); option.Add("IsValidCustomOutputPath", IsValidCustomOutputPath.ToString()); // 출력 폴더를 저장합니다. option.Add("OutputPath", OutputPath.ToString()); string contents = JsonConvert.SerializeObject(option); File.WriteAllText(path, contents); }
/// <summary> /// Получение информации об аудиофайле /// </summary> /// <param name="file"></param> /// <returns></returns> /// <exception cref="Exception"></exception> public async Task <AudioFileInfo> GetFileFormat(string file) { var result = new AudioFileFormat(); try { var arguments = $"-i {file.CoverQuotes()} -print_format json -show_format -v quiet"; var info = new ProcessStartInfo { WindowStyle = ProcessWindowStyle.Hidden, UseShellExecute = false, FileName = _config.FFprobePath, Arguments = arguments, RedirectStandardError = true, RedirectStandardOutput = true, StandardErrorEncoding = Encoding.UTF8, StandardOutputEncoding = Encoding.UTF8, }; using var process = new Process { StartInfo = info, EnableRaisingEvents = true }; process.Start(); var sb = new StringBuilder(); while (!process.StandardOutput.EndOfStream) { sb.Append(await process.StandardOutput.ReadLineAsync()); } result = JsonConvert.DeserializeObject <AudioFileFormat>(sb.ToString()); process.WaitForExit(); return(result.Format); } catch (Exception ex) { ConsoleHelper.Error($"Не удалось обработать файл {ex}"); return(result.Format); } }
public override void Init() { #region For some planned features. /* * _screenWidth = Screen.PrimaryScreen.Bounds.Width; * _screenHeight = Screen.PrimaryScreen.Bounds.Height; * SetWindowSize(_screenWidth, _screenHeight / 9 * 2, true); */ #endregion #region GUI // GUIHandler _guiHandler = new GUIHandler(); _guiHandler.AttachToContext(RC); _guiImage = new GUIImage("Assets/img/AudioTestWallpaper.png", 0, 0, 0, 1280, 720); _guiHandler.Add(_guiImage); #endregion // Set this to control the file formats and loading methods. _fileFormat = AudioFileFormat.mp3; _streamOrLoad = LoadMethod.load; // Now load the correct files. LoadFiles(); #region Audio settings _sound1.Volume = 3.0f; _sound2.Volume = 3.0f; _sound3.Volume = 3.0f; _sound4.Volume = 2.0f; _sound1.Loop = false; _sound2.Loop = false; _sound3.Loop = false; _sound4.Loop = false; #endregion }
/** * @brief Store audio conversations to disk. * * Set the path of where to store audio from a channel to disk. To * store in MP3 format instead of .wav format ensure that the LAME * MP3 encoder file lame_enc.dll is placed in the same directory * as the SDKs DLL files. To stop recording set @a szFolderPath * to an empty string and @a uAFF to #AudioFileFormat ::AFF_NONE. * * To store audio of other channels than the client instance check * out the section @ref spying. * * @param nUserID The ID of the #BearWare.User which should * store audio to disk. * @param szFolderPath The path on disk to where files should be stored. * This value will be stored in @a szMediaStorageDir of #BearWare.User. * @param szFileNameVars The file name used for audio files * can consist of the following variables: \%nickname\%, * \%username\%, \%userid\%, \%counter\% and a specified time * based on @c strftime (google @c 'strftime' for a * description of the format. The default format used by * TeamTalk is: '\%Y\%m\%d-\%H\%M\%S #\%userid\% * \%username\%'. The \%counter\% variable is a 9 digit * integer which is incremented for each audio file. The file * extension is automatically appended based on the file type * (.wav for #AudioFileFormat ::AFF_WAVE_FORMAT and .mp3 for * AFF_MP3_*_FORMAT). Pass NULL or empty string to revert to * default format. * @param uAFF The #AudioFileFormat to use for storing audio files. Passing * #AudioFileFormat ::AFF_NONE will cancel/reset the current recording. * @return FALSE if path is invalid, otherwise TRUE. * @see BearWare.User * @see OnUserAudioFile */ public bool SetUserMediaStorageDir(int nUserID, string szFolderPath, string szFileNameVars, AudioFileFormat uAFF) { return TTDLL.TT_SetUserMediaStorageDir(m_ttInst, nUserID, szFolderPath, szFileNameVars, uAFF); }
public AudioFormat(AudioFileFormat aff, string name) { this.aff = aff; this.name = name; }
public List<string> SelectDistinctArtistNames(AudioFileFormat format) { return _gateway.SelectDistinctArtistNames(format); }
/// <summary> /// Construct an audio file format object with a set of /// defined properties. /// </summary> /// <remarks> /// Construct an audio file format object with a set of /// defined properties. /// This public constructor may be used by applications to describe the /// properties of a requested audio file. The properties map /// will be copied to prevent any changes to it. /// </remarks> /// <param name="type">the type of the audio file</param> /// <param name="format">the format of the audio data contained in the file</param> /// <param name="frameLength"> /// the audio data length in sample frames, or /// <code>AudioSystem.NOT_SPECIFIED</code> /// </param> /// <param name="properties"> /// a <code>Map<String,Object></code> object /// with properties /// </param> /// <since>1.5</since> public AudioFileFormat(AudioFileFormat.Type type, AudioFormat format , int frameLength, System.Collections.Generic.IDictionary<string, object> properties ) : this(type, AudioSystem.NOT_SPECIFIED, format, frameLength) { this.propertiesField = new System.Collections.Generic.Dictionary<string, object>(properties ); }
/// <summary>Constructs an audio file format object.</summary> /// <remarks> /// Constructs an audio file format object. /// This public constructor may be used by applications to describe the /// properties of a requested audio file. /// </remarks> /// <param name="type">the type of the audio file</param> /// <param name="format">the format of the audio data contained in the file</param> /// <param name="frameLength">the audio data length in sample frames, or <code>AudioSystem.NOT_SPECIFIED</code> /// </param> public AudioFileFormat(AudioFileFormat.Type type, AudioFormat format , int frameLength) : this(type, AudioSystem.NOT_SPECIFIED, format, frameLength) { }
public IEnumerable<AudioFile> SelectAudioFiles(AudioFileFormat format, string artistName, string albumTitle, string search) { return _gateway.SelectAudioFiles(format, artistName, albumTitle, search); }
public Dictionary<string, List<string>> SelectDistinctAlbumTitles(AudioFileFormat format, string artistName) { return _gateway.SelectDistinctAlbumTitles(format, artistName); }
/** * @brief Store audio conversations to a single file. * * Unlike TeamTalk.SetUserMediaStorageDir(), which stores users' audio * streams in separate files, TeamTalk.StartRecordingMuxedAudioFile() * muxes the audio streams into a single file. * * The audio streams, which should be muxed together, are * required to use the same audio codec. In most cases this is * the audio codec of the channel where the user is currently * participating (i.e. @c audiocodec member of #BearWare.Channel). * * If the user changes to a channel which uses a different audio * codec then the recording will continue but simply be silent * until the user again joins a channel with the same audio codec * as was used for initializing muxed audio recording. * * Calling TeamTalk.StartRecordingMuxedAudioFile() will enable the * #ClientFlag ::CLIENT_MUX_AUDIOFILE flag from TeamTalk.GetFlags(). * * Call TeamTalk.StopRecordingMuxedAudioFile() to stop recording. Note * that only one muxed audio recording can be active at the same * time. * * @param lpAudioCodec The audio codec which should be used as * reference for muxing users' audio streams. In most situations * this is the #BearWare.AudioCodec of the current channel, i.e. * TeamTalk.GetMyChannelID(). * @param szAudioFileName The file to store audio to, e.g. * C:\\MyFiles\\Conf.mp3. * @param uAFF The audio format which should be used in the recorded * file. The muxer will convert to this format. * * @see SetUserMediaStorageDir() * @see StopRecordingMuxedAudioFile() */ public bool StartRecordingMuxedAudioFile(AudioCodec lpAudioCodec, string szAudioFileName, AudioFileFormat uAFF) { return TTDLL.TT_StartRecordingMuxedAudioFile(m_ttInst, ref lpAudioCodec, szAudioFileName, uAFF); }
/// <summary> /// Writes a stream of bytes representing an audio file of the file format /// indicated to the external file provided. /// </summary> /// <remarks> /// Writes a stream of bytes representing an audio file of the file format /// indicated to the external file provided. /// </remarks> /// <param name="stream"> /// the audio input stream containing audio data to be /// written to the file /// </param> /// <param name="fileType">file type to be written to the file</param> /// <param name="out">external file to which the file data should be written</param> /// <returns>the number of bytes written to the file</returns> /// <exception cref="System.IO.IOException">if an I/O exception occurs</exception> /// <exception cref="System.ArgumentException"> /// if the file format is not supported by /// the system /// </exception> /// <seealso cref="isFileTypeSupported(Type)">isFileTypeSupported(Type)</seealso> /// <seealso cref="getAudioFileTypes()">getAudioFileTypes()</seealso> public abstract int write(AudioInputStream stream, AudioFileFormat.Type fileType, java.io.File @out);
/// <summary>Constructs an audio file format object.</summary> /// <remarks> /// Constructs an audio file format object. /// This protected constructor is intended for use by providers of file-reading /// services when returning information about an audio file or about supported audio file /// formats. /// </remarks> /// <param name="type">the type of the audio file</param> /// <param name="byteLength">the length of the file in bytes, or <code>AudioSystem.NOT_SPECIFIED</code> /// </param> /// <param name="format">the format of the audio data contained in the file</param> /// <param name="frameLength">the audio data length in sample frames, or <code>AudioSystem.NOT_SPECIFIED</code> /// </param> /// <seealso cref="getType()">getType()</seealso> protected AudioFileFormat(AudioFileFormat.Type type, int byteLength, AudioFormat format, int frameLength) { // INSTANCE VARIABLES this.type = type; this.byteLength = byteLength; this.format = format; this.frameLength = frameLength; this.propertiesField = null; }
/// <summary> /// Sets the initial properties and reads initial metadata. /// </summary> /// <param name="filePath">Audio file path</param> /// <param name="id">Unique identifier for database (if needed)</param> /// <param name="readMetadata">If true, the metadata will be refreshed by /// reading the audio file metadata (ex: ID3 tags)</param> private void Initialize(string filePath, Guid id, bool readMetadata) { // Set properties this.filePath = filePath; this.id = id; // Set file type based on file extension AudioFileFormat audioFileFormat = AudioFileFormat.Unknown; string fileExtension = Path.GetExtension(filePath).ToUpper().Replace(".", ""); if (fileExtension == "M4A" || fileExtension == "MP4" || fileExtension == "AAC") { // The format can change even though the file extensions are the same fileType = AudioFileFormat.AAC; } else { // Get format by file extension Enum.TryParse<AudioFileFormat>(fileExtension, out audioFileFormat); fileType = audioFileFormat; } // Check if the metadata needs to be fetched if (readMetadata) RefreshMetadata(); }
/// <summary> /// Default constructor for the AudioFileFormatExtension class. /// </summary> /// <param name="format">Audio file format</param> /// <param name="extensions">List of extensions (including the dot character)</param> public AudioFileFormatExtension(AudioFileFormat format, List<string> extensions) { // Set values Format = format; Extensions = extensions; }