void AddToClipDictionary (AudioFile file) { if (!clipToFileDictionary.ContainsKey(file.Clip)) { clipToFileDictionary.Add(file.Clip, file); } }
void PrepareAudioQueue(MonoTouch.CoreFoundation.CFUrl url) { AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription() { SampleRate = _samplingRate, Format = AudioFormatType.LinearPCM, FormatFlags = AudioFormatFlags.LinearPCMIsSignedInteger | AudioFormatFlags.LinearPCMIsBigEndian | AudioFormatFlags.LinearPCMIsPacked, FramesPerPacket = 1, ChannelsPerFrame = 1, // monoral BitsPerChannel = 16, // 16-bit BytesPerPacket = 2, BytesPerFrame = 2, Reserved = 0 }; _audioFile = AudioFile.Create(url, AudioFileType.AIFF, audioFormat, AudioFileFlags.EraseFlags); _queue = new InputAudioQueue(audioFormat); _queue.InputCompleted += new EventHandler<InputCompletedEventArgs>(_queue_InputCompleted); _startingPacketCount = 0; _numPacketsToWrite = 1024; _bufferByteSize = (int)(_numPacketsToWrite * audioFormat.BytesPerPacket); // preparing queue buffer IntPtr bufferPtr; for (int index = 0; index < 3; index++) { //_queue.AllocateBuffer(_bufferByteSize, out bufferPtr); _queue.AllocateBufferWithPacketDescriptors(_bufferByteSize, _numPacketsToWrite, out bufferPtr); _queue.EnqueueBuffer(bufferPtr, _bufferByteSize, null); } }
void prepareAudioQueue() { _audioFile = AudioFile.Open(_url, AudioFilePermission.Read, AudioFileType.AIFF); // Getting AudioStreamBasicDescription var audioFormat = _audioFile.StreamBasicDescription; // Creating an audio output queue object instance _audioQueue = new OutputAudioQueue(audioFormat); _audioQueue.OutputCompleted += new EventHandler<OutputCompletedEventArgs>(_audioQueue_OutputCompleted); // Getting packet size int maxPacketSize = _audioFile.MaximumPacketSize; _startingPacketCount = 0; _numPacketsToRead = 1024; _bufferByteSize = _numPacketsToRead * maxPacketSize; // enqueue buffers IntPtr bufferPtr; for (int index = 0; index < 3; index++) { _audioQueue.AllocateBuffer(_bufferByteSize, out bufferPtr); outputCallback(bufferPtr); } _isPrepared = true; }
static void Main(string[] args) { AudioFile file = new AudioFile(); file.Stop(); // Вызывает версию IPlayable version ((IRecordable)file).Stop(); // Вызывает версию IRecordabie version Console.ReadKey(); }
public AUScheduledAudioFileRegion (AudioFile audioFile, AUScheduledAudioFileRegionCompletionHandler completionHandler = null) { if (audioFile == null) throw new ArgumentNullException (nameof (audioFile)); AudioFile = audioFile; this.completionHandler = completionHandler; }
public void Dispose() { foreach(var instance in fireAndForgetQueue) instance.Dispose(); fireAndForgetQueue.Clear(); audioFile.Dispose(); audioFile = null; }
public void Play (AudioFile file) { AudioSource source = getChannel(file.Channel); source.clip = file.Clip; source.loop = file.Loop; source.Play(); }
public static void Main(string [] args) { string testDir = args.Length > 0 ? args[0] : "../tests/samples"; foreach(string file in Directory.GetFiles(testDir)) { // try { AudioFile af = new AudioFile(file); Console.WriteLine(af); // } catch(Exception) {} } }
public void Stop (AudioFile file) { if (channelExists(file.Channel)) { AudioSource source = getChannel(file.Channel); if (source.clip == _channels[file.Channel]) { source.Stop(); } } }
public void Initialize(AudioFile file, AudioWave wave) { _file = file; _wave = wave; _format = new WaveFormat(_wave.SamplesPerSecond, 16, 1); _lastBlock = -1; _looped = false; _state = new DviAdpcmDecoder.AdpcmState(); _leftOverBuffer = null; }
internal SoundEffect(string assetName, bool isMusic) { // use of CFUrl.FromFile is necessary in case assetName contains spaces (which must be url-encoded) audioFile = AudioFile.Open(CFUrl.FromFile(assetName), AudioFilePermission.Read, 0); if(audioFile == null) throw new Content.ContentLoadException("Could not open sound effect " + assetName); description = audioFile.StreamBasicDescription; DeriveBufferSize(0.5); isVBR = (description.BytesPerPacket == 0 || description.FramesPerPacket == 0); if(!isMusic) firstInstance = new SoundEffectInstance(this, false); }
public static void Export(AudioFile file, AudioWave wave, Stream outStream) { WaveHeader header = new WaveHeader(); // Skip the header outStream.Seek(header.HeaderSize, SeekOrigin.Begin); // Write the data file.SoundBank.ExportAsPCM(wave.Index, file.Stream, outStream); // Create header and write it outStream.Seek(0, SeekOrigin.Begin); header.FileSize = (int)outStream.Length; header.SamplesPerSecond = wave.SamplesPerSecond; header.Write(new BinaryWriter(outStream)); }
public static void ExportMultichannel(AudioFile file, Stream outStream) { WaveHeader header = new WaveHeader(true); // Skip the header outStream.Seek(header.HeaderSize, SeekOrigin.Begin); // Write the data IMultichannelSound sound = file.SoundBank as IMultichannelSound; sound.ExportMultichannelAsPCM(file.Stream, outStream); // Create header and write it outStream.Seek(0, SeekOrigin.Begin); header.FileSize = (int)outStream.Length; header.SamplesPerSecond = sound.CommonSamplesPerSecond; header.ChannelMask = sound.ChannelMask; header.Write(new BinaryWriter(outStream)); }
public Control GetView(RageLib.FileSystem.Common.File file) { var data = file.GetData(); var ms = new MemoryStream(data); var audioFile = new AudioFile(); try { audioFile.Open(ms); } catch { ms.Close(); throw; } var view = new AudioView(); var controller = new AudioViewController(view); controller.AudioFile = audioFile; return view; }
public static List<AudioFile> GetDuplicates2(List<AudioFile> files, AudioFile file, float level) { //int[] fingerprint = await GetFingerprintAsync(file.FullPath, 0); //var candidates = await Task.Run(() => // files.AsParallel().Where(af => af.SimilarityGroupId == 0 && af != file && // GetFingerprintAsync(af.FullPath, 0).Result.Intersect(fingerprint).FirstOrDefault() != 0).ToList()); var candidates = files.Where(af => af.SimilarityGroupId == 0 && af != file).ToList(); if (candidates.Count == 0) return candidates; var dups = new List<AudioFile>(); var matrix = new float[candidates.Count]; Parallel.For(0, matrix.Length, i => { matrix[i] = ChromaprintFingerprinter.MatchFingerprints(candidates[i].Fingerprint, file.Fingerprint, -1); }); for (int i = 0; i < matrix.Length; i++) { if (matrix[i] >= level) dups.Add(candidates[i]); } return dups; }
public AudioFile GetRandomFile() { checkFileQueue(); return(currentFile = fileQueue.Cycle()); }
public void Init() { audio_file = new AudioFile("samples/sample_v2_only.mp3"); }
private string MapAudioFileEnumToFilePath(AudioFile audioFile) { switch (audioFile) { case AudioFile.MainTheme: { return("Main_Game_MusicRedux"); } case AudioFile.DoorOpening: { return("Door Opening"); } case AudioFile.FootstepsLinolium: { return("Footsteps (Linolium)"); } case AudioFile.FootstepsCarpet: { return("Footsteps(carpet)"); } case AudioFile.FootstepsWood: { return("Footsteps(Wood)"); } case AudioFile.Kick: { return("Kick(player)"); } case AudioFile.LiftingGrunt: { return("Lifting Grunt(player)"); } case AudioFile.MovingHeavyObject: { return("Moving Heavy Object"); } case AudioFile.PickupVase: { return("Pickup(Vase)"); } case AudioFile.Scraping: { return("Scraping Sound"); } case AudioFile.Dog: { return("Dog"); } } return(""); }
public void Dispose() { _isRecording = false; _queue.Stop(true); _audioFile.Dispose(); _queue.Dispose(); _queue = null; _audioFile = null; }
// Some audio formats have a magic cookie associated with them which is required to decompress audio data // When converting audio, a magic cookie may be returned by the Audio Converter so that it may be stored along with // the output data -- This is done so that it may then be passed back to the Audio Converter at a later time as required static void WriteCookie (AudioConverter converter, AudioFile destinationFile) { var cookie = converter.CompressionMagicCookie; if (cookie != null && cookie.Length != 0) { destinationFile.MagicCookie = cookie; } }
private void AudioFinished(AudioFile audioFile) { this.currentlyPlaying.Remove(audioFile); }
private async static Task ProcessFiles() { //0. Constants const string AccountName = "jmmmediaservices"; const string AccountKey = "KOs3A87L2UydtogQDjM1/FF0yuTaqsqBViFJG+WoKj8="; //1. Install Nuget packages //1.1 Nuget: Install-Package windowsazure.mediaservices ////2. Get AMS context //var context = new CloudMediaContext(AccountName, AccountKey); //foreach (var asset in context.Assets.ToList()) //{ // Console.WriteLine(asset.Name); //} //Console.ReadLine(); //1. From TTML to JSON var storageAccount = CloudStorageAccount.Parse("DefaultEndpointsProtocol=https;AccountName=jmmmediaservicesstorage;AccountKey=VpmaM9avmqz7zYr/CCtMUndgZserSIE26sYRS/IqInzlPC03KW9BR+DHIOGV/AOPYxZMnTMSOtf1jOojs8OS/Q=="); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); //clean collection DocumentDBRepository <AudioFile> .DeleteCollection(); var containers = blobClient.ListContainers(); foreach (var container in containers) { var blobs = blobClient.GetContainerReference(container.Name).ListBlobs().OfType <CloudBlob>().Where(b => b.Name.EndsWith(".ttml")).OrderByDescending(b => b.Properties.Length); foreach (var blob in blobs) { var ttml = blobClient.GetBlobReferenceFromServer(blob.Uri); using (var stream = new MemoryStream()) { ttml.DownloadToStream(stream); stream.Position = 0; var json = Helper.ParseAudioTranscript(stream); var audioFile = new AudioFile { Title = blob.Uri.Segments.Last().Replace(".ttml", ""), Url = blob.Uri.ToString().Replace(".ttml", ""), AudioTranscripts = json }; var stringJson = Helper.JsonSerializer(json); Console.WriteLine(stringJson); Console.WriteLine(); Console.WriteLine("AudioFile object"); Console.WriteLine(Helper.JsonSerializer(audioFile)); //2. From JSON to DocumentDB await DocumentDBRepository <AudioFile> .CreateItemAsync(audioFile); var jsonContainer = blobClient.GetContainerReference(ttml.Container.Name); var jsonFile = jsonContainer.GetBlockBlobReference(ttml.Name.Replace(".ttml", ".json")); jsonFile.UploadText(stringJson); } } } Console.WriteLine(); Console.WriteLine("Done!"); Console.ReadLine(); }
private void DownloadAudioFile(AudioFile audioFile) { var url = new Uri(new Uri(_baseUrl), string.Format("/api/audiofile/{0}", audioFile.Id.ToString())); string localFilePath = GetLibraryLocalPath(audioFile); //Tracing.Log("SyncClientService - DownloadAudioFile - folderPath: {0} fileName: {1} localFilePath: {2}", folderPath, fileName, localFilePath); _lastBytesReceived = 0; if (OnDownloadAudioFileStarted != null) OnDownloadAudioFileStarted(new SyncClientDownloadAudioFileProgressEntity(){ Status = "Downloading files...", PercentageDone = ((float)_filesDownloaded / (float)_audioFiles.Count()) * 100f, FilesDownloaded = _filesDownloaded, DownloadFileName = Path.GetFileName(audioFile.FilePath), TotalFiles = _audioFiles.Count(), DownloadSpeed = GetDownloadSpeed(), Errors = _errorCount, Log = string.Empty }); _stopwatch.Start(); //_webClient.DownloadFileAsync(url, localFilePath); }
public AudioInBlock(AudioFile t, string b) { track = t; blockID = b; }
public void AddAudioFile(AudioFile audioFile) { _context.AudioFiles.Add(audioFile); }
private OperationResult <AudioMetaData> MetaDataForFileFromIdSharp(string fileName) { var sw = new Stopwatch(); sw.Start(); var result = new AudioMetaData(); string message = null; var isSuccess = false; try { result.Filename = fileName; var audioFile = AudioFile.Create(fileName, true); if (ID3v2Tag.DoesTagExist(fileName)) { IID3v2Tag id3v2 = new ID3v2Tag(fileName); result.Artist = id3v2.AlbumArtist ?? id3v2.Artist; result.ArtistRaw = id3v2.AlbumArtist ?? id3v2.Artist; result.AudioBitrate = (int?)audioFile.Bitrate; result.AudioChannels = audioFile.Channels; result.AudioSampleRate = (int)audioFile.Bitrate; result.Comments = id3v2.CommentsList != null ? string.Join("|", id3v2.CommentsList?.Select(x => x.Value)) : null; result.Disc = ParseDiscNumber(id3v2.DiscNumber); result.DiscSubTitle = id3v2.SetSubtitle; result.Genres = SplitGenre(id3v2.Genre); result.Release = id3v2.Album; result.TrackArtist = id3v2.OriginalArtist ?? id3v2.Artist ?? id3v2.AlbumArtist; result.TrackArtistRaw = id3v2.OriginalArtist ?? id3v2.Artist ?? id3v2.AlbumArtist; result.Images = id3v2.PictureList?.Select(x => new AudioMetaDataImage { Data = x.PictureData, Description = x.Description, MimeType = x.MimeType, Type = (AudioMetaDataImageType)x.PictureType }).ToArray(); result.Time = audioFile.TotalSeconds > 0 ? ((decimal?)audioFile.TotalSeconds).ToTimeSpan() : null; result.Title = id3v2.Title.ToTitleCase(false); result.TrackNumber = ParseTrackNumber(id3v2.TrackNumber); result.TotalTrackNumbers = ParseTotalTrackNumber(id3v2.TrackNumber); var year = id3v2.Year ?? id3v2.RecordingTimestamp ?? id3v2.ReleaseTimestamp ?? id3v2.OriginalReleaseTimestamp; result.Year = ParseYear(year); isSuccess = result.IsValid; } if (!isSuccess) { if (ID3v1Tag.DoesTagExist(fileName)) { IID3v1Tag id3v1 = new ID3v1Tag(fileName); result.Release = id3v1.Album; result.Artist = id3v1.Artist; result.ArtistRaw = id3v1.Artist; result.AudioBitrate = (int?)audioFile.Bitrate; result.AudioChannels = audioFile.Channels; result.AudioSampleRate = (int)audioFile.Bitrate; result.Time = audioFile.TotalSeconds > 0 ? ((decimal?)audioFile.TotalSeconds).ToTimeSpan() : null; result.Title = id3v1.Title.ToTitleCase(false); result.TrackNumber = SafeParser.ToNumber <short?>(id3v1.TrackNumber); var date = SafeParser.ToDateTime(id3v1.Year); result.Year = date?.Year ?? SafeParser.ToNumber <int?>(id3v1.Year); isSuccess = result.IsValid; } } } catch (Exception ex) { message = ex.ToString(); Logger.LogError(ex, "MetaDataForFileFromTagLib Filename [" + fileName + "] Error [" + ex.Serialize() + "]"); } sw.Stop(); return(new OperationResult <AudioMetaData>(message) { IsSuccess = isSuccess, OperationTime = sw.ElapsedMilliseconds, Data = result }); }
// Write output channel layout to destination file static void WriteDestinationChannelLayout(AudioConverter converter, AudioFile sourceFile, AudioFile destinationFile) { // if the Audio Converter doesn't have a layout see if the input file does var layout = converter.OutputChannelLayout ?? sourceFile.ChannelLayout; if (layout != null) { // TODO: throw new NotImplementedException(); //destinationFile.ChannelLayout = layout; } }
bool DoConvertFile(CFUrl sourceURL, NSUrl destinationURL, AudioFormatType outputFormatType, double outputSampleRate) { // in this sample we should never be on the main thread here Debug.Assert(!NSThread.IsMain); // transition thread state to State::Running before continuing AppDelegate.ThreadStateSetRunning(); Debug.WriteLine("DoConvertFile"); // get the source file AudioFile sourceFile = AudioFile.Open(sourceURL, AudioFilePermission.Read); var srcFormat = (AudioStreamBasicDescription)sourceFile.DataFormat; var dstFormat = new AudioStreamBasicDescription(); // setup the output file format dstFormat.SampleRate = (outputSampleRate == 0 ? srcFormat.SampleRate : outputSampleRate); // set sample rate if (outputFormatType == AudioFormatType.LinearPCM) { // if the output format is PCM create a 16-bit int PCM file format description as an example dstFormat.Format = AudioFormatType.LinearPCM; dstFormat.ChannelsPerFrame = srcFormat.ChannelsPerFrame; dstFormat.BitsPerChannel = 16; dstFormat.BytesPerPacket = dstFormat.BytesPerFrame = 2 * dstFormat.ChannelsPerFrame; dstFormat.FramesPerPacket = 1; dstFormat.FormatFlags = AudioFormatFlags.LinearPCMIsPacked | AudioFormatFlags.LinearPCMIsSignedInteger; } else { // compressed format - need to set at least format, sample rate and channel fields for kAudioFormatProperty_FormatInfo dstFormat.Format = outputFormatType; dstFormat.ChannelsPerFrame = (outputFormatType == AudioFormatType.iLBC ? 1 : srcFormat.ChannelsPerFrame); // for iLBC num channels must be 1 // use AudioFormat API to fill out the rest of the description AudioFormatError afe = AudioStreamBasicDescription.GetFormatInfo(ref dstFormat); if (afe != AudioFormatError.None) { Debug.Print("Cannot create destination format {0:x}", afe); AppDelegate.ThreadStateSetDone(); return(false); } } // create the AudioConverter AudioConverterError ce; var converter = AudioConverter.Create(srcFormat, dstFormat, out ce); Debug.Assert(ce == AudioConverterError.None); converter.InputData += EncoderDataProc; // if the source has a cookie, get it and set it on the Audio Converter ReadCookie(sourceFile, converter); // get the actual formats back from the Audio Converter srcFormat = converter.CurrentInputStreamDescription; dstFormat = converter.CurrentOutputStreamDescription; // if encoding to AAC set the bitrate to 192k which is a nice value for this demo // kAudioConverterEncodeBitRate is a UInt32 value containing the number of bits per second to aim for when encoding data if (dstFormat.Format == AudioFormatType.MPEG4AAC) { uint outputBitRate = 192000; // 192k // ignore errors as setting may be invalid depending on format specifics such as samplerate try { converter.EncodeBitRate = outputBitRate; } catch { } // get it back and print it out outputBitRate = converter.EncodeBitRate; Debug.Print("AAC Encode Bitrate: {0}", outputBitRate); } // can the Audio Converter resume conversion after an interruption? // this property may be queried at any time after construction of the Audio Converter after setting its output format // there's no clear reason to prefer construction time, interruption time, or potential resumption time but we prefer // construction time since it means less code to execute during or after interruption time bool canResumeFromInterruption; try { canResumeFromInterruption = converter.CanResumeFromInterruption; Debug.Print("Audio Converter {0} continue after interruption!", canResumeFromInterruption ? "CAN" : "CANNOT"); } catch (Exception e) { // if the property is unimplemented (kAudioConverterErr_PropertyNotSupported, or paramErr returned in the case of PCM), // then the codec being used is not a hardware codec so we're not concerned about codec state // we are always going to be able to resume conversion after an interruption canResumeFromInterruption = false; Debug.Print("CanResumeFromInterruption: {0}", e.Message); } // create the destination file var destinationFile = AudioFile.Create(destinationURL, AudioFileType.CAF, dstFormat, AudioFileFlags.EraseFlags); // set up source buffers and data proc info struct afio = new AudioFileIO(32 * 1024); // 32Kb afio.SourceFile = sourceFile; afio.SrcFormat = srcFormat; if (srcFormat.BytesPerPacket == 0) { // if the source format is VBR, we need to get the maximum packet size // use kAudioFilePropertyPacketSizeUpperBound which returns the theoretical maximum packet size // in the file (without actually scanning the whole file to find the largest packet, // as may happen with kAudioFilePropertyMaximumPacketSize) afio.SrcSizePerPacket = sourceFile.PacketSizeUpperBound; // how many packets can we read for our buffer size? afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket; // allocate memory for the PacketDescription structures describing the layout of each packet afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead]; } else { // CBR source format afio.SrcSizePerPacket = srcFormat.BytesPerPacket; afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket; } // set up output buffers int outputSizePerPacket = dstFormat.BytesPerPacket; // this will be non-zero if the format is CBR const int theOutputBufSize = 32 * 1024; // 32Kb var outputBuffer = Marshal.AllocHGlobal(theOutputBufSize); AudioStreamPacketDescription[] outputPacketDescriptions = null; if (outputSizePerPacket == 0) { // if the destination format is VBR, we need to get max size per packet from the converter outputSizePerPacket = (int)converter.MaximumOutputPacketSize; // allocate memory for the PacketDescription structures describing the layout of each packet outputPacketDescriptions = new AudioStreamPacketDescription [theOutputBufSize / outputSizePerPacket]; } int numOutputPackets = theOutputBufSize / outputSizePerPacket; // if the destination format has a cookie, get it and set it on the output file WriteCookie(converter, destinationFile); // write destination channel layout if (srcFormat.ChannelsPerFrame > 2) { WriteDestinationChannelLayout(converter, sourceFile, destinationFile); } long totalOutputFrames = 0; // used for debugging long outputFilePos = 0; AudioBuffers fillBufList = new AudioBuffers(1); bool error = false; // loop to convert data Debug.WriteLine("Converting..."); while (true) { // set up output buffer list fillBufList[0] = new AudioBuffer() { NumberChannels = dstFormat.ChannelsPerFrame, DataByteSize = theOutputBufSize, Data = outputBuffer }; // this will block if we're interrupted var wasInterrupted = AppDelegate.ThreadStatePausedCheck(); if (wasInterrupted && !canResumeFromInterruption) { // this is our interruption termination condition // an interruption has occured but the Audio Converter cannot continue Debug.WriteLine("Cannot resume from interruption"); error = true; break; } // convert data int ioOutputDataPackets = numOutputPackets; var fe = converter.FillComplexBuffer(ref ioOutputDataPackets, fillBufList, outputPacketDescriptions); // if interrupted in the process of the conversion call, we must handle the error appropriately if (fe != AudioConverterError.None) { Debug.Print("FillComplexBuffer: {0}", fe); error = true; break; } if (ioOutputDataPackets == 0) { // this is the EOF conditon break; } // write to output file var inNumBytes = fillBufList[0].DataByteSize; var we = destinationFile.WritePackets(false, inNumBytes, outputPacketDescriptions, outputFilePos, ref ioOutputDataPackets, outputBuffer); if (we != 0) { Debug.Print("WritePackets: {0}", we); error = true; break; } // advance output file packet position outputFilePos += ioOutputDataPackets; if (dstFormat.FramesPerPacket != 0) { // the format has constant frames per packet totalOutputFrames += (ioOutputDataPackets * dstFormat.FramesPerPacket); } else { // variable frames per packet require doing this for each packet (adding up the number of sample frames of data in each packet) for (var i = 0; i < ioOutputDataPackets; ++i) { totalOutputFrames += outputPacketDescriptions[i].VariableFramesInPacket; } } } Marshal.FreeHGlobal(outputBuffer); if (!error) { // write out any of the leading and trailing frames for compressed formats only if (dstFormat.BitsPerChannel == 0) { // our output frame count should jive with Debug.Print("Total number of output frames counted: {0}", totalOutputFrames); WritePacketTableInfo(converter, destinationFile); } // write the cookie again - sometimes codecs will update cookies at the end of a conversion WriteCookie(converter, destinationFile); } converter.Dispose(); destinationFile.Dispose(); sourceFile.Dispose(); // transition thread state to State.Done before continuing AppDelegate.ThreadStateSetDone(); return(!error); }
public void Init() { audio_file = new AudioFile("samples/sample_both.mp3"); }
public IActionResult CreateAudioFile([FromBody] AudioFileForUploadDto audioFile) { if (audioFile.Description == audioFile.Filename) { ModelState.AddModelError("Description", "The provided description should be different from the file name."); } if (!ModelState.IsValid) { return(BadRequest(ModelState)); } var files = _audioFileInfoRepository.GetAudioFiles(); //var files = AudioFilesDataStore.Current.AudioFiles; // use if pulling data from in-memory datastore, AudioFilesDataStore.cs // TO-DO: check that user has chosen a valid audio file //if (audioFile format is bad) //{ // return BadRequest(); //} // demo purposes - id value determination can be improved var maxAudioFileId = _audioFileInfoRepository.GetAudioFiles().Max(f => f.Id); // needs improvement because persistent database should use the primary key to track and manage next ID value //var maxAudioFileId = AudioFilesDataStore.Current.AudioFiles.Max(f => f.Id); // use if pulling data from in-memory datastore, AudioFilesDataStore.cs var newAudioFile = new AudioFile() // was new AudioFileDto() { //Id = ++maxAudioFileId, Filename = audioFile.Filename, Description = audioFile.Description, CreatedOn = DateTime.Now }; //var newAudioFile = _mapper.Map<Entities.AudioFile>(audioFile); // use this line if using AutoMapper to avoid manually mapping the properties... see corresponding Pluralsight course for implementation details https://app.pluralsight.com/course-player?clipId=c55b6c06-f014-4595-bc29-603e39e3ae4e _audioFileInfoRepository.AddAudioFile(newAudioFile); _audioFileInfoRepository.Save(); //files.Add(newAudioFile); // old method to add the new audio file to the in-memory datastore // need to map the saved entity back to the dto so we can return the newly-saved audio file (not the entity) along with the "201 Created" status code var createdAudioFileToReturn = new AudioFileDto() { Id = newAudioFile.Id, Filename = newAudioFile.Filename, Description = newAudioFile.Description, CreatedOn = newAudioFile.CreatedOn }; // var createdAudioFileToReturn = _mapper.Map<Models.AudioFileDto>(newAudioFile); // use this line if using AutoMapper to avoid manually mapping the properties... see corresponding Pluralsight course for implementation details https://app.pluralsight.com/course-player?clipId=c55b6c06-f014-4595-bc29-603e39e3ae4e // send mail notifying of audio file upload attempt for transcription _mailService.Send($"Audio file uploaded for transcription - {DateTime.Now.ToShortDateString()} {DateTime.Now.ToLongTimeString()}", $"Audio file was uploaded for text transcription on {DateTime.Now.ToShortDateString()} at {DateTime.Now.ToLongTimeString()}.\r\n\r\n" + $"File details:\r\n" + $" Id: {newAudioFile.Id.ToString()}\r\n" + $" Filename: {newAudioFile.Filename}\r\n" + $" Description: {newAudioFile.Description}\r\n" + $" Date Created: {newAudioFile.CreatedOn.ToShortDateString()} {newAudioFile.CreatedOn.ToLongTimeString()}"); return(CreatedAtRoute( "GetAudioFile", new { createdAudioFileToReturn.Id }, createdAudioFileToReturn)); //return CreatedAtRoute( // old method to return a "201 Created" status code // "GetAudioFile", // new { newAudioFile.Id }, // newAudioFile); }
static void UpdateFormatInfo(UILabel label, CFUrl fileURL) { UpdateFormatInfo(label, AudioFile.Open(fileURL, AudioFilePermission.Read), fileURL.FileSystemPath); }
public AudioClip GetClip(AudioFile file) { return(GetClip(getFullAudioFileName(file))); }
static void UpdateFormatInfo (UILabel label, AudioFile fileID, string fileName) { var asbd = fileID.DataFormat.Value; label.Text = string.Format ("{0} {1} {2} Hz ({3} ch.)", fileName, asbd.Format, asbd.SampleRate, asbd.ChannelsPerFrame); }
public ResourceRequest GetClipAsync(AudioFile file) { string fullFileName = getFullAudioFileName(file); return(Resources.LoadAsync <AudioClip>(Path.Combine(audioPath, fullFileName))); }
static void UpdateFormatInfo(UILabel label, AudioFile fileID, string fileName) { var asbd = fileID.DataFormat.Value; label.Text = string.Format("{0} {1} {2} Hz ({3} ch.)", fileName, asbd.Format, asbd.SampleRate, asbd.ChannelsPerFrame); }
public async Task <bool> AddItemAsync(AudioFile item) { var response = await _basePath.AppendPathSegments("api", "audios", "AddFileAsync").PostJsonAsync(item); return(response.IsSuccessStatusCode); }
private IEnumerator StartMethod(AudioFile audioFile, float clipLength) { yield return(new WaitForSeconds(clipLength)); AudioFinished(audioFile); }
public SoundView(DiagnosticViewContext context, AudioFile audioFile) : base(context) { _source = AddDisposable(context.Game.Audio.GetStream(audioFile.Entry)); }
void CheckMute(AudioFile file, AudioSource source) { source.mute = AudioUtil.IsMuted(file.TypeAsEnum); }
/// <summary> /// Initialize music collection database from path, intended to be called multiple times /// </summary> /// <param name="path"></param> /// <param name="rebuild"></param> public void InitializeFromPath(string path, bool rebuild) { ConcurrentQueue <string> directories = new ConcurrentQueue <string>(Directory.GetDirectories(path)); Parallel.ForEach(directories, (current) => { Directory.GetDirectories(current).ToList().ForEach(f => directories.Enqueue(f)); var frag = Directory.GetFiles(current, ".2CIFragment"); if (frag.Length > 0) { throw new NotImplementedException("load fragment and index if not indexed"); } else if (rebuild == true) { // create a fragment for each sub directory DBFragment fragment = new DBFragment(current); ConcurrentQueue <string> ContentFiles = new ConcurrentQueue <string>(Directory.GetFiles(current)); ConcurrentQueue <IAssociatedFile> Pending = new ConcurrentQueue <IAssociatedFile>(); foreach (var f in ContentFiles) { using (FileStream fs = new FileStream(f, FileMode.Open, FileAccess.Read, FileShare.None, bufferSize: 4096, useAsync: true)) { var CurrentFileType = GetFileType(fs, f); switch (CurrentFileType) { case Assets.AssetFileType.UNKNOWN: continue; // Other Asset files case Assets.AssetFileType.JPEGRAW: case Assets.AssetFileType.JPEGEXIF: case Assets.AssetFileType.JPEGJFIF: case Assets.AssetFileType.PNG: if (fragment == null) { // Lets avoid commiting database fragments for files that aren't necesarrily associated with any music // later when (if) the fragment gets created it can revist the Skipped buffer Pending.Enqueue(new AssociatedImageFile(f, CurrentFileType, fragment)); continue; } // Add these files which may likely be album art or playlists to the assets for the current fragment fragment.AssociatedFiles.Add(new AssociatedImageFile(f, CurrentFileType, fragment)); break; // Audio files case Assets.AssetFileType.MP3ID3v1ORNOTAG: case Assets.AssetFileType.MP3WITHID3v2: case Assets.AssetFileType.AAC: case Assets.AssetFileType.FLAC: case Assets.AssetFileType.APE: case Assets.AssetFileType.WAVRIFF: case Assets.AssetFileType.WAVWAVE: case Assets.AssetFileType.WMA: //fragment = (fragment != null) ? fragment : new DBFragment(current); var af = new AudioFile(f, CurrentFileType, fragment); fragment.AudioFiles.Add(af); foreach (var s in Pending) { fragment.AssociatedFiles.Add(s); } ParseAudioFileMetaData(fs, af); break; } } } // We won't index / save fragments that don't have any music associated with them if (fragment != null && fragment.AudioFiles.Count() > 0) { FragmentIndex.fragments.Add(fragment); } fragment = null; } }); }
public void Play(PlaylistFile playlist = null, AudioFile audio = null) { //EndsWith(".mp3") if (audio.SourceString.Split("//")[1].Split("/")[0].EndsWith(".mp3")) { Toast.MakeText(Application.Context, "URI трека пришел с ошибкой. Невозможно воспроизвести.", ToastLength.Long).Show(); return; } if (playlist == null) { if (currentPlaylist == null && currentTrack == null) { return; } player.Play(); } else { if (playlist.Id != currentPlaylistId) { var audioPlaylist = new AudioPlaylist(playlist, audio, StaticContentService.RepeatPlaylist, StaticContentService.RepeatTrack); currentPlaylist = audioPlaylist; currentTrack = audio; currentPlaylist.OnCurrentItemChanged += CurrentPlaylistOnOnCurrentItemChanged; CurrentAudioChanged?.Invoke(this, audio); } else { currentPlaylist.SetCurrentTrack(audio); currentTrack = audio; CurrentAudioChanged?.Invoke(this, audio); } //var playerNotificationManager = (CrossMediaManager.Android.NotificationManager as MediaManager.Platforms.Android.Notifications.NotificationManager).PlayerNotificationManager; TaskService.RunOnUI(async() => { Toast.MakeText(Application.Context, "[Отладка] Начинаем воспроизводить...", ToastLength.Long).Show(); Toast.MakeText(Application.Context, $"[Отладка] URI: {currentTrack.SourceString}", ToastLength.Long).Show(); var media = await player.Play(currentTrack.SourceString); media.Title = currentTrack.Title; media.AlbumArtUri = ""; //Без этого треки с битыми ссылками будут выкидывать плеер в фатал media.Artist = currentTrack.Artist; media.AlbumArtist = currentTrack.Artist; media.ArtUri = null; if (currentTrack.Cover != "placeholder") { media.ArtUri = currentTrack.Cover; } CrossMediaManager.Android.NotificationManager.UpdateNotification(); }); //player.MediaQueue.Current.Title = audio.Title; //player.MediaQueue.Current.Artist = audio.Title; //player.MediaQueue.Current.AlbumArtUri = audio.Cover; //CrossMediaManager.Android. //player.NotificationManager = null; } }
// Sets the packet table containing information about the number of valid frames in a file and where they begin and end // for the file types that support this information. // Calling this function makes sure we write out the priming and remainder details to the destination file static void WritePacketTableInfo (AudioConverter converter, AudioFile destinationFile) { if (!destinationFile.IsPropertyWritable (AudioFileProperty.PacketTableInfo)) return; // retrieve the leadingFrames and trailingFrames information from the converter, AudioConverterPrimeInfo primeInfo = converter.PrimeInfo; // we have some priming information to write out to the destination file // The total number of packets in the file times the frames per packet (or counting each packet's // frames individually for a variable frames per packet format) minus mPrimingFrames, minus // mRemainderFrames, should equal mNumberValidFrames. AudioFilePacketTableInfo? pti_n = destinationFile.PacketTableInfo; if (pti_n == null) return; AudioFilePacketTableInfo pti = pti_n.Value; // there's priming to write out to the file // get the total number of frames from the output file long totalFrames = pti.ValidFrames + pti.PrimingFrames + pti.RemainderFrames; Debug.WriteLine ("Total number of frames from output file: {0}", totalFrames); pti.PrimingFrames = primeInfo.LeadingFrames; pti.RemainderFrames = primeInfo.TrailingFrames; pti.ValidFrames = totalFrames - pti.PrimingFrames - pti.RemainderFrames; destinationFile.PacketTableInfo = pti; }
public static AudioClip GetClip (AudioFile file) { return GetClip(file.FileName); }
private async void Button_Click(object sender, RoutedEventArgs e) { var folder = ApplicationData.Current.LocalFolder; var audioFolder = await folder.GetFolderAsync("Audio"); var files = await audioFolder.GetFilesAsync(); foreach (var file in files) { if (file.Path.Contains(".mp3")) { Debug.WriteLine("Definitely a mp3: " + file.Path); AudioFile audioFile = new AudioFile(file.Path); txtStuff.Text = audioFile.Title; } else { Debug.WriteLine("Not mp3: " + file.Path); } } //CreateDummyFile(); //AudioFile audioFile = new AudioFile(); BackgroundAudioPlayer.Instance.Play(); }
public void Init() { audio_file = new AudioFile("samples/sample.flac"); }
public void AddFile(AudioFile file) { checkFileQueue(); fileQueue.Enqueue(file); }
protected void Page_Load(object sender, EventArgs e) { //LabelMain.Text = GetContent("Body"); LiteralPageTitle.Text = FormatPageTitle("Sermon Archive"); LabelRightSideBarArea.Text = "<span class=\"title\">Archives</span><br />"; TagList pageTagList = null; List<Document> SideBarList = null; List<Document> DocumentList = null; if (null == Session["EditRiverValleyMedia"]) { DocumentList = Cache.Get("cache.RiverValley.MultimediaFiles") as List<Document>; SideBarList = Cache.Get("cache.RiverValley.MultimediaSideBar") as List<Document>; pageTagList = Cache.Get("cache.RiverValley.MultimediaTagList") as TagList; } if ((null == DocumentList) || (null == SideBarList) || (null == pageTagList)) {//Means that there is no cache read everything from disk #region ReadLoop DocumentList = new List<Document>(); SideBarList = new List<Document>(); pageTagList = new TagList(); DirectoryInfo directoryInfo = new DirectoryInfo(Server.MapPath(Document.MULTIMEDIA_FOLDER)); List<FileInfo> multimediaFileList = new List<FileInfo>(); multimediaFileList.AddRange(directoryInfo.GetFiles("*.mp3")); multimediaFileList.AddRange(directoryInfo.GetFiles("*.wma")); multimediaFileList.AddRange(directoryInfo.GetFiles("*.aac")); multimediaFileList.AddRange(directoryInfo.GetFiles("*.ac3")); multimediaFileList.AddRange(directoryInfo.GetFiles("*.mp4")); multimediaFileList.AddRange(directoryInfo.GetFiles("*.wmv")); multimediaFileList.AddRange(directoryInfo.GetFiles("*.mpg")); Document doc; System.Collections.Hashtable ht2 = new System.Collections.Hashtable(); foreach (FileInfo file in multimediaFileList) { try { if ((file.Extension.ToLower() == ".mp4") || (file.Extension.ToLower() == ".wmv") || (file.Extension.ToLower() == ".mpg")) doc = new VideoFile(this, file.Name); else doc = new AudioFile(this, file.Name); foreach (Tag t in doc.Tags) { pageTagList.Add(t); } //doc.Link = Document.MULTIMEDIA_FOLDER + "/" + doc.Name; doc.Link = doc.Name; } catch { continue; } DocumentList.Add(doc); #region Fill RightSideBar if (null == SideBarList.Find(delegate(Document st) { return ((st.Dated.Year == doc.Dated.Year) && (st.Dated.Month == doc.Dated.Month)); })) { SideBarList.Add(doc); } #endregion } #endregion #region Sort //Sort by date - sort should always be done after all filters for performance DocumentList.Sort(delegate(Document f1, Document f2) { return DateTime.Compare(f2.Dated, f1.Dated); }); //Do the same for right side bar SideBarList.Sort(delegate(Document f1, Document f2) { return DateTime.Compare(f2.Dated, f1.Dated); }); //And left pageTagList.Sort(delegate(ListedTag t1, ListedTag t2) { return t1.Name.CompareTo(t2.Name); }); #endregion #region Insert Cache //Fill the cache with newly read info. Cache.Insert("cache.RiverValley.MultimediaFiles", DocumentList); Cache.Insert("cache.RiverValley.MultimediaSideBar", SideBarList); Cache.Insert("cache.RiverValley.MultimediaTagList", pageTagList); #endregion } #region Filter try { //Try and do all filters here at once for performance if (Request.QueryString["perma"] != null) { string permaFilter = Request.QueryString["perma"]; DocumentList = DocumentList.FindAll(delegate(Document d) { return ((d.Name.ToLower() == permaFilter.ToLower())); }); } else if (Request.QueryString["MonthFilter"] != null) { string MonthFilter = Request.QueryString["MonthFilter"]; if (MonthFilter.Length == 6) { int nYearFilter = Int32.Parse(MonthFilter.Substring(0, 4)); int nMonthFilter = Int32.Parse(MonthFilter.Substring(4, 2)); DocumentList = DocumentList.FindAll(delegate(Document d) { return ((d.Dated.Month == nMonthFilter) && (d.Dated.Year == nYearFilter)); }); } } else if (Request.QueryString["Tags"] != null) { string sTag = Request.QueryString["Tags"]; DocumentList = DocumentList.FindAll(delegate(Document d) { return d.Tags.HasTag(sTag); }); } else if (Request.QueryString["YearFilter"] != null) { int YearView; if (true == Int32.TryParse(Request.QueryString["YearFilter"], out YearView)) { DocumentList = DocumentList.FindAll(delegate(Document d) { return d.Dated.Year == YearView; }); } } else { }//no filter } catch { } #endregion #region Body Write Loop //Show just 25 entries maximum for (int i = 0; ((i < 25) && (i < DocumentList.Count)); i++) { MultimediaFile d = DocumentList[i] as MultimediaFile; if (null == d) continue; LabelMultiMediaFiles.Text += "<br><span class=\"subtitle\" >"; string slink; if (d.MultimediaType == MultimediaType.audio) { //slink = "<br /><a href=MultimediaPlay.aspx?FL=" + Document.MULTIMEDIA_FOLDER + "&F=" + d.Link + "&T=" + d.MultimediaType + "&W=380&H=50" + "&plugins=spectrumvisualizer-1" + " onclick=\"window.open(this.href,'newWindow','width=400,height=400', 'modal');return false\">"; slink = "<br /><a href=MultimediaPlay.aspx?FL=" + Document.MULTIMEDIA_FOLDER + "&F=" + d.Link + "&T=" + d.MultimediaType + "&W=380&H=50" + "&plugins=spectrumvisualizer-1" + ">"; } else if (d.MultimediaType == MultimediaType.video) { slink = "<br /><a href=MultimediaPlay.aspx?FL=" + Document.MULTIMEDIA_FOLDER + "&F=" + d.Link + "&T=" + d.MultimediaType + "&W=640&H=388" + ">"; } else { slink = ""; } slink += d.Title + "</a></span>"; LabelMultiMediaFiles.Text += slink; LabelMultiMediaFiles.Text += " <br /><br /><span class=\"footer\" >[" + d.Dated.ToLongDateString() + "] <a href=\"Multimedia.aspx?perma=" + d.Name + "\">PermaLink</a></span>"; if (d.Attachments.Count > 0) LabelMultiMediaFiles.Text += "<br /><span class=\"footer\" >Attachements:</span>"; foreach (Attachement att in d.Attachments) { LabelMultiMediaFiles.Text += " <span class=\"footer\" >(<a href=GetFile.aspx?SF=" + Document.ATTACHMENT_FOLDER + "/" + att.AttachmentInfo.Name + "&TF=" + att.Title + ">" + att.Title + "</a>)</span>"; } //LabelMultiMediaFiles.Text += "<br />" + d.HTMLDescription + ""; string sDescription = ContentReader.FormatTextBlock(d.Description); if (sDescription.Length > 500) sDescription = sDescription.Substring(0, 500) + "..."; LabelMultiMediaFiles.Text += "<br />" + sDescription; } #endregion #region SideBar Write Loop int yearView = DateTime.Now.Year; if (Request.QueryString["YearFilter"] != null) { if (false == Int32.TryParse(Request.QueryString["YearFilter"], out yearView)) { yearView = DateTime.Now.Year; } } List<int> yearsPrinted = new List<int>(); foreach (Document SideBarDoc in SideBarList) { if (SideBarDoc.Dated.Year == yearView) { string sYearMonth = SideBarDoc.Dated.ToString("yyyy") + SideBarDoc.Dated.ToString("MM"); LabelRightSideBarArea.Text += string.Format("<span class=\"subtitle\"><a href=\"{2}\">{0}-{1}</a></span><br />", SideBarDoc.Dated.ToString("MMMM"), SideBarDoc.Dated.ToString("yyyy"), "Multimedia.aspx?MonthFilter=" + sYearMonth); } else { int j = yearsPrinted.Find(delegate(int i) { return i == SideBarDoc.Dated.Year; }); if (j != SideBarDoc.Dated.Year) { yearsPrinted.Add(SideBarDoc.Dated.Year); LabelRightSideBarArea.Text += string.Format("<span class=\"title\"><a href=\"Multimedia.aspx?YearFilter={0}\">{0}</a></span><br />", SideBarDoc.Dated.Year); } } } #endregion string sThisFileName = System.IO.Path.GetFileName(Request.Url.AbsolutePath); string PodcastPageURL = (Request.Url.AbsoluteUri).Replace(sThisFileName, "Podcast.aspx"); //Remove any query strings int nQueryBegin = PodcastPageURL.IndexOf('?'); if (nQueryBegin > 0) PodcastPageURL = PodcastPageURL.Substring(0, nQueryBegin); //LabelTagCloud.Text = "Categories <i>(Click any link below)</i><br />"; //LabelTagCloud.Text += "<br /><a href=\"" + PodcastPageURL + "\" target=_blank><img border=0 src=picts/feed-icon-14x14.png alt=\"Sermon rss feed\" /> Podcast</a><br />"; LabelTagCloud.Text += "<br /><a href=\"" + "Podcast.aspx" + "\" target=_blank><img border=0 src=picts/feed-icon-14x14.png alt=\"Sermon rss feed\" /> Podcast</a><br />"; foreach (Tag tag in pageTagList) { LabelTagCloud.Text += string.Format(" | <a href=\"Multimedia.aspx?Tags={0}\">{1}</a> ", tag.Signature, tag.Name); } }
static void UpdateFormatInfo(UILabel label, NSUrl fileURL) { UpdateFormatInfo(label, AudioFile.Open(fileURL, AudioFilePermission.Read), System.IO.Path.GetFileName(fileURL.Path)); }
protected INativeObject GetINativeInstance(Type t) { var ctor = t.GetConstructor(Type.EmptyTypes); if ((ctor != null) && !ctor.IsAbstract) { return(ctor.Invoke(null) as INativeObject); } if (!NativeObjectInterfaceType.IsAssignableFrom(t)) { throw new ArgumentException("t"); } switch (t.Name) { case "CFAllocator": return(CFAllocator.SystemDefault); case "CFArray": return(Runtime.GetINativeObject <CFArray> (new NSArray().Handle, false)); case "CFBundle": var bundles = CFBundle.GetAll(); if (bundles.Length > 0) { return(bundles [0]); } else { throw new InvalidOperationException(string.Format("Could not create the new instance for type {0}.", t.Name)); } case "CFNotificationCenter": return(CFNotificationCenter.Darwin); case "CFReadStream": case "CFStream": CFReadStream readStream; CFWriteStream writeStream; CFStream.CreatePairWithSocketToHost("www.google.com", 80, out readStream, out writeStream); return(readStream); case "CFWriteStream": CFStream.CreatePairWithSocketToHost("www.google.com", 80, out readStream, out writeStream); return(writeStream); case "CFUrl": return(CFUrl.FromFile("/etc")); case "CFPropertyList": return(CFPropertyList.FromData(NSData.FromString("<string>data</string>")).PropertyList); case "DispatchData": return(DispatchData.FromByteBuffer(new byte [] { 1, 2, 3, 4 })); case "AudioFile": var path = Path.GetFullPath("1.caf"); var af = AudioFile.Open(CFUrl.FromFile(path), AudioFilePermission.Read, AudioFileType.CAF); return(af); case "CFHTTPMessage": return(CFHTTPMessage.CreateEmpty(false)); case "CFMutableString": return(new CFMutableString("xamarin")); case "CGBitmapContext": byte[] data = new byte [400]; using (CGColorSpace space = CGColorSpace.CreateDeviceRGB()) { return(new CGBitmapContext(data, 10, 10, 8, 40, space, CGBitmapFlags.PremultipliedLast)); } case "CGContextPDF": var filename = Environment.GetFolderPath(Environment.SpecialFolder.CommonDocuments) + "/t.pdf"; using (var url = new NSUrl(filename)) return(new CGContextPDF(url)); case "CGColorConversionInfo": var cci = new GColorConversionInfoTriple() { Space = CGColorSpace.CreateGenericRgb(), Intent = CGColorRenderingIntent.Default, Transform = CGColorConversionInfoTransformType.ApplySpace }; return(new CGColorConversionInfo((NSDictionary)null, cci, cci, cci)); case "CGDataConsumer": using (NSMutableData destData = new NSMutableData()) { return(new CGDataConsumer(destData)); } case "CGDataProvider": #if __MACCATALYST__ filename = Path.Combine("Contents", "Resources", "xamarin1.png"); #else filename = "xamarin1.png"; #endif return(new CGDataProvider(filename)); case "CGFont": return(CGFont.CreateWithFontName("Courier New")); case "CGPattern": return(new CGPattern( new RectangleF(0, 0, 16, 16), CGAffineTransform.MakeIdentity(), 16, 16, CGPatternTiling.NoDistortion, true, (cgc) => {})); case "CMBufferQueue": return(CMBufferQueue.CreateUnsorted(2)); case "CTFont": CTFontDescriptorAttributes fda = new CTFontDescriptorAttributes() { FamilyName = "Courier", StyleName = "Bold", Size = 16.0f }; using (var fd = new CTFontDescriptor(fda)) return(new CTFont(fd, 10)); case "CTFontCollection": return(new CTFontCollection(new CTFontCollectionOptions())); case "CTFontDescriptor": fda = new CTFontDescriptorAttributes(); return(new CTFontDescriptor(fda)); case "CTTextTab": return(new CTTextTab(CTTextAlignment.Left, 2)); case "CTTypesetter": return(new CTTypesetter(new NSAttributedString("Hello, world", new CTStringAttributes() { ForegroundColorFromContext = true, Font = new CTFont("ArialMT", 24) }))); case "CTFrame": var framesetter = new CTFramesetter(new NSAttributedString("Hello, world", new CTStringAttributes() { ForegroundColorFromContext = true, Font = new CTFont("ArialMT", 24) })); var bPath = UIBezierPath.FromRect(new RectangleF(0, 0, 3, 3)); return(framesetter.GetFrame(new NSRange(0, 0), bPath.CGPath, null)); case "CTFramesetter": return(new CTFramesetter(new NSAttributedString("Hello, world", new CTStringAttributes() { ForegroundColorFromContext = true, Font = new CTFont("ArialMT", 24) }))); case "CTGlyphInfo": return(new CTGlyphInfo("copyright", new CTFont("ArialMY", 24), "Foo")); case "CTLine": return(new CTLine(new NSAttributedString("Hello, world", new CTStringAttributes() { ForegroundColorFromContext = true, Font = new CTFont("ArialMT", 24) }))); case "CGImageDestination": var storage = new NSMutableData(); return(CGImageDestination.Create(new CGDataConsumer(storage), "public.png", 1)); case "CGImageMetadataTag": using (NSString name = new NSString("tagName")) using (var value = new NSString("value")) return(new CGImageMetadataTag(CGImageMetadataTagNamespaces.Exif, CGImageMetadataTagPrefixes.Exif, name, CGImageMetadataType.Default, value)); case "CGImageSource": #if __MACCATALYST__ filename = Path.Combine("Contents", "Resources", "xamarin1.png"); #else filename = "xamarin1.png"; #endif return(CGImageSource.FromUrl(NSUrl.FromFilename(filename))); case "SecPolicy": return(SecPolicy.CreateSslPolicy(false, null)); case "SecIdentity": using (var options = NSDictionary.FromObjectAndKey(new NSString("farscape"), SecImportExport.Passphrase)) { NSDictionary[] array; var result = SecImportExport.ImportPkcs12(farscape_pfx, options, out array); if (result != SecStatusCode.Success) { throw new InvalidOperationException(string.Format("Could not create the new instance for type {0} due to {1}.", t.Name, result)); } return(Runtime.GetINativeObject <SecIdentity> (array [0].LowlevelObjectForKey(SecImportExport.Identity.Handle), false)); } case "SecTrust": X509Certificate x = new X509Certificate(mail_google_com); using (var policy = SecPolicy.CreateSslPolicy(true, "mail.google.com")) return(new SecTrust(x, policy)); case "SslContext": return(new SslContext(SslProtocolSide.Client, SslConnectionType.Stream)); case "UIFontFeature": return(new UIFontFeature(CTFontFeatureNumberSpacing.Selector.ProportionalNumbers)); case "NetworkReachability": return(new NetworkReachability(IPAddress.Loopback, null)); case "VTCompressionSession": case "VTSession": return(VTCompressionSession.Create(1024, 768, CMVideoCodecType.H264, (sourceFrame, status, flags, buffer) => { }, null, (CVPixelBufferAttributes)null)); case "VTFrameSilo": return(VTFrameSilo.Create()); case "VTMultiPassStorage": return(VTMultiPassStorage.Create()); case "CFString": return(new CFString("test")); case "DispatchBlock": return(new DispatchBlock(() => { })); case "DispatchQueue": return(new DispatchQueue("com.example.subsystem.taskXYZ")); case "DispatchGroup": return(DispatchGroup.Create()); case "CGColorSpace": return(CGColorSpace.CreateDeviceCmyk()); case "CGGradient": CGColor[] cArray = { UIColor.Black.CGColor, UIColor.Clear.CGColor, UIColor.Blue.CGColor }; return(new CGGradient(null, cArray)); case "CGImage": #if __MACCATALYST__ filename = Path.Combine("Contents", "Resources", "xamarin1.png"); #else filename = "xamarin1.png"; #endif using (var dp = new CGDataProvider(filename)) return(CGImage.FromPNG(dp, null, false, CGColorRenderingIntent.Default)); case "CGColor": return(UIColor.Black.CGColor); case "CMClock": return(CMClock.HostTimeClock); case "CMTimebase": return(new CMTimebase(CMClock.HostTimeClock)); case "CVPixelBufferPool": return(new CVPixelBufferPool( new CVPixelBufferPoolSettings(), new CVPixelBufferAttributes(CVPixelFormatType.CV24RGB, 100, 50) )); case "SecCertificate": using (var cdata = NSData.FromArray(mail_google_com)) return(new SecCertificate(cdata)); case "SecCertificate2": using (var cdata = NSData.FromArray(mail_google_com)) return(new SecCertificate2(new SecCertificate(cdata))); case "SecTrust2": X509Certificate x2 = new X509Certificate(mail_google_com); using (var policy = SecPolicy.CreateSslPolicy(true, "mail.google.com")) return(new SecTrust2(new SecTrust(x2, policy))); case "SecIdentity2": using (var options = NSDictionary.FromObjectAndKey(new NSString("farscape"), SecImportExport.Passphrase)) { NSDictionary[] array; var result = SecImportExport.ImportPkcs12(farscape_pfx, options, out array); if (result != SecStatusCode.Success) { throw new InvalidOperationException(string.Format("Could not create the new instance for type {0} due to {1}.", t.Name, result)); } return(new SecIdentity2(Runtime.GetINativeObject <SecIdentity> (array [0].LowlevelObjectForKey(SecImportExport.Identity.Handle), false))); } case "SecKey": SecKey private_key; SecKey public_key; using (var record = new SecRecord(SecKind.Key)) { record.KeyType = SecKeyType.RSA; record.KeySizeInBits = 512; // it's not a performance test :) SecKey.GenerateKeyPair(record.ToDictionary(), out public_key, out private_key); return(private_key); } case "SecAccessControl": return(new SecAccessControl(SecAccessible.WhenPasscodeSetThisDeviceOnly)); #if __MACCATALYST__ case "Authorization": return(Security.Authorization.Create(AuthorizationFlags.Defaults)); #endif default: throw new InvalidOperationException(string.Format("Could not create the new instance for type {0}.", t.Name)); } }
private void ButtonSelectFile_Click(object sender, RoutedEventArgs e) { var dlg = new Microsoft.Win32.OpenFileDialog { FilterIndex = 3, Filter = "MP3 Files (*.mp3)|*.mp3|MPEG 4 Audio (*.m4a)|*.m4a|Audio Files|*.mp3;*.m4a" }; var result = dlg.ShowDialog(); if (result == true) { _filePath = dlg.FileName; ButtonSelectFile.Content = Path.GetFileName(_filePath); try // load data from tags { var file = new AudioFile(_filePath); // Song title TextBoxSongTitle.Text = file.Tag.Title; // Song artist if (string.IsNullOrEmpty(file.Tag.FirstPerformer)) { ComboBoxArtist.SelectedIndex = -1; } else { int index = GetArtistIndex(file.Tag.FirstPerformer); if (index == -1) { EnableEditing(ComboBoxArtist); ComboBoxArtist.Text = file.Tag.FirstPerformer; } else { ComboBoxArtist.SelectedIndex = index; } } // Song album if (string.IsNullOrEmpty(file.Tag.Album)) { ComboBoxAlbum.SelectedIndex = -1; } else { int index = GetAlbumIndex(file.Tag.Album); if (index == -1) { EnableEditing(ComboBoxAlbum); ComboBoxAlbum.Text = file.Tag.Album; } else { ComboBoxAlbum.SelectedIndex = index; } } // Song genre if (string.IsNullOrEmpty(file.Tag.Genres[0])) { ComboBoxGenre.SelectedIndex = -1; } else { int index = GetGenreIndex(file.Tag.Genres[0]); if (index == -1) { EnableEditing(ComboBoxGenre); ComboBoxGenre.Text = file.Tag.Genres[0]; } else { ComboBoxGenre.SelectedIndex = index; } } } catch (Exception ex) { Debug.WriteLine("Unable to read tags from file. " + ex.Message); } if (string.IsNullOrEmpty(TextBoxSongTitle.Text)) { TextBoxSongTitle.Text = Path.GetFileNameWithoutExtension(_filePath); } } }
void handleClipRequest(AudioFile file) { processAudioFileAccess(file); }
// This is for the autoplay function which waits after each playback and pulls from the playlist. // Since the playlist extracts the audio information, we can safely assume that it's chosen the local // if it exists, or just uses the network link. public async Task AutoPlayAudioAsync(IGuild guild, IMessageChannel channel) { // We can't play from an empty guild. if (guild == null) { return; } if (m_AutoPlayRunning) { return; // Only allow one instance of autoplay. } while (m_AutoPlayRunning = m_AutoPlay) { // If the audio player is already playing, we need to wait until it's fully finished. if (m_AudioPlayer.IsRunning()) { await Task.Delay(1000); } // We do some checks before entering this loop. if (m_Playlist.IsEmpty || !m_AutoPlayRunning || !m_AutoPlay) { break; } // If there's nothing playing, start the stream, this is the main part of 'play' if (m_ConnectedChannels.TryGetValue(guild.Id, out var audioClient)) { AudioFile song = PlaylistNext(); // If null, nothing in the playlist. We can wait in this loop until there is. if (song != null) { Log($"Now Playing: {song.Title}", (int)LogType.Text); // Reply in the text channel. Log(song.Title, (int)LogType.Playing); // Set playing. await m_AudioPlayer.Play(audioClient, song); // The song should already be identified as local or network. Log("Nothing", (int)LogType.Playing); } else { Log($"Cannot play the audio source specified : {song}"); } // We do the same checks again to make sure we exit right away. May not be necessary, but let's check anyways. if (m_Playlist.IsEmpty || !m_AutoPlayRunning || !m_AutoPlay) { break; } // Is null or done with playback. continue; } // If we can't get it from the dictionary, we're probably not connected to it yet. Log("Unable to play in the proper channel. Make sure the audio client is connected."); break; } // Stops autoplay once we're done with it. if (m_AutoStop) { m_AutoPlay = false; } m_AutoPlayRunning = false; }
private string GetLibraryLocalPath(AudioFile audioFile) { string fileName = Path.GetFileName(audioFile.FilePath); string folderPath = _deviceSpecifications.GetMusicFolderPath(); //// Add artist name to the path (create folder if necessary) //folderPath = Path.Combine(folderPath, audioFile.ArtistName); //if (!Directory.Exists(folderPath)) // Directory.CreateDirectory(folderPath); //// Add album title to the path (create folder if necessary) //folderPath = Path.Combine(folderPath, audioFile.AlbumTitle); //if (!Directory.Exists(folderPath)) // Directory.CreateDirectory(folderPath); return Path.Combine(folderPath, fileName); }
public void StopMusic(string name) { AudioFile file = Array.Find(instance.musicFiles, AudioFile => AudioFile.audioName == name); file.source.Stop(); }
// Some audio formats have a magic cookie associated with them which is required to decompress audio data // When converting audio data you must check to see if the format of the data has a magic cookie // If the audio data format has a magic cookie associated with it, you must add this information to anAudio Converter // using AudioConverterSetProperty and kAudioConverterDecompressionMagicCookie to appropriately decompress the data // http://developer.apple.com/mac/library/qa/qa2001/qa1318.html static void ReadCookie (AudioFile sourceFile, AudioConverter converter) { // grab the cookie from the source file and set it on the converter var cookie = sourceFile.MagicCookie; // if there is an error here, then the format doesn't have a cookie - this is perfectly fine as some formats do not if (cookie != null && cookie.Length != 0) { converter.DecompressionMagicCookie = cookie; } }
public void UnPauseMusic(string name) { AudioFile file = Array.Find(instance.musicFiles, AudioFile => AudioFile.audioName == name); file.source.UnPause(); }
// Write output channel layout to destination file static void WriteDestinationChannelLayout (AudioConverter converter, AudioFile sourceFile, AudioFile destinationFile) { // if the Audio Converter doesn't have a layout see if the input file does var layout = converter.OutputChannelLayout ?? sourceFile.ChannelLayout; if (layout != null) { // TODO: throw new NotImplementedException (); //destinationFile.ChannelLayout = layout; } }
/////////////////////////////////////////////////////////////////////////////// // Eventhandler for Custom Defined Events // /////////////////////////////////////////////////////////////////////////////// #region CUSTOMEVENTHANDLER #endregion //CUSTOMEVENTHANDLER #endregion //EVENTS /////////////////////////////////////////////////////////////////////////////// // Methods and Eventhandling for Background tasks // /////////////////////////////////////////////////////////////////////////////// #region BACKGROUNDWORKER #endregion //BACKGROUNDWORKER /////////////////////////////////////////////////////////////////////////////// // Methods for doing main class job // /////////////////////////////////////////////////////////////////////////////// #region PRIVATEMETHODS /// <summary> /// This method does the parsing of the given path and creates for each readable /// image or media file a trial with the defined conditions. /// </summary> /// <returns>A <see cref="List{Slide}"/> to be imported in the slideshow of /// the experiment.</returns> private List <Slide> GetSlides() { var newSlides = new List <Slide>(); var dirInfoStimuli = new DirectoryInfo(this.txbFolder.Text); if (dirInfoStimuli.Exists) { var files = dirInfoStimuli.GetFiles(); Array.Sort(files, new NumericComparer()); foreach (var file in files) { var extension = file.Extension.ToLower(); // Ignore files with unrecognized extensions switch (extension) { case ".bmp": case ".png": case ".jpg": case ".wmf": case ".mp3": case ".wav": case ".wma": break; default: continue; } // Ignore hidden and MAC files if (file.Name.StartsWith(".")) { continue; } var newSlide = new Slide { BackgroundColor = this.clbBackground.CurrentColor, Modified = true, MouseCursorVisible = this.chbShowMouseCursor.Checked, MouseInitialPosition = this.psbMouseCursor.CurrentPosition, Name = Path.GetFileNameWithoutExtension(file.Name), PresentationSize = Document.ActiveDocument.PresentationSize }; StopCondition stop = null; if (this.rdbTime.Checked) { stop = new TimeStopCondition((int)(this.nudTime.Value * 1000)); } else if (this.rdbKey.Checked) { string selectedItemKeys = (string)this.cbbKeys.SelectedItem; KeyStopCondition ksc = new KeyStopCondition(); if (selectedItemKeys == "Any") { ksc.CanBeAnyInputOfThisType = true; } else { ksc.StopKey = (Keys)Enum.Parse(typeof(Keys), selectedItemKeys); } stop = ksc; } else if (this.rdbMouse.Checked) { string selectedItemMouse = (string)this.cbbMouseButtons.SelectedItem; MouseStopCondition msc = new MouseStopCondition(); msc.CanBeAnyInputOfThisType = selectedItemMouse == "Any" ? true : false; if (!msc.CanBeAnyInputOfThisType) { msc.StopMouseButton = (MouseButtons)Enum.Parse(typeof(MouseButtons), selectedItemMouse); } msc.Target = string.Empty; stop = msc; } else if (this.rdbDuration.Checked) { if (extension == ".mp3" || extension == ".wav" || extension == ".wma") { int duration = this.GetAudioFileLength(file.FullName); if (duration != 0) { stop = new TimeStopCondition(duration + (int)this.nudLatency.Value); } else { stop = new TimeStopCondition((int)(this.nudTime.Value * 1000)); } } else { stop = new TimeStopCondition((int)(this.nudTime.Value * 1000)); } } newSlide.StopConditions.Add(stop); foreach (VGElement element in this.lsbStandardItems.Items) { newSlide.VGStimuli.Add(element); } string destination = Path.Combine(Document.ActiveDocument.ExperimentSettings.SlideResourcesPath, file.Name); switch (extension) { case ".bmp": case ".png": case ".jpg": case ".wmf": if (!File.Exists(destination)) { File.Copy(file.FullName, destination, true); } VGImage image = new VGImage( ShapeDrawAction.None, Pens.Red, Brushes.Red, SystemFonts.MenuFont, Color.Red, file.Name, Document.ActiveDocument.ExperimentSettings.SlideResourcesPath, ImageLayout.Stretch, 1f, Document.ActiveDocument.PresentationSize, VGStyleGroup.None, file.Name, string.Empty, true); newSlide.VGStimuli.Add(image); newSlides.Add(newSlide); break; case ".mp3": case ".wav": case ".wma": File.Copy(file.FullName, destination, true); VGSound sound = new VGSound(ShapeDrawAction.None, Pens.Red, new Rectangle(0, 0, 200, 300)); sound.Center = newSlide.MouseInitialPosition; sound.Size = new SizeF(50, 50); AudioFile audioFile = new AudioFile(); audioFile.Filename = file.Name; audioFile.Filepath = Document.ActiveDocument.ExperimentSettings.SlideResourcesPath; audioFile.Loop = false; audioFile.ShouldPlay = true; audioFile.ShowOnClick = false; sound.Sound = audioFile; newSlide.VGStimuli.Add(sound); newSlides.Add(newSlide); break; } } } return(newSlides); }
unsafe static void HandleOutput (AudioFile audioFile, AudioQueue queue, AudioQueueBuffer *audioQueueBuffer, ref int packetsToRead, ref long currentPacket, ref bool done, ref bool flushed, ref AudioStreamPacketDescription [] packetDescriptions) { int bytes; int packets; if (done) return; packets = packetsToRead; bytes = (int) audioQueueBuffer->AudioDataBytesCapacity; packetDescriptions = audioFile.ReadPacketData (false, currentPacket, ref packets, audioQueueBuffer->AudioData, ref bytes); if (packets > 0) { audioQueueBuffer->AudioDataByteSize = (uint) bytes; queue.EnqueueBuffer (audioQueueBuffer, packetDescriptions); currentPacket += packets; } else { if (!flushed) { queue.Flush (); flushed = true; } queue.Stop (false); done = true; } }
override public void Setup_AfterAdd(ByteBuffer buffer, int beginPos) { base.Setup_AfterAdd(buffer, beginPos); if (!buffer.Seek(beginPos, 6)) { return; } if ((ObjectType)buffer.ReadByte() != packageItem.objectType) { return; } string str; str = buffer.ReadS(); if (str != null) { this.title = str; } str = buffer.ReadS(); if (str != null) { this.selectedTitle = str; } str = buffer.ReadS(); if (str != null) { this.icon = str; } str = buffer.ReadS(); if (str != null) { this.selectedIcon = str; } if (buffer.ReadBool()) { this.titleColor = buffer.ReadColor(); } int iv = buffer.ReadInt(); if (iv != 0) { this.titleFontSize = iv; } iv = buffer.ReadShort(); if (iv >= 0) { _relatedController = parent.GetControllerAt(iv); } pageOption.id = buffer.ReadS(); str = buffer.ReadS(); if (str != null) { sound = UIPackage.GetItemAssetByURL(str) as AudioFile; } if (buffer.ReadBool()) { soundVolumeScale = buffer.ReadFloat(); } this.selected = buffer.ReadBool(); }