Example #1
0
File: Player.cs Project: daisy/obi
        // Called when new audio is passed to player for playback.
        // Initializes all asset dependent members excluding stream dependent members
        private void InitAudio(urakawa.media.data.audio.AudioMediaData audio)
        {
            this.audio = audio;
            WaveFormat        format = new WaveFormat();
            BufferDescription desc   = new BufferDescription();

            urakawa.media.data.audio.PCMFormatInfo info = audio.getPCMFormat();
            this.frameSize  = info.getBlockAlign();
            this.channels   = info.getNumberOfChannels();
            this.sampleRate = (int)info.getSampleRate();
            format.AverageBytesPerSecond = (int)info.getSampleRate() * info.getBlockAlign();
            format.BitsPerSample         = Convert.ToInt16(info.getBitDepth());
            format.BlockAlign            = Convert.ToInt16(info.getBlockAlign());
            format.Channels         = Convert.ToInt16(info.getNumberOfChannels());
            format.FormatTag        = WaveFormatTag.Pcm;
            format.SamplesPerSecond = (int)info.getSampleRate();
            desc.Format             = format;
            this.bufferSize         = format.AverageBytesPerSecond;
            this.refreshLength      = this.bufferSize / 2;
            desc.BufferBytes        = this.bufferSize;
            desc.GlobalFocus        = true;
            this.soundBuffer        = new SecondaryBuffer(desc, this.device);
            this.pausePosition      = 0;
            this.previousPosition   = 0;
        }
 private Stream getNextAudioStream(string basename)
 {
     if (getAudioStream() != null && getCurrentPCMFormat() != null)
     {
         FileStream fs = new FileStream(Path.Combine(getOutputFolder(), getCurrentAudioFileName()), FileMode.Create, FileAccess.Write);
         urakawa.media.data.audio.PCMDataInfo dataInfo = new urakawa.media.data.audio.PCMDataInfo(getCurrentPCMFormat());
         Stream curAS = getAudioStream();
         curAS.Position = 0;
         dataInfo.setDataLength((uint)curAS.Length);
         dataInfo.writeRiffWaveHeader(fs);
         copyData(curAS, fs, dataInfo.getDataLength());
         fs.Close();
     }
     mAudioMemoryStream = new MemoryStream();
     mPCMFormat         = null;
     return(getAudioStream());
 }
        public void clear()
        {
            mTreeNodeSmilUris.Clear();
            mSmilFileNames.Clear();
            mTotalElapsedTime = new TimeDelta();
            mSmilWriter       = null;
            mAudioFileNames.Clear();
            mAudioMemoryStream = null;
            mLatestSmilIdNo    = 0;
            mPCMFormat         = null;
            DirectoryInfo di = new DirectoryInfo(getOutputFolder());

            if (!di.Exists)
            {
                di.Create();
            }
            foreach (FileSystemInfo fsi in di.GetFileSystemInfos())
            {
                fsi.Delete();
            }
        }
Example #4
0
File: Player.cs Project: daisy/obi
 /// <summary>
 /// Play an asset from a specified time position its to end
 /// </summary>
 public void Play(urakawa.media.data.audio.AudioMediaData audio, double from)
 {
     if (this.state == PlayerState.Stopped || this.state == PlayerState.Paused)
     {
         if (audio != null && audio.getAudioDuration().getTimeDeltaAsMillisecondFloat() > 0)
         {
             urakawa.media.data.audio.PCMFormatInfo info = audio.getPCMFormat();
             int position = CalculationFunctions.AdaptToFrame(
                 CalculationFunctions.ConvertTimeToByte(from, (int)info.getSampleRate(), info.getBlockAlign()),
                 info.getBlockAlign());
             if (position >= 0 && position <= audio.getPCMLength())
             {
                 this.startPosition = position;
                 InitPlay(audio, position, 0);
             }
             else
             {
                 throw new Exception("Start Position is out of bounds of Audio Asset");
             }
         }
     }
 }
Example #5
0
File: Player.cs Project: daisy/obi
        // Called to start playback when player is already initialised with an asset
        // Initialises all member variables dependent on asset stream and fill play buffers with data
        private void PlayAssetStream(int from, int to)
        {
            urakawa.media.data.audio.PCMFormatInfo info = this.audio.getPCMFormat();
            from                      = CalculationFunctions.AdaptToFrame(from, info.getBlockAlign());
            to                        = CalculationFunctions.AdaptToFrame(to, info.getBlockAlign());
            this.length               = to == 0 ? this.audio.getPCMLength() : to;
            this.audioStream          = this.audio.getAudioData();
            this.audioStream.Position = from;
            this.soundBuffer.Write(0, this.audioStream, this.bufferSize, 0);
            this.played = from + this.bufferSize;
            StateChangedEventArgs e = new StateChangedEventArgs(this, this.state);

            this.state = PlayerState.Playing;
            if (StateChanged != null)
            {
                StateChanged(this, e);
            }
            this.soundBuffer.Play(0, BufferPlayFlags.Looping);
            this.bufferCheck        = 1;
            this.bufferStopPosition = -1;
            this.refreshThread      = new Thread(new ThreadStart(RefreshBuffer));
            this.refreshThread.Start();
        }
        public bool preVisit(TreeNode node)
        {
            XmlWriter curWr;

            if (getLevelNodeNavigator().isIncluded(node))
            {
                curWr = getNextSmilWriter();
            }
            else
            {
                curWr = getSmilWriter();
            }
            curWr.WriteStartElement("seq", SMIL_NS);
            curWr.WriteAttributeString("id", getNextSmilId());
            properties.channel.ChannelsProperty chProp
                = node.getProperty(typeof(properties.channel.ChannelsProperty)) as properties.channel.ChannelsProperty;
            if (chProp != null)
            {
                media.IMedia audChMedia = chProp.getMedia(getAudioChannel());
                List <media.data.audio.AudioMediaData> audioMediaData = new List <urakawa.media.data.audio.AudioMediaData>();
                if (audChMedia is media.data.ManagedAudioMedia)
                {
                    audioMediaData.Add(((media.data.ManagedAudioMedia)audChMedia).getMediaData());
                }
                else if (audChMedia is media.SequenceMedia)
                {
                    audioMediaData.AddRange(getAudioMediaDataFromSequenceMedia((media.SequenceMedia)audChMedia));
                }
                if (audioMediaData.Count > 0)
                {
                    curWr.WriteStartElement("par", SMIL_NS);
                    if (audioMediaData.Count > 1)
                    {
                        curWr.WriteStartElement("seq", SMIL_NS);
                    }
                    foreach (media.data.audio.AudioMediaData amd in audioMediaData)
                    {
                        TimeDelta clipBegin = getElapsenInCurrentAudio();
                        if (getCurrentPCMFormat() == null)
                        {
                            mPCMFormat = new urakawa.media.data.audio.PCMFormatInfo(amd.getPCMFormat());
                        }
                        else if (!getCurrentPCMFormat().ValueEquals(amd.getPCMFormat()))
                        {
                            throw new exception.InvalidDataFormatException(
                                      "Can not export since the PCM format differs within a single destination file");
                        }
                        copyData(amd.getAudioData(), getAudioStream(), (uint)amd.getPCMLength());
                        TimeDelta clipEnd = getElapsenInCurrentAudio();
                        curWr.WriteStartElement("audio", SMIL_NS);
                        curWr.WriteAttributeString("src", getCurrentAudioFileName());
                        curWr.WriteAttributeString("clipBegin", timeDeltaToSmilString(clipBegin));
                        curWr.WriteAttributeString("clipEnd", timeDeltaToSmilString(clipEnd));
                    }
                    if (audioMediaData.Count > 1)
                    {
                        curWr.WriteEndElement();
                    }
                    curWr.WriteEndElement();
                }
            }
            return(true);
        }