Esempio n. 1
0
        private void btnPlay_Click(object sender, RoutedEventArgs e)
        {
            AudioTrack _audioTrack = new AudioTrack(new Uri(fileName, UriKind.Relative),"歌曲名", "艺术家", "专辑", null);
            BackgroundAudioPlayer.Instance.Track = _audioTrack;

            BackgroundAudioPlayer.Instance.Play();
        }
        protected override void OnUserAction(BackgroundAudioPlayer player, AudioTrack track, UserAction action, object param)
        {
            switch (action)
            {
                case UserAction.Play:
                    if (PlayState.Playing != player.PlayerState)
                    {
                        player.Play();
                    }
                    break;

                case UserAction.Stop:
                    player.Stop();
                    break;

                case UserAction.Pause:
                    if (PlayState.Playing == player.PlayerState)
                    {
                        player.Pause();
                    }
                    break;
                case UserAction.Rewind:
                  player.Position = player.Position.Subtract(new TimeSpan(0,0,10));
                    break;

                case UserAction.FastForward:
                  player.Position = player.Position.Add(new TimeSpan(0,0,10));
                    break;
            }

            NotifyComplete();
        }
Esempio n. 3
0
        /// <summary>
        /// Called when a new track requires audio decoding
        /// (typically because it is about to start playing)
        /// </summary>
        /// <param name="track">
        /// The track that needs audio streaming
        /// </param>
        /// <param name="streamer">
        /// The AudioStreamer object to which a MediaStreamSource should be
        /// attached to commence playback
        /// </param>
        /// <remarks>
        /// To invoke this method for a track set the Source parameter of the AudioTrack to null
        /// before setting  into the Track property of the BackgroundAudioPlayer instance
        /// property set to true;
        /// otherwise it is assumed that the system will perform all streaming
        /// and decoding
        /// </remarks>
        protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer)
        {
            //TODO: Set the SetSource property of streamer to a MSS source

            var data = track.Tag.ToString().Split('$');
            var url = data[data.Length - 1];

            var type = data[2];

#if DEBUG
            System.Diagnostics.Debug.WriteLine("AudioStreamer:OnBeginStreaming - Type: " + type);
#endif

            switch (type.ToLower())
            {
                case "shoutcast":
                    {
                        mms = new Silverlight.Media.ShoutcastMediaStreamSource(new Uri(url), true);
                        //track.Title = "Moo";
                        mms.MetadataChanged += mms_MetadataChanged;
                        mms.Connected += mms_Connected;
                        mms.Closed += mms_Closed;
                        streamer.SetSource(mms);
                    }
                    break;
            }
        }
        /// <summary>
        /// Called when the playstate changes, except for the Error state (see OnError)
        /// </summary>
        /// <param name="player">The BackgroundAudioPlayer</param>
        /// <param name="track">The track playing at the time the playstate changed</param>
        /// <param name="playState">The new playstate of the player</param>
        /// <remarks>
        /// Play State changes cannot be cancelled. They are raised even if the application
        /// caused the state change itself, assuming the application has opted-in to the callback.
        /// 
        /// Notable playstate events: 
        /// (a) TrackEnded: invoked when the player has no current track. The agent can set the next track.
        /// (b) TrackReady: an audio track has been set and it is now ready for playack.
        /// 
        /// Call NotifyComplete() only once, after the agent request has been completed, including async callbacks.
        /// </remarks>
        protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
        {
            switch (playState)
            {
                case PlayState.TrackEnded:
                    player.Track = GetNextTrack();
                    break;
                case PlayState.TrackReady:
                    player.Play();
                    break;
                case PlayState.Shutdown:
                    // TODO: Handle the shutdown state here (e.g. save state)
                    break;
                case PlayState.Unknown:
                    break;
                case PlayState.Stopped:
                    _playlistHelper.SetAllTracksToNotPlayingAndSave();
                    break;
                case PlayState.Paused:
                    break;
                case PlayState.Playing:
                    break;
                case PlayState.BufferingStarted:
                    break;
                case PlayState.BufferingStopped:
                    break;
                case PlayState.Rewinding:
                    break;
                case PlayState.FastForwarding:
                    break;
            }

            NotifyComplete();
        }
        /// <summary>
        ///     Called when the playstate changes, except for the Error state (see OnError)
        /// </summary>
        /// <param name="player">The BackgroundAudioPlayer</param>
        /// <param name="track">The track playing at the time the playstate changed</param>
        /// <param name="playState">The new playstate of the player</param>
        /// <remarks>
        ///     Play State changes cannot be cancelled. They are raised even if the application
        ///     caused the state change itself, assuming the application has opted-in to the callback.
        ///     Notable playstate events:
        ///     (a) TrackEnded: invoked when the player has no current track. The agent can set the next track.
        ///     (b) TrackReady: an audio track has been set and it is now ready for playack.
        ///     Call NotifyComplete() only once, after the agent request has been completed, including async callbacks.
        /// </remarks>
        protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
        {
            Debug.WriteLine("OnPlayStateChanged() playState " + playState);

            switch (playState)
            {
                case PlayState.TrackEnded:
                    player.Track = GetPreviousTrack();
                    break;
                case PlayState.TrackReady:
                    player.Play();
                    break;
                case PlayState.Shutdown:
                    // TODO: Handle the shutdown state here (e.g. save state)
                    break;
                case PlayState.Unknown:
                    break;
                case PlayState.Stopped:
                    break;
                case PlayState.Paused:
                    break;
                case PlayState.Playing:
                    break;
                case PlayState.BufferingStarted:
                    break;
                case PlayState.BufferingStopped:
                    break;
                case PlayState.Rewinding:
                    break;
                case PlayState.FastForwarding:
                    break;
            }

            NotifyComplete();
        }
Esempio n. 6
0
        /// <summary>
        /// playstate 更改时调用,但 Error 状态除外(参见 OnError)
        /// </summary>
        /// <param name="player">BackgroundAudioPlayer</param>
        /// <param name="track">在 playstate 更改时播放的曲目</param>
        /// <param name="playState">播放机的新 playstate </param>
        /// <remarks>
        /// 无法取消播放状态更改。即使应用程序
        /// 导致状态自行更改也会提出这些更改,假定应用程序已经选择了回调。
        ///
        /// 值得注意的 playstate 事件:
        /// (a) TrackEnded: 播放器没有当前曲目时激活。代理可设置下一曲目。
        /// (b) TrackReady: 音轨已设置完毕,现在可以播放。
        ///
        /// 只在代理请求完成之后调用一次 NotifyComplete(),包括异步回调。
        /// </remarks>
        protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
        {
            switch (playState)
            {
                case PlayState.TrackEnded:
                    //player.Track = GetPreviousTrack();
                    //player.Track = GetNextTrack();
                    PlayNextTrack(player);
                    break;
                case PlayState.TrackReady:
                    player.Play();
                    break;
                case PlayState.Shutdown:
                    // TODO: 在此处理关机状态(例如保存状态)
                    break;
                case PlayState.Unknown:
                    break;
                case PlayState.Stopped:
                    break;
                case PlayState.Paused:
                    break;
                case PlayState.Playing:
                    break;
                case PlayState.BufferingStarted:
                    break;
                case PlayState.BufferingStopped:
                    break;
                case PlayState.Rewinding:
                    break;
                case PlayState.FastForwarding:
                    break;
            }

            NotifyComplete();
        }
	public void Start()
    {
        _currentTrack = AudioTrack.RabbitHole;
        _currentSourceIndex = 0;

        StartCoroutine("PlayPark");
	}
        protected override void OnUserAction(BackgroundAudioPlayer player, AudioTrack track, UserAction action, object param)
        {
            ShellTile mainTile = ShellTile.ActiveTiles.FirstOrDefault();
            switch (action)
            {
                case UserAction.Play:
                    if (PlayState.Paused == player.PlayerState)
                    {
                        player.Play();

                        mainTile.Update(new StandardTileData
                        {
                            BackContent = "Play"
                        });

                    }
                    break;

                case UserAction.Pause:
                    player.Pause();

                    mainTile.Update(new StandardTileData
                    {
                        BackContent = "Pause"
                    });
                    break;
            }
            NotifyComplete();
        }
 /// <summary>
 /// Called when the user requests an action using system-provided UI and the application has requesed
 /// notifications of the action
 /// </summary>
 /// <param name="player">The BackgroundAudioPlayer</param>
 /// <param name="track">The track playing at the time of the user action</param>
 /// <param name="action">The action the user has requested</param>
 /// <param name="param">The data associated with the requested action.
 /// In the current version this parameter is only for use with the Seek action,
 /// to indicate the requested position of an audio track</param>
 /// <remarks>
 /// User actions do not automatically make any changes in system state; the agent is responsible
 /// for carrying out the user actions if they are supported
 /// </remarks>
 protected override void OnUserAction(BackgroundAudioPlayer player, AudioTrack track, UserAction action, object param)
 {
     if (action == UserAction.Play)
         player.Play();
     else if (action == UserAction.Pause)
         player.Pause();
     NotifyComplete();
 }
Esempio n. 10
0
File: Intro.cs Progetto: yheno/osu
        private void load(AudioManager audio)
        {
            welcome = audio.Sample.Get(@"welcome");
            seeya = audio.Sample.Get(@"seeya");

            bgm = audio.Track.Get(@"circles");
            bgm.Looping = true;
        }
Esempio n. 11
0
		public virtual void stopProgressTone()
		{
			if (mProgressTone != null)
			{
				mProgressTone.stop();
				mProgressTone.release();
				mProgressTone = null;
			}
		}
 protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
 {
     switch (playState)
     {
         case PlayState.TrackReady:
             player.Play();
             break;
     }
     NotifyComplete();
 }
        public bool GetTrackOption(AudioTrack option)
        {
            int index = (int)option;
            Debug.Assert((index >= 0) && (index < AudioPlugin2DViewSettings.cAudioTrackCount));

            lock (AudioPlugin2DViewSettings.lockObj)
            {
                return this.options[index];
            }
        }
Esempio n. 14
0
        protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer)
        {
            System.Diagnostics.Debug.WriteLine("OnBeginStreaming");
            MidiStreamSource mss = new MidiStreamSource();

            // Event handler for when a track is complete or the user switches tracks
            mss.StreamComplete += new EventHandler(mss_StreamComplete);

            // Set the source
            streamer.SetSource(mss);
        }
Esempio n. 15
0
 /// <summary>
 /// Called whenever there is an error with playback, such as an AudioTrack not downloading correctly
 /// </summary>
 /// <param name="player">The BackgroundAudioPlayer</param>
 /// <param name="track">The track that had the error</param>
 /// <param name="error">The error that occured</param>
 /// <param name="isFatal">If true, playback cannot continue and playback of the track will stop</param>
 /// <remarks>
 /// This method is not guaranteed to be called in all cases. For example, if the background agent
 /// itself has an unhandled exception, it won't get called back to handle its own errors.
 /// </remarks>
 protected override void OnError(BackgroundAudioPlayer player, AudioTrack track, Exception error, bool isFatal)
 {
     if (isFatal)
     {
         Abort();
     }
     else
     {
         NotifyComplete();
     }
 }
Esempio n. 16
0
        /// <summary>
        /// Called when a new track requires audio decoding
        /// (typically because it is about to start playing)
        /// </summary>
        /// <param name="track">
        /// The track that needs audio streaming
        /// </param>
        /// <param name="streamer">
        /// The AudioStreamer object to which a MediaStreamSource should be
        /// attached to commence playback
        /// </param>
        /// <remarks>
        /// To invoke this method for a track set the Source parameter of the AudioTrack to null
        /// before setting  into the Track property of the BackgroundAudioPlayer instance
        /// property set to true;
        /// otherwise it is assumed that the system will perform all streaming
        /// and decoding
        /// </remarks>
        protected override void OnBeginStreaming(AudioTrack track, Microsoft.Phone.BackgroundAudio.AudioStreamer streamer)
        {
            //using (var iso = IsolatedStorageFile.GetUserStoreForApplication())
            //{
            var iso = IsolatedStorageFile.GetUserStoreForApplication();
                string filePath = track.Tag;
                var exist = iso.FileExists(filePath);
                streamer.SetSource(new Mp3MediaStreamSource(iso.OpenFile(filePath, FileMode.Open, FileAccess.Read)));
            //}

            NotifyComplete();
        }
Esempio n. 17
0
		public virtual void playProgressTone()
		{
			stopProgressTone();
			try
			{
				mProgressTone = createProgressTone(mContext);
				mProgressTone.play();
			}
			catch (Exception e)
			{
				Log.e(LOG_TAG, "Could not play progress tone", e);
			}
		}
Esempio n. 18
0
 /// <remarks>
 /// AudioPlayer instances can share the same process. 
 /// Static fields can be used to share state between AudioPlayer instances
 /// or to communicate with the Audio Streaming agent.
 /// </remarks>
 public AudioPlayer()
 {
     audio = new AudioTrack();
     if (!_classInitialized)
     {
         _classInitialized = true;
         // Subscribe to the managed exception handler
         Deployment.Current.Dispatcher.BeginInvoke(delegate
         {
             Application.Current.UnhandledException += AudioPlayer_UnhandledException;
         });
     }
 }
Esempio n. 19
0
        /// <summary>
        /// Called when the playstate changes, except for the Error state (see OnError)
        /// </summary>
        /// <param name="player">The BackgroundAudioPlayer</param>
        /// <param name="track">The track playing at the time the playstate changed</param>
        /// <param name="playState">The new playstate of the player</param>
        /// <remarks>
        /// Play State changes cannot be cancelled. They are raised even if the application
        /// caused the state change itself, assuming the application has opted-in to the callback.
        ///
        /// Notable playstate events:
        /// (a) TrackEnded: invoked when the player has no current track. The agent can set the next track.
        /// (b) TrackReady: an audio track has been set and it is now ready for playack.
        ///
        /// Call NotifyComplete() only once, after the agent request has been completed, including async callbacks.
        /// </remarks>
        protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
        {
            switch (playState)
            {
                case PlayState.TrackEnded:
                    //player.Track = GetPreviousTrack();
                    player.Track = null;
                    break;
                case PlayState.TrackReady:
                    player.Play();
                    connectedEvent.Set();

                    if (track != null && track.Tag != null)
                    {
                        var data = track.Tag.ToString().Split('$');
                        var url = data[data.Length - 1];
                        var title = data[1];
                        var type = data[2];

                        //#region from http://stackoverflow.com/questions/7159900/detect-application-launch-from-universal-volume-control
                        //MediaHistoryItem nowPlaying = new MediaHistoryItem();
                        //nowPlaying.Title = title;
                        //nowPlaying.PlayerContext.Add("station", title);
                        //MediaHistory.Instance.NowPlaying = nowPlaying;
                        //#endregion
                    }
                    break;
                case PlayState.Shutdown:
                    // TODO: Handle the shutdown state here (e.g. save state)
                    break;
                case PlayState.Unknown:
                    break;
                case PlayState.Stopped:
                    break;
                case PlayState.Paused:
                    break;
                case PlayState.Playing:
                    break;
                case PlayState.BufferingStarted:
                    break;
                case PlayState.BufferingStopped:
                    break;
                case PlayState.Rewinding:
                    break;
                case PlayState.FastForwarding:
                    break;
            }

            NotifyComplete();
        }
 private void handleDragInteraction(Rect position, AudioTrack track, Vector2 translation, Vector2 scale)
 {
     Rect controlBackground = new Rect(0, 0, position.width, position.height);
     switch (Event.current.type)
     {
         case EventType.DragUpdated:
             if (controlBackground.Contains(Event.current.mousePosition))
             {
                 bool audioFound = false;
                 foreach (Object objectReference in DragAndDrop.objectReferences)
                 {
                     AudioClip clip = objectReference as AudioClip;
                     if (clip != null)
                     {
                         audioFound = true;
                         break;
                     }
                 }
                 if (audioFound)
                 {
                     DragAndDrop.visualMode = DragAndDropVisualMode.Link;
                     Event.current.Use();
                 }
             }
             break;
         case EventType.DragPerform:
             if (controlBackground.Contains(Event.current.mousePosition))
             {
                 AudioClip clip = null;
                 foreach (Object objectReference in DragAndDrop.objectReferences)
                 {
                     AudioClip audioClip = objectReference as AudioClip;
                     if (audioClip != null)
                     {
                         clip = audioClip;
                         break;
                     }
                 }
                 if (clip != null)
                 {
                     float fireTime = (Event.current.mousePosition.x - translation.x) / scale.x;
                     CinemaAudio ca = CutsceneItemFactory.CreateCinemaAudio(track, clip, fireTime);
                     Undo.RegisterCreatedObjectUndo(ca, string.Format("Created {0}", ca.name));
                     Event.current.Use();
                 }
             }
             break;
     }
 }
Esempio n. 21
0
        /// <summary>
        /// Called when the playstate changes, except for the Error state (see OnError)
        /// </summary>
        /// <param name="player">The BackgroundAudioPlayer</param>
        /// <param name="track">The track playing at the time the playstate changed</param>
        /// <param name="playState">The new playstate of the player</param>
        /// <remarks>
        /// Play State changes cannot be cancelled. They are raised even if the application
        /// caused the state change itself, assuming the application has opted-in to the callback.
        /// 
        /// Notable playstate events: 
        /// (a) TrackEnded: invoked when the player has no current track. The agent can set the next track.
        /// (b) TrackReady: an audio track has been set and it is now ready for playack.
        /// 
        /// Call NotifyComplete() only once, after the agent request has been completed, including async callbacks.
        /// </remarks>
        protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
        {
            switch (playState)
            {
                case PlayState.TrackReady:
                    // Wiedergabe wurde in PlayTrack() bereits festgelegt
                    player.Play();
                    break;
                case PlayState.TrackEnded:
                    PlayNextTrack(player);
                    break;
            }

            NotifyComplete();
        }
Esempio n. 22
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: private static android.media.AudioTrack createProgressTone(android.content.Context context) throws java.io.IOException
		private static AudioTrack createProgressTone(Context context)
		{
			AssetFileDescriptor fd = context.Resources.openRawResourceFd(R.raw.progress_tone);
			int length = (int) fd.Length;

			AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL, SAMPLE_RATE, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, length, AudioTrack.MODE_STATIC);

			sbyte[] data = new sbyte[length];
			readFileToBytes(fd, data);

			audioTrack.write(data, 0, data.Length);
			audioTrack.setLoopPoints(0, data.Length / 2, 30);

			return audioTrack;
		}
Esempio n. 23
0
        /// <summary>
        /// Called when the user requests an action using system-provided UI and the application has requesed
        /// notifications of the action
        /// </summary>
        /// <param name="player">The BackgroundAudioPlayer</param>
        /// <param name="track">The track playing at the time of the user action</param>
        /// <param name="action">The action the user has requested</param>
        /// <param name="param">The data associated with the requested action.
        /// In the current version this parameter is only for use with the Seek action,
        /// to indicate the requested position of an audio track</param>
        /// <remarks>
        /// User actions do not automatically make any changes in system state; the agent is responsible
        /// for carrying out the user actions if they are supported
        /// </remarks>
        protected override void OnUserAction(BackgroundAudioPlayer player, AudioTrack track, UserAction action,
                                             object param)
        {
            switch (action)
            {
                case UserAction.Play:
                    playTrack(player);
                    break;

                case UserAction.Stop:
                    player.Stop();
                    break;
            }

            NotifyComplete();
        }
    public void MuteChannel(AudioTrack.ChannelType channelType)
    {
        if(m_mutedChannels.IndexOf(channelType) != -1) return;

        int numTracks = m_tracks.Count;
        for(int i=0; i<numTracks; i++)
        {
            AudioTrack audioTrack = m_tracks[i];
            if(audioTrack.Type == channelType)
            {
                audioTrack.Mute();
            }
        }

        m_mutedChannels.Add(channelType);
    }
Esempio n. 25
0
        /// <summary>
        /// Called when a new track requires audio decoding
        /// (typically because it is about to start playing)
        /// </summary>
        /// <param name="track">
        /// The track that needs audio streaming
        /// </param>
        /// <param name="streamer">
        /// The AudioStreamer object to which a MediaStreamSource should be
        /// attached to commence playback
        /// </param>
        /// <remarks>
        /// To invoke this method for a track set the Source parameter of the AudioTrack to null
        /// before setting  into the Track property of the BackgroundAudioPlayer instance
        /// property set to true;
        /// otherwise it is assumed that the system will perform all streaming
        /// and decoding
        /// </remarks>
        protected override void OnBeginStreaming(AudioTrack track, AudioStreamer streamer)
        {
            // Set the source of streamer to a media stream source
            double freq = Convert.ToDouble(track.Tag);

            /* Windev : Uncomment below stuff if u want to listen to original sine waves.

            // Use sine wave audio generator to simulate a streaming audio feed
            SineMediaStreamSource mss = new SineMediaStreamSource(freq, 1.0, TimeSpan.FromSeconds(5));

            // Event handler for when a track is complete or the user switches tracks
            mss.StreamComplete += new EventHandler(mss_StreamComplete);

            // Set the source
            streamer.SetSource(mss); */
        }
Esempio n. 26
0
        // Constructor
        public MainPage()
        {
            InitializeComponent();

            CopyTrackToIsoStorage();

            // initialize audio track
            audioTrack = new AudioTrack(new Uri("RainyMood.mp3", UriKind.Relative),
                "Rainy mood",
                null,
                null,
                null,
                null,
                EnabledPlayerControls.Pause);

            // BackgroundAudioPlayer.Instance.Track = audioTrack;
        }
        protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
        {
            switch (playState)
            {
                case PlayState.TrackEnded:
                    player.Close();
                    break;

                case PlayState.TrackReady:
                    player.Volume = 1.0;
                    player.Play();
                    break;

                case PlayState.Shutdown:
                    // TODO: Handle the shutdown state here (e.g. save state)
                    break;

                case PlayState.Unknown:
                    break;

                case PlayState.Stopped:
                    break;

                case PlayState.Paused:
                    break;

                case PlayState.Playing:
                    break;

                case PlayState.BufferingStarted:
                    break;

                case PlayState.BufferingStopped:
                    break;

                case PlayState.Rewinding:
                    break;

                case PlayState.FastForwarding:
                    break;
            }

            NotifyComplete();
        }
Esempio n. 28
0
        /// <summary>
        /// Called when the playstate changes, except for the Error state (see OnError)
        /// </summary>
        /// <param name="player">The BackgroundAudioPlayer</param>
        /// <param name="track">The track playing at the time the playstate changed</param>
        /// <param name="playState">The new playstate of the player</param>
        /// <remarks>
        /// Play State changes cannot be cancelled. They are raised even if the application
        /// caused the state change itself, assuming the application has opted-in to the callback.
        /// 
        /// Notable playstate events: 
        /// (a) TrackEnded: invoked when the player has no current track. The agent can set the next track.
        /// (b) TrackReady: an audio track has been set and it is now ready for playack.
        /// 
        /// Call NotifyComplete() only once, after the agent request has been completed, including async callbacks.
        /// </remarks>
        protected override void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
        {
            switch (playState)
            {
                case PlayState.TrackEnded:
                    //player.Track = GetPreviousTrack();
                    if (currentTrack < playlist.Count - 1)
                    {
                        currentTrack += 1;
                        player.Track = playlist[currentTrack];
                    }
                    else
                    {
                        player.Track = null;
                    }
                    break;
                case PlayState.TrackReady:
                    player.Play();
                    break;
                case PlayState.Shutdown:
                    // TODO: Handle the shutdown state here (e.g. save state)
                    break;
                case PlayState.Unknown:
                    break;
                case PlayState.Stopped:
                    break;
                case PlayState.Paused:
                    break;
                case PlayState.Playing:
                    break;
                case PlayState.BufferingStarted:
                    break;
                case PlayState.BufferingStopped:
                    break;
                case PlayState.Rewinding:
                    break;
                case PlayState.FastForwarding:
                    break;
            }

            NotifyComplete();
        }
    public IEnumerator PlayPark()
    {
        if (_currentTrack == AudioTrack.Park)
            yield break;

        _currentTrack = AudioTrack.Park;

        foreach (var audioClip in ParkSources)
        {
            audioClip.Play();
        }

        var crossFading = true;

        while (crossFading)
        {
            crossFading = false;

            foreach (var audioSource in ParkSources)
            {
                Fade(audioSource, FadeDirection.In, FadeSpeed);

                if (audioSource.volume < 1.0f)
                    crossFading = true;
            }

            foreach (var audioSource in RabbitHoleSources)
            {
                Fade(audioSource, FadeDirection.Out, FadeSpeed);

                if (audioSource.volume > 0.0f)
                    crossFading = true;
            }

            yield return null;
        }

        foreach (var audioClip in RabbitHoleSources)
        {
            audioClip.Stop();
        }
    }
Esempio n. 30
0
 /// <summary>
 /// Called when the user requests an action using application/system provided UI
 /// </summary>
 /// <param name="player">The BackgroundAudioPlayer</param>
 /// <param name="track">The track playing at the time of the user action</param>
 /// <param name="action">The action the user has requested</param>
 /// <param name="param">The data associated with the requested action.
 /// In the current version this parameter is only for use with the Seek action,
 /// to indicate the requested position of an audio track</param>
 /// <remarks>
 /// User actions do not automatically make any changes in system state; the agent is responsible
 /// for carrying out the user actions if they are supported.
 /// 
 /// Call NotifyComplete() only once, after the agent request has been completed, including async callbacks.
 /// </remarks>
 protected override void OnUserAction(BackgroundAudioPlayer player, AudioTrack track, UserAction action, object param)
 {
     switch (action)
     {
         case UserAction.Play:
             PlayTrack(player);
             break;
         case UserAction.Pause:
             player.Pause();
             break;
         case UserAction.SkipPrevious:
             PlayPreviousTrack(player);
             break;
         case UserAction.SkipNext:
             PlayNextTrack(player);
             break;
     }
     
     NotifyComplete();
 }
Esempio n. 31
0
        /// <summary>
        /// Called when the user requests an action using application/system provided UI
        /// </summary>
        /// <param name="player">The BackgroundAudioPlayer</param>
        /// <param name="track">The track playing at the time of the user action</param>
        /// <param name="action">The action the user has requested</param>
        /// <param name="param">The data associated with the requested action.
        /// In the current version this parameter is only for use with the Seek action,
        /// to indicate the requested position of an audio track</param>
        /// <remarks>
        /// User actions do not automatically make any changes in system state; the agent is responsible
        /// for carrying out the user actions if they are supported.
        ///
        /// Call NotifyComplete() only once, after the agent request has been completed, including async callbacks.
        /// </remarks>
        protected override async void OnUserAction(BackgroundAudioPlayer player, AudioTrack track, UserAction action, object param)
        {
            await ConfigureThePlayer();

            switch (action)
            {
            case UserAction.Play:
                if (player.PlayerState != PlayState.Playing)
                {
                    _logger.Info("OnUserAction.Play");
                    player.Play();
                }
                break;

            case UserAction.Stop:
                _logger.Info("OnUserAction.Stop");
                player.Stop();
                break;

            case UserAction.Pause:
                _logger.Info("OnUserAction.Pause");
                player.Pause();
                break;

            case UserAction.FastForward:
                _logger.Info("OnUserAction.FastForward");
                player.FastForward();
                break;

            case UserAction.Rewind:
                _logger.Info("OnUserAction.Rewind");
                player.Rewind();
                break;

            case UserAction.Seek:
                player.Position = (TimeSpan)param;
                break;

            case UserAction.SkipNext:
                _logger.Info("OnUserAction.SkipNext");
                var nextTrack = await GetNextTrack();

                if (nextTrack != null)
                {
                    player.Track = nextTrack;
                }
                await InformOfPlayingTrack();

                break;

            case UserAction.SkipPrevious:
                _logger.Info("OnUserAction.SkipPrevious");
                var previousTrack = await GetPreviousTrack();

                if (previousTrack != null)
                {
                    player.Track = previousTrack;
                }
                await InformOfPlayingTrack();

                break;
            }

            NotifyComplete();
        }
Esempio n. 32
0
 public CFA(AudioTrack audioTrack)
     : this(audioTrack, DEFAULT_THRESHOLD, true, false)
 {
     // nothing to do here
 }
        /// <summary>
        /// Takes in a query which can be in any order and parses it.
        /// All varibles are then set so they can be used elsewhere.
        /// </summary>
        /// <param name="input">A ClI Query</param>
        /// <returns>A Parsed Query</returns>
        public static EncodeTask Parse(string input)
        {
            var parsed = new EncodeTask();

            #region Regular Expressions

            // Source
            Match title    = Regex.Match(input, @"-t ([0-9]+)");
            Match chapters = Regex.Match(input, @"-c ([0-9-]+)");

            // Output Settings
            Match format    = Regex.Match(input, @"-f ([a-zA-Z0-9]+)");
            Match grayscale = Regex.Match(input, @" -g");
            Match ipodAtom  = Regex.Match(input, @" -I");

            // Picture Settings Tab
            Match width     = Regex.Match(input, @"-w ([0-9]+)");
            Match height    = Regex.Match(input, @"-l ([0-9]+)");
            Match maxWidth  = Regex.Match(input, @"-X ([0-9]+)");
            Match maxHeight = Regex.Match(input, @"-Y ([0-9]+)");
            Match crop      = Regex.Match(input, @"--crop ([0-9]*):([0-9]*):([0-9]*):([0-9]*)");

            Match looseAnamorphic  = Regex.Match(input, @"--loose-anamorphic");
            Match strictAnamorphic = Regex.Match(input, @"--strict-anamorphic");
            Match customAnamorphic = Regex.Match(input, @"--custom-anamorphic");

            Match keepDisplayAsect = Regex.Match(input, @"--keep-display-aspect");
            Match displayWidth     = Regex.Match(input, @"--display-width ([0-9]*)");
            Match pixelAspect      = Regex.Match(input, @"--pixel-aspect ([0-9]*):([0-9]*)");
            Match modulus          = Regex.Match(input, @"--modulus ([0-9]*)");

            // Picture Settings - Filters
            Match decomb          = Regex.Match(input, @" --decomb");
            Match decombValue     = Regex.Match(input, @" --decomb=([a-zA-Z0-9.:""\\]*)");
            Match deinterlace     = Regex.Match(input, @"--deinterlace=\""([a-zA-Z0-9.:]*)\""");
            Match denoise         = Regex.Match(input, @"--denoise=\""([a-zA-Z0-9.:]*)\""");
            Match deblock         = Regex.Match(input, @"--deblock=([0-9:]*)");
            Match detelecine      = Regex.Match(input, @"--detelecine");
            Match detelecineValue = Regex.Match(input, @" --detelecine=\""([a-zA-Z0-9.:]*)\""");

            // Video Settings Tab
            Match videoEncoder   = Regex.Match(input, @"-e ([a-zA-Z0-9]+)");
            Match videoFramerate = Regex.Match(input, @"-r ([0-9.]+)");
            Match videoBitrate   = Regex.Match(input, @"-b ([0-9]+)");
            Match videoQuality   = Regex.Match(input, @"-q ([0-9.]+)");
            Match twoPass        = Regex.Match(input, @" -2");
            Match turboFirstPass = Regex.Match(input, @" -T");
            Match optimizeMP4    = Regex.Match(input, @" -O");
            Match pfr            = Regex.Match(input, @" --pfr");
            Match cfr            = Regex.Match(input, @" --cfr");

            // Audio Settings Tab
            Match audioTrackMixes  = Regex.Match(input, @"-6 ([0-9a-zA-Z,]+)");
            Match audioEncoders    = Regex.Match(input, @"-E ([a-zA-Z0-9+,:\*]+)");
            Match audioBitrates    = Regex.Match(input, @"-B ([0-9a-zA-Z,]+)");  // Auto = a-z
            Match audioSampleRates = Regex.Match(input, @"-R ([0-9a-zA-Z.,]+)"); // Auto = a-z
            Match drcValues        = Regex.Match(input, @"-D ([0-9.,]+)");
            Match gainValues       = Regex.Match(input, @"--gain=([0-9.,-]+)");
            Match fallbackEncoder  = Regex.Match(input, @"--audio-fallback([a-zA-Z0-9:=\s ]*)");
            Match allowedPassthru  = Regex.Match(input, @"--audio-copy-mask([a-zA-Z0-9:,=\s ]*)");

            // Chapters Tab
            Match chapterMarkers         = Regex.Match(input, @" -m");
            Match chapterMarkersFileMode = Regex.Match(input, @"--markers");

            // Advanced Tab
            Match advanced    = Regex.Match(input, @"-x ([.,/a-zA-Z0-9=:-]*)");
            Match x264Preset  = Regex.Match(input, @"--x264-preset([=a-zA-Z0-9\s ]*)");
            Match x264Tune    = Regex.Match(input, @"--x264-tune([=,a-zA-Z0-9\s ]*)");
            Match h264Profile = Regex.Match(input, @"--h264-profile([=a-zA-Z0-9\s ]*)");
            Match x264Profile = Regex.Match(input, @"--x264-profile([=a-zA-Z0-9\s ]*)");
            Match h264Level   = Regex.Match(input, @"--h264-level([=a-zA-Z0-9.\s ]*)");

            Match x265Profile = Regex.Match(input, @"--x265-profile([=a-zA-Z0-9\s ]*)");
            Match x265Tune    = Regex.Match(input, @"--x265-tune([=,a-zA-Z0-9\s ]*)");
            Match x265Preset  = Regex.Match(input, @"--x265-preset([=a-zA-Z0-9\s ]*)");
            #endregion

            #region Set Varibles

            try
            {
                #region Source Tab

                if (title.Success)
                {
                    parsed.Title = int.Parse(title.ToString().Replace("-t ", string.Empty));
                }

                if (chapters.Success)
                {
                    parsed.PointToPointMode = PointToPointMode.Chapters;
                    string[] actTitles = chapters.ToString().Replace("-c ", string.Empty).Split('-');
                    parsed.StartPoint = int.Parse(actTitles[0]);
                    if (actTitles.Length > 1)
                    {
                        parsed.EndPoint = int.Parse(actTitles[1]);
                    }

                    if ((parsed.StartPoint == 1) && (parsed.EndPoint == 0))
                    {
                        parsed.EndPoint = parsed.StartPoint;
                    }
                }

                #endregion

                #region Output Settings

                if (format.Success)
                {
                    parsed.OutputFormat = Converters.GetFileFormat(format.Groups[1].ToString());
                }
                parsed.IPod5GSupport = ipodAtom.Success;
                parsed.OptimizeMP4   = optimizeMP4.Success;

                #endregion

                #region Picture Tab

                if (width.Success)
                {
                    parsed.Width = int.Parse(width.Groups[0].Value.Replace("-w ", string.Empty));
                }

                if (height.Success)
                {
                    parsed.Height = int.Parse(height.Groups[0].Value.Replace("-l ", string.Empty));
                }

                if (maxWidth.Success)
                {
                    parsed.MaxWidth = int.Parse(maxWidth.Groups[0].Value.Replace("-X ", string.Empty));
                }

                if (maxHeight.Success)
                {
                    parsed.MaxHeight = int.Parse(maxHeight.Groups[0].Value.Replace("-Y ", string.Empty));
                }

                if (crop.Success)
                {
                    try
                    {
                        string   values        = crop.ToString().Replace("--crop ", string.Empty);
                        string[] actCropValues = values.Split(':');
                        parsed.Cropping = new Cropping(
                            int.Parse(actCropValues[0]),
                            int.Parse(actCropValues[1]),
                            int.Parse(actCropValues[2]),
                            int.Parse(actCropValues[3]));
                        parsed.HasCropping = true;
                    }
                    catch (Exception)
                    {
                        parsed.Cropping    = null;
                        parsed.HasCropping = false;
                        // No need to do anything.
                    }
                }

                if (strictAnamorphic.Success)
                {
                    parsed.Anamorphic = Anamorphic.Strict;
                }
                else if (looseAnamorphic.Success)
                {
                    parsed.Anamorphic = Anamorphic.Loose;
                }
                else if (customAnamorphic.Success)
                {
                    parsed.Anamorphic = Anamorphic.Custom;
                }
                else
                {
                    parsed.Anamorphic = Anamorphic.None;
                }

                parsed.KeepDisplayAspect = keepDisplayAsect.Success;

                if (displayWidth.Success)
                {
                    parsed.DisplayWidth =
                        double.Parse(displayWidth.Groups[0].Value.Replace("--display-width ", string.Empty), Culture);
                }

                if (pixelAspect.Success)
                {
                    parsed.PixelAspectX = int.Parse(pixelAspect.Groups[1].Value.Replace("--pixel-aspect ", string.Empty));
                }

                if (pixelAspect.Success && pixelAspect.Groups.Count >= 3)
                {
                    parsed.PixelAspectY = int.Parse(pixelAspect.Groups[2].Value.Replace("--pixel-aspect ", string.Empty));
                }

                if (modulus.Success)
                {
                    parsed.Modulus = int.Parse(modulus.Groups[0].Value.Replace("--modulus ", string.Empty));
                }

                #endregion

                #region Filters

                parsed.Decomb = Decomb.Off;
                if (decomb.Success)
                {
                    parsed.Decomb = Decomb.Default;
                    if (decombValue.Success)
                    {
                        string value = decombValue.ToString().Replace("--decomb=", string.Empty).Replace("\"", string.Empty).Trim();

                        if (value == "bob")
                        {
                            parsed.Decomb = Decomb.Bob;
                        }
                        else if (value == "fast")
                        {
                            parsed.Decomb = Decomb.Fast;
                        }
                        else
                        {
                            parsed.CustomDecomb = value;
                            parsed.Decomb       = parsed.CustomDecomb == "7:2:6:9:1:80" ? Decomb.Fast : Decomb.Custom;
                        }
                    }
                }

                parsed.Deinterlace = Deinterlace.Off;
                if (deinterlace.Success)
                {
                    switch (deinterlace.ToString().Replace("--deinterlace=", string.Empty).Replace("\"", string.Empty).ToLower())
                    {
                    case "fast":
                        parsed.Deinterlace = Deinterlace.Fast;
                        break;

                    case "slow":
                        parsed.Deinterlace = Deinterlace.Slow;
                        break;

                    case "slower":
                        parsed.Deinterlace = Deinterlace.Slower;
                        break;

                    case "bob":
                        parsed.Deinterlace = Deinterlace.Bob;
                        break;

                    default:
                        parsed.Deinterlace       = Deinterlace.Custom;
                        parsed.CustomDeinterlace = deinterlace.ToString().Replace("--deinterlace=", string.Empty).Replace("\"", string.Empty).ToLower();
                        break;
                    }
                }

                parsed.Denoise = Denoise.Off;
                if (denoise.Success)
                {
                    switch (denoise.ToString().Replace("--denoise=", string.Empty).Replace("\"", string.Empty))
                    {
                    case "weak":
                        parsed.Denoise = Denoise.Weak;
                        break;

                    case "medium":
                        parsed.Denoise = Denoise.Medium;
                        break;

                    case "strong":
                        parsed.Denoise = Denoise.Strong;
                        break;

                    default:
                        parsed.Denoise       = Denoise.Custom;
                        parsed.CustomDenoise = denoise.ToString().Replace("--denoise=", string.Empty).Replace("\"", string.Empty);
                        break;
                    }
                }

                parsed.Deblock = 0;
                if (deblock.Success)
                {
                    int dval;
                    int.TryParse(deblock.ToString().Replace("--deblock=", string.Empty), out dval);
                    parsed.Deblock = dval;
                }

                parsed.Detelecine = Detelecine.Off;
                if (detelecine.Success)
                {
                    parsed.Detelecine = Detelecine.Default;
                    if (detelecineValue.Success)
                    {
                        parsed.CustomDetelecine = detelecineValue.ToString().Replace("--detelecine=", string.Empty).Replace("\"", string.Empty);
                        parsed.Detelecine       = Detelecine.Custom;
                    }
                }

                #endregion

                #region Video Settings Tab

                parsed.VideoEncoder = videoEncoder.Success
                                          ? Converters.GetVideoEncoder(videoEncoder.ToString().Replace("-e ", string.Empty))
                                          : VideoEncoder.FFMpeg;

                if (videoFramerate.Success)
                {
                    double fps;
                    double.TryParse(videoFramerate.Groups[1].ToString(), NumberStyles.Any, CultureInfo.InvariantCulture, out fps);
                    parsed.Framerate = fps;
                }

                if (pfr.Success)
                {
                    parsed.FramerateMode = FramerateMode.PFR;
                }
                else if (cfr.Success)
                {
                    parsed.FramerateMode = FramerateMode.CFR;
                }
                else
                {
                    parsed.FramerateMode = FramerateMode.VFR; // Default to VFR
                }
                parsed.Grayscale      = grayscale.Success;
                parsed.TwoPass        = twoPass.Success;
                parsed.TurboFirstPass = turboFirstPass.Success;

                if (videoBitrate.Success)
                {
                    parsed.VideoEncodeRateType = VideoEncodeRateType.AverageBitrate;
                    parsed.VideoBitrate        = int.Parse(videoBitrate.ToString().Replace("-b ", string.Empty));
                }

                if (videoQuality.Success)
                {
                    float quality = float.Parse(videoQuality.ToString().Replace("-q ", string.Empty), Culture);
                    parsed.Quality = quality;

                    parsed.VideoEncodeRateType = VideoEncodeRateType.ConstantQuality;
                }

                #endregion

                #region Audio Tab

                // Find out how many tracks we need to add by checking how many encoders or audio tracks are selected.
                int encoderCount = 0;
                if (audioEncoders.Success)
                {
                    string[] audioDataCounters = audioEncoders.ToString().Replace("-E ", string.Empty).Split(',');
                    encoderCount = audioDataCounters.Length;
                }

                // Get the data from the regular expression results
                string[] trackMixes       = null;
                string[] trackEncoders    = null;
                string[] trackBitrates    = null;
                string[] trackSamplerates = null;
                string[] trackDRCvalues   = null;
                string[] trackGainValues  = null;

                if (audioTrackMixes.Success)
                {
                    trackMixes = audioTrackMixes.ToString().Replace("-6 ", string.Empty).Split(',');
                }
                if (audioEncoders.Success)
                {
                    trackEncoders = audioEncoders.ToString().Replace("-E ", string.Empty).Split(',');
                }
                if (audioBitrates.Success)
                {
                    trackBitrates = audioBitrates.ToString().Replace("-B ", string.Empty).Split(',');
                }
                if (audioSampleRates.Success)
                {
                    trackSamplerates = audioSampleRates.ToString().Replace("-R ", string.Empty).Split(',');
                }
                if (drcValues.Success)
                {
                    trackDRCvalues = drcValues.ToString().Replace("-D ", string.Empty).Split(',');
                }
                if (gainValues.Success)
                {
                    trackGainValues = gainValues.ToString().Replace("--gain=", string.Empty).Split(',');
                }

                // Create new Audio Track Classes and store them in the ArrayList
                ObservableCollection <AudioTrack> allAudioTrackInfo = new ObservableCollection <AudioTrack>();
                for (int x = 0; x < encoderCount; x++)
                {
                    AudioTrack track = new AudioTrack();

                    if (trackMixes != null)
                    {
                        if (trackMixes.Length >= (x + 1)) // Audio Mix
                        {
                            track.MixDown = Converters.GetAudioMixDown(Converters.GetMixDown(trackMixes[x].Trim()));
                        }
                    }

                    if (trackEncoders != null)
                    {
                        if (trackEncoders.Length >= (x + 1)) // Audio Mix
                        {
                            track.Encoder = Converters.GetAudioEncoderFromCliString(trackEncoders[x].Trim());
                        }
                    }

                    if (trackBitrates != null)
                    {
                        if (trackBitrates.Length >= (x + 1)) // Audio Encoder
                        {
                            track.Bitrate = int.Parse(trackBitrates[x].Trim() == "auto" ? "0" : trackBitrates[x].Trim());
                        }
                    }

                    if (trackSamplerates != null)
                    {
                        if (trackSamplerates.Length >= (x + 1)) // Audio SampleRate
                        {
                            track.SampleRate = double.Parse(trackSamplerates[x].Replace("Auto", "0").Trim(), Culture);
                        }
                    }

                    if (trackDRCvalues != null)
                    {
                        if (trackDRCvalues.Length >= (x + 1)) // Audio DRC Values
                        {
                            track.DRC = double.Parse(trackDRCvalues[x].Trim(), Culture);
                        }
                    }

                    if (trackGainValues != null)
                    {
                        if (trackGainValues.Length >= (x + 1)) // Audio DRC Values
                        {
                            track.Gain = int.Parse(trackGainValues[x].Trim());
                        }
                    }

                    allAudioTrackInfo.Add(track);
                }

                parsed.AudioTracks = allAudioTrackInfo;

                if (fallbackEncoder.Success)
                {
                    parsed.AllowedPassthruOptions.AudioEncoderFallback =
                        Converters.GetAudioEncoderFromCliString(fallbackEncoder.Groups[1].ToString().Trim());
                }

                if (allowedPassthru.Success)
                {
                    string[] allowedEncoders = allowedPassthru.Groups[1].ToString().Trim().Split(',');

                    parsed.AllowedPassthruOptions.AudioAllowAACPass   = allowedEncoders.Contains("aac");
                    parsed.AllowedPassthruOptions.AudioAllowAC3Pass   = allowedEncoders.Contains("ac3");
                    parsed.AllowedPassthruOptions.AudioAllowDTSHDPass = allowedEncoders.Contains("dtshd");
                    parsed.AllowedPassthruOptions.AudioAllowDTSPass   = allowedEncoders.Contains("dts");
                    parsed.AllowedPassthruOptions.AudioAllowMP3Pass   = allowedEncoders.Contains("mp3");
                }

                #endregion

                #region Chapters Tab

                if (chapterMarkersFileMode.Success || chapterMarkers.Success)
                {
                    parsed.IncludeChapterMarkers = true;
                }

                #endregion

                #region Advanced and other

                if (advanced.Success)
                {
                    parsed.AdvancedEncoderOptions = advanced.ToString().Replace("-x ", string.Empty);
                }

                if (x264Preset.Success)
                {
                    parsed.X264Preset =
                        Converters.Getx264PresetFromCli(x264Preset.ToString().Replace("--x264-preset", string.Empty).Replace("=", string.Empty).Trim());
                }

                if (h264Profile.Success)
                {
                    parsed.H264Profile =
                        Converters.Getx264ProfileFromCli(h264Profile.ToString().Replace("--h264-profile", string.Empty).Replace("=", string.Empty).Trim());
                }

                if (x264Profile.Success)
                {
                    parsed.H264Profile =
                        Converters.Getx264ProfileFromCli(x264Profile.ToString().Replace("--x264-profile", string.Empty).Replace("=", string.Empty).Trim());
                }

                if (h264Level.Success)
                {
                    parsed.H264Level =
                        h264Level.ToString().Replace("--h264-level", string.Empty).Replace("=", string.Empty).Trim();
                }

                if (x264Tune.Success)
                {
                    string tuneOptions =
                        x264Tune.ToString().Replace("--x264-tune", string.Empty).Replace("=", string.Empty).Trim();

                    parsed.FastDecode = tuneOptions.Contains("fastdecode");

                    // Remove these options. They are not in the dropdown.
                    tuneOptions = tuneOptions.Replace("fastdecode", string.Empty).Replace(
                        ",", string.Empty);

                    parsed.X264Tune = Converters.Getx264TuneFromCli(tuneOptions);
                }

                if (x265Preset.Success)
                {
                    parsed.X265Preset =
                        Converters.Getx265PresetFromCli(x265Preset.ToString().Replace("--x265-preset", string.Empty).Replace("=", string.Empty).Trim());
                }

                if (h264Profile.Success)
                {
                    parsed.H265Profile =
                        Converters.Getx265ProfileFromCli(h264Profile.ToString().Replace("--h265-profile", string.Empty).Replace("=", string.Empty).Trim());
                }

                if (x265Profile.Success)
                {
                    parsed.H265Profile =
                        Converters.Getx265ProfileFromCli(x265Profile.ToString().Replace("--x265-profile", string.Empty).Replace("=", string.Empty).Trim());
                }

                if (x265Tune.Success)
                {
                    string tuneOptions =
                        x265Tune.ToString().Replace("--x265-tune", string.Empty).Replace("=", string.Empty).Trim();

                    parsed.FastDecode = tuneOptions.Contains("fastdecode");

                    // Remove these options. They are not in the dropdown.
                    tuneOptions = tuneOptions.Replace("fastdecode", string.Empty).Replace(
                        ",", string.Empty);

                    parsed.X265Tune = Converters.Getx265TuneFromCli(tuneOptions);
                }

                #endregion
            }
            catch (Exception exc)
            {
                throw new Exception("An error has occured in the QueryParser Utility.", exc);
            }

            #endregion

            return(parsed);
        }
Esempio n. 34
0
 public virtual Task <int> DeleteTrackAsync(AudioTrack track)
 {
     return(_database.DeleteAsync(track));
 }
Esempio n. 35
0
File: Player.cs Progetto: fystir/osu
        private void load(AudioManager audio, BeatmapDatabase beatmaps, OsuGameBase game)
        {
            try
            {
                if (Beatmap == null)
                {
                    Beatmap = beatmaps.GetWorkingBeatmap(BeatmapInfo);
                }
            }
            catch
            {
                //couldn't load, hard abort!
                Exit();
                return;
            }

            AudioTrack track = Beatmap.Track;

            if (track != null)
            {
                audio.Track.SetExclusive(track);
                sourceClock = track;
            }

            sourceClock = (IAdjustableClock)track ?? new StopwatchClock();
            Clock       = new InterpolatingFramedClock(sourceClock);

            Schedule(() =>
            {
                sourceClock.Reset();
                sourceClock.Start();
            });

            var beatmap = Beatmap.Beatmap;

            if (beatmap.BeatmapInfo?.Mode > PlayMode.Osu)
            {
                //we only support osu! mode for now because the hitobject parsing is crappy and needs a refactor.
                Exit();
                return;
            }

            PlayMode usablePlayMode = beatmap.BeatmapInfo?.Mode > PlayMode.Osu ? beatmap.BeatmapInfo.Mode : PreferredPlayMode;

            ruleset = Ruleset.GetRuleset(usablePlayMode);

            var scoreOverlay = ruleset.CreateScoreOverlay();

            scoreOverlay.BindProcessor(scoreProcessor = ruleset.CreateScoreProcessor());

            hitRenderer = ruleset.CreateHitRendererWith(beatmap.HitObjects);

            hitRenderer.OnJudgement += scoreProcessor.AddJudgement;
            hitRenderer.OnAllJudged += hitRenderer_OnAllJudged;

            if (Autoplay)
            {
                hitRenderer.Schedule(() => hitRenderer.DrawableObjects.ForEach(h => h.State = ArmedState.Hit));
            }

            Children = new Drawable[]
            {
                new PlayerInputManager(game.Host)
                {
                    PassThrough = false,
                    Children    = new Drawable[]
                    {
                        hitRenderer,
                    }
                },
                scoreOverlay,
            };
        }
 /// <param name="player"> Audio player </param>
 /// <param name="track"> Audio track where the exception occurred </param>
 /// <param name="exception"> The exception that occurred </param>
 public virtual void onTrackException(AudioPlayer player, AudioTrack track, FriendlyException exception)
 {
     // Adapter dummy method
 }
Esempio n. 37
0
 /// <summary>
 /// Remove the Selected Track
 /// </summary>
 /// <param name="track">
 /// The track.
 /// </param>
 public void Remove(AudioTrack track)
 {
     this.Task.AudioTracks.Remove(track);
 }
 /// <param name="player"> Audio player </param>
 /// <param name="track"> Audio track that started </param>
 public virtual void onTrackStart(AudioPlayer player, AudioTrack track)
 {
     // Adapter dummy method
 }
Esempio n. 39
0
        public static void AddElement(ElementStep elementStep)
        {
            if (!elementStep.Selector.IsValid() || elementStep.Selector.ElementType == ElementType.None)
            {
                return;
            }

            if (elementStep.Selector.ElementType == ElementType.Event)
            {
                Track track      = SelectorService.GetTrack(elementStep.Selector);
                var   trackRegEx = new Regex(elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value);
                Media media      = MainContainer.Vegas.Project.MediaPool.Cast <Media>().FirstOrDefault(x =>
                                                                                                       trackRegEx.IsMatch(Path.GetFileName(x.FilePath)));
                if (elementStep.Selector.IsAudio() && media.Streams.Any(x => x.MediaType == MediaType.Audio))
                {
                    TrackEvent trackEvent = new AudioEvent(
                        Timecode.FromString(elementStep.DataPropertyList[DataPropertyHolder.TIMECODE].Value),
                        media.Length,
                        elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value);
                    track.Events.Add(trackEvent);
                    trackEvent.AddTake(media.Streams[0], true, elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value);
                }
                if (elementStep.Selector.IsVideo() && media.Streams.Any(x => x.MediaType == MediaType.Video))
                {
                    TrackEvent trackEvent = new VideoEvent(Timecode.FromString(elementStep.DataPropertyList[DataPropertyHolder.TIMECODE].Value),
                                                           media.Length,
                                                           elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value);
                    if (media.Streams.Any(x => x.MediaType == MediaType.Audio))
                    {
                        elementStep.Selector.ElementMediaType = ElementMediaType.Audio;
                        AudioTrack  syncAudioTrack = SelectorService.GetTrack(elementStep.Selector) as AudioTrack;
                        MediaStream audioStream    = media.Streams.FirstOrDefault(x => x.MediaType == MediaType.Audio);
                        MediaStream videoStream    = media.Streams.FirstOrDefault(x => x.MediaType == MediaType.Video);
                        var         audioEvent     = new AudioEvent(
                            Timecode.FromString(elementStep.DataPropertyList[DataPropertyHolder.TIMECODE].Value),
                            media.Length,
                            elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value);
                        track.Events.Add(trackEvent);
                        trackEvent.AddTake(videoStream, true, elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value);
                        syncAudioTrack.Events.Add(audioEvent);
                        audioEvent.AddTake(audioStream, true, elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value);
                    }
                    else
                    {
                        track.Events.Add(trackEvent);
                        trackEvent.AddTake(media.Streams[0], true, elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value);
                    }
                }
            }
            if (elementStep.Selector.ElementType == ElementType.Track)
            {
                if (elementStep.Selector.IsAudio())
                {
                    MainContainer.Vegas.Project.AddAudioTrack().Name = elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value;
                }
                if (elementStep.Selector.IsVideo())
                {
                    MainContainer.Vegas.Project.AddVideoTrack().Name = elementStep.DataPropertyList[DataPropertyHolder.RESOURCE_NAME].Value;
                }
            }
        }
Esempio n. 40
0
        /// <summary>
        /// Converts source values to a value for the binding target. The data binding engine calls this method when it propagates the values from source bindings to the binding target.
        /// </summary>
        /// <returns>
        /// A converted value.If the method returns null, the valid null value is used.A return value of <see cref="T:System.Windows.DependencyProperty"/>.<see cref="F:System.Windows.DependencyProperty.UnsetValue"/> indicates that the converter did not produce a value, and that the binding will use the <see cref="P:System.Windows.Data.BindingBase.FallbackValue"/> if it is available, or else will use the default value.A return value of <see cref="T:System.Windows.Data.Binding"/>.<see cref="F:System.Windows.Data.Binding.DoNothing"/> indicates that the binding does not transfer the value or use the <see cref="P:System.Windows.Data.BindingBase.FallbackValue"/> or the default value.
        /// </returns>
        /// <param name="value">
        /// The value.
        /// </param>
        /// <param name="targetType">
        /// The type of the binding target property.
        /// </param>
        /// <param name="parameter">
        /// The converter parameter to use.
        /// </param>
        /// <param name="culture">
        /// The culture to use in the converter.
        /// </param>
        public object Convert(object value, Type targetType, object parameter, CultureInfo culture)
        {
            // Base set of bitrates available.
            List <int> bitrates = new List <int> {
                32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 448, 640, 768, 960, 1152, 1344, 1536
            };

            int max = 160;
            int low = 32;

            AudioTrack track = value as AudioTrack;

            if (track != null)
            {
                int samplerate = this.GetBestSampleRate(track);
                int srShift    = this.GetSamplerateShift(samplerate);
                int lfeCount   = this.GetLowFreqChannelCount(track.MixDown);
                int channels   = this.GetDiscreteChannelCount(track.MixDown) - lfeCount;

                switch (track.Encoder)
                {
                case AudioEncoder.ffaac:
                    low = ((channels + lfeCount) * 32);
                    max = ((channels + lfeCount) * ((192 + (64 * ((samplerate << srShift) >= 44100 ? 1 : 0))) >> srShift));
                    break;

                case AudioEncoder.Lame:
                    low = 8 + (24 * (srShift < 1 ? 1 : 0));
                    max = 64 + (96 * (srShift < 2 ? 1 : 0)) + (160 * (srShift < 1 ? 1 : 0));
                    break;

                case AudioEncoder.Vorbis:
                    low = (channels + lfeCount) * (14 + (8 * (srShift < 2 ? 1 : 0)) + (6 * (srShift < 1 ? 1 : 0)));
                    max = (channels + lfeCount) * (32 + (54 * (srShift < 2 ? 1 : 0)) + (104 * (srShift < 1 ? 1 : 0)) + (50 * (samplerate >= 44100 ? 1 : 0)));
                    break;

                case AudioEncoder.Ac3:
                    low = 224 * channels / 5;
                    max = 640;
                    break;

                case AudioEncoder.Ac3Passthrough:
                case AudioEncoder.DtsPassthrough:
                case AudioEncoder.DtsHDPassthrough:
                case AudioEncoder.AacPassthru:
                case AudioEncoder.Mp3Passthru:
                case AudioEncoder.Passthrough:
                case AudioEncoder.ffflac:
                case AudioEncoder.ffflac24:
                    max = 1536;     // Since we don't care, just set it to the max.
                    break;

                case AudioEncoder.fdkaac:
                    low = channels * samplerate * 2 / 3000;
                    max = channels * samplerate * 6 / 1000;
                    break;

                case AudioEncoder.fdkheaac:
                    low = (channels * (12 + (4 * (samplerate >= 44100 ? 1 : 0))));
                    max = (channels - (channels > 2 ? 1 : 0)) * (48 + (16 * (samplerate >= 22050 ? 1 : 0)));
                    break;

                default:
                    max = 768;
                    break;
                }

                // Bring the bitrate down in-line with the max.
                if (track.Bitrate < low)
                {
                    track.Bitrate = low;
                }

                if (track.Bitrate > max)
                {
                    track.Bitrate = max;
                }
            }

            return(bitrates.Where(bitrate => bitrate <= max && bitrate >= low));
        }
Esempio n. 41
0
        public AudioOutputPreview GetAudioPreview(AudioTrack inputTrack, AudioEncodingViewModel audioVM)
        {
            HBAudioEncoder encoder = audioVM.HBAudioEncoder;

            var outputPreviewTrack = new AudioOutputPreview
            {
                Name    = inputTrack.NoTrackDisplay,
                Encoder = encoder.DisplayName
            };

            if (encoder.IsPassthrough)
            {
                // For passthrough encodes, we need to make sure the input track is of the right type
                if (!Encoders.AudioEncoderIsCompatible(inputTrack, encoder) && encoder.ShortName != "copy")
                {
                    return(null);
                }

                HBContainer container = Encoders.GetContainer(this.Profile.ContainerName);

                if (encoder.ShortName == "copy" && (inputTrack.Codec == AudioCodec.Dts || inputTrack.Codec == AudioCodec.DtsHD) && container.DefaultExtension == "mp4")
                {
                    this.PassthroughWarningText    = EncodingRes.DtsMp4Warning;
                    this.PassthroughWarningVisible = true;

                    return(outputPreviewTrack);
                }
            }

            // Find out what the real mixdown, sample rate and bitrate will be.
            HBMixdown mixdown;
            int       sampleRate, bitrate;

            if (encoder.ShortName == "copy")
            {
                if (Encoders.AudioEncoderIsCompatible(inputTrack, encoder))
                {
                    return(outputPreviewTrack);
                }

                if (this.Profile.AudioEncoderFallback == null)
                {
                    encoder = Encoders.AudioEncoders.First(a => !a.IsPassthrough);
                }
                else
                {
                    encoder = Encoders.GetAudioEncoder(this.Profile.AudioEncoderFallback);
                }

                mixdown    = Encoders.GetDefaultMixdown(encoder, inputTrack.ChannelLayout);
                sampleRate = 0;
                bitrate    = 0;

                outputPreviewTrack.Encoder = encoder.DisplayName;
            }
            else
            {
                mixdown    = audioVM.SelectedMixdown.Mixdown;
                sampleRate = audioVM.SampleRate;
                bitrate    = audioVM.SelectedBitrate.Bitrate;
            }

            HBMixdown previewMixdown;

            previewMixdown = Encoders.SanitizeMixdown(mixdown, encoder, inputTrack.ChannelLayout);

            int previewSampleRate = sampleRate;

            if (previewSampleRate == 0)
            {
                previewSampleRate = inputTrack.SampleRate;
            }

            int previewBitrate = bitrate;

            if (previewBitrate == 0)
            {
                previewBitrate = Encoders.GetDefaultBitrate(encoder, previewSampleRate, previewMixdown);
            }
            else
            {
                previewBitrate = Encoders.SanitizeAudioBitrate(previewBitrate, encoder, previewSampleRate, previewMixdown);
            }

            outputPreviewTrack.Mixdown    = previewMixdown.DisplayName;
            outputPreviewTrack.SampleRate = DisplayConversions.DisplaySampleRate(previewSampleRate);

            if (audioVM.EncodeRateType == AudioEncodeRateType.Bitrate)
            {
                if (previewBitrate >= 0)
                {
                    outputPreviewTrack.Quality = previewBitrate + " kbps";
                }
                else
                {
                    outputPreviewTrack.Quality = string.Empty;
                }
            }
            else
            {
                outputPreviewTrack.Quality = "CQ " + audioVM.AudioQuality;
            }

            var modifiers = new List <string>();

            if (audioVM.Gain != 0)
            {
                modifiers.Add(string.Format("{0}{1} dB", audioVM.Gain > 0 ? "+" : string.Empty, audioVM.Gain));
            }

            if (audioVM.Drc != 0)
            {
                modifiers.Add("DRC " + audioVM.Drc.ToString());
            }

            outputPreviewTrack.Modifiers = string.Join(", ", modifiers);

            return(outputPreviewTrack);
        }
Esempio n. 42
0
        private void GetTracks(XmlNode node)
        {
            _abletonLiveSet.returnTracks ??= new List <ReturnTrack>();
            _abletonLiveSet.midiTracks ??= new List <MidiTrack>();
            _abletonLiveSet.audioTracks ??= new List <AudioTrack>();
            foreach (XmlNode track in node.ChildNodes)
            {
                Track newTrack;
                switch (track.Name)
                {
                case "ReturnTrack":
                    newTrack = new ReturnTrack(int.Parse(track.Attributes[0].Value));
                    _abletonLiveSet.returnTracks.Add((ReturnTrack)newTrack);
                    break;

                case "MidiTrack":
                    newTrack = new MidiTrack(int.Parse(track.Attributes[0].Value));
                    _abletonLiveSet.midiTracks.Add((MidiTrack)newTrack);
                    break;

                case "AudioTrack":
                    newTrack = new AudioTrack(int.Parse(track.Attributes[0].Value));
                    _abletonLiveSet.audioTracks.Add((AudioTrack)newTrack);
                    break;

                default:
                    Console.WriteLine($"Could not parse type of track for {track.Name}, returning.");
                    return;
                }
                newTrack.boolValues ??= new Dictionary <string, bool>();
                newTrack.intValues ??= new Dictionary <string, int>();
                foreach (XmlElement el in track.ChildNodes)
                {
                    if (el.Attributes.Count == 1)
                    {
                        if (el.Name == "ViewData")
                        {
                            newTrack.viewData = el.Attributes[0].Value;
                        }
                        else
                        {
                            if (int.TryParse(el.Attributes[0].Value, out int iVal))
                            {
                                newTrack.intValues.Add(el.Name, iVal);
                            }
                            else if (bool.TryParse(el.Attributes[0].Value, out bool bVal))
                            {
                                newTrack.boolValues.Add(el.Name, bVal);
                            }
                            else
                            {
                                Console.WriteLine($"Could not parse value from {el.Name} in track {track.Name}");
                            }
                        }
                    }
                    else
                    {
                        if (el.Name == "TrackDelay")
                        {
                            newTrack.trackDelay = new Track.TrackDelay(int.Parse(el["Value"].Attributes[0].Value),
                                                                       bool.Parse(el["IsValueSampleBased"].Attributes[0].Value));
                        }
                        else if (el.Name == "Name")
                        {
                            newTrack.name = new Track.Name(el["EffectiveName"].Attributes[0].Value,
                                                           el["UserName"].Attributes[0].Value,
                                                           el["Annotation"].Attributes[0].Value,
                                                           el["MemorizedFirstClipName"].Attributes[0].Value);
                        }
                    }
                }
            }
        }
Esempio n. 43
0
 public AsyncAudioLoader(ClientResourceManager resourceManager, string resourcePath, AudioTrack resultAudio)
     : base(resourceManager, resourcePath)
 {
     m_resultAudio = resultAudio;
 }
 /// <param name="player"> Audio player </param>
 /// <param name="track"> Audio track where the exception occurred </param>
 /// <param name="thresholdMs"> The wait threshold that was exceeded for this event to trigger </param>
 public virtual void onTrackStuck(AudioPlayer player, AudioTrack track, long thresholdMs)
 {
     // Adapter dummy method
 }
Esempio n. 45
0
        /// <summary>
        /// Called when the playstate changes, except for the Error state (see OnError)
        /// </summary>
        /// <param name="player">The BackgroundAudioPlayer</param>
        /// <param name="track">The track playing at the time the playstate changed</param>
        /// <param name="playState">The new playstate of the player</param>
        /// <remarks>
        /// Play State changes cannot be cancelled. They are raised even if the application
        /// caused the state change itself, assuming the application has opted-in to the callback.
        ///
        /// Notable playstate events:
        /// (a) TrackEnded: invoked when the player has no current track. The agent can set the next track.
        /// (b) TrackReady: an audio track has been set and it is now ready for playack.
        ///
        /// Call NotifyComplete() only once, after the agent request has been completed, including async callbacks.
        /// </remarks>
        protected override async void OnPlayStateChanged(BackgroundAudioPlayer player, AudioTrack track, PlayState playState)
        {
            await ConfigureThePlayer();

            switch (playState)
            {
            case PlayState.TrackEnded:
                _logger.Info("PlayStateChanged.TrackEnded");
                player.Track = await GetNextTrack();
                await InformOfPlayingTrack();

                break;

            case PlayState.TrackReady:
                _logger.Info("PlayStateChanged.TrackReady");
                try
                {
                    player.Play();
                }
                catch (Exception ex)
                {
                    _logger.ErrorException("OnPlayStateChanged.TrackReady", ex);
                }
                NotifyComplete();
                break;

            case PlayState.Shutdown:
                await InformOfStoppedTrack();

                break;

            case PlayState.Unknown:
                _logger.Info("PlayStateChanged.Unknown");
                NotifyComplete();
                break;

            case PlayState.Stopped:
                _logger.Info("PlayStateChanged.Stopped");
                NotifyComplete();
                //_playlistHelper.SetAllTracksToNotPlayingAndSave();
                break;

            case PlayState.Paused:
                _logger.Info("PlayStateChanged.Paused");
                await _playlistHelper.SetAllTracksToNotPlayingAndSave();
                await InformOfStoppedTrack();

                break;

            default:
                NotifyComplete();
                break;
            }
        }
Esempio n. 46
0
        /// <summary>
        /// The create preset.
        /// </summary>
        /// <param name="importedPreset">
        /// The preset.
        /// </param>
        /// <returns>
        /// The <see cref="Preset"/>.
        /// </returns>
        public static Preset ImportPreset(HBPreset importedPreset)
        {
            Preset preset = new Preset();

            preset.Name              = importedPreset.PresetName;
            preset.Description       = importedPreset.PresetDescription;
            preset.UsePictureFilters = importedPreset.UsesPictureFilters;
            preset.UseDeinterlace    = importedPreset.PictureDecombDeinterlace;
            preset.Task              = new EncodeTask();

            // Step 1, Create the EncodeTask Object that can be loaded into the UI.

            /* Output Settings */
            preset.Task.OptimizeMP4   = importedPreset.Mp4HttpOptimize;
            preset.Task.IPod5GSupport = importedPreset.Mp4iPodCompatible;
            preset.Task.OutputFormat  = GetFileFormat(importedPreset.FileFormat.Replace("file", string.Empty).Trim()); // TOOD null check.

            /* Picture Settings */
            preset.PictureSettingsMode = (PresetPictureSettingsMode)importedPreset.UsesPictureSettings;
            preset.Task.MaxWidth       = importedPreset.PictureWidth.HasValue && importedPreset.PictureWidth.Value > 0 ? importedPreset.PictureWidth.Value : (int?)null;
            preset.Task.MaxHeight      = importedPreset.PictureHeight.HasValue && importedPreset.PictureHeight.Value > 0 ? importedPreset.PictureHeight.Value : (int?)null;
            preset.Task.Cropping       = new Cropping(importedPreset.PictureTopCrop, importedPreset.PictureBottomCrop, importedPreset.PictureLeftCrop, importedPreset.PictureRightCrop);
            preset.Task.HasCropping    = !importedPreset.PictureAutoCrop;

            preset.Task.Modulus           = importedPreset.PictureModulus;
            preset.Task.KeepDisplayAspect = importedPreset.PictureKeepRatio;
            switch (importedPreset.PicturePAR)
            {
            case "custom":
                preset.Task.Anamorphic = Anamorphic.Custom;
                break;

            case "loose":
                preset.Task.Anamorphic = Anamorphic.Loose;
                break;

            case "strict":
                preset.Task.Anamorphic = Anamorphic.Strict;
                break;

            default:
                preset.Task.Anamorphic = Anamorphic.Loose;
                break;
            }

            /* Filter Settings */
            preset.Task.Grayscale = importedPreset.VideoGrayScale;
            preset.Task.Deblock   = importedPreset.PictureDeblock;
            switch (importedPreset.PictureDecomb)
            {
            case "custom":
                preset.Task.Decomb = Decomb.Custom;
                break;

            case "default":
                preset.Task.Decomb = Decomb.Default;
                break;

            case "bob":
                preset.Task.Decomb = Decomb.Bob;
                break;

            case "fast":
                preset.Task.Decomb = Decomb.Fast;
                break;

            default:
                preset.Task.Decomb = Decomb.Off;
                break;
            }

            preset.Task.CustomDecomb = importedPreset.PictureDecombCustom;

            switch (importedPreset.PictureDeinterlace)
            {
            case "custom":
                preset.Task.Deinterlace = Deinterlace.Custom;
                break;

            case "bob":
                preset.Task.Deinterlace = Deinterlace.Bob;
                break;

            case "gast":
                preset.Task.Deinterlace = Deinterlace.Fast;
                break;

            case "slow":
                preset.Task.Deinterlace = Deinterlace.Slow;
                break;

            case "slower":
                preset.Task.Deinterlace = Deinterlace.Slower;
                break;

            default:
                preset.Task.Deinterlace = Deinterlace.Off;
                break;
            }

            preset.Task.CustomDeinterlace = importedPreset.PictureDetelecineCustom;
            preset.Task.CustomDenoise     = importedPreset.PictureDenoiseCustom;
            preset.Task.CustomDetelecine  = importedPreset.PictureDetelecineCustom;

            switch (importedPreset.PictureDetelecine)
            {
            case "custom":
                preset.Task.Detelecine = Detelecine.Custom;
                break;

            case "default":
                preset.Task.Detelecine = Detelecine.Default;
                break;

            default:
                preset.Task.Detelecine = Detelecine.Off;
                break;
            }

            switch (importedPreset.PictureDenoiseFilter)
            {
            case "nlmeans":
                preset.Task.Denoise = Denoise.NLMeans;
                break;

            case "hqdn3d":
                preset.Task.Denoise = Denoise.hqdn3d;
                break;

            default:
                preset.Task.Denoise = Denoise.Off;
                break;
            }

            switch (importedPreset.PictureDenoisePreset)
            {
            case "custom":
                preset.Task.DenoisePreset = DenoisePreset.Custom;
                break;

            case "light":
                preset.Task.DenoisePreset = DenoisePreset.Light;
                break;

            case "medium":
                preset.Task.DenoisePreset = DenoisePreset.Medium;
                break;

            case "strong":
                preset.Task.DenoisePreset = DenoisePreset.Strong;
                break;

            case "ultralight":
                preset.Task.DenoisePreset = DenoisePreset.Ultralight;
                break;

            case "weak":
                preset.Task.DenoisePreset = DenoisePreset.Weak;
                break;
            }

            switch (importedPreset.PictureDenoiseTune)
            {
            case "animation":
                preset.Task.DenoiseTune = DenoiseTune.Animation;
                break;

            case "film":
                preset.Task.DenoiseTune = DenoiseTune.Film;
                break;

            case "grain":
                preset.Task.DenoiseTune = DenoiseTune.Grain;
                break;

            case "highnotion":
                preset.Task.DenoiseTune = DenoiseTune.HighMotion;
                break;

            default:
                preset.Task.DenoiseTune = DenoiseTune.None;
                break;
            }

            /* Video Settings */
            preset.Task.VideoEncoder = EnumHelper <VideoEncoder> .GetValue(importedPreset.VideoEncoder);

            preset.Task.VideoBitrate           = importedPreset.VideoAvgBitrate;
            preset.Task.TwoPass                = importedPreset.VideoTwoPass;
            preset.Task.TurboFirstPass         = importedPreset.VideoTurboTwoPass;
            preset.Task.ExtraAdvancedArguments = importedPreset.VideoOptionExtra;
            preset.Task.Quality                = double.Parse(importedPreset.VideoQualitySlider.ToString(CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
            preset.Task.VideoEncodeRateType    = (VideoEncodeRateType)importedPreset.VideoQualityType;
            preset.Task.VideoLevel             = new VideoLevel(importedPreset.VideoLevel, importedPreset.VideoLevel);
            preset.Task.VideoPreset            = new VideoPreset(importedPreset.VideoPreset, importedPreset.VideoPreset);
            preset.Task.VideoProfile           = new VideoProfile(importedPreset.VideoProfile, importedPreset.VideoProfile);

            if (!string.IsNullOrEmpty(importedPreset.VideoTune))
            {
                string[] split = importedPreset.VideoTune.Split(',');
                foreach (var item in split)
                {
                    preset.Task.VideoTunes.Add(new VideoTune(item, item));
                }
            }
            preset.Task.Framerate = importedPreset.VideoFramerate == "auto" || string.IsNullOrEmpty(importedPreset.VideoFramerate)
                                 ? (double?)null
                                 : double.Parse(importedPreset.VideoFramerate, CultureInfo.InvariantCulture);
            string parsedValue = importedPreset.VideoFramerateMode;

            switch (parsedValue)
            {
            case "vfr":
                preset.Task.FramerateMode = FramerateMode.VFR;
                break;

            case "cfr":
                preset.Task.FramerateMode = FramerateMode.CFR;
                break;

            default:
                preset.Task.FramerateMode = FramerateMode.PFR;
                break;
            }

            /* Audio Settings */
            preset.AudioTrackBehaviours = new AudioBehaviours();
            preset.Task.AllowedPassthruOptions.AudioEncoderFallback = EnumHelper <AudioEncoder> .GetValue(importedPreset.AudioEncoderFallback);

            preset.AudioTrackBehaviours.SelectedBehaviour = importedPreset.AudioTrackSelectionBehavior == "all"
                                                                     ? AudioBehaviourModes.AllMatching
                                                                     : importedPreset.AudioTrackSelectionBehavior == "first"
                                                                           ? AudioBehaviourModes.FirstMatch
                                                                           : AudioBehaviourModes.None;

            if (importedPreset.AudioCopyMask != null)
            {
                foreach (var item in importedPreset.AudioCopyMask)
                {
                    AudioEncoder encoder = EnumHelper <AudioEncoder> .GetValue(item.ToString());

                    switch (encoder)
                    {
                    case AudioEncoder.AacPassthru:
                        preset.Task.AllowedPassthruOptions.AudioAllowAACPass = true;
                        break;

                    case AudioEncoder.Ac3Passthrough:
                        preset.Task.AllowedPassthruOptions.AudioAllowAACPass = true;
                        break;

                    case AudioEncoder.EAc3Passthrough:
                        preset.Task.AllowedPassthruOptions.AudioAllowAACPass = true;
                        break;

                    case AudioEncoder.DtsHDPassthrough:
                        preset.Task.AllowedPassthruOptions.AudioAllowAACPass = true;
                        break;

                    case AudioEncoder.DtsPassthrough:
                        preset.Task.AllowedPassthruOptions.AudioAllowAACPass = true;
                        break;

                    case AudioEncoder.FlacPassthru:
                        preset.Task.AllowedPassthruOptions.AudioAllowAACPass = true;
                        break;

                    case AudioEncoder.Mp3Passthru:
                        preset.Task.AllowedPassthruOptions.AudioAllowAACPass = true;
                        break;
                    }
                }
            }

            if (importedPreset.AudioLanguageList != null)
            {
                foreach (var item in importedPreset.AudioLanguageList)
                {
                    preset.AudioTrackBehaviours.SelectedLangauges.Add(item);
                }
            }

            preset.Task.AudioTracks = new ObservableCollection <AudioTrack>();

            if (importedPreset.AudioList != null)
            {
                foreach (var audioTrack in importedPreset.AudioList)
                {
                    AudioTrack track = new AudioTrack();
                    track.Bitrate = audioTrack.AudioBitrate;

                    // track.CompressionLevel = audioTrack.AudioCompressionLevel;
                    // track.AudioDitherMethod = audioTrack.AudioDitherMethod;
                    track.Encoder = EnumHelper <AudioEncoder> .GetValue(audioTrack.AudioEncoder);

                    track.MixDown = EnumHelper <Mixdown> .GetValue(audioTrack.AudioMixdown);

                    // track.AudioNormalizeMixLevel = audioTrack.AudioNormalizeMixLevel;
                    track.SampleRate = audioTrack.AudioSamplerate == "auto" ? 0 : double.Parse(audioTrack.AudioSamplerate);

                    // track.IsQualityBased = audioTrack.AudioTrackQualityEnable;
                    // track.Quality = audioTrack.AudioTrackQuality;
                    track.Gain = (int)audioTrack.AudioTrackGainSlider;
                    track.DRC  = audioTrack.AudioTrackDRCSlider;

                    preset.Task.AudioTracks.Add(track);
                }
            }


            /* Subtitle Settings */
            preset.SubtitleTrackBehaviours = new SubtitleBehaviours();

            // parsedPreset.SubtitleTrackBehaviours.SelectedBehaviour = preset.SubtitleTrackSelectionBehavior;
            preset.SubtitleTrackBehaviours.AddClosedCaptions        = importedPreset.SubtitleAddCC;
            preset.SubtitleTrackBehaviours.AddForeignAudioScanTrack = importedPreset.SubtitleAddForeignAudioSearch;
            if (importedPreset.SubtitleLanguageList != null)
            {
                foreach (var item in importedPreset.SubtitleLanguageList)
                {
                    preset.SubtitleTrackBehaviours.SelectedLangauges.Add(item);
                }
            }

            /* Chapter Marker Settings */
            preset.Task.IncludeChapterMarkers = importedPreset.ChapterMarkers;

            /* Advanced Settings */
            preset.Task.ShowAdvancedTab        = importedPreset.x264UseAdvancedOptions;
            preset.Task.AdvancedEncoderOptions = importedPreset.x264Option;

            /* Not Supported Yet */
            // public int VideoColorMatrixCode { get; set; }
            // public bool VideoHWDecode { get; set; }
            // public string VideoScaler { get; set; }
            // public bool VideoQSVDecode { get; set; }
            // public int VideoQSVAsyncDepth { get; set; }
            // public bool SubtitleAddForeignAudioSubtitle { get; set; }
            // public string SubtitleBurnBehavior { get; set; }
            // public bool SubtitleBurnBDSub { get; set; }
            // public bool SubtitleBurnDVDSub { get; set; }
            // public bool PictureItuPAR { get; set; }
            // public bool PictureLooseCrop { get; set; }
            // public int PicturePARWidth { get; set; }
            // public int PicturePARHeight { get; set; }
            // public int PictureRotate { get; set; }
            // public int PictureForceHeight { get; set; }
            // public int PictureForceWidth { get; set; }
            // public bool AudioSecondaryEncoderMode { get; set; }
            // public List<object> ChildrenArray { get; set; }
            // public bool Default { get; set; }
            // public bool Folder { get; set; }
            // public bool FolderOpen { get; set; }
            // public int PictureDARWidth { get; set; }
            // public int Type { get; set; }

            return(preset);
        }
Esempio n. 47
0
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);

            // Set our view from the "main" layout resource
            SetContentView(Resource.Layout.Main);

            notaC = FindViewById <Button>(Resource.Id.btnDo);
            notaD = FindViewById <Button>(Resource.Id.btnRe);
            notaE = FindViewById <Button>(Resource.Id.btnMi);
            notaF = FindViewById <Button>(Resource.Id.btnFa);
            notaG = FindViewById <Button>(Resource.Id.btnSol);
            notaA = FindViewById <Button>(Resource.Id.btnLa);
            notaB = FindViewById <Button>(Resource.Id.btnSi);

            ejercicio = FindViewById <Button>(Resource.Id.btnEjercicio);

            ejercicio.Click += (o, e) => { ex = reproducir(); };

            notaC.Click += (o, e) => { respuesta(0); };
            notaD.Click += (o, e) => { respuesta(1); };
            notaE.Click += (o, e) => { respuesta(2); };
            notaF.Click += (o, e) => { respuesta(3); };
            notaG.Click += (o, e) => { respuesta(4); };
            notaA.Click += (o, e) => { respuesta(5); };
            notaB.Click += (o, e) => { respuesta(6); };


            void respuesta(int r)
            {
                AlertDialog.Builder alert = new AlertDialog.Builder(this);

                if (r == ex)
                {
                    alert.SetTitle("Respuesta Correcta");
                    alert.SetMessage("Su respuesta fue correcta.");
                }
                else
                {
                    alert.SetTitle("Respuesta Incorrecta");
                    alert.SetMessage("Su respuesta fue incorrecta.");
                }

                Dialog dialog = alert.Create();

                dialog.Show();
            }

            int reproducir()
            {
                Random ran        = new Random();
                var    duration   = 2;
                var    sampleRate = 8000;
                var    numSamples = duration * sampleRate;
                var    sample     = new double[numSamples];
                Double freqOfTone = 0;

                byte[] generatedSnd = new byte[2 * numSamples];
                var    tone         = ran.Next(7);

                switch (tone)
                {
                case 0:
                    freqOfTone = 261.626;
                    break;

                case 1:
                    freqOfTone = 293.665;
                    break;

                case 2:
                    freqOfTone = 329.628;
                    break;

                case 3:
                    freqOfTone = 349.228;
                    break;

                case 4:
                    freqOfTone = 391.995;
                    break;

                case 5:
                    freqOfTone = 440.000;
                    break;

                case 6:
                    freqOfTone = 493.88;
                    break;
                }

                for (int i = 0; i < numSamples; ++i)
                {
                    sample[i] = Math.Sin(2 * Math.PI * i / (sampleRate / freqOfTone));
                }

                int idx = 0;

                foreach (double dVal in sample)
                {
                    short val = (short)(dVal * 32767);
                    generatedSnd[idx++] = (byte)(val & 0x00ff);
                    generatedSnd[idx++] = (byte)((val & 0xff00) >> 8);
                }


                var track = new AudioTrack(global::Android.Media.Stream.Music, sampleRate, ChannelOut.Mono, Encoding.Pcm16bit, numSamples, AudioTrackMode.Static);

                track.Write(generatedSnd, 0, numSamples);
                track.Play();
                System.Threading.Thread.Sleep(1000);
                track.Release();

                return(tone);
            }
        }
Esempio n. 48
0
        private void AddTrack(AudioTrack audioTrack)
        {
            if (audioTrack.SourceProperties.SampleRate > audioMixer.SampleRate)
            {
                // The newly added track has a higher samplerate than the current tracks, so we adjust
                // the processing samplerate to the highest rate
                ChangeMixingSampleRate(audioTrack.SourceProperties.SampleRate);
            }

            IAudioStream input        = audioTrack.CreateAudioStream();
            IAudioStream baseStream   = new TolerantStream(new BufferedStream(input, 1024 * 256 * input.SampleBlockSize, true));
            OffsetStream offsetStream = new OffsetStream(baseStream)
            {
                Offset = TimeUtil.TimeSpanToBytes(audioTrack.Offset, baseStream.Properties)
            };

            audioTrack.OffsetChanged += new EventHandler <ValueEventArgs <TimeSpan> >(
                delegate(object sender, ValueEventArgs <TimeSpan> e) {
                offsetStream.Offset = TimeUtil.TimeSpanToBytes(e.Value, offsetStream.Properties);
                audioMixer.UpdateLength();
            });

            // Upmix mono inputs to dual channel stereo or downmix surround to allow channel balancing
            // TODO add better multichannel stream support and allow balancing of surround
            IAudioStream mixToStereoStream = offsetStream;

            if (mixToStereoStream.Properties.Channels == 1)
            {
                mixToStereoStream = new MonoStream(mixToStereoStream, 2);
            }
            else if (mixToStereoStream.Properties.Channels > 2)
            {
                mixToStereoStream = new SurroundDownmixStream(mixToStereoStream);
            }

            // control the track phase
            PhaseInversionStream phaseInversion = new PhaseInversionStream(mixToStereoStream)
            {
                Invert = audioTrack.InvertedPhase
            };

            MonoStream monoStream = new MonoStream(phaseInversion, phaseInversion.Properties.Channels)
            {
                Downmix = audioTrack.MonoDownmix
            };

            // necessary to control each track individually
            VolumeControlStream volumeControl = new VolumeControlStream(monoStream)
            {
                Mute    = audioTrack.Mute,
                Volume  = audioTrack.Volume,
                Balance = audioTrack.Balance
            };

            // when the AudioTrack.Mute property changes, just set it accordingly on the audio stream
            audioTrack.MuteChanged += new EventHandler <ValueEventArgs <bool> >(
                delegate(object vsender, ValueEventArgs <bool> ve) {
                volumeControl.Mute = ve.Value;
            });

            // when the AudioTrack.Solo property changes, we have to react in different ways:
            audioTrack.SoloChanged += new EventHandler <ValueEventArgs <bool> >(
                delegate(object vsender, ValueEventArgs <bool> ve) {
                AudioTrack senderTrack  = (AudioTrack)vsender;
                bool isOtherTrackSoloed = false;

                foreach (AudioTrack vaudioTrack in trackList)
                {
                    if (vaudioTrack != senderTrack && vaudioTrack.Solo)
                    {
                        isOtherTrackSoloed = true;
                        break;
                    }
                }

                /* if there's at least one other track that is soloed, we set the mute property of
                 * the current track to the opposite of the solo property:
                 * - if the track is soloed, we unmute it
                 * - if the track is unsoloed, we mute it
                 */
                if (isOtherTrackSoloed)
                {
                    senderTrack.Mute = !ve.Value;
                }

                /* if this is the only soloed track, we mute all other tracks
                 * if this track just got unsoloed, we unmute all other tracks
                 */
                else
                {
                    foreach (AudioTrack vaudioTrack in trackList)
                    {
                        if (vaudioTrack != senderTrack && !vaudioTrack.Solo)
                        {
                            vaudioTrack.Mute = ve.Value;
                        }
                    }
                }
            });

            // when the AudioTrack.Volume property changes, just set it accordingly on the audio stream
            audioTrack.VolumeChanged += new EventHandler <ValueEventArgs <float> >(
                delegate(object vsender, ValueEventArgs <float> ve) {
                volumeControl.Volume = ve.Value;
            });

            audioTrack.BalanceChanged += new EventHandler <ValueEventArgs <float> >(
                delegate(object vsender, ValueEventArgs <float> ve) {
                volumeControl.Balance = ve.Value;
            });

            audioTrack.InvertedPhaseChanged += new EventHandler <ValueEventArgs <bool> >(
                delegate(object vsender, ValueEventArgs <bool> ve) {
                phaseInversion.Invert = ve.Value;
            });
            audioTrack.MonoDownmixChanged += new EventHandler <ValueEventArgs <bool> >(
                delegate(object vsender, ValueEventArgs <bool> ve) {
                monoStream.Downmix = ve.Value;
            });

            // adjust sample rate to mixer output rate
            ResamplingStream resamplingStream = new ResamplingStream(volumeControl,
                                                                     ResamplingQuality.Medium, audioMixer.Properties.SampleRate);

            IAudioStream trackStream = resamplingStream;

            if (trackStream.Properties.Channels == 1 && audioMixer.Properties.Channels > 1)
            {
                trackStream = new MonoStream(trackStream, audioMixer.Properties.Channels);
            }

            audioMixer.Add(trackStream);
            trackListStreams.Add(audioTrack, trackStream);
        }
 /// <param name="player"> Audio player </param>
 /// <param name="track"> Audio track where the exception occurred </param>
 /// <param name="exception"> The exception that occurred </param>
 public TrackExceptionEvent(AudioPlayer player, AudioTrack track, FriendlyException exception) : base(player)
 {
     this.track     = track;
     this.exception = exception;
 }
Esempio n. 50
0
    void DoBatchRender(ArrayList selectedTemplates, String basePath, RenderMode renderMode)
    {
        String outputDirectory = Path.GetDirectoryName(basePath);
        String baseFileName    = Path.GetFileName(basePath);

        // make sure templates are selected
        if ((null == selectedTemplates) || (0 == selectedTemplates.Count))
        {
            throw new ApplicationException("No render templates selected.");
        }

        // make sure the output directory exists
        if (!Directory.Exists(outputDirectory))
        {
            throw new ApplicationException("The output directory does not exist.");
        }

        List <RenderArgs> renders = new List <RenderArgs>();

        // enumerate through each selected render template
        foreach (RenderItem renderItem in selectedTemplates)
        {
            // construct the file name (most of it)
            String filename = Path.Combine(outputDirectory,
                                           FixFileName(baseFileName));
            // file name fix

            //check to see if this is a QuickTime file...if so, file length cannot exceed 59 characters
            if (renderItem.Renderer.ClassID == Renderer.CLSID_CSfQT7RenderFileClass)
            {
                int size = baseFileName.Length + renderItem.Renderer.FileTypeName.Length + 1 + renderItem.Template.Name.Length;
                if (size > QUICKTIME_MAX_FILE_NAME_LENGTH)
                {
                    int    dif      = size - (QUICKTIME_MAX_FILE_NAME_LENGTH - 2); //extra buffer for a "--" to indicated name is truncated.
                    string tempstr1 = renderItem.Renderer.FileTypeName;
                    string tempstr2 = renderItem.Template.Name;
                    if (tempstr1.Length < (dif + 3))
                    {
                        dif     -= (tempstr1.Length - 3);
                        tempstr1 = tempstr1.Substring(0, 3);
                        tempstr2 = tempstr2.Substring(dif);
                    }
                    else
                    {
                        tempstr1 = tempstr1.Substring(0, tempstr1.Length - dif);
                    }
                    filename = Path.Combine(outputDirectory,
                                            FixFileName(baseFileName) +
                                            FixFileName(tempstr1) +
                                            "--" +
                                            FixFileName(tempstr2));
                }
            }
            if (RenderMode.Project == renderMode)   // on toggled project mode
            // actual render part
            {
                foreach (Track track in myVegas.Project.Tracks)
                {
                    if (track.IsAudio())   // only for preset rendering
                    {
                        track.Solo = true;
                        if (track.Name != null)
                        {
                            // for always on background track
                            // reset volume
                            AudioTrack audioTrack = (AudioTrack)track;
                            audioTrack.Volume = 1;
                            // null check
                            String regionFilename = String.Format("{0}{1}{2}",
                                                                  filename,
                                                                  track.Name,
                                                                  renderItem.Extension); // extension (must be)
                            RenderArgs args = new RenderArgs();
                            args.OutputFile     = regionFilename;
                            args.RenderTemplate = renderItem.Template;
                            // Render
                            if (track.Events.Count > 0)
                            {
                                // No events crash fix
                                args.Start  = track.Events[0].Start;
                                args.Length = track.Events[0].Length;
                                DoRender(args);
                            }
                            track.Solo = false;
                        }
                    }
                }
            }
            else
            {
                filename += renderItem.Extension;
                RenderArgs args = new RenderArgs();
                args.OutputFile     = filename;
                args.RenderTemplate = renderItem.Template;
                args.UseSelection   = (renderMode == RenderMode.Selection);
                renders.Add(args);
            }
        }

        // validate all files and propmt for overwrites
        foreach (RenderArgs args in renders)
        {
            ValidateFilePath(args.OutputFile);
            if (!OverwriteExistingFiles)
            {
                if (File.Exists(args.OutputFile))
                {
                    String       msg = "File(s) exists. Do you want to overwrite them?";
                    DialogResult rs;
                    rs = MessageBox.Show(msg,
                                         "Overwrite files?",
                                         MessageBoxButtons.OKCancel,
                                         MessageBoxIcon.Warning,
                                         MessageBoxDefaultButton.Button2);
                    if (DialogResult.Cancel == rs)
                    {
                        return;
                    }
                    else
                    {
                        OverwriteExistingFiles = true;
                    }
                }
            }
        }

        // perform all renders.  The Render method returns a member of the RenderStatus enumeration.  If it is
        // anything other than OK, exit the loop.
        foreach (RenderArgs args in renders)
        {
            if (RenderStatus.Canceled == DoRender(args))
            {
                break;
            }
        }
    }
Esempio n. 51
0
        public void Generate(AudioTrack track)
        {
            IAudioStream audioStream = new ResamplingStream(
                new MonoStream(AudioStreamFactory.FromFileInfoIeee32(track.FileInfo)),
                ResamplingQuality.Medium, profile.SamplingRate);

            STFT stft            = new STFT(audioStream, profile.WindowSize, profile.HopSize, WindowType.Hann, STFT.OutputFormat.Decibel);
            int  index           = 0;
            int  indices         = stft.WindowCount;
            int  processedFrames = 0;

            float[] spectrum         = new float[profile.WindowSize / 2];
            float[] smoothedSpectrum = new float[spectrum.Length - profile.SpectrumSmoothingLength + 1]; // the smooved frequency spectrum of the current frame
            var     spectrumSmoother = new SimpleMovingAverage(profile.SpectrumSmoothingLength);

            float[] spectrumTemporalAverage = new float[spectrum.Length]; // a running average of each spectrum bin over time
            float[] spectrumResidual        = new float[spectrum.Length]; // the difference between the current spectrum and the moving average spectrum

            var peakHistory = new PeakHistory(1 + profile.TargetZoneDistance + profile.TargetZoneLength, spectrum.Length / 2);
            var peakPairs   = new List <PeakPair>(profile.PeaksPerFrame * profile.PeakFanout); // keep a single instance of the list to avoid instantiation overhead

            var subFingerprints = new List <SubFingerprint>();

            while (stft.HasNext())
            {
                // Get the FFT spectrum
                stft.ReadFrame(spectrum);

                // Skip frames whose average spectrum volume is below the threshold
                // This skips silent frames (zero samples) that only contain very low noise from the FFT
                // and that would screw up the temporal spectrum average below for the following frames.
                if (spectrum.Average() < spectrumMinThreshold)
                {
                    index++;
                    continue;
                }

                // Smooth the frequency spectrum to remove small peaks
                if (profile.SpectrumSmoothingLength > 0)
                {
                    spectrumSmoother.Clear();
                    for (int i = 0; i < spectrum.Length; i++)
                    {
                        var avg = spectrumSmoother.Add(spectrum[i]);
                        if (i >= profile.SpectrumSmoothingLength)
                        {
                            smoothedSpectrum[i - profile.SpectrumSmoothingLength] = avg;
                        }
                    }
                }

                // Update the temporal moving bin average
                if (processedFrames == 0)
                {
                    // Init averages on first frame
                    for (int i = 0; i < spectrum.Length; i++)
                    {
                        spectrumTemporalAverage[i] = spectrum[i];
                    }
                }
                else
                {
                    // Update averages on all subsequent frames
                    for (int i = 0; i < spectrum.Length; i++)
                    {
                        spectrumTemporalAverage[i] = ExponentialMovingAverage.UpdateMovingAverage(
                            spectrumTemporalAverage[i], profile.SpectrumTemporalSmoothingCoefficient, spectrum[i]);
                    }
                }

                // Calculate the residual
                // The residual is the difference of the current spectrum to the temporal average spectrum. The higher
                // a bin residual is, the steeper the increase in energy in that peak.
                for (int i = 0; i < spectrum.Length; i++)
                {
                    spectrumResidual[i] = spectrum[i] - spectrumTemporalAverage[i] - 90f;
                }

                // Find local peaks in the residual
                // The advantage of finding peaks in the residual instead of the spectrum is that spectrum energy is usually
                // concentrated in the low frequencies, resulting in a clustering of the highest peaks in the lows. Getting
                // peaks from the residual distributes the peaks more evenly across the spectrum.
                var peaks = peakHistory.List;             // take oldest list,
                peaks.Clear();                            // clear it, and
                FindLocalMaxima(spectrumResidual, peaks); // refill with new peaks

                // Pick the largest n peaks
                int numMaxima = Math.Min(peaks.Count, profile.PeaksPerFrame);
                if (numMaxima > 0)
                {
                    peaks.Sort((p1, p2) => p1.Value == p2.Value ? 0 : p1.Value < p2.Value ? 1 : -1); // order peaks by height
                    if (peaks.Count > numMaxima)
                    {
                        peaks.RemoveRange(numMaxima, peaks.Count - numMaxima);                       // select the n tallest peaks by deleting the rest
                    }
                    peaks.Sort((p1, p2) => p1.Index == p2.Index ? 0 : p1.Index < p2.Index ? -1 : 1); // sort peaks by index (not really necessary)
                }

                peakHistory.Add(index, peaks);

                if (FrameProcessed != null)
                {
                    // Mark peaks as 0dB for spectrogram display purposes
                    foreach (var peak in peaks)
                    {
                        spectrum[peak.Index]         = 0;
                        spectrumResidual[peak.Index] = 0;
                    }

                    FrameProcessed(this, new FrameProcessedEventArgs {
                        AudioTrack = track, Index = index, Indices = indices,
                        Spectrum   = spectrum, SpectrumResidual = spectrumResidual
                    });
                }

                processedFrames++;
                index++;

                if (processedFrames >= peakHistory.Length)
                {
                    peakPairs.Clear();
                    FindPairsWithMaxEnergy(peakHistory, peakPairs);
                    ConvertPairsToSubFingerprints(peakPairs, subFingerprints);
                }

                if (subFingerprints.Count > 512)
                {
                    FireFingerprintHashesGenerated(track, indices, subFingerprints);
                    subFingerprints.Clear();
                }
            }

            // Flush the remaining peaks of the last frames from the history to get all remaining pairs
            for (int i = 0; i < profile.TargetZoneLength; i++)
            {
                var peaks = peakHistory.List;
                peaks.Clear();
                peakHistory.Add(-1, peaks);
                peakPairs.Clear();
                FindPairsWithMaxEnergy(peakHistory, peakPairs);
                ConvertPairsToSubFingerprints(peakPairs, subFingerprints);
            }
            FireFingerprintHashesGenerated(track, indices, subFingerprints);

            audioStream.Close();
        }
Esempio n. 52
0
 /// <summary>
 /// Determines if the given encoder is compatible with the given track.
 /// </summary>
 /// <param name="track">
 /// The audio track to examine.
 /// </param>
 /// <param name="encoder">
 /// The encoder to examine.
 /// </param>
 /// <returns>
 /// True if the given encoder is comatible with the given audio track.
 /// </returns>
 /// <remarks>
 /// Only works with passthrough encoders.
 /// </remarks>
 public static bool AudioEncoderIsCompatible(AudioTrack track, HBAudioEncoder encoder)
 {
     return((track.CodecId & encoder.Id) > 0);
 }
Esempio n. 53
0
 /// <summary>
 /// Determines if DRC can be applied to the given track with the given encoder.
 /// </summary>
 /// <param name="track">
 /// The track to apply DRC to.
 /// </param>
 /// <param name="encoder">
 /// The encoder to use for DRC.
 /// </param>
 /// <param name="title">
 /// The title.
 /// </param>
 /// <returns>
 /// True if DRC can be applied to the track with the given encoder.
 /// </returns>
 public static bool CanApplyDrc(AudioTrack track, HBAudioEncoder encoder, int title)
 {
     return(HBFunctions.hb_audio_can_apply_drc2(HandBrakeInstanceManager.LastScanHandle, title, track.TrackNumber, encoder.Id) > 0);
 }
        public override void RemovedAudioTrack(Participant participant, AudioTrack audioTrack)
        {
            var msg = $"Participant {participant.Identity} removed audio track.";

            OnRemovedAudioTrack?.Invoke(msg, participant, audioTrack);
        }
Esempio n. 55
0
 public override void StartMenuMusic()
 {
     mainMusic = Game.mainInstance.audioManager.addTrack(pathToMusic);
 }
        private static void FillPeakStore(AudioTrack audioTrack, bool fileSupport, IAudioStream audioInputStream, PeakStore peakStore)
        {
            bool peakFileLoaded = false;

            // search for existing peakfile
            if (audioTrack.HasPeakFile && fileSupport)
            {
                // load peakfile from disk
                try {
                    peakStore.ReadFrom(File.OpenRead(audioTrack.PeakFile.FullName), audioTrack.FileInfo.LastWriteTimeUtc);
                    peakStore.CalculateScaledData(8, 6);
                    peakFileLoaded = true;
                }
                catch (Exception e) {
                    Console.WriteLine("peakfile read failed: " + e.Message);
                }
            }

            // generate peakfile
            if (!peakFileLoaded)
            {
                int            channels    = peakStore.Channels;
                byte[]         buffer      = new byte[65536 * audioInputStream.SampleBlockSize];
                float[]        min         = new float[channels];
                float[]        max         = new float[channels];
                BinaryWriter[] peakWriters = peakStore.CreateMemoryStreams().WrapWithBinaryWriters();

                IProgressReporter progressReporter = ProgressMonitor.GlobalInstance.BeginTask("Generating peaks for " + audioTrack.Name, true);
                DateTime          startTime        = DateTime.Now;
                int  sampleBlockCount = 0;
                int  peakCount        = 0;
                int  bytesRead;
                long totalSampleBlocks = audioInputStream.Length / audioInputStream.SampleBlockSize;
                long totalSamplesRead  = 0;
                int  progress          = 0;

                for (int i = 0; i < channels; i++)
                {
                    min[i] = float.MaxValue;
                    max[i] = float.MinValue;
                }

                unsafe
                {
                    fixed(byte *bufferB = &buffer[0])
                    {
                        float *bufferF = (float *)bufferB;
                        int    samplesRead;
                        int    samplesProcessed;
                        bool   peakStoreFull = false;

                        while ((bytesRead = StreamUtil.ForceRead(audioInputStream, buffer, 0, buffer.Length)) > 0)
                        {
                            samplesRead      = bytesRead / audioInputStream.Properties.SampleByteSize;
                            samplesProcessed = 0;

                            do
                            {
                                for (int channel = 0; channel < channels; channel++)
                                {
                                    if (min[channel] > bufferF[samplesProcessed])
                                    {
                                        min[channel] = bufferF[samplesProcessed];
                                    }
                                    if (max[channel] < bufferF[samplesProcessed])
                                    {
                                        max[channel] = bufferF[samplesProcessed];
                                    }
                                    samplesProcessed++;
                                    totalSamplesRead++;
                                }

                                if (++sampleBlockCount % SAMPLES_PER_PEAK == 0 || sampleBlockCount == totalSampleBlocks)
                                {
                                    // write peak
                                    peakCount++;
                                    for (int channel = 0; channel < channels; channel++)
                                    {
                                        peakWriters[channel].Write(new Peak(min[channel], max[channel]));
                                        // add last sample of previous peak as first sample of current peak to make consecutive peaks overlap
                                        // this gives the impression of a continuous waveform
                                        min[channel] = max[channel] = bufferF[samplesProcessed - channels];
                                    }
                                    //sampleBlockCount = 0;
                                }

                                if (sampleBlockCount == totalSampleBlocks && samplesProcessed < samplesRead)
                                {
                                    // There's no more space for more peaks
                                    // TODO how to handle this case? why is there still audio data left?
                                    Console.WriteLine("peakstore full, but there are samples left ({0} < {1})", samplesProcessed, samplesRead);
                                    peakStoreFull = true;
                                    break;
                                }
                            }while (samplesProcessed < samplesRead);

                            progressReporter.ReportProgress(100.0f / audioInputStream.Length * audioInputStream.Position);
                            if ((int)(100.0f / audioInputStream.Length * audioInputStream.Position) > progress)
                            {
                                progress = (int)(100.0f / audioInputStream.Length * audioInputStream.Position);
                                peakStore.OnPeaksChanged();
                            }

                            if (peakStoreFull)
                            {
                                break;
                            }
                        }
                    }
                }

                Debug.WriteLine("generating downscaled peaks...");
                peakStore.CalculateScaledData(8, 6);

                Debug.WriteLine("peak generation finished - " + (DateTime.Now - startTime) + ", " + (peakWriters[0].BaseStream.Length * channels) + " bytes");
                progressReporter.Finish();

                if (fileSupport)
                {
                    // write peakfile to disk
                    try {
                        FileStream peakOutputFile = File.OpenWrite(audioTrack.PeakFile.FullName);
                        peakStore.StoreTo(peakOutputFile, audioTrack.FileInfo.LastWriteTimeUtc);
                        peakOutputFile.Close();
                    }
                    catch (UnauthorizedAccessException e) {
                        Debug.WriteLine("peak file writing failed: " + e.Message);
                    }
                }
            }
            peakStore.OnPeaksChanged();
        }
 private void AudioTrack_OpenFailed(AudioTrack sender, AudioTrackOpenFailedEventArgs args)
 {
     Log($"AudioTrack.OpenFailed: ExtendedError:{args.ExtendedError} DecoderStatus:{sender.SupportInfo.DecoderStatus} MediaSourceStatus:{sender.SupportInfo.MediaSourceStatus}");
 }
Esempio n. 58
0
 public override void Apply(ref float sampleValue, ref float volume, float progress, float timestamp, AudioTrack track)
 {
     volume *= Maths.Clamp01(ProgressToTimestamp(progress, timestamp));
 }
 /// <param name="player"> Audio player </param>
 /// <param name="track"> Audio track that ended </param>
 /// <param name="endReason"> The reason why the track stopped playing </param>
 public virtual void onTrackEnd(AudioPlayer player, AudioTrack track, AudioTrackEndReason endReason)
 {
     // Adapter dummy method
 }
        public void Read()
        {
            System.Threading.Tasks.Task.Run(() =>
            {
                AudioTrack _output;

                int buffsize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit);
                //_output = new AudioTrack(Android.Media.Stream.Music, 44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit,
                //buffsize, AudioTrackMode.Stream);
                var AABuilder = new AudioAttributes.Builder();

                AABuilder.SetContentType(AudioContentType.Music);
                AABuilder.SetUsage(AudioUsageKind.Media);

                var AfBuilder = new AudioFormat.Builder();
                AfBuilder.SetSampleRate(44100);
                AfBuilder.SetEncoding(Android.Media.Encoding.Pcm16bit);
                AfBuilder.SetChannelMask(ChannelOut.Stereo);


                _output = new AudioTrack(AABuilder.Build(), AfBuilder.Build(), buffsize, AudioTrackMode.Stream, AudioManager.AudioSessionIdGenerate);
                _output.Play();

                byte[] myReadBuffer = new byte[1000];
                int count           = 4;
                System.Threading.Tasks.Task.Run(() =>
                {
                    while (true)
                    {
                        try
                        {
                            int a = mmInStream.Read(myReadBuffer, 0, myReadBuffer.Length);
                            if (a > 0)
                            {
                                if (count == 0)
                                {
                                    _output.Write(myReadBuffer, 0, myReadBuffer.Length);
                                }
                                else
                                {
                                    count--;
                                }
                            }
                            else
                            {
                                var t = a;
                            }
                        }
                        catch (System.IO.IOException ex)
                        {
                            System.Diagnostics.Debug.WriteLine("Input stream was disconnected", ex);
                        }
                    }
                }).ContinueWith((t) =>
                {
                    var a = t.Exception;
                });
            }).ContinueWith((t) =>
            {
                var a = t.Exception;
            });
        }