Esempio n. 1
0
		/// <summary>
		/// Load content
		/// </summary>
		public override void LoadContent()
		{
			Trace.WriteLine("[MainMenu] LoadContent()");

			Batch = new SpriteBatch();
	
			Tileset = ResourceManager.CreateAsset<TileSet>("Main Menu");

			Font = ResourceManager.CreateSharedAsset<BitmapFont>("intro", "intro");

			StringTable = ResourceManager.CreateAsset<StringTable>("main");

			Buttons.Add(new ScreenButton("", new Rectangle(156, 324, 340, 14)));
			Buttons[0].Selected += new EventHandler(LoadGameEvent);

			Buttons.Add(new ScreenButton("", new Rectangle(156, 342, 340, 14)));
			Buttons[1].Selected += new EventHandler(StartNewGameEvent);

			Buttons.Add(new ScreenButton("", new Rectangle(156, 360, 340, 14)));
			Buttons[2].Selected += new EventHandler(OptionEvent);

			Buttons.Add(new ScreenButton("", new Rectangle(156, 378, 340, 14)));
			Buttons[3].Selected += new EventHandler(QuitEvent);

			Theme = new AudioStream();
			Theme.LoadOgg("main.ogg");
			Theme.Loop = true;
			//Theme.Play();


		}
Esempio n. 2
0
        public override void InitializeComponent()
        {
            if (AudioContext == null)
            {
                //AudioContext = new AudioContext(AudioContext.DefaultDevice, 44100, 4410);
                AudioContext = new AudioContext();
                //XRam = new XRamExtension();

                var Position = new Vector3(0, 0, 0);
                var Velocity = new Vector3(0, 0, 0);
                //var Orientation = new float[] { 0, 0, 1 };
                AL.Listener(ALListener3f.Position, ref Position);
                AL.Listener(ALListener3f.Velocity, ref Velocity);
                //AL.Listener(ALListenerfv.Orientation, ref Orientation);

                AudioStream = new AudioStream();
            }
        }
Esempio n. 3
0
        public static AudioData LoadAudioData(AudioStream audioStream)
        {
            lock (LoadedData)
            {
                var audioData = LoadedData.FirstOrDefault(x => x.Filename == audioStream.Filename);
                if (audioData == null)
                {
                    audioData = new AudioData
                    {
                        Filename = audioStream.Filename,
                        Data     = File.ReadAllBytes(audioStream.Filename)
                    };

                    audioData.DataHandle = GCHandle.Alloc(audioData.Data, GCHandleType.Pinned);
                    LoadedData.Add(audioData);
                }

                audioStream.AudioData = audioData;
                audioData.AudioStreams.Add(audioStream);

                return(audioData);
            }
        }
Esempio n. 4
0
        /// <summary>
        ///     Play an audio stream globally, without position.
        /// </summary>
        /// <param name="stream">The audio stream to play.</param>
        public void Play(AudioStream stream, AudioParams?audioParams = null)
        {
            if (!GameController.OnGodot)
            {
                return;
            }
            var player = new Godot.AudioStreamPlayer()
            {
                Stream  = stream.GodotAudioStream,
                Playing = true,
            };

            if (audioParams != null)
            {
                var val = audioParams.Value;
                player.Bus      = val.BusName;
                player.VolumeDb = val.Volume;
                //player.PitchScale = val.PitchScale;
                player.MixTarget = (Godot.AudioStreamPlayer.MixTargetEnum)val.MixTarget;
            }
            sceneTree.WorldRoot.AddChild(player);
            TrackAudioPlayer(player);
        }
        /// <summary>
        ///     Adds an audio stream to a mixer
        /// </summary>
        /// <param name="audioStream">The audio stream.</param>
        /// <param name="mixerChannel">The mixer channel.</param>
        public static void AddToMixer(AudioStream audioStream, MixerChannel mixerChannel)
        {
            if (audioStream == null || !audioStream.IsAudioLoaded())
            {
                throw new Exception("Audio file null or not audio not loaded");
            }

            if (mixerChannel.ChannelId == int.MinValue)
            {
                throw new Exception("Mixer channel not initialized");
            }

            // DebugHelper.WriteLine($"AddToMixer {audioStream.Description} {mixerChannel} {audioStream.Channel}...");

            BassMix.BASS_Mixer_StreamAddChannel(mixerChannel.ChannelId, audioStream.ChannelId,
                                                BASSFlag.BASS_MIXER_PAUSE | BASSFlag.BASS_MIXER_DOWNMIX | BASSFlag.BASS_MIXER_NORAMPIN |
                                                BASSFlag.BASS_MUSIC_AUTOFREE);
            Thread.Sleep(1);

            audioStream.MixerChannelId = mixerChannel.ChannelId;

            // DebugHelper.WriteLine("done");
        }
Esempio n. 6
0
        /// <summary>
        /// reads the headerchunk of the wave file
        /// </summary>
        /// <returns>true if header looks like a wave file</returns>
        public bool Read(AudioStream from)
        {
            // read header from the filestream
            from.Read(this);
#if DEBUG
            this.Debug();
#endif

            // check that the file is a riff file
            if (!this.Prefix.SequenceEqual(Wave.TargetHeader.Prefix))
            {
                Console.WriteLine("Header does not start with 'RIFF'.");
                return(false);
            }

            // check that the file is a wave file
            if (!this.Format.SequenceEqual(Wave.TargetHeader.Format))
            {
                Console.WriteLine("RIFF file format is not type of 'WAVE'");
                return(false);
            }
            return(true);
        }
Esempio n. 7
0
 public override void _Ready()
 {
     DieSound                       = ResourceLoader.Load("res://Recursos/Sons/die.wav") as AudioStream;
     JumpSound                      = ResourceLoader.Load("res://Recursos/Sons/jump.wav") as AudioStream;
     BugsBLL.jogador                = this;
     GlobalPosition                 = BugsBLL.Spawnpoint;
     personagemDTO.Vivo             = true;
     personagemDTO.Vida             = 1.0f;
     personagemDTO.Municao          = BugsBLL.Municao;
     personagemDTO.Velocidade       = 1f;
     personagemDTO.Peso             = 80;
     personagemDTO.Gravidade        = 9.8f;
     personagemDTO.ForcaPulo        = -20;
     personagemDTO.Direcao          = new Vector2(0, 0);
     personagemDTO.Corpo2D          = this;
     personagemDTO.UltimaAnimcacao  = "";
     personagemDTO.AnimationPlayer  = GetNode <AnimationPlayer>("./AnimationPlayer");
     personagemDTO.SpritePersonagem = GetNode <Sprite>("./SpriteDoIgor");
     Base       = GetNode("/root/Base") as GameController;
     Combo      = GetNode <Timer>("./Combo");
     ArmaSprite = GetNode <Node2D>("./Arma/ArmaSprite") as ArmaController;
     Arma       = ArmaSprite.GetParent <Node2D>();
 }
Esempio n. 8
0
        internal BufferedWriteStream(AudioStream next, AudioClient client, int bufferMillis, CancellationToken cancelToken, Logger logger, int maxFrameSize = 1500)
        {
            //maxFrameSize = 1275 was too limiting at 128kbps,2ch,60ms
            _next          = next;
            _client        = client;
            _ticksPerFrame = OpusEncoder.FrameMillis;
            _logger        = logger;
            _queueLength   = (bufferMillis + (_ticksPerFrame - 1)) / _ticksPerFrame; //Round up

            _disposeTokenSource = new CancellationTokenSource();
            _cancelTokenSource  = CancellationTokenSource.CreateLinkedTokenSource(_disposeTokenSource.Token, cancelToken);
            _cancelToken        = _cancelTokenSource.Token;
            _queuedFrames       = new ConcurrentQueue <Frame>();
            _bufferPool         = new ConcurrentQueue <byte[]>();
            for (int i = 0; i < _queueLength; i++)
            {
                _bufferPool.Enqueue(new byte[maxFrameSize]);
            }
            _queueLock     = new SemaphoreSlim(_queueLength, _queueLength);
            _silenceFrames = MaxSilenceFrames;

            _task = Run();
        }
Esempio n. 9
0
        /// <summary>
        /// read the header of the file to this.header
        /// </summary>
        public bool Read(AudioStream from)
        {
            // read the first 14 bytes in the filestream to header
            from.Read(this);
#if (DEBUG)
            // show debug info
            this.Debug();
#endif
            // probably a midi file
            if (ByteConverter.ToASCIIString(this.Prefix) == "MThd" &&
                ByteConverter.ToInt(this.Size) == 6)
            {
                // too many songs in one file
                if (ByteConverter.ToInt(this.Format) == 2)
                {
                    Console.WriteLine("Midi files with multiple songs are not supported.");
                    return(false);
                }
                return(true);
            }
            Console.WriteLine("The file given is not a midi file.");
            return(false);
        }
Esempio n. 10
0
    /// <summary>
    /// Display next Question
    /// </summary>
    public void DisplayNextQuestion()
    {
        Control ctr = GetNode <Control>("Buttons");

        if (questionIndex < questionList.Count - 1)
        {
            questionIndex++;
            DisplayQuestion();
            SetQuestionNum();
        }
        else
        {
            //EmitSignal("NoMoreQuestions")
            timerLabel.Visible = false;
            HideBtns();
            monsterSprite.Play("Die");
            win.Play("Win");
            DefaultSound.disableSound();
            AudioStream sfx = ResourceLoader.Load("res://Assets/SoundEffects/Jingle_Win_00.wav") as AudioStream;
            audioStreamPlayer.Stream  = sfx;
            audioStreamPlayer.Playing = true;
        }
    }
Esempio n. 11
0
        private static byte[] GetAviAudioBytes(string fileName)
        {
            var aviManager = new AviManager(fileName);

            byte[] bytes = null;

            try
            {
                AudioStream audioStream = aviManager.GetWaveStream();

                if (audioStream != null)
                {
                    if (audioStream.ChannelsCount != 2 || audioStream.BitsPerSample != 16)
                    {
                        throw new InvalidDataException();
                    }

                    if (audioStream.SamplesPerSecond != 22050 && audioStream.SamplesPerSecond != 44100)
                    {
                        throw new NotSupportedException();
                    }

                    bytes = audioStream.GetStreamData();

                    if (audioStream.SamplesPerSecond == 44100)
                    {
                        bytes = ConvertAudio44100To22050(bytes);
                    }
                }
            }
            finally
            {
                aviManager.Close();
            }

            return(bytes);
        }
Esempio n. 12
0
        public Player(MainWindow mainwindow, string filename, long timebase)
        {
            try
            {
                this.timebase = timebase;
                this.renderer = mainwindow.Renderer;
                this.audioout = mainwindow.Audio;
                this.mixer    = mainwindow.Mixer;
                this.player   = BaseLib.Media.MoviePlayer.Open(() => { }, filename);

                try
                {
                    if (player.VideoStreams.Length > 0)
                    {
                        this.video = player.open_video(0, frameready);
                    }
                    if (player.AudioStreams.Length > 0)
                    {
                        this.audio       = player.open_audio(0, mainwindow.Mixer, audioready);
                        this.audiobuffer = new FifoStream(mainwindow.Audio.SampleSize * mainwindow.Audio.SampleRate * 3);
                        this.mixer.Register(this.audiobuffer, this.audioout.Channels, false);
                    }
                    this.player.start(0, timebase);
                }
                catch
                {
                    Dispose(true);
                    GC.SuppressFinalize(this);
                    throw;
                }
            }
            catch
            {
                GC.SuppressFinalize(this);
                throw;
            }
        }
        public MainWindow()
        {
            InitializeComponent();

            // Log to the console.
            Log.Provider = new ConsoleLogProvider(LogLevel.Info);

            // WebRTC has chosen VP8 as its mandatory video codec.
            // Since video encoding is best done using native code,
            // reference the video codec at the application-level.
            // This is required when using a WebRTC video stream.
            // ทำการ Register Codec Video
            VideoStream.RegisterCodec("VP8", () =>
            {
                // class Vp8Codec.cs
                return(new Vp8Codec());
            }, true);

            // For improved audio quality, we can use Opus. By
            // setting it as the preferred audio codec, it will
            // override the default PCMU/PCMA codecs.
            // ทำการ Register Codec Audio
            AudioStream.RegisterCodec("opus", OpusClockRate, OpusChannels, () =>
            {
                // class OpusCodec.cs
                return(new OpusCodec(OpusEchoCanceller));
            }, true);

            // Since this example can create MessageBox alerts, we have to
            // wait until the form has finished loading before proceeding.
            // เมื่อ program Run ทำการจะทำการ Load เพื่อ access user media ของเครื่อง
            Loaded += (s, unused) =>
            {
                // Start เพื่อทำการ Access User media ของเครื่อง
                StartMedia();
            };
        }
Esempio n. 14
0
        static void Main(string[] args)
        {
            GameSystems.Initialize(new GameStartInfo
            {
                Window = new WindowInfo()
                {
                    Name = "TestUnit",
                    Size = new Size(1920, 1080)
                }
            });


            var stream0 = new AudioStream(@"C:\Users\LinkC\source\love.mp3");
            var stream1 = new AudioStream(@"C:\Users\LinkC\source\free.wav");

            AudioQueue audioQueue0 = new AudioQueue();
            AudioQueue audioQueue1 = new AudioQueue();


            audioQueue0.Add(stream0, 0, stream0.Length);
            audioQueue1.Add(stream1, 0, stream1.Length);


            AudioSource audioSource0 = new AudioSource(stream0.WaveFormat);
            AudioSource audioSource1 = new AudioSource(stream1.WaveFormat);

            audioSource0.SubmitAudioQueue(audioQueue0);
            audioSource1.SubmitAudioQueue(audioQueue1);

            audioSource0.Start();
            audioSource1.Start();


            GameSystems.RunLoop();

            AudioDevice.Terminate();
        }
        public void ShouldAddMultipleAudioStreamsToSequenceAudioStreamsWhenExecutingAudioSequenceSelectionChangedCommandWithDifferentAudios()
        {
            var viewModel = this.CreateViewModel();

            SmoothStreamingVideoAsset smoothStreamingVideoAsset = new SmoothStreamingVideoAsset();

            VideoAssetInOut videoInOut = new VideoAssetInOut(smoothStreamingVideoAsset);

            viewModel.VideoAssetInOut = videoInOut;

            var audioStreamEs       = new AudioStream("audio_es", false);
            var audioStreamEn       = new AudioStream("audio_en", false);
            var audioStreamDirector = new AudioStream("audio_director", false);

            smoothStreamingVideoAsset.AudioStreams.Add(audioStreamEs);
            smoothStreamingVideoAsset.AudioStreams.Add(audioStreamEn);
            smoothStreamingVideoAsset.AudioStreams.Add(audioStreamDirector);

            viewModel.Asset = smoothStreamingVideoAsset;

            Assert.AreEqual(1, viewModel.VideoAssetInOut.SequenceAudioStreams.Count);

            viewModel.AudioSequenceSelectionChangedCommand.Execute(
                new StreamOption {
                Name = "audio_en", SequenceSelected = true
            });

            Assert.AreEqual(2, viewModel.VideoAssetInOut.SequenceAudioStreams.Count);
            Assert.AreSame(audioStreamEn, viewModel.VideoAssetInOut.SequenceAudioStreams[1]);

            viewModel.AudioSequenceSelectionChangedCommand.Execute(new StreamOption {
                Name = "audio_director", SequenceSelected = true
            });

            Assert.AreEqual(3, viewModel.VideoAssetInOut.SequenceAudioStreams.Count);
            Assert.AreSame(audioStreamDirector, viewModel.VideoAssetInOut.SequenceAudioStreams[2]);
        }
        public void ExtractData(ref NetworkHost sourceHost, NetworkHost destinationHost, IEnumerable <PacketParser.Packets.AbstractPacket> packetList)
        {
            Packets.UdpPacket udpPacket = null;


            foreach (Packets.AbstractPacket p in packetList)
            {
                if (p.GetType() == typeof(Packets.UdpPacket))
                {
                    udpPacket = (Packets.UdpPacket)p;
                }
                else if (udpPacket != null && p is Packets.RtpPacket rtpPacket)
                {
                    //Packets.RtpPacket rtpPacket =(Packets.RtpPacket)p;
                    if (Enum.IsDefined(typeof(RtpPayloadType), rtpPacket.PayloadType))
                    {
                        RtpPayloadType payloadType = (RtpPayloadType)rtpPacket.PayloadType;
                        FiveTuple      fiveTuple   = new FiveTuple(sourceHost, udpPacket.SourcePort, destinationHost, udpPacket.DestinationPort, FiveTuple.TransportProtocol.UDP);

                        AudioStream audioStream;
                        Tuple <System.Net.IPAddress, ushort, System.Net.IPAddress, ushort, RtpPayloadType> key = new Tuple <System.Net.IPAddress, ushort, System.Net.IPAddress, ushort, RtpPayloadType>(sourceHost.IPAddress, udpPacket.SourcePort, destinationHost.IPAddress, udpPacket.DestinationPort, payloadType);
                        if (this.audioStreams.ContainsKey(key))
                        {
                            audioStream = this.audioStreams[key];
                        }
                        else
                        {
                            audioStream = new AudioStream(sourceHost, destinationHost, payloadType, MainPacketHandler.FileStreamAssemblerList, fiveTuple, rtpPacket.ParentFrame.FrameNumber);
                            this.audioStreams.Add(key, audioStream);
                            base.MainPacketHandler.OnAudioDetected(audioStream);
                        }

                        audioStream.AddSamples(rtpPacket.ParentFrame.Data.Skip(rtpPacket.PacketStartIndex + Packets.RtpPacket.HEADER_LENGTH).ToArray(), rtpPacket.SampleTick, rtpPacket.ParentFrame.Timestamp, rtpPacket.SyncSourceID);
                    }
                }
            }
        }
Esempio n. 17
0
        public IEnumerable <AudioStreamRandomPitch> Load()
        {
            List <AudioStreamRandomPitch> audioStreams = new List <AudioStreamRandomPitch>();

            List <string> files = new List <string>(
                Directory.GetFiles(
                    Directory.GetCurrentDirectory() +
                    GlobalConstants.ASSETS_FOLDER +
                    "Sounds",
                    "*.wav"));

            files.AddRange(
                Directory.GetFiles(
                    Directory.GetCurrentDirectory() +
                    GlobalConstants.ASSETS_FOLDER +
                    "Sounds",
                    "*.ogg"));

            foreach (string file in files)
            {
                AudioStream audioStream = GD.Load <AudioStream>(file);
                int         lastSlash   = file.LastIndexOf('\\');
                int         lastDot     = file.LastIndexOf('.');

                string name = file.Substring(lastSlash + 1, lastDot - lastSlash - 1);
                audioStream.ResourceName = name;
                AudioStreamRandomPitch streamRandomPitch = new AudioStreamRandomPitch
                {
                    AudioStream  = audioStream,
                    ResourceName = name,
                    RandomPitch  = 1.2f
                };
                audioStreams.Add(streamRandomPitch);
            }

            return(audioStreams);
        }
Esempio n. 18
0
        public override void Load(IResourceCache cache, ResourcePath path)
        {
            if (!GameController.OnGodot)
            {
                return;
            }
            if (!cache.ContentFileExists(path))
            {
                throw new FileNotFoundException("Content file does not exist for audio sample.");
            }

            using (var fileStream = cache.ContentFileRead(path))
            {
                var stream = new Godot.AudioStreamOGGVorbis()
                {
                    Data = fileStream.ToArray(),
                };
                if (stream.GetLength() == 0)
                {
                    throw new InvalidDataException();
                }
                AudioStream = new GodotAudioStreamSource(stream);
            }
        }
 // Called when the node enters the scene tree for the first time.
 public override void _Ready()
 {
     // Audio resources
     _audioPistolShot = ResourceLoader.Load <AudioStream>("res://assets/audio/sounds/weapons/gun_revolver_pistol_shot_04.wav");
     _audioGunCock    = ResourceLoader.Load <AudioStream>("res://assets/audio/sounds/weapons/GUN_MP5K_Cocked_Full_Action_02.wav");
     _audioRifleShot  = ResourceLoader.Load <AudioStream>("res://assets/audio/sounds/weapons/gun_rifle_sniper_shot_01.wav");
     _explosionSound  = ResourceLoader.Load <AudioStream>("res://assets/audio/sounds/weapons/explosion_large_no_tail_03.wav");
     // Nodes
     _audioNodeGlobal = GetNode <AudioStreamPlayer>("Audio_Stream_Player");
     _audioNodeLocal  = GetNode <AudioStreamPlayer3D>("Audio_Stream_Player_3D");
     _globals         = GetNode <Globals>("/root/Globals");
     // Setup
     _audioNodeGlobal.Connect("finished", this, "DestroySelfGlobal");
     _audioNodeGlobal.Stop();
     _audioNodeGlobal.MixTarget = AudioStreamPlayer.MixTargetEnum.Surround;
     _audioNodeLocal.Connect("finished", this, "DestroySelf3D");
     _audioNodeLocal.Stop();
     _audioNodeLocal.AttenuationFilterDb = -0.1f;
     _audioNodeLocal.MaxDb = 50;
     _audioNodeLocal.AttenuationFilterCutoffHz = 10000;
     // Attributes
     _shouldLoop = false;
     _globals    = GetNode <Globals>("/root/Globals");
 }
Esempio n. 20
0
        public static void UnloadAudioData(AudioStream audioStream)
        {
            lock (LoadedData)
            {
                var audioData = LoadedData.FirstOrDefault(x => x.Filename == audioStream.Filename);

                if (audioData == null)
                {
                    return;
                }

                audioData.AudioStreams.Remove(audioStream);
                audioStream.AudioData = null;

                if (audioData.AudioStreams.Count > 0)
                {
                    return;
                }

                LoadedData.Remove(audioData);
                audioData.DataHandle.Free();
                audioData.Data = null;
            }
        }
Esempio n. 21
0
        public void StartConference(Action <string> callback)
        {
            // Create a WebRTC audio stream description (requires a
            // reference to the local audio feed).
            AudioStream = new AudioStream(LocalMedia.LocalMediaStream);

            // Create a WebRTC video stream description (requires a
            // reference to the local video feed). Whenever a P2P link
            // initializes using this description, position and display
            // the remote video control on-screen by passing it to the
            // layout manager created above. Whenever a P2P link goes
            // down, remove it.
            VideoStream             = new VideoStream(LocalMedia.LocalMediaStream);
            VideoStream.OnLinkInit += AddRemoteVideoControl;
            VideoStream.OnLinkDown += RemoveRemoteVideoControl;

            // Create a conference using our stream descriptions.
            Conference = new FM.IceLink.Conference(IceLinkServerAddress, new Stream[] { AudioStream, VideoStream });

            // Use our pre-generated DTLS certificate.
            Conference.DtlsCertificate = Certificate;

            // Supply TURN relay credentials in case we are behind a
            // highly restrictive firewall. These credentials will be
            // verified by the TURN server.
            Conference.RelayUsername = "******";
            Conference.RelayPassword = "******";

            // Add a few event handlers to the conference so we can see
            // when a new P2P link is created or changes state.
            Conference.OnLinkInit += LogLinkInit;
            Conference.OnLinkUp   += LogLinkUp;
            Conference.OnLinkDown += LogLinkDown;

            Signalling.Attach(Conference, SessionId, callback);
        }
Esempio n. 22
0
        public override void Load(IResourceCache cache, ResourcePath path)
        {
            if (!cache.ContentFileExists(path))
            {
                throw new FileNotFoundException("Content file does not exist for audio sample.");
            }

            using (var fileStream = cache.ContentFileRead(path))
            {
                var clyde = IoCManager.Resolve <IClydeAudio>();
                if (path.Extension == "ogg")
                {
                    AudioStream = clyde.LoadAudioOggVorbis(fileStream, path.ToString());
                }
                else if (path.Extension == "wav")
                {
                    AudioStream = clyde.LoadAudioWav(fileStream, path.ToString());
                }
                else
                {
                    throw new NotSupportedException("Unable to load audio files outside of ogg Vorbis or PCM wav");
                }
            }
        }
        /// <summary>
        ///     Determines whether an audio stream is currently playing.
        /// </summary>
        /// <param name="audioStream">The audio stream.</param>
        /// <returns>
        ///     True if an audio stream is playing; otherwise, false.
        /// </returns>
        public static bool IsPlaying(AudioStream audioStream)
        {
            //if (audioStream == null) return false;
            //var position1 = GetPosition(audioStream);
            //Thread.Sleep(50);
            //var position2 = GetPosition(audioStream);
            //return (position1 != position2);


            if (audioStream == null)
            {
                return(false);
            }
            if (audioStream.ChannelId == int.MinValue)
            {
                return(false);
            }

            var playing = (Bass.BASS_ChannelIsActive(audioStream.ChannelId) != BASSActive.BASS_ACTIVE_STOPPED);

            Thread.Sleep(1);

            return(playing);
        }
		// TODO: will move to dispose
		/// <summary>
		/// Stops the conference.
		/// </summary>
		/// <returns>The conference.</returns>
		private void StopConference()
		{
			try
			{
#if __ANDROID__
				// Stop echo canceller.
				OpusEchoCanceller.Stop();
				OpusEchoCanceller = null;
#endif
				conference.OnLinkInit -= LogLinkInit;
				conference.OnLinkUp -= LogLinkUp;
				conference.OnLinkDown -= LogLinkDown;

				conference.OnLinkOfferAnswer -= OnLinkSendOfferAnswer;
				conference.OnLinkCandidate -= OnLinkSendCandidate;
				conference = null;

				videoStream.OnLinkInit -= AddRemoteVideoControl;
				videoStream.OnLinkDown -= RemoveRemoteVideoControl;
				videoStream = null;

				audioStream = null;
			}
			catch (Exception ex)
			{
				FM.Log.Debug(ex.ToString());
			}
		}
Esempio n. 25
0
        public void StopConference(Action<string> callback)
        {
            // Detach signalling from the conference.
            Signalling.Detach((error) =>
            {
                Conference.OnLinkInit -= LogLinkInit;
                Conference.OnLinkUp   -= LogLinkUp;
                Conference.OnLinkDown -= LogLinkDown;
                Conference = null;

                VideoStream.OnLinkInit -= AddRemoteVideoControl;
                VideoStream.OnLinkDown -= RemoveRemoteVideoControl;
                VideoStream = null;

                AudioStream = null;
                
                callback(error);
            });
        }
		/// <summary>
		/// Inits the audio and video streams.
		/// </summary>
		/// <returns>The audio and video streams.</returns>
		private void InitAudioAndVideoStreams()
		{
			// Create a WebRTC audio stream description (requires a
			// reference to the local audio feed).
			audioStream = new AudioStream(LocalMedia.LocalMediaStream);

			// Create a WebRTC video stream description (requires a
			// reference to the local video feed). Whenever a P2P link
			// initializes using this description, position and display
			// the remote video control on-screen by passing it to the
			// layout manager created above. Whenever a P2P link goes
			// down, remove it.
			videoStream = new VideoStream(LocalMedia.LocalMediaStream);
			videoStream.OnLinkInit += AddRemoteVideoControl;
			videoStream.OnLinkDown += RemoveRemoteVideoControl;
		}
Esempio n. 27
0
        private void ParseVtsVobs()
        {
            string data;
            byte[] buffer = new byte[16];

            //retrieve video info
            _fs.Position = 0x200;
            data = IntToBin(GetEndian(2), 16);
            _vtsVobs.VideoStream.CodingMode = ArrayOfCodingMode[BinToInt(MidStr(data, 0, 2))];
            _vtsVobs.VideoStream.Standard = ArrayOfStandard[BinToInt(MidStr(data, 2, 2))];
            _vtsVobs.VideoStream.Aspect = ArrayOfAspect[BinToInt(MidStr(data, 4, 2))];
            if (_vtsVobs.VideoStream.Standard == "PAL")
                _vtsVobs.VideoStream.Resolution = ArrayOfPALResolution[BinToInt(MidStr(data, 13, 2))];
            else if (_vtsVobs.VideoStream.Standard == "NTSC")
                _vtsVobs.VideoStream.Resolution = ArrayOfNTSCResolution[BinToInt(MidStr(data, 13, 2))];

            //retrieve audio info
            _fs.Position = 0x202; //useless but here for readability
            _vtsVobs.NumberOfAudioStreams = GetEndian(2);
            //            _ifo.VtsVobs.AudioStreams = new List<AudioStream>();
            for (int i = 0; i < _vtsVobs.NumberOfAudioStreams; i++)
            {
                AudioStream audioStream = new AudioStream();
                data = IntToBin(GetEndian(2), 16);
                audioStream.LangageTypeSpecified = Convert.ToInt32(MidStr(data, 4, 2));
                audioStream.CodingMode = ArrayOfAudioMode[(BinToInt(MidStr(data, 0, 3)))];
                audioStream.Channels = BinToInt(MidStr(data, 13, 3)) + 1;
                _fs.Read(buffer, 0, 2);
                audioStream.LangageCode = Convert.ToChar(buffer[0]).ToString() + Convert.ToChar(buffer[1]).ToString();
                if (ArrayOfLanguageCode.Contains(audioStream.LangageCode))
                    audioStream.Langage = ArrayOfLanguage[ArrayOfLanguageCode.IndexOf(audioStream.LangageCode)];
                _fs.Seek(1, SeekOrigin.Current);
                audioStream.Extension = ArrayOfAudioExtension[_fs.ReadByte()];
                _fs.Seek(2, SeekOrigin.Current);
                _vtsVobs.AudioStreams.Add(audioStream);
            }

            //retrieve subs info (only name)
            _fs.Position = 0x254;
            _vtsVobs.NumberOfSubtitles = GetEndian(2);
            _fs.Position += 2;
            for (int i = 0; i < _vtsVobs.NumberOfSubtitles; i++)
            {
                _fs.Read(buffer, 0, 2);
                string languageTwoLetter = Convert.ToChar(buffer[0]).ToString() + Convert.ToChar(buffer[1]).ToString();
                _vtsVobs.Subtitles.Add(InterpretLangageCode(languageTwoLetter));

                string subtitleFormat = string.Empty;
                _fs.Read(buffer, 0, 2); // reserved for language code extension + code extension
                switch (buffer[0])      // 4, 8, 10-12 unused
                {
                    // http://dvd.sourceforge.net/dvdinfo/sprm.html
                    case 1: subtitleFormat = "(caption/normal size char)"; break; //0 = unspecified caption
                    case 2: subtitleFormat = "(caption/large size char)"; break;
                    case 3: subtitleFormat = "(caption for children)"; break;
                    case 5: subtitleFormat = "(closed caption/normal size char)"; break;
                    case 6: subtitleFormat = "(closed caption/large size char)"; break;
                    case 7: subtitleFormat = "(closed caption for children)"; break;
                    case 9: subtitleFormat = "(forced caption)"; break;
                    case 13: subtitleFormat = "(director comments/normal size char)"; break;
                    case 14: subtitleFormat = "(director comments/large size char)"; break;
                    case 15: subtitleFormat = "(director comments for children)"; break;
                }

////                int languageId = buffer[1] & Helper.B11111000;
//                int languageId1 = buffer[0] & Helper.B11111000;
//                int languageId2= buffer[1] & Helper.B11111000;
//                System.Diagnostics.Debug.WriteLine(languageTwoLetter + " " + languageId1.ToString() + " " + languageId2.ToString() + "  " + buffer[0].ToString() + " " + buffer[1].ToString());
                _fs.Position += 2;
            }
        }
Esempio n. 28
0
        private void InitOnce()
        {
            if (AudioStream == null)
            {
                //AudioContext = new AudioContext(AudioContext.DefaultDevice, 44100, 4410);
                //AudioContext = new AudioContext();
                //XRam = new XRamExtension();

                device = AL.alcOpenDevice(AL.alcGetString(null, AL.ALC_DEFAULT_DEVICE_SPECIFIER));
                context = AL.alcCreateContext(device, null);
                AL.alcMakeContextCurrent(context);

                AL.alListener3f(AL.AL_POSITION, 0f, 0f, 0f);
                AL.alListener3f(AL.AL_VELOCITY, 0f, 0f, 0f);

                AudioStream = new AudioStream();
            }
        }
Esempio n. 29
0
 /// <summary>
 /// Load the processing library with an .mp3 file
 /// </summary>
 /// <param name="fileName">The filename of the song to load</param>
 public static void LoadFile(string fileName)
 {
     if (fileName.EndsWith(".mp3"))
     {
         if (_audioStreams.Count() < MaxSongs)
         {
             InitializeNAudio(fileName);
             if (_currentWaveChannel == null)
             {
                 //  This is the first audio file loaded
                 _currentWaveChannel = _waveChannel;
             }
             AudioStream stream = new AudioStream(_waveChannel, fileName, _audioStreamList.Count);
             _audioStreams.Add(fileName, stream);
             _audioStreamList.Add(stream);
         }
         else
         {
             throw new Exception("Too many files loaded.  Load at most three distinct songs.");
         }
     }
     else
     {
         throw new InvalidOperationException("Unsupported file extension");
     }
 }
Esempio n. 30
0
        //Video Chat is the main form
        public void StartConference(MainPage videoWindow, Action<Exception> callback)
        {
            if (!SignallingExists())
            {
                callback(new Exception("Signalling must exist before starting a conference."));
            }
            else if (!LocalMediaExists())
            {
                callback(new Exception("Local media must exist before starting a conference."));
            }
            else if (ConferenceExists())
            {
                //trying to start a conference again
                callback(signalling.LastConferenceException);
            }
            else
            {
                try
                {
                    var localMediaStream = localMedia.LocalStream;
                    
                    // This is our local video control, a WinForms Control or
                    // WPF FrameworkElement. It is constantly updated with
                    // our live video feed since we requested video above.
                    // Add it directly to the UI or use the IceLink layout
                    // manager, which we do below.
                    var localVideoControl = localMedia.LocalVideoControl;

                    // Create an IceLink layout manager, which makes the task
                    // of arranging video controls easy. Give it a reference
                    // to a WinForms control that can be filled with video feeds.
                    // For WPF users, the WebRTC extension includes
                    // WpfLayoutManager, which accepts a Canvas.
                    var layoutManager = localMedia.LayoutManager;

                    // Create a WebRTC audio stream description (requires a
                    // reference to the local audio feed).
                    var audioStream = new AudioStream(localMediaStream);

                    // Create a WebRTC video stream description (requires a
                    // reference to the local video feed). Whenever a P2P link
                    // initializes using this description, position and display
                    // the remote video control on-screen by passing it to the
                    // layout manager created above. Whenever a P2P link goes
                    // down, remove it.
                    var videoStream = new VideoStream(localMediaStream);
                    videoStream.OnLinkInit += (e) =>
                    {
                        var remoteVideoControl = (FrameworkElement)e.Link.GetRemoteVideoControl();
                        layoutManager.AddRemoteVideoControl(e.PeerId, remoteVideoControl);

                        // When double-clicked, mute/unmute the remote video.
                        videoWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () =>
                        {
                            // When double-tapped, mute/unmute the remote video.
                            remoteVideoControl.DoubleTapped += (sender, ce) =>
                            {
                                if (e.Link.RemoteVideoIsMuted())
                                {
                                    // Resume rendering incoming video.
                                    e.Link.UnmuteRemoteVideo();
                                }
                                else
                                {
                                    // Stop rendering incoming video.
                                    e.Link.MuteRemoteVideo();
                                }
                            };
                        });
                    };
                    videoStream.OnLinkDown += (e) =>
                    {
                        layoutManager.RemoveRemoteVideoControl(e.PeerId);
                    };

                    // Create a new IceLink conference.
                    conference = new FM.IceLink.Conference(IceLinkServerAddress, new Stream[] { audioStream, videoStream });

                    //Use our generated DTLS certificate.
                    conference.DtlsCertificate = Certificate;
                    
                    // Supply TURN relay credentials in case we are behind a
                    // highly restrictive firewall. These credentials will be
                    // verified by the TURN server.
                    conference.RelayUsername = "******";
                    conference.RelayPassword = "******";

                    // Add a few event handlers to the conference so we can see
                    // when a new P2P link is created or changes state.
                    conference.OnLinkInit += (e) =>
                    {
                        Log.Info("Link to peer initializing...");
                    };
                    conference.OnLinkUp += (e) =>
                    {
                        Log.Info("Link to peer is UP.");
                    };
                    conference.OnLinkDown += (e) =>
                    {
                        Log.InfoFormat("Link to peer is DOWN. {0}", e.Exception.Message);
                    };
                    callback(null);
                }
                catch (Exception ex)
                {
                    callback(ex);
                }
            }
        }
Esempio n. 31
0
        private AudioStream MakeAudioStream(int id, string line)
        {
            var stream = new AudioStream ();
            stream.Id = id;

            if(line.Contains ("aac"))
            {
                stream.Codec = CodecType.AAC;
            }
            else if(line.Contains("ac-3"))
            {
                stream.Codec = CodecType.AC3;
            }
            else if(line.Contains("DTS"))
            {
                stream.Codec = CodecType.DTS;
            }

            if(line.Contains("stereo"))
            {
                stream.Channels = 2;
            }
            else if (line.Contains("5.1"))
            {
                stream.Channels = 6;
            }

            return stream;
        }
Esempio n. 32
0
 public static void Play(this AudioStream sound, AudioStreamPlayer player)
 {
     player.Stream = sound;
     player.Play();
 }
Esempio n. 33
0
 /// <summary>
 ///   Plays the given sound non-positionally.
 /// </summary>
 public void PlayCustomSound(AudioStream sound)
 {
     AudioSource.Stream = sound;
     AudioSource.Play();
 }
Esempio n. 34
0
        //Video Chat is the main form
        public void StartConference(Action<string> callback)
        {
            // Create a WebRTC audio stream description (requires a
            // reference to the local audio feed).
            AudioStream = new AudioStream(LocalMedia.LocalMediaStream);

            // Create a WebRTC video stream description (requires a
            // reference to the local video feed). Whenever a P2P link
            // initializes using this description, position and display
            // the remote video control on-screen by passing it to the
            // layout manager created above. Whenever a P2P link goes
            // down, remove it.
            VideoStream = new VideoStream(LocalMedia.LocalMediaStream);
            VideoStream.OnLinkInit += AddRemoteVideoControl;
            VideoStream.OnLinkDown += RemoveRemoteVideoControl;

            // Create a new IceLink conference.
            Conference = new FM.IceLink.Conference(IceLinkServerAddress, new Stream[] { AudioStream, VideoStream });

            // Supply TURN relay credentials in case we are behind a
            // highly restrictive firewall. These credentials will be
            // verified by the TURN server.
            Conference.RelayUsername = "******";
            Conference.RelayPassword = "******";

            // Add a few event handlers to the conference so we can see
            // when a new P2P link is created or changes state.
            Conference.OnLinkInit += LogLinkInit;
            Conference.OnLinkUp += LogLinkUp;
            Conference.OnLinkDown += LogLinkDown;

            // Attach signalling to the conference.
            Signalling.Attach(Conference, SessionId, callback);
        }
Esempio n. 35
0
        public DoorSecret(int x, int y, Level level)
        {
            if (level != null)
            {
                Location = new Point2(x, y);
                Level    = level;

                Type = level.Map.Planes[(int)Level.Planes.Walls][y, x];

                // This will be the physical body for the door itself.

                BoxShape box = new BoxShape();
                box.Extents = new Vector3(Level.CellSize * 0.5f, Level.CellSize * 0.5f, Level.CellSize * 0.5f);

                _wallShape       = new CollisionShape();
                _wallShape.Shape = box;

                _wallBody                = new RigidBody();
                _wallBody.Mode           = RigidBody.ModeEnum.Static;
                _wallBody.CollisionLayer = (uint)Level.CollisionLayers.Walls;
                _wallBody.CollisionMask  = (uint)(Level.CollisionLayers.Characters | Level.CollisionLayers.Projectiles);
                _wallBody.AddChild(_wallShape);

                AddChild(_wallBody);

                // Create the actual mesh for the door

                _mesh      = new MeshInstance();
                _mesh.Mesh = GetMeshForDoor(Type);

                _wallBody.AddChild(_mesh);

                // Add an audio player to play the "pushing" sound when the door
                // is activated.

                _audioPlayer = new AudioStreamPlayer3D();

                _wallBody.AddChild(_audioPlayer);

                _activateSound = Assets.GetSoundClip(Assets.DigitalSoundList.PushWallActivation);

                // Add the tween that will be used to animate the door.

                _tween = new Tween();
                _tween.Connect("tween_all_completed", this, "OnTweenCompleted");

                AddChild(_tween);

                // Add myself to the world and set my position.

                level.AddChild(this);

                Transform tform = this.Transform;
                tform.origin   = level.MapToWorld(x, y);
                this.Transform = tform;

                // Set my default state.

                State   = DoorState.Stopped;
                Enabled = true;

                SetProcess(true);
                SetPhysicsProcess(true);
            }
        }
Esempio n. 36
0
 void sf_SoundChannelChanged(AudioStream c)
 {
     ID = Bass.BASS_ChannelSetFX(c.StreamHandle, BASSFXType.BASS_FX_BFX_LPF, 1);
 }
Esempio n. 37
0
        private void ParseVtsVobs()
        {
            var buffer = new byte[16];

            //retrieve video info
            _fs.Position = 0x200;
            var data = IntToBin(GetEndian(2), 16);
            _vtsVobs.VideoStream.CodingMode = _arrayOfCodingMode[BinToInt(MidStr(data, 0, 2))];
            _vtsVobs.VideoStream.Standard = _arrayOfStandard[BinToInt(MidStr(data, 2, 2))];
            _vtsVobs.VideoStream.Aspect = _arrayOfAspect[BinToInt(MidStr(data, 4, 2))];
            if (_vtsVobs.VideoStream.Standard == "PAL")
                _vtsVobs.VideoStream.Resolution = _arrayOfPalResolution[BinToInt(MidStr(data, 13, 2))];
            else if (_vtsVobs.VideoStream.Standard == "NTSC")
                _vtsVobs.VideoStream.Resolution = _arrayOfNtscResolution[BinToInt(MidStr(data, 13, 2))];

            //retrieve audio info
            _fs.Position = 0x202; //useless but here for readability
            _vtsVobs.NumberOfAudioStreams = GetEndian(2);
            for (int i = 0; i < _vtsVobs.NumberOfAudioStreams; i++)
            {
                var audioStream = new AudioStream();
                data = IntToBin(GetEndian(2), 16);
                audioStream.LanguageTypeSpecified = Convert.ToInt32(MidStr(data, 4, 2));
                audioStream.CodingMode = _arrayOfAudioMode[(BinToInt(MidStr(data, 0, 3)))];
                audioStream.Channels = BinToInt(MidStr(data, 13, 3)) + 1;
                _fs.Read(buffer, 0, 2);
                audioStream.LanguageCode = new string(new[] { Convert.ToChar(buffer[0]), Convert.ToChar(buffer[1]) });
                if (ArrayOfLanguageCode.Contains(audioStream.LanguageCode))
                    audioStream.Language = ArrayOfLanguage[ArrayOfLanguageCode.IndexOf(audioStream.LanguageCode)];
                _fs.Seek(1, SeekOrigin.Current);
                audioStream.Extension = _arrayOfAudioExtension[_fs.ReadByte()];
                _fs.Seek(2, SeekOrigin.Current);
                _vtsVobs.AudioStreams.Add(audioStream);
            }

            //retrieve subs info (only name)
            _fs.Position = 0x254;
            _vtsVobs.NumberOfSubtitles = GetEndian(2);
            _fs.Position += 2;
            for (int i = 0; i < _vtsVobs.NumberOfSubtitles; i++)
            {
                _fs.Read(buffer, 0, 2);
                var languageTwoLetter = new string(new[] { Convert.ToChar(buffer[0]), Convert.ToChar(buffer[1]) });
                _vtsVobs.Subtitles.Add(InterpretLanguageCode(languageTwoLetter));
                _fs.Read(buffer, 0, 2); // reserved for language code extension + code extension

                //switch (buffer[0])      // 4, 8, 10-12 unused
                //{
                //    // http://dvd.sourceforge.net/dvdinfo/sprm.html
                //    case 1: subtitleFormat = "(caption/normal size char)"; break; //0 = unspecified caption
                //    case 2: subtitleFormat = "(caption/large size char)"; break;
                //    case 3: subtitleFormat = "(caption for children)"; break;
                //    case 5: subtitleFormat = "(closed caption/normal size char)"; break;
                //    case 6: subtitleFormat = "(closed caption/large size char)"; break;
                //    case 7: subtitleFormat = "(closed caption for children)"; break;
                //    case 9: subtitleFormat = "(forced caption)"; break;
                //    case 13: subtitleFormat = "(director comments/normal size char)"; break;
                //    case 14: subtitleFormat = "(director comments/large size char)"; break;
                //    case 15: subtitleFormat = "(director comments for children)"; break;
                //}

                _fs.Position += 2;
            }
        }
Esempio n. 38
0
        private void AssignDlnaMetadata(MetadataContainer info, int edition)
        {
            if (info == null)
            {
                return;
            }

            List <string> profileList = new List <string>();

            if (TranscodingParameter == null)
            {
                Metadata  = info.Metadata[edition];
                Video     = info.Video[edition];
                Audio     = info.Audio[edition];
                Image     = info.Image[edition];
                Subtitles = info.Subtitles[edition];
            }
            else
            {
                if (IsImage)
                {
                    ImageTranscoding        image    = (ImageTranscoding)TranscodingParameter;
                    TranscodedImageMetadata metadata = MediaConverter.GetTranscodedImageMetadata(image);
                    Metadata = new MetadataStream
                    {
                        Mime = info.Metadata[edition].Mime,
                        ImageContainerType = metadata.TargetImageCodec,
                        Size = Client.EstimateTransodedSize ? info.Metadata[edition].Size : 0,
                    };
                    Image = new ImageStream
                    {
                        Height          = metadata.TargetMaxHeight,
                        Orientation     = metadata.TargetOrientation,
                        PixelFormatType = metadata.TargetPixelFormat,
                        Width           = metadata.TargetMaxWidth
                    };
                }
                else if (IsAudio)
                {
                    AudioTranscoding        audio    = (AudioTranscoding)TranscodingParameter;
                    TranscodedAudioMetadata metadata = MediaConverter.GetTranscodedAudioMetadata(audio);
                    Metadata = new MetadataStream
                    {
                        Mime = info.Metadata[edition].Mime,
                        AudioContainerType = metadata.TargetAudioContainer,
                        Bitrate            = metadata.TargetAudioBitrate > 0 ? metadata.TargetAudioBitrate : null,
                        Duration           = info.Metadata[edition].Duration,
                        Size = Client.EstimateTransodedSize ? (metadata.TargetAudioBitrate > 0 ? Convert.ToInt64((metadata.TargetAudioBitrate * 1024 * info.Metadata[edition].Duration) / 8.0) : (long?)null) : null,
                    };
                    AudioStream audioStream = new AudioStream();
                    audioStream.Bitrate   = metadata.TargetAudioBitrate;
                    audioStream.Channels  = metadata.TargetAudioChannels;
                    audioStream.Codec     = metadata.TargetAudioCodec;
                    audioStream.Frequency = metadata.TargetAudioFrequency;
                    Audio = new List <AudioStream> {
                        audioStream
                    };
                }
                else if (IsVideo)
                {
                    VideoTranscoding        video    = (VideoTranscoding)TranscodingParameter;
                    TranscodedVideoMetadata metadata = MediaConverter.GetTranscodedVideoMetadata(video);
                    Metadata = new MetadataStream
                    {
                        Mime = info.Metadata[edition].Mime,
                        VideoContainerType = metadata.TargetVideoContainer,
                        Bitrate            = metadata.TargetAudioBitrate > 0 && metadata.TargetVideoBitrate > 0 ? metadata.TargetAudioBitrate + metadata.TargetVideoBitrate : null,
                        Duration           = info.Metadata[edition].Duration,
                        Size = Client.EstimateTransodedSize ? (metadata.TargetAudioBitrate > 0 && info.Metadata[edition].Duration > 0 ? Convert.ToInt64((metadata.TargetAudioBitrate * 1024 * info.Metadata[edition].Duration) / 8.0) : (long?)null) : null,
                    };
                    Video = new VideoStream
                    {
                        AspectRatio      = metadata.TargetVideoAspectRatio,
                        Bitrate          = metadata.TargetVideoBitrate,
                        Codec            = metadata.TargetVideoCodec,
                        Framerate        = metadata.TargetVideoFrameRate,
                        HeaderLevel      = metadata.TargetLevel,
                        ProfileType      = metadata.TargetProfile,
                        RefLevel         = metadata.TargetLevel,
                        Height           = metadata.TargetVideoMaxHeight,
                        PixelAspectRatio = metadata.TargetVideoPixelAspectRatio,
                        PixelFormatType  = metadata.TargetVideoPixelFormat,
                        TimestampType    = metadata.TargetVideoTimestamp,
                        Width            = metadata.TargetVideoMaxWidth,
                    };
                    AudioStream audioStream = new AudioStream();
                    audioStream.Bitrate   = metadata.TargetAudioBitrate;
                    audioStream.Channels  = metadata.TargetAudioChannels;
                    audioStream.Codec     = metadata.TargetAudioCodec;
                    audioStream.Frequency = metadata.TargetAudioFrequency;
                    Audio = new List <AudioStream> {
                        audioStream
                    };
                }
            }

            if (IsImage)
            {
                profileList = DlnaProfiles.ResolveImageProfile(Metadata.ImageContainerType, Image.Width, Image.Height);
            }
            else if (IsAudio)
            {
                var audio = Audio.FirstOrDefault();
                profileList = DlnaProfiles.ResolveAudioProfile(Metadata.AudioContainerType, audio?.Codec ?? AudioCodec.Unknown, audio?.Bitrate, audio?.Frequency, audio?.Channels);
            }
            else if (IsVideo)
            {
                var audio = Audio.FirstOrDefault();
                profileList = DlnaProfiles.ResolveVideoProfile(Metadata.VideoContainerType, Video.Codec, audio?.Codec ?? AudioCodec.Unknown, Video.ProfileType, Video.HeaderLevel,
                                                               Video.Framerate, Video.Width, Video.Height, Video.Bitrate, audio?.Bitrate, Video.TimestampType);
            }

            string profile = "";
            string mime    = info.Metadata[edition].Mime;

            if (DlnaProfiles.TryFindCompatibleProfile(Client, profileList, ref profile, ref mime))
            {
                DlnaMime    = mime;
                DlnaProfile = profile;
            }
        }
Esempio n. 39
0
        private void ParseVtsVobs()
        {
            var buffer = new byte[16];

            //retrieve video info
            _fs.Position = 0x200;
            var data = IntToBin(GetEndian(2), 16);

            _vtsVobs.VideoStream.CodingMode = _arrayOfCodingMode[BinToInt(MidStr(data, 0, 2))];
            _vtsVobs.VideoStream.Standard   = _arrayOfStandard[BinToInt(MidStr(data, 2, 2))];
            _vtsVobs.VideoStream.Aspect     = _arrayOfAspect[BinToInt(MidStr(data, 4, 2))];
            if (_vtsVobs.VideoStream.Standard == "PAL")
            {
                _vtsVobs.VideoStream.Resolution = _arrayOfPalResolution[BinToInt(MidStr(data, 13, 2))];
            }
            else if (_vtsVobs.VideoStream.Standard == "NTSC")
            {
                _vtsVobs.VideoStream.Resolution = _arrayOfNtscResolution[BinToInt(MidStr(data, 13, 2))];
            }

            //retrieve audio info
            _fs.Position = 0x202; //useless but here for readability
            _vtsVobs.NumberOfAudioStreams = GetEndian(2);
            for (int i = 0; i < _vtsVobs.NumberOfAudioStreams; i++)
            {
                var audioStream = new AudioStream();
                data = IntToBin(GetEndian(2), 16);
                audioStream.LanguageTypeSpecified = Convert.ToInt32(MidStr(data, 4, 2));
                audioStream.CodingMode            = _arrayOfAudioMode[(BinToInt(MidStr(data, 0, 3)))];
                audioStream.Channels = BinToInt(MidStr(data, 13, 3)) + 1;
                _fs.Read(buffer, 0, 2);
                audioStream.LanguageCode = new string(new[] { Convert.ToChar(buffer[0]), Convert.ToChar(buffer[1]) });
                if (ArrayOfLanguageCode.Contains(audioStream.LanguageCode))
                {
                    audioStream.Language = ArrayOfLanguage[ArrayOfLanguageCode.IndexOf(audioStream.LanguageCode)];
                }
                _fs.Seek(1, SeekOrigin.Current);
                audioStream.Extension = _arrayOfAudioExtension[_fs.ReadByte()];
                _fs.Seek(2, SeekOrigin.Current);
                _vtsVobs.AudioStreams.Add(audioStream);
            }

            //retrieve subs info (only name)
            _fs.Position = 0x254;
            _vtsVobs.NumberOfSubtitles = GetEndian(2);
            _fs.Position += 2;
            for (int i = 0; i < _vtsVobs.NumberOfSubtitles; i++)
            {
                _fs.Read(buffer, 0, 2);
                var languageTwoLetter = new string(new[] { Convert.ToChar(buffer[0]), Convert.ToChar(buffer[1]) });
                _vtsVobs.Subtitles.Add(InterpretLanguageCode(languageTwoLetter));
                _fs.Read(buffer, 0, 2); // reserved for language code extension + code extension

                //switch (buffer[0])      // 4, 8, 10-12 unused
                //{
                //    // http://dvd.sourceforge.net/dvdinfo/sprm.html
                //    case 1: subtitleFormat = "(caption/normal size char)"; break; //0 = unspecified caption
                //    case 2: subtitleFormat = "(caption/large size char)"; break;
                //    case 3: subtitleFormat = "(caption for children)"; break;
                //    case 5: subtitleFormat = "(closed caption/normal size char)"; break;
                //    case 6: subtitleFormat = "(closed caption/large size char)"; break;
                //    case 7: subtitleFormat = "(closed caption for children)"; break;
                //    case 9: subtitleFormat = "(forced caption)"; break;
                //    case 13: subtitleFormat = "(director comments/normal size char)"; break;
                //    case 14: subtitleFormat = "(director comments/large size char)"; break;
                //    case 15: subtitleFormat = "(director comments for children)"; break;
                //}

                _fs.Position += 2;
            }
        }
Esempio n. 40
0
 public override void _Ready()
 {
     PowerUpSound = ResourceLoader.Load("res://Recursos/Sons/kill.wav") as AudioStream;
     SetCollisionLayerBit(0, false);
 }
Esempio n. 41
0
    public Job CreateJobWithStaticOverlay(
        string projectId, string location, string inputUri, string overlayImageUri, string outputUri)
    {
        // Create the client.
        TranscoderServiceClient client = TranscoderServiceClient.Create();

        // Build the parent location name.
        LocationName parent = new LocationName(projectId, location);

        // Build the job config.
        VideoStream videoStream0 = new VideoStream
        {
            H264 = new VideoStream.Types.H264CodecSettings
            {
                BitrateBps   = 550000,
                FrameRate    = 60,
                HeightPixels = 360,
                WidthPixels  = 640
            }
        };

        AudioStream audioStream0 = new AudioStream
        {
            Codec      = "aac",
            BitrateBps = 64000
        };

        // Create the overlay image. Only JPEG is supported. Image resolution is based on output
        // video resolution. To respect the original image aspect ratio, set either x or y to 0.0.
        // This example stretches the overlay image the full width and half of the height of the
        // output video.
        Overlay.Types.Image overlayImage = new Overlay.Types.Image
        {
            Uri        = overlayImageUri,
            Alpha      = 1,
            Resolution = new Overlay.Types.NormalizedCoordinate
            {
                X = 1,
                Y = 0.5
            }
        };

        // Create the starting animation (when the overlay appears). Use the values x: 0 and y: 0 to
        // position the top-left corner of the overlay in the top-left corner of the output video.
        Overlay.Types.Animation animationStart = new Overlay.Types.Animation
        {
            AnimationStatic = new Overlay.Types.AnimationStatic
            {
                Xy = new Overlay.Types.NormalizedCoordinate
                {
                    X = 0,
                    Y = 0
                },
                StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(0))
            }
        };


        // Create the ending animation (when the overlay disappears). In this example, the overlay
        // disappears at the 10-second mark in the output video.
        Overlay.Types.Animation animationEnd = new Overlay.Types.Animation
        {
            AnimationEnd = new Overlay.Types.AnimationEnd
            {
                StartTimeOffset = Duration.FromTimeSpan(TimeSpan.FromSeconds(10))
            }
        };

        // Create the overlay and add the image and animations to it.
        Overlay overlay = new Overlay
        {
            Image      = overlayImage,
            Animations = { animationStart, animationEnd }
        };

        ElementaryStream elementaryStream0 = new ElementaryStream
        {
            Key         = "video_stream0",
            VideoStream = videoStream0
        };

        ElementaryStream elementaryStream1 = new ElementaryStream
        {
            Key         = "audio_stream0",
            AudioStream = audioStream0
        };

        MuxStream muxStream0 = new MuxStream
        {
            Key               = "sd",
            Container         = "mp4",
            ElementaryStreams = { "video_stream0", "audio_stream0" }
        };

        Input input = new Input
        {
            Key = "input0",
            Uri = inputUri
        };

        Output output = new Output
        {
            Uri = outputUri
        };

        JobConfig jobConfig = new JobConfig
        {
            Inputs            = { input },
            Output            = output,
            ElementaryStreams = { elementaryStream0, elementaryStream1 },
            MuxStreams        = { muxStream0 },
            Overlays          = { overlay }
        };

        // Build the job.
        Job newJob = new Job
        {
            InputUri  = inputUri,
            OutputUri = outputUri,
            Config    = jobConfig
        };

        // Call the API.
        Job job = client.CreateJob(parent, newJob);

        // Return the result.
        return(job);
    }
Esempio n. 42
0
        private void convTh_main()
        {
            using (WorkingDir wd = new WorkingDir())
            {
                string rExt = Path.GetExtension(_rFile);

                if (Gnd.i.audioVideoExtensions.contains(rExt) == false)
                {
                    throw new Exception("再生可能なファイルではありません。(不明な拡張子)");
                }

                string midFile = wd.makePath() + rExt;

                try
                {
                    using (critSect.parallel())
                    {
                        File.Copy(_rFile, midFile);
                    }
                    if (File.Exists(midFile) == false)
                    {
                        throw null;
                    }
                }
                catch
                {
                    throw new Exception("ファイルにアクセス出来ません。");
                }
                string redirFile = wd.makePath();

                ProcessTools.runOnBatch("ffprobe.exe " + _rFile + " 2> " + redirFile, FFmpeg.getBDir(), critSect);

                foreach (string line in FileTools.readAllLines(redirFile, Encoding.ASCII))
                {
                    if (line.Contains("Duration:"))
                    {
                        _duration = new Duration();

                        List <string> tokens = StringTools.tokenize(line, " :.,", false, true);

                        if (tokens[1] == "N/A")
                        {
                            throw new Exception("再生可能なファイルではありません。(Duration)");
                        }

                        int h = int.Parse(tokens[1]);
                        int m = int.Parse(tokens[2]);
                        int s = int.Parse(tokens[3]);

                        int sec = h * 3600 + m * 60 + s;

                        if (sec < 1)
                        {
                            throw new Exception("映像又は曲の長さが短すぎます。");
                        }

                        if (IntTools.IMAX < sec)
                        {
                            throw new Exception("映像又は曲の長さが長すぎます。");
                        }

                        _duration.secLength = sec;
                    }
                    else if (_audioStream == null && line.Contains("Stream") && line.Contains("Audio:"))
                    {
                        _audioStream = new AudioStream();

                        List <string> tokens = StringTools.tokenize(line, StringTools.DIGIT, true, true);

                        _audioStream.mapIndex = int.Parse(tokens[1]);
                    }
                    else if (_videoStream == null && line.Contains("Stream") && line.Contains("Video:"))
                    {
                        _videoStream = new VideoStream();

                        {
                            List <string> tokens = StringTools.tokenize(line, StringTools.DIGIT, true, true);

                            _videoStream.mapIndex = int.Parse(tokens[1]);
                        }

                        {
                            List <string> tokens = StringTools.tokenize(line, " ,");

                            foreach (string fToken in tokens)
                            {
                                string token = fToken;

                                if (StringTools.toFormat(token, true) == "9x9")
                                {
                                    List <string> whTokens = StringTools.tokenize(token, "x");

                                    _videoStream.w = int.Parse(whTokens[0]);
                                    _videoStream.h = int.Parse(whTokens[1]);
                                }
                            }
                        }

                        if (_videoStream.w < Consts.VIDEO_W_MIN)
                        {
                            throw new Exception("映像の幅が小さすぎます。");
                        }

                        if (_videoStream.h < Consts.VIDEO_H_MIN)
                        {
                            throw new Exception("映像の高さが小さすぎます。");
                        }

                        if (IntTools.IMAX < _videoStream.w)
                        {
                            throw new Exception("映像の幅が大きすぎます。");
                        }

                        if (IntTools.IMAX < _videoStream.h)
                        {
                            throw new Exception("映像の高さが大きすぎます。");
                        }
                    }
                }
                if (_duration == null)
                {
                    throw null;
                }

                if (_audioStream == null)
                {
                    throw new Exception("再生可能なファイルではありません。(音声ストリームがありません)");
                }

                if (_videoStream == null)
                {
                    _type = Consts.MediaType_e.AUDIO;
                }
                else
                {
                    _type = Consts.MediaType_e.MOVIE;
                }

                string wFile = Utils.getOgxFile(_wIndex, _type);

                if (Gnd.i.convWavMastering)
                {
                    string wavFile = wd.makePath() + ".wav";

                    ProcessTools.runOnBatch(
                        "ffmpeg.exe -i " + _rFile + " -map 0:" + _audioStream.mapIndex + " -ac 2 " + wavFile,
                        FFmpeg.getBDir(),
                        critSect
                        );

                    if (File.Exists(wavFile) == false)
                    {
                        throw new Exception("音声ストリームの抽出に失敗しました。");
                    }

                    string wmDir      = wd.makePath();
                    string wmFile     = Path.Combine(wmDir, "Master.exe");
                    string wavFileNew = wd.makePath() + ".wav";

                    Directory.CreateDirectory(wmDir);
                    File.Copy(wavMasteringFile, wmFile);

                    ProcessTools.runOnBatch(
                        "Master.exe " + wavFile + " " + wavFileNew + " 0001.txt",
                        wmDir,
                        critSect
                        );

                    if (File.Exists(wavFileNew) == false)
                    {
                        throw new Exception("wavFileNew does not exist");
                    }

                    if (_type == Consts.MediaType_e.AUDIO)
                    {
                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + wavFileNew + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptAudio + " " + wFile,
                            FFmpeg.getBDir(),
                            critSect
                            );
                    }
                    else
                    {
                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + _rFile + " -i " + wavFileNew + " -map 0:" + _videoStream.mapIndex + " -map 1:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptVideo + " " + Gnd.i.ffmpegOptAudio + " " + wFile,
                            FFmpeg.getBDir(),
                            critSect
                            );
                    }
                }
                else
                {
                    if (_type == Consts.MediaType_e.AUDIO)
                    {
                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + _rFile + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptAudio + " " + wFile,
                            FFmpeg.getBDir(),
                            critSect
                            );
                    }
                    else
                    {
                        ProcessTools.runOnBatch(
                            "ffmpeg.exe -i " + _rFile + " -map 0:" + _videoStream.mapIndex + " -map 0:" + _audioStream.mapIndex + " " + Gnd.i.ffmpegOptVideo + " " + Gnd.i.ffmpegOptAudio + " " + wFile,
                            FFmpeg.getBDir(),
                            critSect
                            );
                    }
                }
                if (File.Exists(wFile) == false)
                {
                    throw new Exception("wFile does not exist");
                }

                _wFile = wFile;
            }
        }
Esempio n. 43
-1
        private void SetupCall()
        {
            Signalling Signalling = new Signalling(Constants.WEB_SYNC_SERVER);
            Signalling.Start((error) =>
            {
                if (error != null)
                {
                    // TODO: Handle Errors
                }
            });

            LocalMedia = new LocalMedia();
            LocalMedia.Start(Container, (error) =>
            {
                if (error != null)
                {
                    //TODO: Handle Errors
                }
            });

            var audioStream = new AudioStream(LocalMedia.LocalMediaStream);
            var videoStream = new VideoStream(LocalMedia.LocalMediaStream);
            var conference = new Conference(Constants.ICE_LINK_ADDRESS, new Stream[]
                    {
                        audioStream,
                        videoStream
                    });
            conference.RelayUsername = "******";
            conference.RelayPassword = "******";

            Signalling.Attach(conference, Constants.SESSION_ID, (error) =>
            {
                if (error != null)
                {
                    // TODO: Handle Errors
                }
            });
        }