private void Awake()
        {
            // First we run through some Android specifics. Android requires explicit permission requests before the audio context can be created.
            // Beware that failing these will not necessarily report permission errors, but rather device creation errors.
            // Additionally, the library prefers the audio in communication mode, so we switch that too.

            if (Application.platform == RuntimePlatform.Android)
            {
                if (!Permission.HasUserAuthorizedPermission(Permission.Microphone))
                {
                    Permission.RequestUserPermission(Permission.Microphone);
                }

                try
                {
                    AndroidJavaClass  unityPlayer  = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
                    AndroidJavaObject activity     = unityPlayer.GetStatic <AndroidJavaObject>("currentActivity");
                    AndroidJavaObject audioManager = activity.Call <AndroidJavaObject>("getSystemService", "audio");
                    int mode1 = audioManager.Call <Int32>("getMode");
                    audioManager.Call("setMode", 3); // 3 is Communication Mode
                    int mode2 = audioManager.Call <Int32>("getMode");

                    Debug.Log($"Android Audio Mode changed from {mode1} to {mode2}");
                }
                catch (Exception e)
                {
                    Debug.Log(e.ToString());
                }
            }

            threads = WebRtcThreads.Acquire();

            // the default audio device module must be created on the worker thread
            adm = AudioDeviceModuleFactory.CreateDefault(threads.threads[1]); // by convention 1 is the worker (see call below)

            // adm is now initialised

            factory = PeerConnectionFactory.Create(
                threads.threads[0],
                threads.threads[1],
                threads.threads[2], // This is the main signalling thread
                adm,
                AudioEncoderFactory.CreateBuiltin(),
                AudioDecoderFactory.CreateBuiltin(),
                VideoEncoderFactory.CreateBuiltin(),
                VideoDecoderFactory.CreateBuiltin(),
                null,
                null);

            playoutHelper   = new AudioDeviceHelper(PlayoutDevices, ChangePlayoutDevice);
            recordingHelper = new AudioDeviceHelper(RecordingDevices, ChangeRecordingDevice);

            pcs = new List <WebRtcPeerConnection>();
        }
Example #2
0
        static unsafe Waveform GetWaveform(string path)
        {
            Waveform waveform;

            if (waveforms.TryGetValue(path, out waveform))
            {
                return(waveform);
            }
            waveform = new Waveform();
            var textureWidth  = 256;
            var textureHeight = 64;

            using (var fs = AssetBundle.Current.OpenFile(path + ".ogg")) {
                using (var decoder = AudioDecoderFactory.CreateDecoder(fs)) {
                    var stereo = decoder.GetFormat() == AudioFormat.Stereo16;
                    while (true)
                    {
                        var maxPartLength  = textureWidth / (AnimationUtils.FramesPerSecond * pixelsPerFrame);
                        var maxPartSamples = (int)(maxPartLength * decoder.GetFrequency());
                        var maxBlocks      = maxPartSamples / decoder.GetBlockSize() * (stereo ? 4 : 2);
                        var samples        = Marshal.AllocHGlobal(maxBlocks * decoder.GetBlockSize());
                        try {
                            var numBlocks = decoder.ReadBlocks(samples, 0, maxBlocks);
                            if (numBlocks == 0)
                            {
                                break;
                            }
                            var numSamples = numBlocks * decoder.GetBlockSize() / (stereo ? 4 : 2);
                            var pixels     = new Color4[textureWidth * textureHeight];
                            int width      = numSamples * textureWidth / maxPartSamples;
                            if (stereo)
                            {
                                BuildMonoWaveform((short *)samples, 2, numSamples, pixels, textureWidth, width, 0, textureHeight / 2 - 1);
                                BuildMonoWaveform(((short *)samples + 1), 2, numSamples, pixels, textureWidth, width, textureHeight / 2 + 1, textureHeight - 1);
                            }
                            else
                            {
                                BuildMonoWaveform((short *)samples, 1, numSamples, pixels, textureWidth, width, 0, textureHeight - 1);
                            }
                            var texture = new Texture2D();
                            texture.LoadImage(pixels, textureWidth, textureHeight);
                            waveform.Parts.Add(new Waveform.Part {
                                Texture = texture, Width = width
                            });
                        } finally {
                            Marshal.FreeHGlobal(samples);
                        }
                    }
                }
            }
            waveforms.Add(path, waveform);
            return(waveform);
        }