Ejemplo n.º 1
0
    void DisposeSora()
    {
        if (sora != null)
        {
            sora.Dispose();
            sora = null;
            Debug.Log("Sora is Disposed");
            if (MultiRecv)
            {
                foreach (var track in tracks)
                {
                    GameObject.Destroy(track.Value);
                }
                tracks.Clear();
            }
            if (!Recvonly)
            {
                audioSourceInput.Stop();
                AudioRenderer.Stop();
            }

            if (unityAudioOutput)
            {
                audioSourceOutput.Stop();
            }
        }
    }
Ejemplo n.º 2
0
    // Start is called before the first frame update
    void Start()
    {
        fixedSampleType = sampleType;

        DumpDeviceInfo("video capturer devices", Sora.GetVideoCapturerDevices());
        DumpDeviceInfo("audio recording devices", Sora.GetAudioRecordingDevices());
        DumpDeviceInfo("audio playout devices", Sora.GetAudioPlayoutDevices());

        if (!MultiSub)
        {
            var image = renderTarget.GetComponent <UnityEngine.UI.RawImage>();
            image.texture = new Texture2D(640, 480, TextureFormat.RGBA32, false);
        }
        StartCoroutine(Render());
    }
Ejemplo n.º 3
0
    void DisposeSora()
    {
        if (sora != null)
        {
            sora.Dispose();
            sora = null;
            Debug.Log("Sora is Disposed");
            if (!Recvonly)
            {
                audioSourceInput.Stop();
                AudioRenderer.Stop();
            }

            if (unityAudioOutput)
            {
                audioSourceOutput.Stop();
            }
        }
    }
Ejemplo n.º 4
0
    // Start is called before the first frame update
    void Start()
    {
#if !UNITY_EDITOR && UNITY_ANDROID
        var x = WebCamTexture.devices;
        var y = Microphone.devices;
#endif
        fixedSampleType = sampleType;

        DumpDeviceInfo("video capturer devices", Sora.GetVideoCapturerDevices());
        DumpDeviceInfo("audio recording devices", Sora.GetAudioRecordingDevices());
        DumpDeviceInfo("audio playout devices", Sora.GetAudioPlayoutDevices());

        if (!MultiRecv)
        {
            var image = renderTarget.GetComponent <UnityEngine.UI.RawImage>();
            image.texture = new Texture2D(640, 480, TextureFormat.RGBA32, false);
        }
        StartCoroutine(Render());
    }
        public void Close()
        {
            logEvent?.RemoveAllListeners();
            warningEvent?.RemoveAllListeners();
            errorEvent?.RemoveAllListeners();
            dataChannelOpenEvent?.RemoveAllListeners();
            logEvent             = null;
            warningEvent         = null;
            errorEvent           = null;
            dataChannelOpenEvent = null;

            if (sora != null)
            {
                sora.OnAddTrack    = null;
                sora.OnRemoveTrack = null;
                sora.OnNotify      = null;
                sora.Dispose();
                sora = null;
            }
        }
        public void Connect(
            string signalingUrl,
            ClientType clientType,
            string streamId,
            int videoWidth,
            int videoHeight,
            int videoBitrate,
            RenderTexture renderTexture)
        {
            try
            {
                sora = new Sora();

                this.clientType = clientType;

                var config = new Sora.Config
                {
                    SignalingUrl       = signalingUrl,
                    ChannelId          = streamId,
                    VideoWidth         = videoWidth,
                    VideoHeight        = videoHeight,
                    VideoBitrate       = videoBitrate,
                    AudioOnly          = false,
                    Multistream        = false,
                    Role               = clientType == ClientType.Publisher ? Sora.Role.Sendonly : Sora.Role.Recvonly,
                    CapturerType       = Sora.CapturerType.UnityRenderTexture,
                    UnityRenderTexture = renderTexture
                };

                sora.OnAddTrack = (trackId) =>
                {
                    this.trackId = trackId;
                    OnLogEvent.Invoke("OnAddTrack", $"trackId: {trackId}");
                };
                if (clientType == ClientType.Player)
                {
                    receiveTexture = renderTexture;

                    sora.OnRemoveTrack = (trackId) =>
                    {
                        this.trackId = 0;
                        OnLogEvent.Invoke("OnRemoveTrack", $"trackId: {trackId}");
                    };
                }
                sora.OnNotify = (msg) =>
                {
                    OnLogEvent.Invoke("OnNotify", $"\"{msg}\"");
                };

                var isSuccess = sora.Connect(config);

                if (isSuccess)
                {
                    OnOpen?.Invoke();
                    OnLogEvent.Invoke("Sora Connect", "success");
                    OnDataChannelOpen?.Invoke();
                }
                else
                {
                    OnErrorEvent.Invoke("sora connect error", "");
                }
            }
            catch (Exception ex)
            {
                OnErrorEvent.Invoke("OnError", ex.Message);
            }
        }
Ejemplo n.º 7
0
    public void OnClickStart()
    {
        // 開発用の機能。
        // .env.json ファイルがあったら、それを読んでシグナリングURLとチャンネルIDを設定する。
        if (signalingUrl.Length == 0 && channelId.Length == 0 && System.IO.File.Exists(".env.json"))
        {
            var settings = JsonUtility.FromJson <Settings>(System.IO.File.ReadAllText(".env.json"));
            signalingUrl = settings.signaling_url;
            channelId    = settings.channel_id;
            signalingKey = settings.signaling_key;
        }

        if (signalingUrl.Length == 0)
        {
            Debug.LogError("シグナリング URL が設定されていません");
            return;
        }
        if (channelId.Length == 0)
        {
            Debug.LogError("チャンネル ID が設定されていません");
            return;
        }
        // signalingKey がある場合はメタデータを設定する
        string metadata = "";

        if (signalingKey.Length != 0)
        {
            var md = new Metadata()
            {
                signaling_key = signalingKey
            };
            metadata = JsonUtility.ToJson(md);
        }

        InitSora();

        int videoWidth  = 640;
        int videoHeight = 480;

        switch (videoSize)
        {
        case VideoSize.QVGA:
            videoWidth  = 320;
            videoHeight = 240;
            break;

        case VideoSize.VGA:
            videoWidth  = 640;
            videoHeight = 480;
            break;

        case VideoSize.HD:
            videoWidth  = 1280;
            videoHeight = 720;
            break;

        case VideoSize.FHD:
            videoWidth  = 1920;
            videoHeight = 1080;
            break;

        case VideoSize._4K:
            videoWidth  = 3840;
            videoHeight = 2160;
            break;
        }

        var config = new Sora.Config()
        {
            SignalingUrl         = signalingUrl,
            ChannelId            = channelId,
            Metadata             = metadata,
            Role                 = Role,
            Multistream          = Multistream,
            VideoCodec           = videoCodec,
            VideoBitrate         = videoBitrate,
            VideoWidth           = videoWidth,
            VideoHeight          = videoHeight,
            UnityAudioInput      = unityAudioInput,
            UnityAudioOutput     = unityAudioOutput,
            VideoCapturerDevice  = videoCapturerDevice,
            AudioRecordingDevice = audioRecordingDevice,
            AudioPlayoutDevice   = audioPlayoutDevice,
            Spotlight            = spotlight,
            SpotlightNumber      = spotlightNumber,
            Simulcast            = simulcast,
        };

        if (captureUnityCamera && capturedCamera != null)
        {
            config.CapturerType = Sora.CapturerType.UnityCamera;
            config.UnityCamera  = capturedCamera;
        }

        var success = sora.Connect(config);

        if (!success)
        {
            sora.Dispose();
            sora = null;
            Debug.LogErrorFormat("Sora.Connect failed: signalingUrl={0}, channelId={1}", signalingUrl, channelId);
            return;
        }
        Debug.LogFormat("Sora is Created: signalingUrl={0}, channelId={1}", signalingUrl, channelId);
    }
Ejemplo n.º 8
0
    void InitSora()
    {
        DisposeSora();

        sora = new Sora();
        if (!MultiRecv)
        {
            sora.OnAddTrack = (trackId) =>
            {
                Debug.LogFormat("OnAddTrack: trackId={0}", trackId);
                this.trackId = trackId;
            };
            sora.OnRemoveTrack = (trackId) =>
            {
                Debug.LogFormat("OnRemoveTrack: trackId={0}", trackId);
                this.trackId = 0;
            };
        }
        else
        {
            sora.OnAddTrack = (trackId) =>
            {
                Debug.LogFormat("OnAddTrack: trackId={0}", trackId);
                var obj = GameObject.Instantiate(baseContent, Vector3.zero, Quaternion.identity);
                obj.name = string.Format("track {0}", trackId);
                obj.transform.SetParent(scrollViewContent.transform);
                obj.SetActive(true);
                var image = obj.GetComponent <UnityEngine.UI.RawImage>();
                image.texture = new Texture2D(320, 240, TextureFormat.RGBA32, false);
                tracks.Add(trackId, obj);
            };
            sora.OnRemoveTrack = (trackId) =>
            {
                Debug.LogFormat("OnRemoveTrack: trackId={0}", trackId);
                if (tracks.ContainsKey(trackId))
                {
                    GameObject.Destroy(tracks[trackId]);
                    tracks.Remove(trackId);
                }
            };
        }
        sora.OnNotify = (json) =>
        {
            Debug.LogFormat("OnNotify: {0}", json);
        };
        // これは別スレッドからやってくるので注意すること
        sora.OnHandleAudio = (buf, samples, channels) =>
        {
            lock (audioBuffer)
            {
                audioBuffer.Enqueue(buf);
                audioBufferSamples += samples;
            }
        };

        if (unityAudioOutput)
        {
            var audioClip = AudioClip.Create("AudioClip", 480000, 1, 48000, true, (data) =>
            {
                lock (audioBuffer)
                {
                    if (audioBufferSamples < data.Length)
                    {
                        for (int i = 0; i < data.Length; i++)
                        {
                            data[i] = 0.0f;
                        }
                        return;
                    }

                    var p = audioBuffer.Peek();
                    for (int i = 0; i < data.Length; i++)
                    {
                        data[i] = p[audioBufferPosition] / 32768.0f;
                        ++audioBufferPosition;
                        if (audioBufferPosition >= p.Length)
                        {
                            audioBuffer.Dequeue();
                            p = audioBuffer.Peek();
                            audioBufferPosition = 0;
                        }
                    }
                    audioBufferSamples -= data.Length;
                }
            });
            audioSourceOutput.clip = audioClip;
            audioSourceOutput.Play();
        }

        if (!Recvonly)
        {
            AudioRenderer.Start();
            audioSourceInput.Play();
        }
    }