Exemple #1
0
        public static Interop.CaptureFrame TryCaptureFrame(Interop.Recv recv)
        {
            Interop.CaptureFrame captureFrame = new Interop.CaptureFrame();

            captureFrame.frameType = recv.Capture(
                out captureFrame.videoFrame,
                out captureFrame.audioFrame,
                out captureFrame.metadataFrame,
                0);

            return(captureFrame);
        }
Exemple #2
0
        void TryReceiveFrame()
        {
            while (!_exitThread)
            {
                Interop.CaptureFrame?captureFrameOrNull;

                lock (threadlock)
                {
                    if (_recv == null)
                    {
                        continue;
                    }
                    captureFrameOrNull = RecvHelper.TryCaptureFrame(_recv);
                }

                if (captureFrameOrNull == null)
                {
                    continue;
                }

                Interop.CaptureFrame captureFrame = captureFrameOrNull.GetValueOrDefault();

                switch (captureFrame.frameType)
                {
                case Interop.FrameType.Video:

                    // Add to Queue for processing on Main thread
                    // We cannot free up the frame here because we need the data
                    // IntPtr address to be valid when processing on main thread.
                    // So we have to make sure to free the videoframe after processing
                    lock (videoFrameQueue)
                    {
                        // We only need to keep a single frame at any point
                        // So we can free up any previous frames first
                        videoFrameQueue.ForEach(vf => _recv.FreeVideoFrame(vf));
                        videoFrameQueue.Clear();
                        videoFrameQueue.Add(captureFrame.videoFrame);
                    }

                    // Send some metadata back
                    SendMetadataFrame(sendMetadataFrameData);

                    break;

                case Interop.FrameType.Audio:

                    // Create audio buffer

                    // we're working in bytes, so take the size of a 32 bit sample (float) into account
                    int sizeInBytes = (int)captureFrame.audioFrame.NoSamples * (int)captureFrame.audioFrame.NoChannels * sizeof(float);

                    // Unity audio is interleaved so we need to convert from planar
                    Interop.AudioFrameInterleaved audioFrameInterleaved = new Interop.AudioFrameInterleaved
                    {
                        SampleRate = captureFrame.audioFrame.SampleRate,
                        NoChannels = captureFrame.audioFrame.NoChannels,
                        NoSamples  = captureFrame.audioFrame.NoSamples,
                        TimeCode   = captureFrame.audioFrame.Timecode
                    };

                    // we need a managed byte array for our buffer
                    byte[] audBuffer = new byte[sizeInBytes];

                    // pin the byte[] and get a GC handle to it
                    // doing it this way saves an expensive Marshal.Alloc/Marshal.Copy/Marshal.Free later
                    // the data will only be moved once, during the fast interleave step that is required anyway
                    GCHandle handle = GCHandle.Alloc(audBuffer, GCHandleType.Pinned);

                    // access it by an IntPtr and use it for our interleaved audio buffer
                    audioFrameInterleaved.Data = handle.AddrOfPinnedObject();

                    // Convert from float planar to float interleaved audio
                    _recv.UtilAudioToInterleaved(ref captureFrame.audioFrame, ref audioFrameInterleaved);

                    // release the pin on the byte[]
                    // never try to access p_data after the byte[] has been unpinned!
                    // that IntPtr will no longer be valid.
                    handle.Free();

                    // Add to audio buffer for processing in audio thread
                    lock (audioBuffer)
                    {
                        audioBuffer.AddRange(Util.ConvertByteArrayToFloat(audBuffer));
                    }

                    if (captureFrame.audioFrame.Metadata != IntPtr.Zero)
                    {
                        // Handle AudioFrame metadata
                        lock (audioMetadataQueue)
                        {
                            audioMetadataQueue.Add(Marshal.PtrToStringAnsi(captureFrame.audioFrame.Metadata));
                        }
                    }

                    // We can free up the audio frame because this already been processed
                    // Converted to interleaved array of floats
                    _recv.FreeAudioFrame(captureFrame.audioFrame);

                    break;

                case Interop.FrameType.Metadata:

                    if (captureFrame.metadataFrame.Data != IntPtr.Zero)
                    {
                        // Handle MetadataFrame metadata
                        lock (metadataFrameQueue)
                        {
                            metadataFrameQueue.Add(Marshal.PtrToStringAnsi(captureFrame.metadataFrame.Data));
                        }
                    }
                    // free frames that were received
                    _recv.FreeMetadataFrame(captureFrame.metadataFrame);
                    break;
                }
            }
        }