Пример #1
0
        public IDeckLinkMutableVideoFrame CreateUploadVideoFrame()
        {
            IDeckLinkMutableVideoFrame newFrame = null;

            m_selectedDevice.deckLinkOutput.CreateVideoFrame(1920, 1080, 1920 * 2, _BMDPixelFormat.bmdFormat8BitYUV, _BMDFrameFlags.bmdFrameFlagDefault, out newFrame);
            return(newFrame);
        }
Пример #2
0
        private IDeckLinkMutableVideoFrame CreateOutputVideoFrame(Action <IDeckLinkVideoFrame> fillFrame, _BMDPixelFormat fillerPxFormat = _BMDPixelFormat.bmdFormat8BitYUV)
        {
            IDeckLinkMutableVideoFrame referenceFrame = null;
            IDeckLinkMutableVideoFrame scheduleFrame  = null;

            m_selectedDevice.deckLinkOutput.CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth * bytesPerPixel, m_pixelFormat, _BMDFrameFlags.bmdFrameFlagDefault, out scheduleFrame);
            if (m_pixelFormat == fillerPxFormat)
            {
                // Fill 8-bit YUV directly without conversion
                fillFrame(scheduleFrame);
            }
            else
            {
                int bpp = 4;
                if (fillerPxFormat == _BMDPixelFormat.bmdFormat8BitYUV)
                {
                    bpp = 2;
                }
                // Pixel formats are different, first generate 8-bit YUV bars frame and convert to required format
                m_selectedDevice.deckLinkOutput.CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth * bpp, fillerPxFormat, _BMDFrameFlags.bmdFrameFlagDefault, out referenceFrame);
                fillFrame(referenceFrame);
                try
                {
                    frameConverter.ConvertFrame(referenceFrame, scheduleFrame);
                }
                catch
                {
                    return(referenceFrame);
                }
            }
            return(scheduleFrame);
        }
Пример #3
0
        private IDeckLinkMutableVideoFrame CreateOutputVideoFrame(Action <IDeckLinkVideoFrame> fillFrame)
        {
            IDeckLinkMutableVideoFrame referenceFrame = null;
            IDeckLinkMutableVideoFrame scheduleFrame  = null;
            IDeckLinkVideoConversion   frameConverter = new CDeckLinkVideoConversion();

            m_selectedDevice.deckLinkOutput.CreateVideoFrame(m_frameWidth, m_frameHeight, BytesPerRow, m_selectedPixelFormat, _BMDFrameFlags.bmdFrameFlagDefault, out scheduleFrame);
            if (m_selectedPixelFormat == _BMDPixelFormat.bmdFormat8BitYUV)
            {
                // Fill 8-bit YUV directly without conversion
                fillFrame(scheduleFrame);
            }
            else
            {
                // Pixel formats are different, first generate 8-bit YUV bars frame and convert to required format
                m_selectedDevice.deckLinkOutput.CreateVideoFrame(m_frameWidth, m_frameHeight, m_frameWidth * 2, _BMDPixelFormat.bmdFormat8BitYUV, _BMDFrameFlags.bmdFrameFlagDefault, out referenceFrame);
                fillFrame(referenceFrame);
                frameConverter.ConvertFrame(referenceFrame, scheduleFrame);
            }

            return(scheduleFrame);
        }
Пример #4
0
        private void StartRunning()
        {
            m_selectedDevice.VideoFrameCompleted  += new DeckLinkVideoOutputHandler((b) => this.BeginInvoke((Action)(() => { ScheduleNextFrame(b); })));
            m_selectedDevice.AudioOutputRequested += new DeckLinkAudioOutputHandler(() => this.BeginInvoke((Action)(() => { WriteNextAudioSamples(); })));
            m_selectedDevice.PlaybackStopped      += new DeckLinkPlaybackStoppedHandler(() => this.BeginInvoke((Action)(() => { DisableOutput(); })));

            m_outputSignal      = ((StringObjectPair <OutputSignal>)comboBoxOutputSignal.SelectedItem).value;
            m_audioChannelCount = (uint)((int)comboBoxAudioChannels.SelectedItem);
            m_audioSampleDepth  = ((StringObjectPair <_BMDAudioSampleType>)comboBoxAudioDepth.SelectedItem).value;
            m_audioSampleRate   = _BMDAudioSampleRate.bmdAudioSampleRate48kHz;
            //
            //- Extract the IDeckLinkDisplayMode from the display mode popup menu
            m_frameWidth  = m_selectedDisplayMode.GetWidth();
            m_frameHeight = m_selectedDisplayMode.GetHeight();
            m_selectedDisplayMode.GetFrameRate(out m_frameDuration, out m_frameTimescale);
            // Calculate the number of frames per second, rounded up to the nearest integer.  For example, for NTSC (29.97 FPS), framesPerSecond == 30.
            m_framesPerSecond = (uint)((m_frameTimescale + (m_frameDuration - 1)) / m_frameDuration);

            // Set the video output mode
            m_selectedDevice.deckLinkOutput.EnableVideoOutput(m_selectedDisplayMode.GetDisplayMode(), _BMDVideoOutputFlags.bmdVideoOutputFlagDefault);

            // Set the audio output mode
            m_selectedDevice.deckLinkOutput.EnableAudioOutput(m_audioSampleRate, m_audioSampleDepth, m_audioChannelCount, _BMDAudioOutputStreamType.bmdAudioOutputStreamContinuous);

            // Set screen preview callback
            m_selectedDevice.deckLinkOutput.SetScreenPreviewCallback(previewWindow);

            // Generate one second of audio
            m_audioBufferSampleLength = (uint)((m_framesPerSecond * (uint)m_audioSampleRate * m_frameDuration) / m_frameTimescale);
            m_audioBuffer             = Marshal.AllocCoTaskMem((int)(m_audioBufferSampleLength * m_audioChannelCount * ((uint)m_audioSampleDepth / 8)));

            // Zero the buffer (interpreted as audio silence)
            for (int i = 0; i < (m_audioBufferSampleLength * m_audioChannelCount * (uint)m_audioSampleDepth / 8); i++)
            {
                Marshal.WriteInt32(m_audioBuffer, i, 0);
            }
            uint audioSamplesPerFrame = (uint)(((uint)m_audioSampleRate * m_frameDuration) / m_frameTimescale);

            if (m_outputSignal == OutputSignal.kOutputSignalPip)
            {
                FillSine(m_audioBuffer, audioSamplesPerFrame, m_audioChannelCount, m_audioSampleDepth);
            }
            else
            {
                FillSine(new IntPtr(m_audioBuffer.ToInt64() + (audioSamplesPerFrame * m_audioChannelCount * (uint)m_audioSampleDepth / 8)), (m_audioBufferSampleLength - audioSamplesPerFrame), m_audioChannelCount, m_audioSampleDepth);
            }

            // Generate a frame of black
            m_videoFrameBlack = CreateOutputVideoFrame(FillBlack);

            // Generate a frame of colour bars
            m_videoFrameBars = CreateOutputVideoFrame(FillColourBars);

            // Begin video preroll by scheduling a second of frames in hardware
            m_totalFramesScheduled = 0;
            for (uint i = 0; i < m_framesPerSecond; i++)
            {
                ScheduleNextFrame(true);
            }

            // Begin audio preroll.  This will begin calling our audio callback, which will start the DeckLink output stream.
            m_audioBufferOffset = 0;
            m_selectedDevice.deckLinkOutput.BeginAudioPreroll();

            m_running            = true;
            buttonStartStop.Text = "Stop";
        }
Пример #5
0
        public void StartRunning()
        {
            //     m_selectedDevice.VideoFrameCompleted += new DeckLinkVideoOutputHandler((b) => this.BeginInvoke((Action)(() => { ScheduleNextFrame(b); })));
            m_selectedDevice.AudioOutputRequested += new DeckLinkAudioOutputHandler(() => this.BeginInvoke((Action)(() => { WriteNextAudioSamples(); })));
            m_selectedDevice.PlaybackStopped      += new DeckLinkPlaybackStoppedHandler(() => this.BeginInvoke((Action)(() => { DisableOutput(); })));

            m_audioChannelCount = 16;
            m_audioSampleDepth  = _BMDAudioSampleType.bmdAudioSampleType16bitInteger;
            m_audioSampleRate   = _BMDAudioSampleRate.bmdAudioSampleRate48kHz;
            //
            //- Extract the IDeckLinkDisplayMode from the display mode popup menu
            IDeckLinkDisplayMode videoDisplayMode;

            videoDisplayMode = ((DisplayModeEntry)comboBoxVideoFormat.SelectedItem).displayMode;
            m_frameWidth     = videoDisplayMode.GetWidth();
            m_frameHeight    = videoDisplayMode.GetHeight();
            videoDisplayMode.GetFrameRate(out m_frameDuration, out m_frameTimescale);
            // Calculate the number of frames per second, rounded up to the nearest integer.  For example, for NTSC (29.97 FPS), framesPerSecond == 30.
            m_framesPerSecond = (uint)((m_frameTimescale + (m_frameDuration - 1)) / m_frameDuration);
            var mode = videoDisplayMode.GetDisplayMode();

            // Set the video output mode
            m_selectedDevice.deckLinkOutput.EnableVideoOutput(videoDisplayMode.GetDisplayMode(), _BMDVideoOutputFlags.bmdVideoOutputFlagDefault);

            // Set the audio output mode
            m_selectedDevice.deckLinkOutput.EnableAudioOutput(m_audioSampleRate, m_audioSampleDepth, m_audioChannelCount, _BMDAudioOutputStreamType.bmdAudioOutputStreamContinuous);

            // Generate one second of audio
            m_audioBufferSampleLength = (uint)(m_framesPerSecond * audioSamplesPerFrame());
            int m_audioBufferDataLength = (int)(m_audioBufferSampleLength * audioDataPerSample());

            m_audioBuffer          = Marshal.AllocCoTaskMem(m_audioBufferDataLength);
            m_audioBufferAllocated = true;

            lock (m_selectedDevice)
            {
                // Zero the buffer (interpreted as audio silence)
                for (int i = 0; i < m_audioBufferDataLength; i++)
                {
                    Marshal.WriteInt32(m_audioBuffer, i, 0);
                }
                FillSine(new IntPtr(m_audioBuffer.ToInt64()), m_audioBufferSampleLength, m_audioChannelCount, m_audioSampleDepth);
                m_audioBufferReadOffset  = 0;
                m_audioBufferWriteOffset = 0;
            }
            m_videoFrameARGBBars = CreateOutputVideoFrame(FillBGRAColourBars, _BMDPixelFormat.bmdFormat8BitARGB);
            m_videoFrame         = CreateOutputVideoFrame(FillARGBColourBars, _BMDPixelFormat.bmdFormat8BitARGB);
            m_videoFrameBars     = CreateOutputVideoFrame(FillColourBars);

            m_pixelFormat        = _BMDPixelFormat.bmdFormat8BitBGRA;
            m_videoFrameBGRA     = CreateOutputVideoFrame(FillColourBars);
            m_videoFrameBGRABars = CreateOutputVideoFrame(FillBGRAColourBars, _BMDPixelFormat.bmdFormat8BitBGRA);
            m_pixelFormat        = _BMDPixelFormat.bmdFormat8BitARGB;

            m_running            = true;
            buttonStartStop.Text = "Stop";

            // Begin video preroll by scheduling a second of frames in hardware
            m_totalFramesScheduled = 0;
            for (uint i = 0; i < m_prerollFrames; i++)
            {
                ScheduleNextFrame(true);
            }

            // Begin audio preroll.  This will begin calling our audio callback, which will then start the DeckLink output stream - StartScheduledPlayback.
            m_selectedDevice.deckLinkOutput.BeginAudioPreroll();
            // StopRunning();
        }