コード例 #1
0
ファイル: Aja.cs プロジェクト: ratsil/bethe.helpers
 byte[] GetVideoFrame()
 {
     // см внимательно ScheduledFrameCompleted в деклинке!!
     Frame.Video cV = _AjaFramesVideoBuffer.Dequeue();
     _AjaFramesVideoToDispose.Enqueue(cV);
     return(cV.aFrameBytes.aBytes);
 }
コード例 #2
0
ファイル: Aja.cs プロジェクト: ratsil/bethe.helpers
 byte[] GetAudioFrame()
 {
     if (bFirsTime)
     {
         Preferences.nQueueDeviceLength = (byte)(_cCard.nBufferMaxLength - 2);
     }
     _bNeedToAddFrame = true;
     Frame.Audio cA = _AjaFramesAudioBuffer.Dequeue();
     _AjaFramesAudioToDispose.Enqueue(cA);
     return(cA.aFrameBytes.aBytes);
 }
コード例 #3
0
 static public void EventSend(EventDelegate dEvent, Effect cSender)
 {
     (new Logger()).WriteDebug3("in [hc = " + cSender.nID + "][" + dEvent.Method.Name + "]");
     _aqEvents.Enqueue(new Tuple <EventDelegate, Effect>(dEvent, cSender));
 }
コード例 #4
0
 static public void EventSend(EventDelegate dEvent, Effect cSender, Effect cEffect)
 {
     (new Logger()).WriteDebug3("in [hc = " + cEffect.nID + "]");
     _aqEvents.Enqueue(new Tuple <EventDelegate, Effect, Effect>(dEvent, cSender, cEffect));
 }
コード例 #5
0
ファイル: Plugin.cs プロジェクト: ratsil/bethe.ingenie
 static public void EventSend(EventDelegate dEvent, IPlugin cSender)
 {
     _aqEvents.Enqueue(new Tuple <EventDelegate, IPlugin>(dEvent, cSender));
 }
コード例 #6
0
ファイル: Aja.cs プロジェクト: ratsil/bethe.helpers
        override protected bool FrameSchedule()
        {
            if (!NextFrameAttached)
            {
                return(false);
            }

            int nVideoFramesBuffered = 0;

            Frame.Audio cFrameAudio;
            Frame.Video cFrameVideo;
            _dtLastTimeFrameScheduleCalled = DateTime.Now;

            AjaInterop.NTV2VideoFormat eCurrentRef;

            while (true) // puts frames up to max (_AjaFramesBufferMaxCount)
            {
                eCurrentRef = _cCard.eRefStatus;
                if (null == _eReferenceStatus || _eReferenceStatus != eCurrentRef)
                {
                    _eReferenceStatus = eCurrentRef;
                    (new Logger("Aja", sName)).WriteWarning("Refference status has changed to: [" + eCurrentRef + "] (unknown status means 'no ref')");
                }

                cFrameAudio = AudioFrameGet();
                cFrameVideo = VideoFrameGet();
                #region audio
                if (
                    null == (cFrameAudio) ||
                    cFrameAudio.aFrameBytes.IsNullOrEmpty()
                    )
                {
                    (new Logger("Aja", sName)).WriteError("audio frame is empty! [cFrameAudio=" + (cFrameAudio == null ? "NULL" : (null == cFrameAudio.aFrameBytes ? "bytes is NULL" : "" + cFrameAudio.aFrameBytes.Length)) + "]");
                    break;
                }
                #endregion
                #region video
                if (null == (cFrameVideo) || cFrameVideo.aFrameBytes.IsNullOrEmpty())    // _nFPS + _nVideoBufferExtraCapacity < nVideoFramesBuffered
                {
                    (new Logger("Aja", sName)).WriteDebug("got null instead of frame IN DECKLINK !!");
                    break;
                }

                //_cBugCatcherScheduleFrame.Enqueue(cFrameVideo, "FrameSchedule: [_ahFramesBuffersBinds.Count = " + _ahFramesBuffersBinds.Count + "]");

                if (_bDoWritingFrames)
                {
                    if (null != cFrameVideo)
                    {
                        byte[] aBytes = new byte[_nVideoBytesQty];
                        System.Runtime.InteropServices.Marshal.Copy(cFrameVideo.pFrameBytes, aBytes, 0, (int)_nVideoBytesQty);
                        lock (_aqWritingFrames)
                            _aqWritingFrames.Enqueue(aBytes);
                    }
                }
                #endregion

                _AjaFramesAudioBuffer.Enqueue(cFrameAudio);
                _AjaFramesVideoBuffer.Enqueue(cFrameVideo);

                _nAudioQueueLength            = (int)_cCard.nBufferLength;
                nVideoFramesBuffered          = (int)_cCard.nBufferLength;
                n__PROBA__AudioFramesBuffered = (int)_AjaFramesAudioBuffer.nCount;
                n__PROBA__VideoFramesBuffered = (int)_AjaFramesVideoBuffer.nCount;

                while (_AjaFramesAudioToDispose.nCount > 1) // last frame possible can be in work
                {
                    _AjaFramesAudioToDispose.Dequeue().Dispose();
                }
                while (_AjaFramesVideoToDispose.nCount > 1) // last frame possible can be in work
                {
                    FrameBufferReleased(_AjaFramesVideoToDispose.Dequeue());
                }

                #region logging
                if (
                    Preferences.nQueueDeviceLength - 2 > _nAudioQueueLength ||
                    Preferences.nQueueDeviceLength - 2 > nVideoFramesBuffered ||
                    Preferences.nQueuePipeLength * 4 / 5 > base._nBufferFrameCount && 0 < base._nBufferFrameCount ||
                    _aq__PROBA__AudioFrames.Count > 2 || _aq__PROBA__VideoFrames.Count > 2
                    )
                {
                    if (_bItsOk == true)
                    {
                        (new Logger("Aja", sName)).WriteError("device queue goes wrong-1:(" + _nAudioQueueLength + ", " + (nVideoFramesBuffered) + ")(" + n__PROBA__AudioFramesBuffered + ", " + n__PROBA__VideoFramesBuffered + ") dev buffer:" + base._nBufferFrameCount + " internal buffer_av:(" + _aq__PROBA__AudioFrames.Count + ", " + _aq__PROBA__VideoFrames.Count + ") -- logc-0");
                        _bItsOk       = false;
                        _nLogCounter2 = 0;
                    }
                    else if (_nLogCounter2++ >= 200)
                    {
                        (new Logger("Aja", sName)).WriteError("device queue goes wrong-2:(" + _nAudioQueueLength + ", " + (nVideoFramesBuffered) + ")(" + n__PROBA__AudioFramesBuffered + ", " + n__PROBA__VideoFramesBuffered + ") dev buffer:" + base._nBufferFrameCount + " internal buffer_av:(" + _aq__PROBA__AudioFrames.Count + ", " + _aq__PROBA__VideoFrames.Count + ") -- logc-" + _nLogCounter2 + _sIterationsCounter2);
                        _sIterationsCounter2 = _sIterationsCounter2 == "." ? ".." : ".";
                        _nLogCounter2        = 0;
                    }
                }
                else
                {
                    if (_bItsOk == null)
                    {
                        if (_nLogCounter2 >= 150)
                        {
                            _bItsOk = true;
                        }
                    }
                    else if (_bItsOk == false)
                    {
                        (new Logger("Aja", sName)).WriteError("device queue was wrong:(" + _nAudioQueueLength + ", " + (nVideoFramesBuffered) + ")(" + n__PROBA__AudioFramesBuffered + ", " + n__PROBA__VideoFramesBuffered + ") dev buffer:" + base._nBufferFrameCount + " internal buffer_av:(" + _aq__PROBA__AudioFrames.Count + ", " + _aq__PROBA__VideoFrames.Count + ") -- logc-" + _nLogCounter2);
                        _bItsOk = true;
                    }

                    if (_nLogCounter2++ >= 2000)
                    {
                        (new Logger("Aja", sName)).WriteNotice("device queue:(" + _nAudioQueueLength + ", " + (nVideoFramesBuffered) + ")(" + n__PROBA__AudioFramesBuffered + ", " + n__PROBA__VideoFramesBuffered + ") dev buffer:" + base._nBufferFrameCount + " internal buffer_av:(" + _aq__PROBA__AudioFrames.Count + ", " + _aq__PROBA__VideoFrames.Count + ")        " + _sIterationsCounter2);
                        _sIterationsCounter2 = _sIterationsCounter2 == "." ? ".." : ".";
                        _nLogCounter2        = 0;
                    }
                }
                #endregion

                if (_AjaFramesAudioBuffer.CountGet() >= _AjaFramesBufferMaxCount)
                {
                    return(true);
                }
            }
            return(false);
        }