Example #1
0
        /// <summary>
        /// Processes commands pushed to the FIFO.
        /// </summary>
        public void DispatchCalls()
        {
            // Use this opportunity to also dispose any pending channels that were closed.
            _context.RunDeferredActions();

            // Process command buffers.
            while (_ibEnable && !_interrupt && _commandBufferQueue.TryDequeue(out CommandBuffer entry))
            {
                bool flushCommandBuffer = true;

                if (_flushSkips != 0)
                {
                    _flushSkips--;
                    flushCommandBuffer = false;
                }

                _currentCommandBuffer = entry;
                _currentCommandBuffer.Fetch(entry.Processor.MemoryManager, flushCommandBuffer);

                // If we are changing the current channel,
                // we need to force all the host state to be updated.
                if (_prevChannelProcessor != entry.Processor)
                {
                    _prevChannelProcessor = entry.Processor;
                    entry.Processor.ForceAllDirty();
                }

                entry.Processor.Process(entry.EntryAddress, _currentCommandBuffer.Words);
            }

            _interrupt = false;
        }
Example #2
0
        /// <summary>
        /// Creates a new instance of the GPU General Purpose FIFO device.
        /// </summary>
        /// <param name="context">GPU context that the GPFIFO belongs to</param>
        internal GPFifoDevice(GpuContext context)
        {
            _commandBufferQueue = new ConcurrentQueue <CommandBuffer>();
            _ibEnable           = true;
            _context            = context;
            _event = new AutoResetEvent(false);

            _processor = new GPFifoProcessor(context);
        }
Example #3
0
 /// <summary>
 /// Push a GPFIFO entry in the form of a prefetched command buffer.
 /// It is intended to be used by nvservices to handle special cases.
 /// </summary>
 /// <param name="processor">Processor used to process <paramref name="commandBuffer"/></param>
 /// <param name="commandBuffer">The command buffer containing the prefetched commands</param>
 internal void PushHostCommandBuffer(GPFifoProcessor processor, int[] commandBuffer)
 {
     _commandBufferQueue.Enqueue(new CommandBuffer
     {
         Processor    = processor,
         Type         = CommandBufferType.Prefetch,
         Words        = commandBuffer,
         EntryAddress = ulong.MaxValue,
         EntryCount   = (uint)commandBuffer.Length
     });
 }
Example #4
0
        /// <summary>
        /// Creates a new instance of the GPU General Purpose FIFO class.
        /// </summary>
        /// <param name="context">GPU context</param>
        /// <param name="parent">Parent GPU General Purpose FIFO processor</param>
        public GPFifoClass(GpuContext context, GPFifoProcessor parent)
        {
            _context = context;
            _parent  = parent;
            _state   = new DeviceState <GPFifoClassState>(new Dictionary <string, RwCallback>
            {
                { nameof(GPFifoClassState.Semaphored), new RwCallback(Semaphored, null) },
                { nameof(GPFifoClassState.Syncpointb), new RwCallback(Syncpointb, null) },
                { nameof(GPFifoClassState.WaitForIdle), new RwCallback(WaitForIdle, null) },
                { nameof(GPFifoClassState.LoadMmeInstructionRam), new RwCallback(LoadMmeInstructionRam, null) },
                { nameof(GPFifoClassState.LoadMmeStartAddressRam), new RwCallback(LoadMmeStartAddressRam, null) },
                { nameof(GPFifoClassState.SetMmeShadowRamControl), new RwCallback(SetMmeShadowRamControl, null) }
            });

            _macros    = new Macro[MacrosCount];
            _macroCode = new int[MacroCodeSize];
        }
Example #5
0
        /// <summary>
        /// Create a CommandBuffer from a GPFIFO entry.
        /// </summary>
        /// <param name="processor">Processor used to process the command buffer pointed to by <paramref name="entry"/></param>
        /// <param name="entry">The GPFIFO entry</param>
        /// <returns>A new CommandBuffer based on the GPFIFO entry</returns>
        private static CommandBuffer CreateCommandBuffer(GPFifoProcessor processor, GPEntry entry)
        {
            CommandBufferType type = CommandBufferType.Prefetch;

            if (entry.Entry1Sync == Entry1Sync.Wait)
            {
                type = CommandBufferType.NoPrefetch;
            }

            ulong startAddress = ((ulong)entry.Entry0Get << 2) | ((ulong)entry.Entry1GetHi << 32);

            return(new CommandBuffer
            {
                Processor = processor,
                Type = type,
                Words = null,
                EntryAddress = startAddress,
                EntryCount = (uint)entry.Entry1Length
            });
        }
Example #6
0
        /// <summary>
        /// Pushes GPFIFO entries.
        /// </summary>
        /// <param name="processor">Processor used to process the command buffers pointed to by <paramref name="entries"/></param>
        /// <param name="entries">GPFIFO entries</param>
        internal void PushEntries(GPFifoProcessor processor, ReadOnlySpan <ulong> entries)
        {
            bool beforeBarrier = true;

            for (int index = 0; index < entries.Length; index++)
            {
                ulong entry = entries[index];

                CommandBuffer commandBuffer = CreateCommandBuffer(processor, Unsafe.As <ulong, GPEntry>(ref entry));

                if (beforeBarrier && commandBuffer.Type == CommandBufferType.Prefetch)
                {
                    commandBuffer.Fetch(processor.MemoryManager);
                }

                if (commandBuffer.Type == CommandBufferType.NoPrefetch)
                {
                    beforeBarrier = false;
                }

                _commandBufferQueue.Enqueue(commandBuffer);
            }
        }