예제 #1
0
        public void ProcessBlock(FrameBlock newBlock)
        {
            // Decode the block
            lastBlockProcessedAt = DateTime.Now;
            newBlock.Decode();
            int blockIndex = horizontalBlocks * newBlock.BlockY + newBlock.BlockX;

            if (newBlock.BlockType == BlockType.JpegDiff)
            {
                var oldBlock = FrameBlocks[blockIndex];
                newBlock.AddTo(oldBlock);
            }
            InsertBlock(newBlock, blockIndex);

            // Insert the decoded block into the current frame buffer
            int startingOffset = (newBlock.BlockX * BlockSizeInBytes) + (newBlock.BlockY * horizontalBlocks * TotalBlockSizeInBytes);

            newBlock.EncodedStream.Position = 0;
            int blockOffset = 0;

            lock (frame)
            {
                for (int line = 0; line < BlockSize; line++)
                {
                    int frameOffset = startingOffset + (line * widthInBytes);
                    Buffer.BlockCopy(newBlock.RgbaRaw, blockOffset, frame, frameOffset, BlockSizeInBytes);
                    blockOffset += BlockSizeInBytes;
                }
            }
        }
예제 #2
0
        private FrameBlock GetBlockFromImage(byte[] image, int blockX, int blockY)
        {
            FrameBlock block = frameBlockPool.GetNext();

            block.BlockX = (byte)blockX;
            if (stride <= 0)
            {
                block.BlockY = (byte)(verticalBlocks - blockY - 1);                 // Reverse the y-axis because the image is upside down.
            }
            else
            {
                block.BlockY = (byte)blockY;
            }

            int startingOffset = (blockX * BlockSizeInBytes) + (blockY * horizontalBlocks * TotalBlockSizeInBytes);

            for (int line = 0; line < BlockSize; line++)
            {
                int sampleLine;
                if (stride <= 0)
                {
                    sampleLine = BlockSize - line - 1;                     // Reverse the y axis because the image is upside down.
                }
                else
                {
                    sampleLine = line;
                }
                int sourceStartPos = startingOffset + (sampleLine * widthInBytes);
                int destStartPos   = line * BlockSizeInBytes;
                Buffer.BlockCopy(image, sourceStartPos, block.RgbaRaw, destStartPos, BlockSizeInBytes);
            }
            return(block);
        }
예제 #3
0
        public FrameBlock[] ParseChunk(ByteStream chunk, ushort remoteSsrcId)
        {
            byte numBlocks = chunk.Data[0];

            // ks 7/18/11 - This seems like an odd place to do it, but I can't think of a better one.
            _videoQualityController.LogReceivedVideoQuality(remoteSsrcId, (VideoQuality)chunk.Data[1], (VideoQuality)chunk.Data[2]);

            byte jpegQuality = chunk.Data[3];

            Debug.Assert(jpegQuality > 0 && jpegQuality <= 100);

            chunk.CurrentOffset = ChunkHeaderLength;
            var blocks = new FrameBlock[numBlocks];

            for (int i = 0; i < numBlocks; i++)
            {
                var block = _frameBlockPool.GetNext();
                block.JpegQuality = jpegQuality;
                block.BlockX      = chunk.ReadByte();
                block.BlockY      = chunk.ReadByte();
                block.BlockType   = (BlockType)chunk.ReadByte();
                short payloadLength = chunk.ReadInt16();
                Debug.Assert(payloadLength > 0, "The payloadLength must be greater than 0");
                block.EncodedStream  = new MemoryStream(chunk.Data, chunk.CurrentOffset, payloadLength);
                chunk.CurrentOffset += payloadLength;
                blocks[i]            = block;
            }
            return(blocks);
        }
예제 #4
0
        /// <summary>
        /// Subtracts the current block from the old block and calculates the average color distance between the two
        /// </summary>
        /// <param name="oldBlock">The old block</param>
        /// <param name="lowendCutoff">The minimum amount of distance allowed before we stop comparing blocks (i.e., no need to send this block)</param>
        /// <param name="highendCutoff">The max amount of delta allowed before we stop comparing blocks (i.e., we won't send this block delta-encoded</param>
        /// <returns>The average color distance between the two blocks</returns>
        public double SubtractFrom(FrameBlock oldBlock, double lowendCutoff, double highendCutoff)
        {
            if (oldBlock == null)
            {
                return(0.0f);
            }

            var       oldFrame        = oldBlock.RgbaRaw;
            int       totalDistance   = 0;
            float     averageDistance = 0.0f;
            const int segments        = 5;
            int       pixelsProcessed = 0;

            for (int pixelOffset = 0; pixelOffset < segments; pixelOffset++)
            {
                // Only calculate the color distance on the first segment.
                if (pixelOffset == 0)
                {
                    for (int index = pixelOffset * VideoConstants.BytesPerPixel; index < RgbaRaw.Length; index += (segments - 1) * VideoConstants.BytesPerPixel)
                    {
                        byte b    = RgbaRaw[index];
                        byte oldB = oldFrame[index];
                        RgbaDelta[index] = EncodeDelta(oldB, b);
                        index++;
                        byte g    = RgbaRaw[index];
                        byte oldG = oldFrame[index];
                        RgbaDelta[index] = EncodeDelta(oldG, g);
                        index++;
                        byte r    = RgbaRaw[index];
                        byte oldR = oldFrame[index];
                        RgbaDelta[index] = EncodeDelta(oldR, r);
                        index           += 2;
                        totalDistance   += VideoHelper.GetColorDistance(r, g, b, oldR, oldG, oldB);;                        // Square it so that pixels with more color distance will be more likely to trigger a retransmission
                        pixelsProcessed++;
                    }
                    averageDistance = totalDistance / (float)pixelsProcessed;
                    if (averageDistance < lowendCutoff || averageDistance > highendCutoff)
                    {
                        break;
                    }
                }
                else
                {
                    for (int index = pixelOffset * VideoConstants.BytesPerPixel; index < RgbaRaw.Length; index += (segments - 1) * VideoConstants.BytesPerPixel)
                    {
                        byte b = RgbaRaw[index];
                        RgbaDelta[index] = EncodeDelta(oldFrame[index], b);
                        index++;
                        byte g = RgbaRaw[index];
                        RgbaDelta[index] = EncodeDelta(oldFrame[index], g);
                        index++;
                        byte r = RgbaRaw[index];
                        RgbaDelta[index] = EncodeDelta(oldFrame[index], r);
                        index           += 2;
                    }
                }
            }
            return(averageDistance);
        }
예제 #5
0
        /// <summary>
        /// Enqueues an encoded 16x16 block for later transmission.
        /// </summary>
        /// <param name="newBlock">The FrameBlock to be queued for transmission</param>
        private void EncodeAndEnqueueBlock(FrameBlock newBlock)
        {
            lock (EncodedBlocks)
            {
                TrimQueueIfNecessary();
                newBlock.Encode(_videoQualityController.JpegQuality);

                // Increment the reference count before we insert the block into an external container.
                newBlock.ReferenceCount++;
                EncodedBlocks.Enqueue(newBlock);
            }
        }
예제 #6
0
        protected VideoFrame(int height, int width, IObjectPool <FrameBlock> frameBlockPool)
        {
            if (width % BlockSize != 0 || height % BlockSize != 0)
            {
                throw new ArgumentException(string.Format("The height and width must be multiples of {0}.", VideoConstants.VideoBlockSize));
            }
            this.height         = (short)height;
            this.width          = (short)width;
            this.frameBlockPool = frameBlockPool;
            widthInBytes        = width * VideoConstants.BytesPerPixel;
            horizontalBlocks    = width / BlockSize;
            verticalBlocks      = height / BlockSize;
            int totalBlocks = horizontalBlocks * verticalBlocks;

            FrameBlocks = new FrameBlock[totalBlocks];
        }
예제 #7
0
 /// <summary>
 /// Populates the current raw block by adding the current delta to the old raw block.
 /// </summary>
 /// <param name="oldBlock">The old block</param>
 public void AddTo(FrameBlock oldBlock)
 {
     if (oldBlock != null)
     {
         var oldFrame = oldBlock.RgbaRaw;
         for (int i = 0; i < RgbaDelta.Length; i++)
         {
             RgbaRaw[i] = i % 4 != 3 ? ReconstructOriginal(oldFrame[i], RgbaDelta[i]) : (byte)0xFF;
         }
     }
     else
     {
         // If we don't have anything to construct the frame against, just copy the delta to the RgbaRaw buffer.
         Buffer.BlockCopy(RgbaDelta, 0, RgbaRaw, 0, RgbaDelta.Length);
     }
 }
예제 #8
0
 public void InsertBlock(FrameBlock newBlock, int index)
 {
     frameBlockPool.Recycle(FrameBlocks[index]);
     newBlock.ReferenceCount++;
     FrameBlocks[index] = newBlock;
 }
예제 #9
0
        public bool GetNextChunkFromQueue(Queue <FrameBlock> queue, ByteStream buffer)
        {
            byte blocks = 0;

            buffer.CurrentOffset = ChunkHeaderLength;
            short jpegQuality = 0;

            // Pull blocks from the queue and insert them into the buffer until the buffer is full or the block queue is empty.
            FrameBlock block = null;

            while (buffer.CurrentOffset < _maxChunkSize)
            {
                // See if there's room left in the current chunk for the next block.
                lock (queue)
                {
                    // Stop pulling blocks from the queue if there are no more blocks.
                    if (queue.Count == 0)
                    {
                        break;
                    }

                    // Stop pulling blocks from the queue if there's no more room in the chunk.
                    var peek = queue.Peek();
                    if ((buffer.CurrentOffset + BlockHeaderLength + peek.EncodedStream.Length) > _maxChunkSize && buffer.CurrentOffset > ChunkHeaderLength)
                    {
                        break;
                    }

                    // Stop pulling blocks from the queue if the jpegQuality has changed.
                    if ((jpegQuality > 0 && peek.JpegQuality != jpegQuality))
                    {
                        break;
                    }

                    block = queue.Dequeue();
                }
                blocks++;
                jpegQuality = block.JpegQuality;

                // Set the x,y position of the block.
                buffer.WriteByte(block.BlockX);
                buffer.WriteByte(block.BlockY);

                // Set the type of the block.
                buffer.WriteByte((byte)block.BlockType);

                // Set the size of the block.
                var streamLength = (short)block.EncodedStream.Length;
                Debug.Assert(streamLength > 0, "The length of the encoded stream must be greater than 0");
                buffer.WriteInt16(streamLength);

                // Set the actual block data.
                block.EncodedStream.Position = 0;
                block.EncodedStream.Read(buffer.Data, buffer.CurrentOffset, streamLength);
                buffer.CurrentOffset += streamLength;

                _frameBlockPool.Recycle(block);
            }

            // If we've retrieved at least one block, go back to the beginning of the buffer
            // and set the chunk header information
            if (block != null)
            {
                buffer.Data[0]    = blocks;
                buffer.Data[1]    = (byte)_videoQualityController.CommandedVideoQuality;
                buffer.Data[2]    = (byte)_videoQualityController.ProposedVideoQuality;
                buffer.Data[3]    = (byte)jpegQuality;
                buffer.DataLength = buffer.CurrentOffset;
                buffer.ResetCurrentOffset();
                return(true);
            }
            return(false);
        }