Beispiel #1
0
 private void HandleBlockAsync(object thread_parm)
 {
     object[] parm = (object[])thread_parm;
     MLVTypes.mlv_vidf_hdr_t header = (MLVTypes.mlv_vidf_hdr_t)parm[0];
     byte[] raw_data = (byte[])parm[1];
     int    raw_pos  = (int)parm[2];
 }
Beispiel #2
0
        public void HandleBlock(string type, MLVTypes.mlv_vidf_hdr_t header, byte[] rawData, int rawPos, int rawLength)
        {
            VidfHeader = header;

            if (FileHeader.videoClass != 0x01 || LockBitmap == null)
            {
                return;
            }

            if (!VideoEnabled)
            {
                return;
            }

            lock (this)
            {
                int frameSpace = (int)Math.Max(header.frameSpace - RawFixOffset, 0);
                int startPos   = rawPos + frameSpace;
                int length     = rawLength - frameSpace;

                if (!DebayeringEnabled)
                {
                    RawPixelData = new byte[length];
                    Array.Copy(rawData, startPos, RawPixelData, 0, length);
                    return;
                }

                /* first extract the raw channel values */
                Bitunpack.Process(rawData, startPos, rawLength, PixelData);

                /* then debayer the pixel data */
                Debayer.Process(PixelData, RGBData);

                /* and transform into a bitmap for displaying */
                LockBitmap.LockBits();

                int    pos     = 0;
                uint[] average = new uint[] { 0, 0, 0 };

                for (int y = 0; y < RawiHeader.yRes; y++)
                {
                    for (int x = 0; x < RawiHeader.xRes; x++)
                    {
                        float r = RGBData[y, x, 0];
                        float g = RGBData[y, x, 1];
                        float b = RGBData[y, x, 2];

                        if (ColorLut != null)
                        {
                            ColorLut.Lookup(r, g, b, out r, out g, out b);
                        }

                        /* now scale to TV black/white levels */
                        ScaleLevels(ref r);
                        ScaleLevels(ref g);
                        ScaleLevels(ref b);

                        average[0] += (uint)g;
                        average[1] += (uint)b;
                        average[2] += (uint)r;

                        /* limit RGB values */
                        LockBitmap.Pixels[pos++] = (byte)b;
                        LockBitmap.Pixels[pos++] = (byte)g;
                        LockBitmap.Pixels[pos++] = (byte)r;
                    }
                }
                LockBitmap.UnlockBits();

                int pixels = RawiHeader.yRes * RawiHeader.xRes;

                /* make sure the average brightness is somewhere in the mid range */
                if (Math.Abs(_ExposureCorrection) == 0.0f)
                {
                    double averageBrightness = (average[0] + average[1] + average[2]) / (3 * pixels);
                    if (averageBrightness < 100)
                    {
                        Debayer.Brightness *= 1.0f + (float)(100.0f - averageBrightness) / 100.0f;
                    }
                    if (averageBrightness > 200)
                    {
                        Debayer.Brightness /= 1.0f + (float)(averageBrightness - 200.0f) / 55.0f;
                    }
                }
                else
                {
                    Debayer.Brightness = (float)Math.Pow(2, _ExposureCorrection);
                }

                FrameUpdated = true;
            }
        }
Beispiel #3
0
        public virtual void BuildFrameIndex()
        {
            if (Reader == null)
            {
                return;
            }

            HighestVideoFrameNumber = 0;
            TotalVideoFrameCount    = 0;

            Dictionary <uint, frameXrefEntry> vidfXrefList = new Dictionary <uint, frameXrefEntry>();
            Dictionary <uint, frameXrefEntry> audfXrefList = new Dictionary <uint, frameXrefEntry>();
            MetadataContainer metadataContainer            = new MetadataContainer();
            uint highestFrameNumber = 0;

            for (int blockIndexPos = 0; blockIndexPos < BlockIndex.Length; blockIndexPos++)
            {
                var block = BlockIndex[blockIndexPos];

                Reader[block.fileNumber].BaseStream.Position = block.position;

                /* 16 bytes are enough for size, type and timestamp, but we try to read all blocks up to 1k */
                byte[] buf = new byte[1024];

                /* read MLV block header */
                if (Reader[block.fileNumber].Read(buf, 0, 16) != 16)
                {
                    break;
                }

                uint   size      = BitConverter.ToUInt32(buf, 4);
                string type      = Encoding.UTF8.GetString(buf, 0, 4);
                UInt64 timestamp = BitConverter.ToUInt64(buf, 8);

                /* read that block, up to 256 byte */
                Reader[block.fileNumber].BaseStream.Position = block.position;

                /* read MLV block header */
                int readSize = (int)Math.Min(size, 256);
                if (Reader[block.fileNumber].Read(buf, 0, readSize) != readSize)
                {
                    break;
                }

                object blockData = MLVTypes.ToStruct(buf);

                switch (type)
                {
                case "NULL":
                    continue;

                case "VIDF":
                {
                    MLVTypes.mlv_vidf_hdr_t header = (MLVTypes.mlv_vidf_hdr_t)blockData;
                    if (!vidfXrefList.ContainsKey(header.frameNumber))
                    {
                        frameXrefEntry entry = new frameXrefEntry();
                        entry.blockIndexPos = blockIndexPos;
                        entry.metadata      = metadataContainer.Metadata;
                        entry.timestamp     = timestamp;
                        vidfXrefList.Add(header.frameNumber, entry);
                    }
                    else
                    {
                        FrameRedundantErrors++;
                    }
                    highestFrameNumber = Math.Max(highestFrameNumber, header.frameNumber);
                }
                break;

                case "AUDF":
                {
                    MLVTypes.mlv_audf_hdr_t header = (MLVTypes.mlv_audf_hdr_t)blockData;
                    if (!audfXrefList.ContainsKey(header.frameNumber))
                    {
                        frameXrefEntry entry = new frameXrefEntry();
                        entry.blockIndexPos = blockIndexPos;
                        entry.metadata      = metadataContainer.Metadata;
                        entry.timestamp     = timestamp;
                        audfXrefList.Add(header.frameNumber, entry);
                    }
                    else
                    {
                        FrameRedundantErrors++;
                    }
                }
                break;

                default:
                    metadataContainer.Update(type, blockData);
                    break;
                }
            }

            /* count the number of missing video frames */
            uint curFrame = 0;

            foreach (var elem in vidfXrefList.OrderBy(elem => elem.Key))
            {
                if (elem.Key != curFrame)
                {
                    curFrame = elem.Key;
                    FrameMissingErrors++;
                }
                curFrame++;
            }

            VidfXrefList            = vidfXrefList;
            AudfXrefList            = audfXrefList;
            TotalVideoFrameCount    = (uint)vidfXrefList.Count;
            HighestVideoFrameNumber = highestFrameNumber;
        }