Exemplo n.º 1
0
        internal static uint GetSize(string mlvFileName, MLVTypes.mlv_vidf_hdr_t vidfHeader, byte[] pixelData, object[] metadata)
        {
            try
            {
                frame_headers dngData = CreateDngData(vidfHeader, metadata);

                uint headerSize = dng_get_header_size(ref dngData);
                uint imageSize = dng_get_image_size(ref dngData);
                byte[] headerData = new byte[headerSize];
                uint headerSizeReal = dng_get_header_data(ref dngData, headerData, 0, headerSize);

                uint totalSize = headerSizeReal + imageSize;

                return totalSize;
            }
            catch(Exception e)
            {
                return 0;
            }
        }
Exemplo n.º 2
0
        internal static byte[] Create(string mlvFileName, MLVTypes.mlv_vidf_hdr_t vidfHeader, byte[] inData, object[] metadata)
        {
            frame_headers dngData = CreateDngData(vidfHeader, metadata);
            uint dngHeaderSize = dng_get_header_size(ref dngData);
            uint dngImageDataSize = (uint)(dngData.rawi_hdr.xRes * dngData.rawi_hdr.yRes * 16 / 8);
            uint rawImageDataSize = (uint)(dngData.rawi_hdr.xRes * dngData.rawi_hdr.yRes * dngData.rawi_hdr.raw_info.bits_per_pixel / 8);

            byte[] imageData = new byte[dngHeaderSize + dngImageDataSize];

            if(rawImageDataSize > inData.Length)
            {
                throw new InvalidDataException("Raw data has only " + inData.Length + " bytes instead of " + rawImageDataSize + " bytes declared by RAWI info");
            }

            uint headerSizeReal = dng_get_header_data(ref dngData, imageData, 0, dngHeaderSize);

            DateTime start = DateTime.Now;
            uint dataSizeReal = dng_get_image_data(ref dngData, inData, imageData, -(int)headerSizeReal, dngImageDataSize);

            ProcessingTime = (DateTime.Now - start).TotalMilliseconds;

            return imageData;
        }
Exemplo n.º 3
0
        public DateTime ParseRtci(MLVTypes.mlv_rtci_hdr_t rtci)
        {
            ushort tm_year = (ushort)(rtci.tm_year + 1900);
            ushort tm_mon = rtci.tm_mon;
            ushort tm_mday = rtci.tm_mday;
            ushort tm_hour = rtci.tm_hour;
            ushort tm_min = rtci.tm_min;
            ushort tm_sec = rtci.tm_sec;

            if (tm_year > 1900 && tm_mon > 0 && tm_mday > 0)
            {
                try
                {
                    return new DateTime(tm_year, tm_mon, tm_mday, tm_hour, tm_min, tm_sec);
                }
                catch (Exception e)
                {
                }
            }
            else
            {
            }

            return DateTime.Now;
        }
Exemplo n.º 4
0
        public void HandleBlock(string type, MLVTypes.mlv_vidf_hdr_t header, byte[] rawData, int rawPos, int rawLength)
        {
            VidfHeader = header;

            if (FileHeader.videoClass != 0x01 || LockBitmap == null)
            {
                return;
            }

            if (!VideoEnabled)
            {
                return;
            }

            lock (this)
            {
                int frameSpace = (int)Math.Max(header.frameSpace - RawFixOffset, 0);
                int startPos = rawPos + frameSpace;
                int length = rawLength - frameSpace;

                if (!DebayeringEnabled)
                {
                    RawPixelData = new byte[length];
                    Array.Copy(rawData, startPos, RawPixelData, 0, length);
                    return;
                }

                /* first extract the raw channel values */
                Bitunpack.Process(rawData, startPos, rawLength, PixelData);

                /* then debayer the pixel data */
                Debayer.Process(PixelData, RGBData);

                /* and transform into a bitmap for displaying */
                LockBitmap.LockBits();

                int pos = 0;
                uint[] average = new uint[] { 0, 0, 0 };

                for (int y = 0; y < RawiHeader.yRes; y++)
                {
                    for (int x = 0; x < RawiHeader.xRes; x++)
                    {
                        float r = RGBData[y, x, 0];
                        float g = RGBData[y, x, 1];
                        float b = RGBData[y, x, 2];

                        if (ColorLut != null)
                        {
                            ColorLut.Lookup(r, g, b, out r, out g, out b);
                        }

                        /* now scale to TV black/white levels */
                        ScaleLevels(ref r);
                        ScaleLevels(ref g);
                        ScaleLevels(ref b);

                        average[0] += (uint)g;
                        average[1] += (uint)b;
                        average[2] += (uint)r;

                        /* limit RGB values */
                        LockBitmap.Pixels[pos++] = (byte)b;
                        LockBitmap.Pixels[pos++] = (byte)g;
                        LockBitmap.Pixels[pos++] = (byte)r;
                    }
                }
                LockBitmap.UnlockBits();

                int pixels = RawiHeader.yRes * RawiHeader.xRes;

                /* make sure the average brightness is somewhere in the mid range */
                if (Math.Abs(_ExposureCorrection) == 0.0f)
                {
                    double averageBrightness = (average[0] + average[1] + average[2]) / (3 * pixels);
                    if (averageBrightness < 100)
                    {
                        Debayer.Brightness *= 1.0f + (float)(100.0f - averageBrightness) / 100.0f;
                    }
                    if (averageBrightness > 200)
                    {
                        Debayer.Brightness /= 1.0f + (float)(averageBrightness - 200.0f) / 55.0f;
                    }
                }
                else
                {
                    Debayer.Brightness = (float)Math.Pow(2,_ExposureCorrection);
                }

                FrameUpdated = true;
            }
        }
Exemplo n.º 5
0
 public void HandleBlock(string type, MLVTypes.mlv_audf_hdr_t header, byte[] rawData, int rawPos, int rawLength)
 {
     AudfHeader = header;
     if (WaveProvider != null)
     {
         while (WaveProvider.BufferedBytes + (int)(rawLength - header.frameSpace) > WaveProvider.BufferLength)
         {
             Thread.Sleep(100);
         }
         WaveProvider.AddSamples(rawData, (int)(rawPos + header.frameSpace), (int)(rawLength - header.frameSpace));
     }
 }
Exemplo n.º 6
0
        public void HandleBlock(string type, MLVTypes.mlv_rawi_hdr_t header, byte[] raw_data, int raw_pos, int raw_length)
        {
            RawiHeader = header;

            if (FileHeader.videoClass != 0x01)
            {
                return;
            }

            for (int pos = 0; pos < camMatrix.Length; pos++)
            {
                camMatrix[pos] = (float)header.raw_info.color_matrix1[2 * pos] / (float)header.raw_info.color_matrix1[2 * pos + 1];
            }

            Bitunpack.BitsPerPixel = header.raw_info.bits_per_pixel;

            Debayer.Saturation = 0.12f;
            Debayer.Brightness = 1;
            Debayer.BlackLevel = header.raw_info.black_level;
            Debayer.WhiteLevel = header.raw_info.white_level;
            Debayer.CamMatrix = camMatrix;

            /* simple fix to overcome a mlv_dump misbehavior. it simply doesnt scale white and black level when changing bit depth */
            while (Debayer.WhiteLevel > (1 << header.raw_info.bits_per_pixel))
            {
                Debayer.BlackLevel >>= 1;
                Debayer.WhiteLevel >>= 1;
            }

            PixelData = new ushort[header.yRes, header.xRes];
            RGBData = new float[header.yRes, header.xRes, 3];

            CurrentFrame = new System.Drawing.Bitmap(RawiHeader.xRes, RawiHeader.yRes, PixelFormat.Format24bppRgb);
            LockBitmap = new LockBitmap(CurrentFrame);
        }
Exemplo n.º 7
0
        public void HandleBlock(string type, MLVTypes.mlv_wavi_hdr_t header, byte[] raw_data, int raw_pos, int raw_length)
        {
            WaviHeader = header;
            if (DriverOut != null)
            {
                DriverOut.Stop();
            }
            try
            {
                DriverOut = new WaveOut();
                DriverOut.DesiredLatency = 100;

                WaveFormat fmt = new WaveFormat((int)header.samplingRate, header.bitsPerSample, header.channels);
                WaveProvider = new BufferedWaveProvider(fmt);
                WaveProvider.BufferLength = 256 * 1024;

                DriverOut.Init(WaveProvider);
                DriverOut.Play();
            }
            catch (Exception ex)
            {
                Console.WriteLine("No audio support on this platform (" + ex.ToString() + ")");
            }
        }
Exemplo n.º 8
0
 public void HandleBlock(string type, MLVTypes.mlv_info_hdr_t header, byte[] raw_data, int raw_pos, int raw_length)
 {
     InfoString = Encoding.ASCII.GetString(raw_data, raw_pos, raw_length);
 }
Exemplo n.º 9
0
 public void HandleBlock(string type, MLVTypes.mlv_wbal_hdr_t header, byte[] raw_data, int raw_pos, int raw_length)
 {
     WbalHeader = header;
 }
Exemplo n.º 10
0
        private static frame_headers CreateDngData(MLVTypes.mlv_vidf_hdr_t vidfHeader, object[] metadata)
        {
            frame_headers dngData = new frame_headers();

            foreach (var obj in metadata)
            {
                if (obj.GetType() == typeof(MLVTypes.mlv_file_hdr_t))
                {
                    dngData.file_hdr = (MLVTypes.mlv_file_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_rtci_hdr_t))
                {
                    dngData.rtci_hdr = (MLVTypes.mlv_rtci_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_idnt_hdr_t))
                {
                    dngData.idnt_hdr = (MLVTypes.mlv_idnt_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_rawi_hdr_t))
                {
                    dngData.rawi_hdr = (MLVTypes.mlv_rawi_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_expo_hdr_t))
                {
                    dngData.expo_hdr = (MLVTypes.mlv_expo_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_lens_hdr_t))
                {
                    dngData.lens_hdr = (MLVTypes.mlv_lens_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_wbal_hdr_t))
                {
                    dngData.wbal_hdr = (MLVTypes.mlv_wbal_hdr_t)obj;
                }
                    /*
                else
                {
                    switch ((String)obj.blockType)
                    {
                        case "RTCI":
                            dngData.rtci_hdr = obj;
                            break;
                        case "IDNT":
                            dngData.idnt_hdr = obj;
                            break;
                        case "RAWI":
                            dngData.rawi_hdr = obj;
                            break;
                        case "EXPO":
                            dngData.expo_hdr = obj;
                            break;
                        case "LENS":
                            dngData.lens_hdr = obj;
                            break;
                        case "WBAL":
                            dngData.wbal_hdr = obj;
                            break;
                    }
                }*/
            }
            dngData.vidf_hdr = vidfHeader;

            return dngData;
        }