Example #1
0
        internal static uint GetSize(string mlvFileName, MLVTypes.mlv_vidf_hdr_t vidfHeader, byte[] pixelData, object[] metadata)
        {
            try
            {
                frame_headers dngData = CreateDngData(vidfHeader, metadata);

                uint   headerSize     = dng_get_header_size(ref dngData);
                uint   imageSize      = dng_get_image_size(ref dngData);
                byte[] headerData     = new byte[headerSize];
                uint   headerSizeReal = dng_get_header_data(ref dngData, headerData, 0, headerSize);

                uint totalSize = headerSizeReal + imageSize;

                return(totalSize);
            }
            catch (Exception e)
            {
                return(0);
            }
        }
Example #2
0
        internal static byte[] Create(string mlvFileName, MLVTypes.mlv_vidf_hdr_t vidfHeader, byte[] inData, object[] metadata)
        {
            frame_headers dngData          = CreateDngData(vidfHeader, metadata);
            uint          dngHeaderSize    = dng_get_header_size(ref dngData);
            uint          dngImageDataSize = (uint)(dngData.rawi_hdr.xRes * dngData.rawi_hdr.yRes * 16 / 8);
            uint          rawImageDataSize = (uint)(dngData.rawi_hdr.xRes * dngData.rawi_hdr.yRes * dngData.rawi_hdr.raw_info.bits_per_pixel / 8);

            byte[] imageData = new byte[dngHeaderSize + dngImageDataSize];

            if (rawImageDataSize > inData.Length)
            {
                throw new InvalidDataException("Raw data has only " + inData.Length + " bytes instead of " + rawImageDataSize + " bytes declared by RAWI info");
            }

            uint headerSizeReal = dng_get_header_data(ref dngData, imageData, 0, dngHeaderSize);

            DateTime start        = DateTime.Now;
            uint     dataSizeReal = dng_get_image_data(ref dngData, inData, imageData, -(int)headerSizeReal, dngImageDataSize);

            ProcessingTime = (DateTime.Now - start).TotalMilliseconds;

            return(imageData);
        }
Example #3
0
        public static byte[] GetDataStream(string mlvFileName, string content, int prefetchCount)
        {
            MLVCachedReader cache = GetReader(mlvFileName);
            eFileType       type  = GetFileType(content);

            lock (cache.cachedFiles)
            {
                if (cache.cachedFiles.ContainsKey(content))
                {
                    PrefetchNext(mlvFileName, content, prefetchCount - 1);

                    cache.cachedFiles[content].lastUseTime = DateTime.Now;

                    return(cache.cachedFiles[content].bufferedData);
                }
            }

            switch (type)
            {
            case eFileType.Txt:
                string info = GetInfoFields(mlvFileName).Aggregate <string>(addString);
                return(ASCIIEncoding.ASCII.GetBytes(info));

            case eFileType.Wav:
                return(GetWaveDataStream(mlvFileName));

            case eFileType.MJpeg:
                return(GetMJpegDataStream(mlvFileName));
            }

            int frame = GetFrameNumber(mlvFileName, content);

            /* read video frame with/out debayering dependig on filetype */
            switch (type)
            {
            case eFileType.Dng:
            case eFileType.Fits:
                cache.handler.DebayeringEnabled = false;
                break;

            case eFileType.Jpg:
                cache.handler.DebayeringEnabled = true;
                break;
            }

            byte[] data         = null;
            Bitmap currentFrame = null;

            MLVTypes.mlv_vidf_hdr_t vidfHeader = new MLVTypes.mlv_vidf_hdr_t();

            /* seek to the correct block */
            int block = cache.reader.GetVideoFrameBlockNumber((uint)frame);

            if (block < 0)
            {
                throw new FileNotFoundException("Requested video frame " + frame + " but thats not in the file index");
            }

            /* ensure that multiple threads dont conflict */
            lock (cache)
            {
                /* read it */
                cache.reader.CurrentBlockNumber    = block;
                cache.handler.VidfHeader.blockSize = 0;
                cache.reader.ReadBlock();

                /* now the VIDF should be read correctly */
                if (cache.handler.VidfHeader.blockSize == 0)
                {
                    throw new InvalidDataException("Requested video frame " + frame + " but the index points us wrong");
                }

                /* get all data we need */
                switch (type)
                {
                case eFileType.Dng:
                case eFileType.Fits:
                    data       = cache.handler.RawPixelData;
                    vidfHeader = cache.handler.VidfHeader;
                    break;

                case eFileType.Jpg:
                    if (cache.handler.CurrentFrame != null)
                    {
                        currentFrame = new Bitmap(cache.handler.CurrentFrame);
                    }
                    break;
                }
            }

            /* process it */
            switch (type)
            {
            case eFileType.Fits:
            {
                object[] metadata = cache.reader.GetVideoFrameMetadata((uint)frame);
                byte[]   stream   = FITSCreator.Create(mlvFileName, vidfHeader, data, metadata);

                PrefetchSave(mlvFileName, content, eFileType.Dng, frame, stream);
                PrefetchNext(mlvFileName, content, prefetchCount);

                return(stream);
            }

            case eFileType.Dng:
            {
                object[] metadata = cache.reader.GetVideoFrameMetadata((uint)frame);
                byte[]   stream   = DNGCreator.Create(mlvFileName, vidfHeader, data, metadata);

                PrefetchSave(mlvFileName, content, eFileType.Dng, frame, stream);
                PrefetchNext(mlvFileName, content, prefetchCount);

                return(stream);
            }

            case eFileType.Jpg:
            {
                JpegBitmapEncoder encoder = new JpegBitmapEncoder();
                Stream            stream  = new MemoryStream();

                encoder.QualityLevel = 90;
                if (currentFrame != null)
                {
                    encoder.Frames.Add(BitmapFrame.Create(BitmapSourceFromBitmap(currentFrame)));
                    encoder.Save(stream);

                    stream.Seek(0, SeekOrigin.Begin);
                }
                stream.Seek(0, SeekOrigin.Begin);

                byte[] buffer = new byte[stream.Length];
                stream.Read(buffer, 0, buffer.Length);
                stream.Close();

                PrefetchSave(mlvFileName, content, eFileType.Jpg, frame, buffer);
                PrefetchNext(mlvFileName, content, prefetchCount);

                return(buffer);
            }
            }

            throw new FileNotFoundException("Requested frame " + frame + " of type " + type + " but we dont support that");
        }
        public void HandleBlock(string type, MLVTypes.mlv_vidf_hdr_t header, byte[] rawData, int rawPos, int rawLength)
        {
            VidfHeader = header;

            if (FileHeader.videoClass != 0x01 || LockBitmap == null)
            {
                return;
            }

            if (!VideoEnabled)
            {
                return;
            }

            lock (this)
            {
                int frameSpace = (int)Math.Max(header.frameSpace - RawFixOffset, 0);
                int startPos = rawPos + frameSpace;
                int length = rawLength - frameSpace;

                if (!DebayeringEnabled)
                {
                    RawPixelData = new byte[length];
                    Array.Copy(rawData, startPos, RawPixelData, 0, length);
                    return;
                }

                /* first extract the raw channel values */
                Bitunpack.Process(rawData, startPos, rawLength, PixelData);

                /* then debayer the pixel data */
                Debayer.Process(PixelData, RGBData);

                /* and transform into a bitmap for displaying */
                LockBitmap.LockBits();

                int pos = 0;
                uint[] average = new uint[] { 0, 0, 0 };

                for (int y = 0; y < RawiHeader.yRes; y++)
                {
                    for (int x = 0; x < RawiHeader.xRes; x++)
                    {
                        float r = RGBData[y, x, 0];
                        float g = RGBData[y, x, 1];
                        float b = RGBData[y, x, 2];

                        if (ColorLut != null)
                        {
                            ColorLut.Lookup(r, g, b, out r, out g, out b);
                        }

                        /* now scale to TV black/white levels */
                        ScaleLevels(ref r);
                        ScaleLevels(ref g);
                        ScaleLevels(ref b);

                        average[0] += (uint)g;
                        average[1] += (uint)b;
                        average[2] += (uint)r;

                        /* limit RGB values */
                        LockBitmap.Pixels[pos++] = (byte)b;
                        LockBitmap.Pixels[pos++] = (byte)g;
                        LockBitmap.Pixels[pos++] = (byte)r;
                    }
                }
                LockBitmap.UnlockBits();

                int pixels = RawiHeader.yRes * RawiHeader.xRes;

                /* make sure the average brightness is somewhere in the mid range */
                if (Math.Abs(_ExposureCorrection) == 0.0f)
                {
                    double averageBrightness = (average[0] + average[1] + average[2]) / (3 * pixels);
                    if (averageBrightness < 100)
                    {
                        Debayer.Brightness *= 1.0f + (float)(100.0f - averageBrightness) / 100.0f;
                    }
                    if (averageBrightness > 200)
                    {
                        Debayer.Brightness /= 1.0f + (float)(averageBrightness - 200.0f) / 55.0f;
                    }
                }
                else
                {
                    Debayer.Brightness = (float)Math.Pow(2,_ExposureCorrection);
                }

                FrameUpdated = true;
            }
        }
        public static byte[] GetDataStream(string mlvFileName, string content, int prefetchCount)
        {
            MLVCachedReader cache = GetReader(mlvFileName);
            eFileType type = GetFileType(content);

            lock (cache.cachedFiles)
            {
                if (cache.cachedFiles.ContainsKey(content))
                {
                    PrefetchNext(mlvFileName, content, prefetchCount - 1);

                    cache.cachedFiles[content].lastUseTime = DateTime.Now;

                    return cache.cachedFiles[content].bufferedData;
                }
            }

            switch(type)
            {
                case eFileType.Txt:
                    string info = GetInfoFields(mlvFileName).Aggregate<string>(addString);
                    return ASCIIEncoding.ASCII.GetBytes(info);

                case eFileType.Wav:
                    return GetWaveDataStream(mlvFileName);

                case eFileType.MJpeg:
                    return GetMJpegDataStream(mlvFileName);
            }

            int frame = GetFrameNumber(mlvFileName, content);

            /* read video frame with/out debayering dependig on filetype */
            switch (type)
            {
                case eFileType.Dng:
                case eFileType.Fits:
                    cache.handler.DebayeringEnabled = false;
                    break;
                case eFileType.Jpg:
                    cache.handler.DebayeringEnabled = true;
                    break;
            }

            byte[] data = null;
            Bitmap currentFrame = null;
            MLVTypes.mlv_vidf_hdr_t vidfHeader = new MLVTypes.mlv_vidf_hdr_t();

            /* seek to the correct block */
            int block = cache.reader.GetVideoFrameBlockNumber((uint)frame);
            if (block < 0)
            {
                throw new FileNotFoundException("Requested video frame " + frame + " but thats not in the file index");
            }

            /* ensure that multiple threads dont conflict */
            lock (cache)
            {
                /* read it */
                cache.reader.CurrentBlockNumber = block;
                cache.handler.VidfHeader.blockSize = 0;
                cache.reader.ReadBlock();

                /* now the VIDF should be read correctly */
                if (cache.handler.VidfHeader.blockSize == 0)
                {
                    throw new InvalidDataException("Requested video frame " + frame + " but the index points us wrong");
                }

                /* get all data we need */
                switch (type)
                {
                    case eFileType.Dng:
                    case eFileType.Fits:
                        data = cache.handler.RawPixelData;
                        vidfHeader = cache.handler.VidfHeader;
                        break;

                    case eFileType.Jpg:
                        if (cache.handler.CurrentFrame != null)
                        {
                            currentFrame = new Bitmap(cache.handler.CurrentFrame);
                        }
                        break;
                }
            }

            /* process it */
            switch(type)
            {
                case eFileType.Fits:
                    {
                        object[] metadata = cache.reader.GetVideoFrameMetadata((uint)frame);
                        byte[] stream = FITSCreator.Create(mlvFileName, vidfHeader, data, metadata);

                        PrefetchSave(mlvFileName, content, eFileType.Dng, frame, stream);
                        PrefetchNext(mlvFileName, content, prefetchCount);

                        return stream;
                    }

                case eFileType.Dng:
                    {
                        object[] metadata = cache.reader.GetVideoFrameMetadata((uint)frame);
                        byte[] stream = DNGCreator.Create(mlvFileName, vidfHeader, data, metadata);

                        PrefetchSave(mlvFileName, content, eFileType.Dng, frame, stream);
                        PrefetchNext(mlvFileName, content, prefetchCount);

                        return stream;
                    }

                case eFileType.Jpg:
                    {
                        JpegBitmapEncoder encoder = new JpegBitmapEncoder();
                        Stream stream = new MemoryStream();

                        encoder.QualityLevel = 90;
                        if (currentFrame != null)
                        {
                            encoder.Frames.Add(BitmapFrame.Create(BitmapSourceFromBitmap(currentFrame)));
                            encoder.Save(stream);

                            stream.Seek(0, SeekOrigin.Begin);
                        }
                        stream.Seek(0, SeekOrigin.Begin);

                        byte[] buffer = new byte[stream.Length];
                        stream.Read(buffer, 0, buffer.Length);
                        stream.Close();

                        PrefetchSave(mlvFileName, content, eFileType.Jpg, frame, buffer);
                        PrefetchNext(mlvFileName, content, prefetchCount);

                        return buffer;
                    }
            }

            throw new FileNotFoundException("Requested frame " + frame + " of type " + type + " but we dont support that");
        }
Example #6
0
        private static frame_headers CreateDngData(MLVTypes.mlv_vidf_hdr_t vidfHeader, object[] metadata)
        {
            frame_headers dngData = new frame_headers();

            foreach (var obj in metadata)
            {
                if (obj.GetType() == typeof(MLVTypes.mlv_file_hdr_t))
                {
                    dngData.file_hdr = (MLVTypes.mlv_file_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_rtci_hdr_t))
                {
                    dngData.rtci_hdr = (MLVTypes.mlv_rtci_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_idnt_hdr_t))
                {
                    dngData.idnt_hdr = (MLVTypes.mlv_idnt_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_rawi_hdr_t))
                {
                    dngData.rawi_hdr = (MLVTypes.mlv_rawi_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_expo_hdr_t))
                {
                    dngData.expo_hdr = (MLVTypes.mlv_expo_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_lens_hdr_t))
                {
                    dngData.lens_hdr = (MLVTypes.mlv_lens_hdr_t)obj;
                }
                else if (obj.GetType() == typeof(MLVTypes.mlv_wbal_hdr_t))
                {
                    dngData.wbal_hdr = (MLVTypes.mlv_wbal_hdr_t)obj;
                }

                /*
                 * else
                 * {
                 * switch ((String)obj.blockType)
                 * {
                 *  case "RTCI":
                 *      dngData.rtci_hdr = obj;
                 *      break;
                 *  case "IDNT":
                 *      dngData.idnt_hdr = obj;
                 *      break;
                 *  case "RAWI":
                 *      dngData.rawi_hdr = obj;
                 *      break;
                 *  case "EXPO":
                 *      dngData.expo_hdr = obj;
                 *      break;
                 *  case "LENS":
                 *      dngData.lens_hdr = obj;
                 *      break;
                 *  case "WBAL":
                 *      dngData.wbal_hdr = obj;
                 *      break;
                 * }
                 * }*/
            }
            dngData.vidf_hdr = vidfHeader;

            return(dngData);
        }