/* DNG Images are assumed to be decodable unless explicitly set so */ protected override void checkSupportInternal(CameraMetaData meta) { // We set this, since DNG's are not explicitly added. failOnUnknown = false; var t = mRootIFD.getEntryRecursive(TagType.MAKE); var t2 = mRootIFD.getEntryRecursive(TagType.MODEL); if (!(t != null && t2 != null)) { // Check "Unique Camera Model" instead, uses this for both make + model. var t3 = mRootIFD.getEntryRecursive(TagType.UNIQUECAMERAMODEL); if (t3 != null) { string unique = t3.dataAsString; this.checkCameraSupported(meta, unique, unique, "dng"); return; } else { // If we don't have make/model we cannot tell, but still assume yes. return; } } List <IFD> data = mRootIFD.getIFDsWithTag(TagType.MODEL); string make = data[0].getEntry(TagType.MAKE).dataAsString; string model = data[0].getEntry(TagType.MODEL).dataAsString; this.checkCameraSupported(meta, make, model, "dng"); }
protected override void decodeMetaDataInternal(CameraMetaData meta) { var t = mRootIFD.getEntryRecursive(TagType.ISOSPEEDRATINGS); if (t != null) { mRaw.metadata.isoSpeed = t.getInt(); } // Set the make and model t = mRootIFD.getEntryRecursive(TagType.MAKE); var t2 = mRootIFD.getEntryRecursive(TagType.MODEL); if (t != null && t != null) { string make = t.dataAsString; string model = t2.dataAsString; make = make.Trim(); model = model.Trim(); mRaw.metadata.make = make; mRaw.metadata.model = model; Camera cam = meta.getCamera(make, model, "dng"); if (cam == null) //Also look for non-DNG cameras in case it's a converted file { cam = meta.getCamera(make, model, ""); } if (cam != null) { mRaw.metadata.canonical_make = cam.canonical_make; mRaw.metadata.canonical_model = cam.canonical_model; mRaw.metadata.canonical_alias = cam.canonical_alias; mRaw.metadata.canonical_id = cam.canonical_id; } else { mRaw.metadata.canonical_make = make; mRaw.metadata.canonical_model = mRaw.metadata.canonical_alias = model; t = mRootIFD.getEntryRecursive(TagType.UNIQUECAMERAMODEL); if (t != null) { mRaw.metadata.canonical_id = t.dataAsString; } else { mRaw.metadata.canonical_id = make + " " + model; } } } }
public void checkSupport(CameraMetaData meta) { try { checkSupportInternal(meta); } catch (TiffParserException e) { throw new RawDecoderException(e.Message); } catch (FileIOException e) { throw new RawDecoderException(e.Message); } catch (IOException e) { throw new RawDecoderException(e.Message); } }
public void decodeMetaData(CameraMetaData meta) { try { decodeMetaDataInternal(meta); } catch (TiffParserException e) { throw new RawDecoderException(e.Message); } catch (FileIOException e) { throw new RawDecoderException(e.Message); } catch (IOException e) { throw new RawDecoderException(e.Message); } }
public bool checkCameraSupported(CameraMetaData meta, string make, string model, string mode) { make = make.Trim(); model = model.Trim(); mRaw.metadata.make = make; mRaw.metadata.model = model; Camera cam = meta.getCamera(make, model, mode); if (cam == null) { if (mode.Length == 0) { Debug.WriteLine("Unable to find camera in database: " + make + " " + model + " " + mode); } if (failOnUnknown) { throw new RawDecoderException("Camera " + make + " " + model + ", mode " + mode + " not supported, and not allowed to guess. Sorry."); } // Assume the camera can be decoded, but return false, so decoders can see that we are unsure. return(false); } if (!cam.supported) { throw new RawDecoderException("Camera not supported (explicit). Sorry."); } if (cam.decoderVersion > decoderVersion) { throw new RawDecoderException("Camera not supported in this version. Update RawSpeed for support."); } hints = cam.hints; return(true); }
public RawDecoder getDecoder(CameraMetaData meta) { // We need some data. // For now it is 104 bytes for RAF images. if (stream.Length <= 104) { throw new Exception("File too small"); } byte[] data = new byte[105]; stream.Read(data, 0, 104); /* * // MRW images are easy to check for, let's try that first * if (MrwDecoder::isMRW(mInput)) { * try * { * return new MrwDecoder(Math.Math.Min((put); * } * catch (RawDecoderException) * { * } * }*/ /* * if (0 == memcmp(&data[0], "ARRI\x12\x34\x56\x78", 8)) * { * try * { * return new AriDecoder(Math.Math.Min((put); * } * catch (RawDecoderException) * { * } * }*/ // FUJI has pointers to IFD's at fixed byte offsets // So if camera is FUJI, we cannot use ordinary TIFF parser //get first 8 char and see if equal fuji /* * string dataAsString = System.Text.Encoding.UTF8.GetString(data.Take(8).ToArray()); * if (dataAsString == "FUJIFILM") * { * // First IFD typically JPEG and EXIF * UInt32 first_ifd = (uint)(data[87] | (data[86] << 8) | (data[85] << 16) | (data[84] << 24)); * first_ifd += 12; * if (stream.Length <= first_ifd) * throw new Exception("File too small (FUJI first IFD)"); * * // RAW IFD on newer, pointer to raw data on older models, so we try parsing first * // And adds it as data if parsin fails * UInt32 second_ifd = (UInt32)(data[103] | (data[102] << 8) | (data[101] << 16) | (data[100] << 24)); * if (stream.Length <= second_ifd) * second_ifd = 0; * * // RAW information IFD on older * UInt32 third_ifd = (uint)(data[95] | (data[94] << 8) | (data[93] << 16) | (data[92] << 24)); * if (stream.Length <= third_ifd) * third_ifd = 0; * * // Open the IFDs and merge them * try * { * FileMap* m1 = new FileMap(Math.Math.Min((put, first_ifd); * FileMap* m2 = null; * TiffParser p(m1); * p.parseData(); * if (second_ifd) * { * m2 = new FileMap(Math.Math.Min((put, second_ifd); * try * { * TiffParser p2(m2); * p2.parseData(); * p.MergeIFD(&p2); * } * catch (TiffParserException e) * { * delete m2; * m2 = null; * } * } * * TiffIFD* new_ifd = new TiffIFD(Math.Math.Min((put); * p.RootIFD().mSubIFD.push_back(new_ifd); * * if (third_ifd) * { * try * { * ParseFuji(third_ifd, new_ifd); * } * catch (TiffParserException e) * { * } * } * // Make sure these aren't leaked. * RawDecoder* d = p.getDecoder(); * d.ownedObjects.push_back(m1); * if (m2) * d.ownedObjects.push_back(m2); * * if (!m2 && second_ifd) * { * TiffEntry* entry = new TiffEntry(FUJI_STRIPOFFSETS, TIFF_LONG, 1); * entry.setData(&second_ifd, 4); * new_ifd.mEntry[entry.tag] = entry; * entry = new TiffEntry(FUJI_STRIPBYTECOUNTS, TIFF_LONG, 1); * UInt32 max_size = Math.Math.Min((put.getSize() - second_ifd; * entry.setData(&max_size, 4); * new_ifd.mEntry[entry.tag] = entry; * } * return d; * } * catch (TiffParserException) { } * throw new Exception("No decoder found. Sorry."); * } * */ // Ordinary TIFF images try { TiffParser p = new TiffParser(stream); p.parseData(); return(p.getDecoder()); } catch (TiffParserException) { } /* * try * { * X3fParser parser(mInput); * return parser.getDecoder(); * } * catch (RawDecoderException) * { * }*/ /* * // CIFF images * try * { * CiffParser p(Math.Math.Min((put); * p.parseData(); * return p.getDecoder(); * } * catch (CiffParserException) * { * } */ /* * // Detect camera on filesize (CHDK). * if (meta != null && meta.hasChdkCamera(Math.Min((put.getSize())) { * Camera* c = meta.getChdkCamera(Math.Min((put.getSize()); * * try * { * return new NakedDecoder(Math.Math.Min((put, c); * } * catch (RawDecoderException) * { * } * }*/ //try jpeg file try { return(new JPGParser(new TIFFBinaryReader(stream))); } catch (TiffParserException) { } // File could not be decoded, so no further options for now. throw new FormatException("No decoder found. Sorry."); }
public NakedDecoder(ref Stream file, Camera c, CameraMetaData meta) : base(meta) { cam = c; this.reader = new TIFFBinaryReader(file); }
protected override void checkSupportInternal(CameraMetaData meta) { throw new NotImplementedException(); }
protected override void decodeMetaDataInternal(CameraMetaData meta) { throw new NotImplementedException(); }
public void setMetaData(CameraMetaData meta, string make, string model, string mode, int iso_speed) { mRaw.metadata.isoSpeed = iso_speed; make = make.Trim(); model = model.Trim(); Camera cam = meta.getCamera(make, model, mode); if (cam == null) { Debug.WriteLine("ISO:" + iso_speed); Debug.WriteLine("Unable to find camera in database: " + make + " " + model + " " + mode + "\nPlease upload file to ftp.rawstudio.org, thanks!"); return; } mRaw.cfa = cam.cfa; mRaw.metadata.canonical_make = cam.canonical_make; mRaw.metadata.canonical_model = cam.canonical_model; mRaw.metadata.canonical_alias = cam.canonical_alias; mRaw.metadata.canonical_id = cam.canonical_id; mRaw.metadata.make = make; mRaw.metadata.model = model; mRaw.metadata.mode = mode; if (applyCrop) { iPoint2D new_size = cam.cropSize; // If crop size is negative, use relative cropping if (new_size.x <= 0) { new_size.x = mRaw.dim.x - cam.cropPos.x + new_size.x; } if (new_size.y <= 0) { new_size.y = mRaw.dim.y - cam.cropPos.y + new_size.y; } mRaw.subFrame(new iRectangle2D(cam.cropPos, new_size)); // Shift CFA to match crop mRaw.UncroppedCfa = new ColorFilterArray(mRaw.cfa); if ((cam.cropPos.x & 1) != 0) { mRaw.cfa.shiftLeft(0); } if ((cam.cropPos.y & 1) != 0) { mRaw.cfa.shiftDown(0); } } CameraSensorInfo sensor = cam.getSensorInfo(iso_speed); mRaw.blackLevel = sensor.blackLevel; mRaw.whitePoint = (uint)sensor.whiteLevel; mRaw.blackAreas = cam.blackAreas; if (mRaw.blackAreas.Count == 0 && sensor.mBlackLevelSeparate.Count != 0) { if (mRaw.isCFA && mRaw.cfa.size.area() <= sensor.mBlackLevelSeparate.Count) { for (UInt32 i = 0; i < mRaw.cfa.size.area(); i++) { mRaw.blackLevelSeparate[i] = sensor.mBlackLevelSeparate[(int)i]; } } else if (!mRaw.isCFA && mRaw.cpp <= sensor.mBlackLevelSeparate.Count) { for (UInt32 i = 0; i < mRaw.cpp; i++) { mRaw.blackLevelSeparate[i] = sensor.mBlackLevelSeparate[(int)i]; } } } // Allow overriding individual blacklevels. Values are in CFA order // (the same order as the in the CFA tag) // A hint could be: // <Hint name="override_cfa_black" value="10,20,30,20"/> cam.hints.TryGetValue("override_cfa_black", out string value); if (value != null) { string rgb = value; var v = rgb.Split(','); if (v.Length != 4) { mRaw.errors.Add("Expected 4 values '10,20,30,20' as values for override_cfa_black hint."); } else { for (int i = 0; i < 4; i++) { mRaw.blackLevelSeparate[i] = Int32.Parse(v[i]); } } } }
protected abstract void checkSupportInternal(CameraMetaData meta);
protected abstract void decodeMetaDataInternal(CameraMetaData meta);
protected override void decodeMetaDataInternal(CameraMetaData meta) { //fill useless metadata mRaw.metadata.wbCoeffs = new float[] { 1, 1, 1, 1 }; }
protected override void checkSupportInternal(CameraMetaData meta) { stream = mFile.BaseStream.AsRandomAccessStream(); }