// Create the default technique instance for the algorithm: wavelet public static Technique <WaveletHash, double> createTechniqueWavelet() { // Local variables Technique <WaveletHash, double> t = null; // Create technique t = new Technique <WaveletHash, double>(TechniqueID.WAVELET, (Technique _t, ImageSource _image, string[] _dumpToDiskPathes) => { // Local variables int len = 0; IntPtr hash; WaveletHash data = new WaveletHash(); HashData <WaveletHash> result = null; HashComputationTimings timings = new HashComputationTimings(); decimal attAlpha = 2m; decimal attLevel = 1m; // Extract attributes if (_t.isAttributeAvailable(Technique.ATT_WAVELET_ALPHA) == true) { _t.getAttribute <decimal>(Technique.ATT_WAVELET_ALPHA, out attAlpha); } if (_t.isAttributeAvailable(Technique.ATT_WAVELET_LEVEL) == true) { _t.getAttribute <decimal>(Technique.ATT_WAVELET_LEVEL, out attLevel); } // Dump to disk? if (_dumpToDiskPathes != null && _dumpToDiskPathes.Length == DumpTechniqueStepsToDisk.WAVELET_PATH_COUNT) { if (PHash.dumpWaveletHashToDisk(_image.FilePath, Convert.ToSingle(attAlpha), Convert.ToSingle(attLevel), _dumpToDiskPathes[0], _dumpToDiskPathes[1], _dumpToDiskPathes[2], _dumpToDiskPathes[3]) != 0) { return(null); } return(new HashData <WaveletHash>(null)); } else { // Compute hast hash = PHash.computeWaveletHash(_image.FilePath, ref len, timings, Convert.ToSingle(attAlpha), Convert.ToSingle(attLevel)); // Store result data.m_data = hash; data.m_dataLength = len; result = new HashData <WaveletHash>(data, (WaveletHash _data) => { return(Utility.toHexString(_data.m_data, _data.m_dataLength)); }, new HashDataTimings(timings.m_imageLoadingTimeMS, timings.m_hashComputationTimeMS)); return(result); } }, (Technique _t, HashData <WaveletHash> _h0, HashData <WaveletHash> _h1) => { // Local variables double dis = 0; decimal threshold = 90m; bool isSame = false; ComparativeData <double> result = null; // Extract attributes if (_t.isAttributeAvailable(Technique.ATT_GENERAL_THRESHOLD) == true) { _t.getAttribute <decimal>(Technique.ATT_GENERAL_THRESHOLD, out threshold); } // Compute distance dis = PHash.computeHammingDistance(_h0.Data.m_data, _h0.Data.m_dataLength, _h1.Data.m_data, _h1.Data.m_dataLength); // Is accepted? isSame = (1.0 - dis) >= Convert.ToSingle(threshold) / 100.0; // Store result result = new ComparativeData <double>(dis, isSame, (double _d) => { return("Match rate: " + (1.0 - _d).ToString("#0.0000")); }, (double _d) => { return(1.0 - _d); }); return(result); } ); return(t); }
// Create the default technique instance for the algorithm: BMB public static Technique <BMBHash, double> createTechniqueBMB() { // Local variables Technique <BMBHash, double> t = null; // Create technique t = new Technique <BMBHash, double>(TechniqueID.BMB, (Technique _t, ImageSource _image, string[] _dumpToDiskPathes) => { // Local variables BMBHash hash = new BMBHash(); IntPtr hashUnmanaged = IntPtr.Zero; HashComputationTimings timings = new HashComputationTimings(); HashData <BMBHash> result = null; int attMethod = 1; // Extract attributes if (_t.isAttributeAvailable(Technique.ATT_BMB_METHOD) == true) { _t.getAttribute <int>(Technique.ATT_BMB_METHOD, out attMethod); } // Dump to disk? if (_dumpToDiskPathes != null && _dumpToDiskPathes.Length == DumpTechniqueStepsToDisk.BMB_PATH_COUNT) { if (PHash.dumpBMBHashToDisk(_image.FilePath, attMethod, _dumpToDiskPathes[0], _dumpToDiskPathes[1], _dumpToDiskPathes[2]) != 0) { return(null); } return(new HashData <BMBHash>(null)); } else { // Comnpute hash if (PHash.computeBMBHash(_image.FilePath, attMethod, out hashUnmanaged, timings) == -1) { return(null); } // Convert unmanaged to managed Utility.convertUnmanagedPtrToSimpleStructure <BMBHash>(hashUnmanaged, ref hash, false); // Store result result = new HashData <BMBHash>(hash, (BMBHash _data) => { return(Utility.toHexString(_data.m_data, _data.m_dataLength)); }, new HashDataTimings(timings.m_imageLoadingTimeMS, timings.m_hashComputationTimeMS)); return(result); } }, (Technique _t, HashData <BMBHash> _h0, HashData <BMBHash> _h1) => { // Local variables double dis = 0; decimal threshold = 90m; bool isSame = false; ComparativeData <double> result = null; // Extract attributes if (_t.isAttributeAvailable(Technique.ATT_GENERAL_THRESHOLD) == true) { _t.getAttribute <decimal>(Technique.ATT_GENERAL_THRESHOLD, out threshold); } // Compute distance dis = PHash.computeHammingDistance(_h0.Data.m_data, _h0.Data.m_dataLength, _h1.Data.m_data, _h1.Data.m_dataLength); // Is accepted? isSame = (1.0 - dis) >= Convert.ToSingle(threshold) / 100.0; // Store result result = new ComparativeData <double>(dis, isSame, (double _d) => { return("Match rate: " + (1.0 - _d).ToString("#0.0000")); }, (double _d) => { return(1.0 - _d); }); return(result); } ); return(t); }
// Create the default technique instance for the algorithm: DCT public static Technique <UInt64, double> createTechniqueDCT() { // Local variables Technique <UInt64, double> t = null; // Create technique t = new Technique <UInt64, double>(TechniqueID.DCT, (Technique _t, ImageSource _image, string[] _dumpToDiskPathes) => { // Local variables UInt64 hash = 0; HashData <UInt64> result = null; HashComputationTimings timings = new HashComputationTimings(); // Dump to disk? if (_dumpToDiskPathes != null && _dumpToDiskPathes.Length == DumpTechniqueStepsToDisk.DCT_PATH_COUNT) { if (PHash.dumpDCTHashToDisk(_image.FilePath, _dumpToDiskPathes[0], _dumpToDiskPathes[1], _dumpToDiskPathes[2], _dumpToDiskPathes[3], _dumpToDiskPathes[4], _dumpToDiskPathes[5]) != 0) { return(null); } return(new HashData <ulong>(0)); } else { // Compute hast PHash.computeDCTHash(_image.FilePath, ref hash, timings); // Store result result = new HashData <UInt64>(hash, null, new HashDataTimings(timings.m_imageLoadingTimeMS, timings.m_hashComputationTimeMS)); return(result); } }, (Technique _t, HashData <UInt64> _h0, HashData <UInt64> _h1) => { // Local variables double dis = 0; bool isSame = false; decimal threshold = 90m; ComparativeData <double> result = null; // Extract attributes if (_t.isAttributeAvailable(Technique.ATT_GENERAL_THRESHOLD) == true) { _t.getAttribute <decimal>(Technique.ATT_GENERAL_THRESHOLD, out threshold); } // Compute distance and normalize it dis = PHash.computeHammingDistance(_h0.Data, _h1.Data); dis /= 64; // Is accepted? isSame = (1.0 - dis) >= Convert.ToSingle(threshold) / 100.0; // Store result result = new ComparativeData <double>(dis, isSame, (double _d) => { return("Match rate: " + (1.0 - _d).ToString("#0.0000")); }, (double _d) => { return(1.0f - _d); }); return(result); } ); return(t); }