private TreeNode iterateHashChildren(TreeNode parent, volumeStream vs)
        {
            TreeNode replaceParent = new TreeNode();

            replaceParent.Tag = parent.Tag;


            foreach (TreeNode child in parent.Nodes)
            {
                TreeNode replaceChild = new TreeNode();

                if (child.Tag is HFSPlusCatalogFolder)
                {
                    replaceChild     = iterateHashChildren(child, vs);
                    replaceChild.Tag = child.Tag;
                }
                else if (child.Tag is HFSPlusCatalogFile)
                {
                    HFSPlusCatalogFile        tag    = (HFSPlusCatalogFile)child.Tag;
                    dataOperations.hashValues hashes = new dataOperations.hashValues();

                    if (tag.dataFork != null && tag.dataFork.forkDataValues.logicalSize > 0)
                    {
                        HFSPlusFile theFileData = new HFSPlusFile(tag.dataFork, forkStream.forkType.data);

                        forkStream fs = new forkStream(vs, theFileData, forkStream.forkType.data);

                        dataOperations.hashValues hv = dataOperations.getHashValues(fs, (long)theFileData.dataLogicalSize);

                        hashes.md5hash = hv.md5hash;
                    }

                    if (tag.resourceFork != null && tag.resourceFork.forkDataValues.logicalSize > 0)
                    {
                        HFSPlusFile theFileResource = new HFSPlusFile(tag.dataFork, forkStream.forkType.data);

                        forkStream fs = new forkStream(vs, theFileResource, forkStream.forkType.data);

                        dataOperations.hashValues hv = dataOperations.getHashValues(fs, (long)theFileResource.dataLogicalSize);

                        hashes.sha1hash = hv.sha1hash;
                    }

                    tag.hashes = hashes;

                    replaceChild.Tag = tag;
                }
                else
                {
                    replaceChild.Tag = child.Tag;
                }

                replaceChild.Text = child.Text;
                replaceParent.Nodes.Add(replaceChild);
            }

            replaceParent.Text = parent.Text;

            return(replaceParent);
        }
        public dataOperations.hashValues[] hashFile(HFSPlusCatalogFile file)
        {
            // take a file, return hashes for its data fork and resource fork
            dataOperations.hashValues[] hv = new dataOperations.hashValues[2];

            GPTScheme gpts = new GPTScheme(i);
            HFSPlus   hfsp = new HFSPlus(i, gpts.entries[file.partitionAssoc]);

            volumeStream        vs  = new volumeStream(hfsp);
            extentsOverflowFile eof = new extentsOverflowFile(new HFSPlusFile(hfsp.volHead.extentsFile, forkStream.forkType.data), vs);

            if (file.dataFork.forkDataValues.logicalSize > 0)
            {
                HFSPlusFile hfspfile = new HFSPlusFile(file, eof);
                forkStream  fs       = new forkStream(vs, hfspfile, forkStream.forkType.data);

                hv[0] = dataOperations.getHashValues(fs, (long)hfspfile.dataLogicalSize);
            }

            if (file.resourceFork != null)
            {
                if (file.resourceFork.forkDataValues.logicalSize > 0)
                {
                    HFSPlusFile hfspfile = new HFSPlusFile(file.resourceFork, forkStream.forkType.resource);
                    forkStream  fs       = new forkStream(vs, hfspfile, forkStream.forkType.resource);

                    hv[1] = dataOperations.getHashValues(fs, (long)hfspfile.rsrcLogicalSize);
                }
            }

            return(hv);
        }
Beispiel #3
0
        private void hashImageToolStripMenuItem_Click(object sender, EventArgs e)
        {
            dataOperations.hashValues hv = dc.addImageHash();
            string[] hashStrings         = dataOperations.buildHashStrings(hv);

            absImageStream.imageProperties tag = (absImageStream.imageProperties)directoryTree.Nodes[0].Tag;

            tag.hashMD5  = hashStrings[0];
            tag.hashSHA1 = hashStrings[1];
            directoryTree.Nodes[0].Tag   = tag;
            propertyGrid1.SelectedObject = tag;
        }
        public dataOperations.hashValues addImageHash()
        {
            dataOperations.hashValues hashbytes = dataOperations.getHashValues(i, i.Length);

            return(hashbytes);
        }
        private TreeNode iterateHashChildren(TreeNode parent, volumeStream vs)
        {
            TreeNode replaceParent = new TreeNode();
            replaceParent.Tag = parent.Tag;

            foreach (TreeNode child in parent.Nodes)
            {
                TreeNode replaceChild = new TreeNode();

                if (child.Tag is HFSPlusCatalogFolder)
                {
                    replaceChild = iterateHashChildren(child, vs);
                    replaceChild.Tag = child.Tag;
                }
                else if (child.Tag is HFSPlusCatalogFile)
                {
                    HFSPlusCatalogFile tag = (HFSPlusCatalogFile)child.Tag;
                    dataOperations.hashValues hashes = new dataOperations.hashValues();

                    if (tag.dataFork != null && tag.dataFork.forkDataValues.logicalSize > 0)
                    {
                        HFSPlusFile theFileData = new HFSPlusFile(tag.dataFork, forkStream.forkType.data);

                        forkStream fs = new forkStream(vs, theFileData, forkStream.forkType.data);

                        dataOperations.hashValues hv = dataOperations.getHashValues(fs, (long)theFileData.dataLogicalSize);

                        hashes.md5hash = hv.md5hash;
                    }

                    if (tag.resourceFork != null && tag.resourceFork.forkDataValues.logicalSize > 0)
                    {
                        HFSPlusFile theFileResource = new HFSPlusFile(tag.dataFork, forkStream.forkType.data);

                        forkStream fs = new forkStream(vs, theFileResource, forkStream.forkType.data);

                        dataOperations.hashValues hv = dataOperations.getHashValues(fs, (long)theFileResource.dataLogicalSize);

                        hashes.sha1hash = hv.sha1hash;
                    }

                    tag.hashes = hashes;

                    replaceChild.Tag = tag;
                }
                else
                {
                    replaceChild.Tag = child.Tag;
                }

                replaceChild.Text = child.Text;
                replaceParent.Nodes.Add(replaceChild);
            }

            replaceParent.Text = parent.Text;

            return replaceParent;
        }
        public dataOperations.hashValues[] hashFile(HFSPlusCatalogFile file)
        {
            // take a file, return hashes for its data fork and resource fork
            dataOperations.hashValues[] hv = new dataOperations.hashValues[2];

            GPTScheme gpts = new GPTScheme(i);
            HFSPlus hfsp = new HFSPlus(i, gpts.entries[file.partitionAssoc]);

            volumeStream vs = new volumeStream(hfsp);
            extentsOverflowFile eof = new extentsOverflowFile(new HFSPlusFile(hfsp.volHead.extentsFile, forkStream.forkType.data),vs);

            if (file.dataFork.forkDataValues.logicalSize > 0)
            {
                HFSPlusFile hfspfile = new HFSPlusFile(file, eof);
                forkStream fs = new forkStream(vs, hfspfile, forkStream.forkType.data);

                hv[0] = dataOperations.getHashValues(fs, (long)hfspfile.dataLogicalSize);
            }

            if (file.resourceFork != null)
            {
                if (file.resourceFork.forkDataValues.logicalSize > 0)
                {
                    HFSPlusFile hfspfile = new HFSPlusFile(file.resourceFork, forkStream.forkType.resource);
                    forkStream fs = new forkStream(vs, hfspfile, forkStream.forkType.resource);

                    hv[1] = dataOperations.getHashValues(fs, (long)hfspfile.rsrcLogicalSize);
                }
            }

            return hv;
        }