public void ScanPath(string path)
        {
            List<string> files = FileOperations.I.GetFilesRecursive(path, null, new Action<string>((s) =>
            {
                       DupChecker.ProgressText="Retreiving files in: " + s;
            }));

            Dictionary<string, List<string>> files_with_same_size = new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase);
            DupChecker.ProgressText="Checking for same file size. This may take a while"; // to minimize work load looking for duplicate files
            this.DupChecker.ProgressMax = files.Count;
            this.DupChecker.ProgressIndex = 0;
            foreach (string file in files)
            {
                if (this.DupChecker.Cancel)
                {
                    DupChecker.ProgressText="Operation Cancelled.";
                    return;
                }

                bool add = true;

                FileInfo fi = new FileInfo(file);
                if (Settings.Default.DupChecker_MinSize != 0)
                {
                    if (fi.Length < Settings.Default.DupChecker_MinSize * 1000) add = false;
                }
                if (Settings.Default.DupChecker_MaxSize != 0)
                {
                    if (fi.Length > Settings.Default.DupChecker_MaxSize * 1000) add = false;
                }

                if (Settings.Default.DupChecker_FileExtensions != "*.*")
                {
                    string[] exts = Settings.Default.DupChecker_FileExtensions.Split(';');
                    if (!exts.Contains("*" + fi.Extension.ToLower())) add = false;
                }
                string strKeytoAdd=string.Empty;
                if ((Settings.Default.DuplicateFilterFileSizeCriteara && fi.Length > 0) || !Settings.Default.DuplicateFilterFileSizeCriteara) // do not include 0 length files.
                {
                    if (add)
                    {
                        if (!Settings.Default.DuplicateFilterFileSizeCriteara)
                        {
                            strKeytoAdd = fi.Name;
                        }
                        else
                            strKeytoAdd = fi.Length.ToString();

                        if (files_with_same_size.ContainsKey(strKeytoAdd))
                        {
                            files_with_same_size[strKeytoAdd].Add(fi.FullName);
                        }
                        else
                        {
                            files_with_same_size.Add(strKeytoAdd, new List<string>() { fi.FullName });
                        }
                    }
                }

                this.DupChecker.ProgressIndex++;
            }

            this.DupChecker.ProgressIndex = 0;

            DupChecker.ProgressText = "Please wait while hashing files. This may take a while";
            // get all the files we need to hash
            List<string> files_to_hash = new List<string>();
            Dictionary<string, List<string>> FileswithSameNameSize = new Dictionary<string, List<string>>();
            foreach (string filesize in files_with_same_size.Keys)
            {
                if (this.DupChecker.Cancel)
                {
                    DupChecker.ProgressText="Operation Cancelled";
                    return;
                }
                if (files_with_same_size[filesize].Count > 1)
                {
                    files_to_hash.AddRange(files_with_same_size[filesize].ToArray());
                    FileswithSameNameSize.Add(filesize, files_with_same_size[filesize]);
                }

                this.DupChecker.ProgressIndex++;
            }



            List<string> hashed_files = new List<string>();
            this.DupChecker.ProgressMax = files_to_hash.Count;
            this.DupChecker.ProgressIndex = 0;
            Dictionary<string, List<string>> files_with_same_hash = new Dictionary<string, List<string>>();
            if (Settings.Default.DuplicateFilterFileSizeCriteara)
            {
                foreach (string filename in files_to_hash)
                {
                    try
                    {

                        if (this.DupChecker.Cancel)
                        {
                            DupChecker.ProgressText = "Operation Cancelled";
                            return;
                        }
                        string hash = FileOperations.I.HashFile(filename);
                        DupChecker.ProgressText = "Hashing: " + filename + " > " + hash;
                        hashed_files.Add(filename + "|" + hash);
                    }
                    catch (Exception ex)
                    {
                        Debug.WriteLine(ex.Message);
                    }

                    this.DupChecker.ProgressIndex++;
                }


                DupChecker.ProgressText = "Finalizing ...";
                this.DupChecker.ProgressMax = hashed_files.Count;
                this.DupChecker.ProgressIndex = 0;

                foreach (string hashedfile in hashed_files)
                {
                    if (this.DupChecker.Cancel)
                    {
                        DupChecker.ProgressText = "Operation Cancelled";
                        return;
                    }
                    string[] tmp = hashedfile.Split('|');
                    string file = tmp[0];
                    string hash = tmp[1];

                    if (files_with_same_hash.ContainsKey(hash))
                    {
                        if (!files_with_same_hash[hash].Contains(file))
                        {
                            files_with_same_hash[hash].Add(file);
                        }
                    }
                    else
                    {
                        files_with_same_hash.Add(hash, new List<string>());
                        files_with_same_hash[hash].Add(file);
                    }
                    this.DupChecker.ProgressIndex++;
                }

                List<string> teremove = new List<string>();
                foreach (string key in files_with_same_hash.Keys)
                {
                    if (this.DupChecker.Cancel)
                    {
                        DupChecker.ProgressText = "Operation Cancelled";
                        return;
                    }
                    if (files_with_same_hash[key].Count == 1) teremove.Add(key);
                }
                foreach (string key in teremove)
                {
                    files_with_same_hash.Remove(key);
                }
            }
            else
            {
                files_with_same_hash = FileswithSameNameSize;
            }

            DupChecker.ProgressText = "Adding to collection for previewing.";
            
            App.Current.Dispatcher.Invoke((Action)delegate // <--- HERE
            {
                this.DupChecker.ProgressMax = files_with_same_hash.Count;
                this.DupChecker.ProgressIndex = 0;
                foreach (string entry in files_with_same_hash.Keys)
                {
                    DupChecker.ProgressText = "Adding Files to collection " + entry;
                    if (this.DupChecker.Cancel)
                    {
                        DupChecker.ProgressText = "Operation Cancelled";
                        return;
                    }
                    for (int i = 0; i < files_with_same_hash[entry].ToArray().Length; i++)
                    {
                        if (this.DupChecker.Cancel)
                        {
                            DupChecker.ProgressText = "Operation Cancelled";
                            return;
                        }
                        string file_entries = files_with_same_hash[entry][i];
                        FileInfo fi = new FileInfo(file_entries);
                        Model_DuplicateChecker e = new Model_DuplicateChecker();
                        e.Hash = entry;
                        e.Selected = i != 0;
                        e.FileDetails = new Model_DuplicateChecker_FileDetails()
                        {
                            Filename = fi.Name,
                            Fullfilepath = fi.FullName,
                            ParentDirectory = fi.Directory.FullName
                        };
                        DupChecker.DupplicateCollection.Add(e);
                    }
                }
            });

            DupChecker.ProgressText ="Done.";
            

            // clear some memory
            files.Clear();
            files_with_same_size.Clear();
            files_to_hash.Clear();
            hashed_files.Clear();
            files_with_same_hash.Clear();
            if (DupChecker.DupplicateCollection.Count > 0)
                DupChecker.ProgressText = "Done. Select file(s) to be removed, then click on remove duplicates button.";
            else
                DupChecker.ProgressText = "Done. No duplicates found.";
        }
        public void ScanPath(string path)
        {
            List<string> files = FileOperations.I.GetFilesRecursive(path, null, new Action<string>((s) =>
            {
                ProgressWorker.I.EnQ("Retreiving files in: " + s + "|1");
            }));

            //Dictionary<string, List<string>> files_with_same_size = new Dictionary<string,List<string>>();
            Dictionary<long, List<string>> files_with_same_size = new Dictionary<long,List<string>>();
            ProgressWorker.I.EnQ("Checking for same file size. This may take a while" + "|1"); // to minimize work load looking for duplicate files
            //base.VMCleanerML.MaxProgress = files.Count;
            //base.VMCleanerML.ProgressIndex = 0;
            this.DupChecker.ProgressMax = files.Count;
            this.DupChecker.ProgressIndex = 0;
            foreach (string file in files)
            {
                bool add = true;

                FileInfo fi = new FileInfo(file);
                if (Settings.Default.DupChecker_MinSize != 0)
                {
                    if (fi.Length < Settings.Default.DupChecker_MinSize * 1000) add = false;
                }
                if (Settings.Default.DupChecker_MaxSize != 0)
                {
                    if (fi.Length > Settings.Default.DupChecker_MaxSize * 1000) add = false;
                }

                if (Settings.Default.DupChecker_FileExtensions != "*.*")
                {
                    string[] exts = Settings.Default.DupChecker_FileExtensions.Split(';');
                    if (!exts.Contains("*" + fi.Extension.ToLower())) add = false;
                }

                if (fi.Length > 0) // do not include 0 length files.
                {
                    if (add)
                    {
                        if (files_with_same_size.ContainsKey(fi.Length))
                        {
                            files_with_same_size[fi.Length].Add(fi.FullName);
                        }
                        else
                        {
                            files_with_same_size.Add(fi.Length, new List<string>() { fi.FullName });
                        }
                    }
                }

                this.DupChecker.ProgressIndex++;
            }

            this.DupChecker.ProgressIndex = 0;

            ProgressWorker.I.EnQ("Please wait while hashing files. This may take a while" + "|1");
            // get all the files we need to hash
            List<string> files_to_hash = new List<string>();
            foreach (long filesize in files_with_same_size.Keys)
            {
                if (files_with_same_size[filesize].Count > 1)
                {
                    files_to_hash.AddRange(files_with_same_size[filesize].ToArray());
                }

                this.DupChecker.ProgressIndex++;
            }

            List<string> hashed_files = new List<string>();
            this.DupChecker.ProgressMax = files_to_hash.Count;
            this.DupChecker.ProgressIndex = 0;

            //if (files_to_hash.Count / 5 > 50)
            //{
            //    List<Task> task_list = new List<Task>();
            //    int index = 0;
            //    int max = files_to_hash.Count / 5;
            //    for (i = 0; i < 5; i++)
            //    {
            //        var task = new Task(new Action(() =>
            //        {
            //            for (int j = index; j < max; j++)
            //            {
            //                if (j < files_to_hash.Count)
            //                {
            //                    string hash = FileOperations.I.HashFile(files_to_hash[j]);
            //                    ProgressWorker.I.EnQ("Hashing: " + files_to_hash[j] + " > " + hash + "|1");
            //                    hashed_files.Add(files_to_hash[j] + "|" + hash);
            //                }
            //            }
            //        }));
            //        task.Start();
            //        task_list.Add(task);
            //        index += max;
            //    }

            //    await Task.WhenAll(task_list.ToArray());
            //}
            //else
            {
                foreach (string filename in files_to_hash)
                {
                    try
                    {
                        string hash = FileOperations.I.HashFile(filename);
                        ProgressWorker.I.EnQ("Hashing: " + filename + " > " + hash + "|1");
                        hashed_files.Add(filename + "|" + hash);
                    }
                    catch (Exception ex)
                    {
                        Debug.WriteLine(ex.Message);
                    }

                    this.DupChecker.ProgressIndex++;
                }
            }


            ProgressWorker.I.EnQ("Finalizing ..." + "|1");
            Dictionary<string, List<string>> files_with_same_hash = new Dictionary<string, List<string>>();
            this.DupChecker.ProgressMax = hashed_files.Count;
            this.DupChecker.ProgressIndex = 0;

            foreach (string hashedfile in hashed_files)
            {
                string[] tmp = hashedfile.Split('|');
                string file = tmp[0];
                string hash = tmp[1];

                if (files_with_same_hash.ContainsKey(hash))
                {
                    if (!files_with_same_hash[hash].Contains(file))
                    {
                        files_with_same_hash[hash].Add(file);
                    }
                }
                else
                {
                    files_with_same_hash.Add(hash, new List<string>());
                    files_with_same_hash[hash].Add(file);
                }
                this.DupChecker.ProgressIndex++;
            }

            List<string> teremove = new List<string>();
            foreach (string key in files_with_same_hash.Keys)
            {
                if (files_with_same_hash[key].Count == 1) teremove.Add(key);
            }
            foreach(string key in teremove)
            {
                files_with_same_hash.Remove(key);
            }

            ProgressWorker.I.EnQ("Adding to collection for previewing" + "|1");
            
            App.Current.Dispatcher.Invoke((Action)delegate // <--- HERE
            {
                this.DupChecker.ProgressMax = files_with_same_hash.Count;
                this.DupChecker.ProgressIndex = 0;
                foreach (string entry in files_with_same_hash.Keys)
                {
                    for (int i = 0; i < files_with_same_hash[entry].ToArray().Length; i++)
                    {
                        string file_entries = files_with_same_hash[entry][i];
                        FileInfo fi = new FileInfo(file_entries);
                        Model_DuplicateChecker e = new Model_DuplicateChecker();
                        e.Hash = entry;
                        e.Selected = i != 0;
                        e.FileDetails = new Model_DuplicateChecker_FileDetails()
                        {
                            Filename = fi.Name,
                            Fullfilepath = fi.FullName,
                            ParentDirectory = fi.Directory.FullName
                        };
                        DupChecker.DupplicateCollection.Add(e);
                    }
                }
            });
            

            // clear some memory
            files.Clear();
            files_with_same_size.Clear();
            files_to_hash.Clear();
            hashed_files.Clear();
            files_with_same_hash.Clear();

            //foreach (Model_DuplicateChecker e in DupChecker.DupplicationCollection)
            //{
            //    foreach (string file in e.DuplicateFiles)
            //    {
            //        FileInfo fi = new FileInfo(file);
            //        UpdateProgressLog(string.Format("Duplicate {2} \"{0}\" in \"{1}\" - {3}", fi.Name, fi.Directory.FullName, Win32API.FormatByteSize(fi.Length), e.Hash), "Retreiving duplicate files from the collection");
            //    }
            //}
        }