示例#1
0
        public TestResults RunPerformanceTest()
        {
            Console.WriteLine("Running performance test '{0}'", Name);
            TestResults results = new TestResults(Name);

            foreach (RunMethod run in runMethods)
            {
                Console.WriteLine("Running for {0}", run.Method.Name);
                double totalRunTime = 0.0d;

                for (int i = 0; i < 10; i++)
                {
                    Console.Write(" | ");

                    TimeSpan start = Process.GetCurrentProcess().TotalProcessorTime;

                    for (int j = 0; j < Iterations; j++)
                    {
                        run(GenerateList());
                    }

                    TimeSpan end  = Process.GetCurrentProcess().TotalProcessorTime;
                    double   time = (end - start).TotalMilliseconds;

                    Console.Write(time);
                    totalRunTime += time;
                }

                Console.WriteLine("\nAverage time for {0} executions: {1}", Iterations, totalRunTime / 10);
                results.AddResult(run.Method.Name, totalRunTime / 10);
            }

            return(results);
        }
        public TestResults RunPerformanceTest()
        {
            Console.WriteLine("Running performance test '{0}'", Name);
            TestResults results = new TestResults(Name);

            foreach (RunMethod run in runMethods)
            {
                Console.WriteLine("Running for {0}", run.Method.Name);
                double totalRunTime = 0.0d;

                for (int i = 0; i < 10; i++)
                {
                    Console.Write(" | ");
                    Random rand = new Random();

                    TimeSpan start = Process.GetCurrentProcess().TotalProcessorTime;

                    for (int j = 0; j < Iterations; j++)
                    {
                        run(rand.Next());
                    }

                    TimeSpan end = Process.GetCurrentProcess().TotalProcessorTime;
                    double time = (end - start).TotalMilliseconds;

                    Console.Write(time);
                    totalRunTime += time;
                }

                Console.WriteLine("\nAverage time for {0} executions: {1}", Iterations, totalRunTime / 10);
                results.AddResult(run.Method.Name, totalRunTime / 10);
            }

            return results;
        }
示例#3
0
        public void DoRun(long samples, LocalTestDatabase db, BackendManager backend)
        {
            var files = db.SelectTestTargets(samples, m_options).ToList();

            m_results.OperationProgressUpdater.UpdatePhase(OperationPhase.Verify_Running);
            m_results.OperationProgressUpdater.UpdateProgress(0);
            var progress = 0L;

            if (m_options.FullRemoteVerification)
            {
                foreach (var vol in new AsyncDownloader(files, backend))
                {
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            backend.WaitForComplete(db, null);
                            return;
                        }

                        progress++;
                        m_results.OperationProgressUpdater.UpdateProgress((float)progress / files.Count);

                        KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > res;
                        using (var tf = vol.TempFile)
                            res = TestVolumeInternals(db, vol, tf, m_options, m_results, m_options.FullBlockVerification ? 1.0 : 0.2);
                        m_results.AddResult(res.Key, res.Value);

                        db.UpdateVerificationCount(vol.Name);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(vol.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        m_results.AddError(string.Format("Failed to process file {0}", vol.Name), ex);
                        if (ex is System.Threading.ThreadAbortException)
                        {
                            throw;
                        }
                    }
                }
            }
            else
            {
                foreach (var f in files)
                {
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            return;
                        }

                        progress++;
                        m_results.OperationProgressUpdater.UpdateProgress((float)progress / files.Count);

                        if (f.Size < 0 || string.IsNullOrWhiteSpace(f.Hash))
                        {
                            m_results.AddMessage(string.Format("No hash recorded for {0}, performing full verification", f.Name));
                            KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > res;
                            string hash;
                            long   size;

                            using (var tf = backend.GetWithInfo(f.Name, out size, out hash))
                                res = TestVolumeInternals(db, f, tf, m_options, m_results, 1);
                            m_results.AddResult(res.Key, res.Value);

                            if (res.Value != null && !res.Value.Any() && !string.IsNullOrWhiteSpace(hash))
                            {
                                if (!m_options.Dryrun)
                                {
                                    m_results.AddMessage(string.Format("Sucessfully captured hash for {0}, updating database", f.Name));
                                    db.UpdateRemoteVolume(f.Name, RemoteVolumeState.Verified, size, hash);
                                }
                            }
                        }
                        else
                        {
                            backend.GetForTesting(f.Name, f.Size, f.Hash);
                        }
                        db.UpdateVerificationCount(f.Name);
                        m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        m_results.AddError(string.Format("Failed to process file {0}", f.Name), ex);
                        if (ex is System.Threading.ThreadAbortException)
                        {
                            throw;
                        }
                    }
                }
            }
        }
示例#4
0
        public void DoRun(long samples, LocalTestDatabase db, BackendManager backend)
        {
            var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.BlockHashAlgorithm);

            if (blockhasher == null)
            {
                throw new Exception(string.Format(Strings.Foresthash.InvalidHashAlgorithm, m_options.BlockHashAlgorithm));
            }
            if (!blockhasher.CanReuseTransform)
            {
                throw new Exception(string.Format(Strings.Foresthash.InvalidCryptoSystem, m_options.BlockHashAlgorithm));
            }

            var hashsize = blockhasher.HashSize / 8;
            var files    = db.SelectTestTargets(samples, m_options).ToList();

            if (m_options.FullRemoteVerification)
            {
                foreach (var vol in new AsyncDownloader(files, backend))
                {
                    var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name);
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            backend.WaitForComplete(db, null);
                            return;
                        }

                        using (var tf = vol.TempFile)
                        {
                            if (parsedInfo.FileType == RemoteVolumeType.Files)
                            {
                                //Compare with db and see if all files are accounted for
                                // with correct file hashes and blocklist hashes
                                using (var fl = db.CreateFilelist(vol.Name))
                                {
                                    using (var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, m_options))
                                        foreach (var f in rd.Files)
                                        {
                                            fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time);
                                        }

                                    m_results.AddResult(vol.Name, fl.Compare().ToList());
                                }
                            }
                            else if (parsedInfo.FileType == RemoteVolumeType.Index)
                            {
                                var blocklinks = new List <Tuple <string, string, long> >();
                                IEnumerable <KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> > combined = new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0];

                                //Compare with db and see that all hashes and volumes are listed
                                using (var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, m_options, hashsize))
                                    foreach (var v in rd.Volumes)
                                    {
                                        blocklinks.Add(new Tuple <string, string, long>(v.Filename, v.Hash, v.Length));
                                        using (var bl = db.CreateBlocklist(v.Filename))
                                        {
                                            foreach (var h in v.Blocks)
                                            {
                                                bl.AddBlock(h.Key, h.Value);
                                            }

                                            combined = combined.Union(bl.Compare().ToArray());
                                        }
                                    }

                                using (var il = db.CreateIndexlist(vol.Name))
                                {
                                    foreach (var t in blocklinks)
                                    {
                                        il.AddBlockLink(t.Item1, t.Item2, t.Item3);
                                    }

                                    combined = combined.Union(il.Compare()).ToList();
                                }

                                m_results.AddResult(vol.Name, combined.ToList());
                            }
                            else if (parsedInfo.FileType == RemoteVolumeType.Blocks)
                            {
                                using (var bl = db.CreateBlocklist(vol.Name))
                                    using (var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, m_options))
                                    {
                                        //Verify that all blocks are in the file
                                        foreach (var b in rd.Blocks)
                                        {
                                            bl.AddBlock(b.Key, b.Value);
                                        }

                                        //Select 20% random blocks and verify their hashes match the filename and size
                                        var hashsamples = new List <KeyValuePair <string, long> >(rd.Blocks);
                                        var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * 0.2)), hashsamples.Count - 1);
                                        var rnd         = new Random();

                                        while (hashsamples.Count > sampleCount)
                                        {
                                            hashsamples.RemoveAt(rnd.Next(hashsamples.Count));
                                        }

                                        var blockbuffer = new byte[m_options.Blocksize];
                                        var changes     = new List <KeyValuePair <Library.Interface.TestEntryStatus, string> >();
                                        foreach (var s in hashsamples)
                                        {
                                            var size = rd.ReadBlock(s.Key, blockbuffer);
                                            if (size != s.Value)
                                            {
                                                changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                                            }
                                            else
                                            {
                                                var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size));
                                                if (hash != s.Key)
                                                {
                                                    changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                                                }
                                            }
                                        }

                                        m_results.AddResult(vol.Name, changes.Union(bl.Compare().ToList()));
                                    }
                            }
                        }

                        db.UpdateVerificationCount(vol.Name);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(vol.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        m_results.AddError(string.Format("Failed to process file {0}", vol.Name), ex);
                        if (ex is System.Threading.ThreadAbortException)
                        {
                            throw;
                        }
                    }
                }
            }
            else
            {
                foreach (var f in files)
                {
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            return;
                        }

                        backend.GetForTesting(f.Name, f.Size, f.Hash);
                        db.UpdateVerificationCount(f.Name);
                        m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        m_results.AddError(string.Format("Failed to process file {0}", f.Name), ex);
                        if (ex is System.Threading.ThreadAbortException)
                        {
                            throw;
                        }
                    }
                }
            }
        }
示例#5
0
        public void DoRun(long samples, LocalTestDatabase db, BackendManager backend)
        {
            var files = db.SelectTestTargets(samples, m_options).ToList();

            m_results.OperationProgressUpdater.UpdatePhase(OperationPhase.Verify_Running);
            m_results.OperationProgressUpdater.UpdateProgress(0);
            var progress = 0L;

            if (m_options.FullRemoteVerification)
            {
                foreach (var vol in new AsyncDownloader(files, backend))
                {
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            backend.WaitForComplete(db, null);
                            m_results.EndTime = DateTime.UtcNow;
                            return;
                        }

                        progress++;
                        m_results.OperationProgressUpdater.UpdateProgress((float)progress / files.Count);

                        KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > res;
                        using (var tf = vol.TempFile)
                            res = TestVolumeInternals(db, vol, tf, m_options, m_options.FullBlockVerification ? 1.0 : 0.2);
                        m_results.AddResult(res.Key, res.Value);

                        if (!string.IsNullOrWhiteSpace(vol.Hash) && vol.Size > 0)
                        {
                            if (res.Value == null || !res.Value.Any())
                            {
                                var rv = db.GetRemoteVolume(vol.Name, null);

                                if (rv.ID < 0)
                                {
                                    if (string.IsNullOrWhiteSpace(rv.Hash) || rv.Size <= 0)
                                    {
                                        if (m_options.Dryrun)
                                        {
                                            Logging.Log.WriteDryrunMessage(LOGTAG, "CaptureHashAndSize", "Sucessfully captured hash and size for {0}, would update database", vol.Name);
                                        }
                                        else
                                        {
                                            Logging.Log.WriteInformationMessage(LOGTAG, "CaptureHashAndSize", "Sucessfully captured hash and size for {0}, updating database", vol.Name);
                                            db.UpdateRemoteVolume(vol.Name, RemoteVolumeState.Verified, vol.Size, vol.Hash);
                                        }
                                    }
                                }
                            }
                        }

                        db.UpdateVerificationCount(vol.Name);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(vol.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        Logging.Log.WriteErrorMessage(LOGTAG, "RemoteFileProcessingFailed", ex, "Failed to process file {0}", vol.Name);
                        if (ex is System.Threading.ThreadAbortException)
                        {
                            m_results.EndTime = DateTime.UtcNow;
                            throw;
                        }
                    }
                }
            }
            else
            {
                foreach (var f in files)
                {
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            m_results.EndTime = DateTime.UtcNow;
                            return;
                        }

                        progress++;
                        m_results.OperationProgressUpdater.UpdateProgress((float)progress / files.Count);

                        if (f.Size <= 0 || string.IsNullOrWhiteSpace(f.Hash))
                        {
                            Logging.Log.WriteInformationMessage(LOGTAG, "MissingRemoteHash", "No hash or size recorded for {0}, performing full verification", f.Name);
                            KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > res;
                            string hash;
                            long   size;

                            using (var tf = backend.GetWithInfo(f.Name, out size, out hash))
                                res = TestVolumeInternals(db, f, tf, m_options, 1);
                            m_results.AddResult(res.Key, res.Value);

                            if (!string.IsNullOrWhiteSpace(hash) && size > 0)
                            {
                                if (res.Value == null || !res.Value.Any())
                                {
                                    if (m_options.Dryrun)
                                    {
                                        Logging.Log.WriteDryrunMessage(LOGTAG, "CapturedHashAndSize", "Sucessfully captured hash and size for {0}, would update database", f.Name);
                                    }
                                    else
                                    {
                                        Logging.Log.WriteInformationMessage(LOGTAG, "CapturedHashAndSize", "Sucessfully captured hash and size for {0}, updating database", f.Name);
                                        db.UpdateRemoteVolume(f.Name, RemoteVolumeState.Verified, size, hash);
                                    }
                                }
                            }
                        }
                        else
                        {
                            backend.GetForTesting(f.Name, f.Size, f.Hash);
                        }

                        db.UpdateVerificationCount(f.Name);
                        m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        Logging.Log.WriteErrorMessage(LOGTAG, "FailedToProcessFile", ex, "Failed to process file {0}", f.Name);
                        if (ex is System.Threading.ThreadAbortException)
                        {
                            m_results.EndTime = DateTime.UtcNow;
                            throw;
                        }
                    }
                }
            }

            m_results.EndTime = DateTime.UtcNow;
        }