示例#1
0
        IEnumerable <KeyValuePair <string, string> > CreateAuthRequestPayload(string userName, string password, UcwaAppAuthenticationTypes authType)
        {
            KeyValuePair <string, string>[] formData = new KeyValuePair <string, string>[] {};
            var formDataList = formData.ToList();

            switch (authType)
            {
            case UcwaAppAuthenticationTypes.Windows:
                formDataList.Add(new KeyValuePair <string, string>("grant_type", "urn:microsoft.rtc:windows"));
                formDataList.Add(new KeyValuePair <string, string>("username", userName));
                break;

            case UcwaAppAuthenticationTypes.Annonymous:
                formDataList.Add(new KeyValuePair <string, string>("grant_type", "urn:microsoft.rtc:anonmeeting"));
                formDataList.Add(new KeyValuePair <string, string>("password", password));
                formDataList.Add(new KeyValuePair <string, string>("msrtc_conferenceuri", userName));
                break;

            case UcwaAppAuthenticationTypes.Passive:
                formDataList.Add(new KeyValuePair <string, string>("grant_type", "urn:microsoft.rtc:passive"));
                break;

            default:      // password
                formDataList.Add(new KeyValuePair <string, string>("grant_type", "password"));
                formDataList.Add(new KeyValuePair <string, string>("password", password));
                formDataList.Add(new KeyValuePair <string, string>("username", userName));
                break;
            }
            ;

            return(formDataList.AsEnumerable());
        }
示例#2
0
        public IEnumerator <KeyValuePair <string, IAttributeValue> > GetEnumerator()
        {
            var array = new KeyValuePair <string, IAttributeValue> [[email protected]];

            this.CopyTo(array, 0);
            return(array.ToList().GetEnumerator());
        }
示例#3
0
        private IReadOnlyList <KeyValuePair <string, Size> > getResolutions()
        {
            var resolutions = new KeyValuePair <string, Size>("Default", new Size(9999, 9999)).Yield();

            if (game.Window != null)
            {
                resolutions = resolutions.Concat(game.Window.AvailableResolutions
                                                 .Where(r => r.Width >= 800 && r.Height >= 600)
                                                 .OrderByDescending(r => r.Width)
                                                 .ThenByDescending(r => r.Height)
                                                 .Select(res => new KeyValuePair <string, Size>($"{res.Width}x{res.Height}", new Size(res.Width, res.Height)))
                                                 .Distinct());
            }
            return(resolutions.ToList());
        }
示例#4
0
        private (LearningSet, LearningSet) GetTestSet(LearningSet xSet, double learningSetSizePercent)
        {
            int learningSetSize = (int)(xSet.Count * learningSetSizePercent);
            var newA            = new KeyValuePair <double, List <double> > [xSet.Count - learningSetSize];

            Array.Copy(xSet.ToArray(), learningSetSize, newA, 0, xSet.Count - learningSetSize);
            var testSet = newA.ToList();

            var newB = new KeyValuePair <double, List <double> > [learningSetSize];

            Array.Copy(xSet.ToArray(), 0, newB, 0, learningSetSize);
            var learningSet = newB.ToList();

            return(learningSet, testSet);
        }
示例#5
0
        /// <summary>
        /// Tests the volume by examining the internal contents
        /// </summary>
        /// <param name="vol">The remote volume being examined</param>
        /// <param name="tf">The path to the downloaded copy of the file</param>
        /// <param name="sample_percent">A value between 0 and 1 that indicates how many blocks are tested in a dblock file</param>
        public static KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > > TestVolumeInternals(LocalTestDatabase db, IRemoteVolume vol, string tf, Options options, ILogWriter log, double sample_percent)
        {
            var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(options.BlockHashAlgorithm);

            if (blockhasher == null)
            {
                throw new Exception(Strings.Common.InvalidHashAlgorithm(options.BlockHashAlgorithm));
            }
            if (!blockhasher.CanReuseTransform)
            {
                throw new Exception(Strings.Common.InvalidCryptoSystem(options.BlockHashAlgorithm));
            }

            var hashsize   = blockhasher.HashSize / 8;
            var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name);

            sample_percent = Math.Min(1, Math.Max(sample_percent, 0.01));

            if (parsedInfo.FileType == RemoteVolumeType.Files)
            {
                //Compare with db and see if all files are accounted for
                // with correct file hashes and blocklist hashes
                using (var fl = db.CreateFilelist(vol.Name))
                {
                    using (var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, options))
                        foreach (var f in rd.Files)
                        {
                            fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time);
                        }

                    return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, fl.Compare().ToList()));
                }
            }
            else if (parsedInfo.FileType == RemoteVolumeType.Index)
            {
                var blocklinks = new List <Tuple <string, string, long> >();
                IEnumerable <KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> > combined = new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0];

                //Compare with db and see that all hashes and volumes are listed
                using (var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, options, hashsize))
                    foreach (var v in rd.Volumes)
                    {
                        blocklinks.Add(new Tuple <string, string, long>(v.Filename, v.Hash, v.Length));
                        using (var bl = db.CreateBlocklist(v.Filename))
                        {
                            foreach (var h in v.Blocks)
                            {
                                bl.AddBlock(h.Key, h.Value);
                            }

                            combined = combined.Union(bl.Compare().ToArray());
                        }
                    }

                using (var il = db.CreateIndexlist(vol.Name))
                {
                    foreach (var t in blocklinks)
                    {
                        il.AddBlockLink(t.Item1, t.Item2, t.Item3);
                    }

                    combined = combined.Union(il.Compare()).ToList();
                }

                return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, combined.ToList()));
            }
            else if (parsedInfo.FileType == RemoteVolumeType.Blocks)
            {
                using (var bl = db.CreateBlocklist(vol.Name))
                    using (var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, options))
                    {
                        //Verify that all blocks are in the file
                        foreach (var b in rd.Blocks)
                        {
                            bl.AddBlock(b.Key, b.Value);
                        }

                        //Select random blocks and verify their hashes match the filename and size
                        var hashsamples = new List <KeyValuePair <string, long> >(rd.Blocks);
                        var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * sample_percent)), hashsamples.Count - 1);
                        var rnd         = new Random();

                        while (hashsamples.Count > sampleCount)
                        {
                            hashsamples.RemoveAt(rnd.Next(hashsamples.Count));
                        }

                        var blockbuffer = new byte[options.Blocksize];
                        var changes     = new List <KeyValuePair <Library.Interface.TestEntryStatus, string> >();
                        foreach (var s in hashsamples)
                        {
                            var size = rd.ReadBlock(s.Key, blockbuffer);
                            if (size != s.Value)
                            {
                                changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                            }
                            else
                            {
                                var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size));
                                if (hash != s.Key)
                                {
                                    changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                                }
                            }
                        }

                        return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, changes.Union(bl.Compare().ToList())));
                    }
            }

            log.AddWarning(string.Format("Unexpected file type {0} for {1}", parsedInfo.FileType, vol.Name), null);
            return(new KeyValuePair <string, IEnumerable <KeyValuePair <TestEntryStatus, string> > >(vol.Name, null));
        }
示例#6
0
        /// <summary>
        /// Tests the volume by examining the internal contents
        /// </summary>
        /// <param name="vol">The remote volume being examined</param>
        /// <param name="tf">The path to the downloaded copy of the file</param>
        /// <param name="sample_percent">A value between 0 and 1 that indicates how many blocks are tested in a dblock file</param>
        public static KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>> TestVolumeInternals(LocalTestDatabase db, IRemoteVolume vol, string tf, Options options, ILogWriter log, double sample_percent = 0.2)
        {
            var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(options.BlockHashAlgorithm);
 
            if (blockhasher == null)
                throw new Exception(Strings.Foresthash.InvalidHashAlgorithm(options.BlockHashAlgorithm));
            if (!blockhasher.CanReuseTransform)
                throw new Exception(Strings.Foresthash.InvalidCryptoSystem(options.BlockHashAlgorithm));
                
            var hashsize = blockhasher.HashSize / 8;
            var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name);
            sample_percent = Math.Min(1, Math.Max(sample_percent, 0.01));

            if (parsedInfo.FileType == RemoteVolumeType.Files)
            {
                //Compare with db and see if all files are accounted for 
                // with correct file hashes and blocklist hashes
                using(var fl = db.CreateFilelist(vol.Name))
                {
                    using(var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, options))
                        foreach(var f in rd.Files)
                            fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time);
                                    
                    return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, fl.Compare().ToList());
                }       
            }
            else if (parsedInfo.FileType == RemoteVolumeType.Index)
            {
                var blocklinks = new List<Tuple<string, string, long>>();
                IEnumerable<KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>> combined = new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[0]; 
                                
                //Compare with db and see that all hashes and volumes are listed
                using(var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, options, hashsize))
                    foreach(var v in rd.Volumes)
                    {
                        blocklinks.Add(new Tuple<string, string, long>(v.Filename, v.Hash, v.Length));
                        using(var bl = db.CreateBlocklist(v.Filename))
                        {
                            foreach(var h in v.Blocks)
                                bl.AddBlock(h.Key, h.Value);
                                                
                            combined = combined.Union(bl.Compare().ToArray());
                        }
                    }
                                
                using(var il = db.CreateIndexlist(vol.Name))
                {
                    foreach(var t in blocklinks)
                        il.AddBlockLink(t.Item1, t.Item2, t.Item3);
                                        
                    combined = combined.Union(il.Compare()).ToList();
                }
                                
                return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, combined.ToList());
            }
            else if (parsedInfo.FileType == RemoteVolumeType.Blocks)
            {
                using(var bl = db.CreateBlocklist(vol.Name))
                using(var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, options))
                {                                    
                    //Verify that all blocks are in the file
                    foreach(var b in rd.Blocks)
                        bl.AddBlock(b.Key, b.Value);
    
                    //Select random blocks and verify their hashes match the filename and size
                    var hashsamples = new List<KeyValuePair<string, long>>(rd.Blocks);
                    var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * sample_percent)), hashsamples.Count - 1);
                    var rnd = new Random();
                                     
                    while (hashsamples.Count > sampleCount)
                        hashsamples.RemoveAt(rnd.Next(hashsamples.Count));
    
                    var blockbuffer = new byte[options.Blocksize];
                    var changes = new List<KeyValuePair<Library.Interface.TestEntryStatus, string>>();
                    foreach(var s in hashsamples)
                    {
                        var size = rd.ReadBlock(s.Key, blockbuffer);
                        if (size != s.Value)
                            changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                        else
                        {
                            var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size));
                            if (hash != s.Key)
                                changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                        }
                    }
                                    
                    return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, changes.Union(bl.Compare().ToList()));
                }                                
            }

            log.AddWarning(string.Format("Unexpected file type {0} for {1}", parsedInfo.FileType, vol.Name), null);
            return new KeyValuePair<string, IEnumerable<KeyValuePair<TestEntryStatus, string>>>(vol.Name, null);
        }
示例#7
0
        public void DoRun(long samples, LocalTestDatabase db, BackendManager backend)
        {
            var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.BlockHashAlgorithm);

            if (blockhasher == null)
            {
                throw new Exception(string.Format(Strings.Foresthash.InvalidHashAlgorithm, m_options.BlockHashAlgorithm));
            }
            if (!blockhasher.CanReuseTransform)
            {
                throw new Exception(string.Format(Strings.Foresthash.InvalidCryptoSystem, m_options.BlockHashAlgorithm));
            }

            var hashsize = blockhasher.HashSize / 8;
            var files    = db.SelectTestTargets(samples, m_options).ToList();

            if (m_options.FullRemoteVerification)
            {
                foreach (var vol in new AsyncDownloader(files, backend))
                {
                    var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name);
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            backend.WaitForComplete(db, null);
                            return;
                        }

                        using (var tf = vol.TempFile)
                        {
                            if (parsedInfo.FileType == RemoteVolumeType.Files)
                            {
                                //Compare with db and see if all files are accounted for
                                // with correct file hashes and blocklist hashes
                                using (var fl = db.CreateFilelist(vol.Name))
                                {
                                    using (var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, m_options))
                                        foreach (var f in rd.Files)
                                        {
                                            fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time);
                                        }

                                    m_results.AddResult(vol.Name, fl.Compare().ToList());
                                }
                            }
                            else if (parsedInfo.FileType == RemoteVolumeType.Index)
                            {
                                var blocklinks = new List <Tuple <string, string, long> >();
                                IEnumerable <KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> > combined = new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0];

                                //Compare with db and see that all hashes and volumes are listed
                                using (var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, m_options, hashsize))
                                    foreach (var v in rd.Volumes)
                                    {
                                        blocklinks.Add(new Tuple <string, string, long>(v.Filename, v.Hash, v.Length));
                                        using (var bl = db.CreateBlocklist(v.Filename))
                                        {
                                            foreach (var h in v.Blocks)
                                            {
                                                bl.AddBlock(h.Key, h.Value);
                                            }

                                            combined = combined.Union(bl.Compare().ToArray());
                                        }
                                    }

                                using (var il = db.CreateIndexlist(vol.Name))
                                {
                                    foreach (var t in blocklinks)
                                    {
                                        il.AddBlockLink(t.Item1, t.Item2, t.Item3);
                                    }

                                    combined = combined.Union(il.Compare()).ToList();
                                }

                                m_results.AddResult(vol.Name, combined.ToList());
                            }
                            else if (parsedInfo.FileType == RemoteVolumeType.Blocks)
                            {
                                using (var bl = db.CreateBlocklist(vol.Name))
                                    using (var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, m_options))
                                    {
                                        //Verify that all blocks are in the file
                                        foreach (var b in rd.Blocks)
                                        {
                                            bl.AddBlock(b.Key, b.Value);
                                        }

                                        //Select 20% random blocks and verify their hashes match the filename and size
                                        var hashsamples = new List <KeyValuePair <string, long> >(rd.Blocks);
                                        var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * 0.2)), hashsamples.Count - 1);
                                        var rnd         = new Random();

                                        while (hashsamples.Count > sampleCount)
                                        {
                                            hashsamples.RemoveAt(rnd.Next(hashsamples.Count));
                                        }

                                        var blockbuffer = new byte[m_options.Blocksize];
                                        var changes     = new List <KeyValuePair <Library.Interface.TestEntryStatus, string> >();
                                        foreach (var s in hashsamples)
                                        {
                                            var size = rd.ReadBlock(s.Key, blockbuffer);
                                            if (size != s.Value)
                                            {
                                                changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                                            }
                                            else
                                            {
                                                var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size));
                                                if (hash != s.Key)
                                                {
                                                    changes.Add(new KeyValuePair <Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                                                }
                                            }
                                        }

                                        m_results.AddResult(vol.Name, changes.Union(bl.Compare().ToList()));
                                    }
                            }
                        }

                        db.UpdateVerificationCount(vol.Name);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(vol.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        m_results.AddError(string.Format("Failed to process file {0}", vol.Name), ex);
                        if (ex is System.Threading.ThreadAbortException)
                        {
                            throw;
                        }
                    }
                }
            }
            else
            {
                foreach (var f in files)
                {
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            return;
                        }

                        backend.GetForTesting(f.Name, f.Size, f.Hash);
                        db.UpdateVerificationCount(f.Name);
                        m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string> [0]);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(f.Name, new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair <Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        m_results.AddError(string.Format("Failed to process file {0}", f.Name), ex);
                        if (ex is System.Threading.ThreadAbortException)
                        {
                            throw;
                        }
                    }
                }
            }
        }
示例#8
0
    public static List <KeyValuePair <int, int> > GetShortestPathes(int s, IEnumerable <Edge> edges, int vertices)
    {
        var ls = new KeyValuePair <int, int> [vertices];

        return(ls.ToList());
    }
示例#9
0
        public void DoRun(long samples, LocalTestDatabase db, BackendManager backend)
        {
            var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.BlockHashAlgorithm);

            if (blockhasher == null)
                throw new Exception(string.Format(Strings.Foresthash.InvalidHashAlgorithm, m_options.BlockHashAlgorithm));
            if (!blockhasher.CanReuseTransform)
                throw new Exception(string.Format(Strings.Foresthash.InvalidCryptoSystem, m_options.BlockHashAlgorithm));

            var hashsize = blockhasher.HashSize / 8;
            var files = db.SelectTestTargets(samples, m_options).ToList();

            if (m_options.FullRemoteVerification)
            {
                foreach(var vol in new AsyncDownloader(files, backend))
                {
                    var parsedInfo = Volumes.VolumeBase.ParseFilename(vol.Name);
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                        {
                            backend.WaitForComplete(db, null);
                            return;
                        }

                        using(var tf = vol.TempFile)
                        {
                            if (parsedInfo.FileType == RemoteVolumeType.Files)
                            {
                                //Compare with db and see if all files are accounted for
                                // with correct file hashes and blocklist hashes
                                using(var fl = db.CreateFilelist(vol.Name))
                                {
                                    using(var rd = new Volumes.FilesetVolumeReader(parsedInfo.CompressionModule, tf, m_options))
                                        foreach(var f in rd.Files)
                                            fl.Add(f.Path, f.Size, f.Hash, f.Metasize, f.Metahash, f.BlocklistHashes, f.Type, f.Time);

                                    m_results.AddResult(vol.Name, fl.Compare().ToList());
                                }
                            }
                            else if (parsedInfo.FileType == RemoteVolumeType.Index)
                            {
                                var blocklinks = new List<Tuple<string, string, long>>();
                                IEnumerable<KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>> combined = new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[0];

                                //Compare with db and see that all hashes and volumes are listed
                                using(var rd = new Volumes.IndexVolumeReader(parsedInfo.CompressionModule, tf, m_options, hashsize))
                                    foreach(var v in rd.Volumes)
                                    {
                                        blocklinks.Add(new Tuple<string, string, long>(v.Filename, v.Hash, v.Length));
                                        using(var bl = db.CreateBlocklist(v.Filename))
                                        {
                                            foreach(var h in v.Blocks)
                                                bl.AddBlock(h.Key, h.Value);

                                            combined = combined.Union(bl.Compare().ToArray());
                                        }
                                    }

                                using(var il = db.CreateIndexlist(vol.Name))
                                {
                                    foreach(var t in blocklinks)
                                        il.AddBlockLink(t.Item1, t.Item2, t.Item3);

                                    combined = combined.Union(il.Compare()).ToList();
                                }

                                m_results.AddResult(vol.Name, combined.ToList());
                            }
                            else if (parsedInfo.FileType == RemoteVolumeType.Blocks)
                            {
                                using(var bl = db.CreateBlocklist(vol.Name))
                                using(var rd = new Volumes.BlockVolumeReader(parsedInfo.CompressionModule, tf, m_options))
                                {
                                    //Verify that all blocks are in the file
                                    foreach(var b in rd.Blocks)
                                        bl.AddBlock(b.Key, b.Value);

                                    //Select 20% random blocks and verify their hashes match the filename and size
                                    var hashsamples = new List<KeyValuePair<string, long>>(rd.Blocks);
                                    var sampleCount = Math.Min(Math.Max(0, (int)(hashsamples.Count * 0.2)), hashsamples.Count - 1);
                                    var rnd = new Random();

                                    while (hashsamples.Count > sampleCount)
                                        hashsamples.RemoveAt(rnd.Next(hashsamples.Count));

                                    var blockbuffer = new byte[m_options.Blocksize];
                                    var changes = new List<KeyValuePair<Library.Interface.TestEntryStatus, string>>();
                                    foreach(var s in hashsamples)
                                    {
                                        var size = rd.ReadBlock(s.Key, blockbuffer);
                                        if (size != s.Value)
                                            changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                                        else
                                        {
                                            var hash = Convert.ToBase64String(blockhasher.ComputeHash(blockbuffer, 0, size));
                                            if (hash != s.Key)
                                                changes.Add(new KeyValuePair<Library.Interface.TestEntryStatus, string>(Library.Interface.TestEntryStatus.Modified, s.Key));
                                        }
                                    }

                                    m_results.AddResult(vol.Name, changes.Union(bl.Compare().ToList()));
                                }

                            }
                        }

                        db.UpdateVerificationCount(vol.Name);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(vol.Name, new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        m_results.AddError(string.Format("Failed to process file {0}", vol.Name), ex);
                        if (ex is System.Threading.ThreadAbortException)
                            throw;
                    }
                }
            }
            else
            {
                foreach(var f in files)
                {
                    try
                    {
                        if (m_results.TaskControlRendevouz() == TaskControlState.Stop)
                            return;

                        backend.GetForTesting(f.Name, f.Size, f.Hash);
                        db.UpdateVerificationCount(f.Name);
                        m_results.AddResult(f.Name, new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[0]);
                    }
                    catch (Exception ex)
                    {
                        m_results.AddResult(f.Name, new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>[] { new KeyValuePair<Duplicati.Library.Interface.TestEntryStatus, string>(Duplicati.Library.Interface.TestEntryStatus.Error, ex.Message) });
                        m_results.AddError(string.Format("Failed to process file {0}", f.Name), ex);
                        if (ex is System.Threading.ThreadAbortException)
                            throw;
                    }
                }
            }
        }
        public void ToOtlpResourceMetricsTest(bool includeServiceNameInResource)
        {
            using var exporter = new OtlpMetricsExporter(
                      new OtlpExporterOptions(),
                      new NoopMetricsServiceClient());

            var resourceBuilder = ResourceBuilder.CreateEmpty();

            if (includeServiceNameInResource)
            {
                resourceBuilder.AddAttributes(
                    new List <KeyValuePair <string, object> >
                {
                    new KeyValuePair <string, object>(ResourceSemanticConventions.AttributeServiceName, "service-name"),
                    new KeyValuePair <string, object>(ResourceSemanticConventions.AttributeServiceNamespace, "ns1"),
                });
            }

            var tags = new KeyValuePair <string, object>[]
            {
                new KeyValuePair <string, object>("key1", "value1"),
                new KeyValuePair <string, object>("key2", "value2"),
            };

            var processor = new PullMetricProcessor(new TestExporter <MetricItem>(RunTest), true);

            using var provider = Sdk.CreateMeterProviderBuilder()
                                 .SetResourceBuilder(resourceBuilder)
                                 .AddSource("TestMeter")
                                 .AddMetricProcessor(processor)
                                 .Build();

            exporter.ParentProvider = provider;

            using var meter = new Meter("TestMeter", "0.0.1");

            var counter = meter.CreateCounter <int>("counter");

            counter.Add(100, tags);

            var testCompleted = false;

            // Invokes the TestExporter which will invoke RunTest
            processor.PullRequest();

            Assert.True(testCompleted);

            void RunTest(Batch <MetricItem> metricItem)
            {
                var request = new OtlpCollector.ExportMetricsServiceRequest();

                request.AddBatch(exporter.ProcessResource, metricItem);

                Assert.Single(request.ResourceMetrics);
                var resourceMetric = request.ResourceMetrics.First();
                var oltpResource   = resourceMetric.Resource;

                if (includeServiceNameInResource)
                {
                    Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceName && kvp.Value.StringValue == "service-name");
                    Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceNamespace && kvp.Value.StringValue == "ns1");
                }
                else
                {
                    Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceName && kvp.Value.ToString().Contains("unknown_service:"));
                }

                Assert.Single(resourceMetric.InstrumentationLibraryMetrics);
                var instrumentationLibraryMetrics = resourceMetric.InstrumentationLibraryMetrics.First();

                Assert.Equal(string.Empty, instrumentationLibraryMetrics.SchemaUrl);
                Assert.Equal("TestMeter", instrumentationLibraryMetrics.InstrumentationLibrary.Name);
                Assert.Equal("0.0.1", instrumentationLibraryMetrics.InstrumentationLibrary.Version);

                Assert.Single(instrumentationLibraryMetrics.Metrics);

                foreach (var metric in instrumentationLibraryMetrics.Metrics)
                {
                    Assert.Equal(string.Empty, metric.Description);
                    Assert.Equal(string.Empty, metric.Unit);
                    Assert.Equal("counter", metric.Name);

                    Assert.Equal(OtlpMetrics.Metric.DataOneofCase.Sum, metric.DataCase);
                    Assert.True(metric.Sum.IsMonotonic);
                    Assert.Equal(OtlpMetrics.AggregationTemporality.Delta, metric.Sum.AggregationTemporality);

                    Assert.Single(metric.Sum.DataPoints);
                    var dataPoint = metric.Sum.DataPoints.First();
                    Assert.True(dataPoint.StartTimeUnixNano > 0);
                    Assert.True(dataPoint.TimeUnixNano > 0);
                    Assert.Equal(OtlpMetrics.NumberDataPoint.ValueOneofCase.AsInt, dataPoint.ValueCase);
                    Assert.Equal(100, dataPoint.AsInt);

#pragma warning disable CS0612 // Type or member is obsolete
                    Assert.Empty(dataPoint.Labels);
#pragma warning restore CS0612 // Type or member is obsolete
                    OtlpTestHelpers.AssertOtlpAttributes(tags.ToList(), dataPoint.Attributes);

                    Assert.Empty(dataPoint.Exemplars);
                }

                testCompleted = true;
            }
        }
        public void ToOtlpResourceMetricsTest(bool includeServiceNameInResource)
        {
            var resourceBuilder = ResourceBuilder.CreateEmpty();

            if (includeServiceNameInResource)
            {
                resourceBuilder.AddAttributes(
                    new List <KeyValuePair <string, object> >
                {
                    new KeyValuePair <string, object>(ResourceSemanticConventions.AttributeServiceName, "service-name"),
                    new KeyValuePair <string, object>(ResourceSemanticConventions.AttributeServiceNamespace, "ns1"),
                });
            }

            var tags = new KeyValuePair <string, object>[]
            {
                new KeyValuePair <string, object>("key1", "value1"),
                new KeyValuePair <string, object>("key2", "value2"),
            };

            using var meter = new Meter($"{Utils.GetCurrentMethodName()}.{includeServiceNameInResource}", "0.0.1");

            var exportedItems = new List <Metric>();

            using var provider = Sdk.CreateMeterProviderBuilder()
                                 .SetResourceBuilder(resourceBuilder)
                                 .AddMeter(meter.Name)
                                 .AddReader(new BaseExportingMetricReader(new InMemoryExporter <Metric>(exportedItems))
            {
                PreferredAggregationTemporality = AggregationTemporality.Delta
            })
                                 .Build();

            var counter = meter.CreateCounter <int>("counter");

            counter.Add(100, tags);

            var testCompleted = false;

            provider.ForceFlush();

            var batch = new Batch <Metric>(exportedItems.ToArray(), exportedItems.Count);

            RunTest(batch);

            Assert.True(testCompleted);

            void RunTest(Batch <Metric> metrics)
            {
                var request = new OtlpCollector.ExportMetricsServiceRequest();

                request.AddMetrics(resourceBuilder.Build().ToOtlpResource(), metrics);

                Assert.Single(request.ResourceMetrics);
                var resourceMetric = request.ResourceMetrics.First();
                var oltpResource   = resourceMetric.Resource;

                if (includeServiceNameInResource)
                {
                    Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceName && kvp.Value.StringValue == "service-name");
                    Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceNamespace && kvp.Value.StringValue == "ns1");
                }
                else
                {
                    Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceName && kvp.Value.ToString().Contains("unknown_service:"));
                }

                Assert.Single(resourceMetric.InstrumentationLibraryMetrics);
                var instrumentationLibraryMetrics = resourceMetric.InstrumentationLibraryMetrics.First();

                Assert.Equal(string.Empty, instrumentationLibraryMetrics.SchemaUrl);
                Assert.Equal(meter.Name, instrumentationLibraryMetrics.InstrumentationLibrary.Name);
                Assert.Equal("0.0.1", instrumentationLibraryMetrics.InstrumentationLibrary.Version);

                Assert.Single(instrumentationLibraryMetrics.Metrics);

                foreach (var metric in instrumentationLibraryMetrics.Metrics)
                {
                    Assert.Equal(string.Empty, metric.Description);
                    Assert.Equal(string.Empty, metric.Unit);
                    Assert.Equal("counter", metric.Name);

                    Assert.Equal(OtlpMetrics.Metric.DataOneofCase.Sum, metric.DataCase);
                    Assert.True(metric.Sum.IsMonotonic);
                    Assert.Equal(OtlpMetrics.AggregationTemporality.Delta, metric.Sum.AggregationTemporality);

                    Assert.Single(metric.Sum.DataPoints);
                    var dataPoint = metric.Sum.DataPoints.First();
                    Assert.True(dataPoint.StartTimeUnixNano > 0);
                    Assert.True(dataPoint.TimeUnixNano > 0);
                    Assert.Equal(OtlpMetrics.NumberDataPoint.ValueOneofCase.AsInt, dataPoint.ValueCase);
                    Assert.Equal(100, dataPoint.AsInt);

#pragma warning disable CS0612 // Type or member is obsolete
                    Assert.Empty(dataPoint.Labels);
#pragma warning restore CS0612 // Type or member is obsolete
                    OtlpTestHelpers.AssertOtlpAttributes(tags.ToList(), dataPoint.Attributes);

                    Assert.Empty(dataPoint.Exemplars);
                }

                testCompleted = true;
            }
        }