private static NtResult <IEnumerable <SidName> > LookupSids2(string system_name, Sid[] sids, LsaLookupOptions options, bool throw_on_error) { using (var policy = SafeLsaHandle.OpenPolicy(system_name, Policy.LsaPolicyAccessRights.LookupNames, throw_on_error)) { if (!policy.IsSuccess) { return(policy.Cast <IEnumerable <SidName> >()); } using (var list = new DisposableList()) { var sid_ptrs = sids.Select(s => list.AddSid(s).DangerousGetHandle()).ToArray(); var status = SecurityNativeMethods.LsaLookupSids2(policy.Result, options, sid_ptrs.Length, sid_ptrs, out SafeLsaMemoryBuffer domains, out SafeLsaMemoryBuffer names); if (!status.IsSuccess()) { if (status == NtStatus.STATUS_NONE_MAPPED) { list.Add(domains); list.Add(names); } return(status.CreateResultFromError <IEnumerable <SidName> >(throw_on_error)); } return(GetSidNames(sids, domains, names).CreateResult()); } } }
public static DisposableList <IDisposable> RegisterResponders( this ISigningTestServer testServer, CertificateAuthority ca, bool addCa = true, bool addOcsp = true) { var responders = new DisposableList <IDisposable>(); var currentCa = ca; while (currentCa != null) { if (addCa) { responders.Add(testServer.RegisterResponder(currentCa)); } if (addOcsp) { responders.Add(testServer.RegisterResponder(currentCa.OcspResponder)); } currentCa = currentCa.Parent; } return(responders); }
public void TestQueueBig() { using (var dl = new DisposableList()) { var docs = TestHelpers.CreateStarDocuments(10000); var original = DocdbGatewayMessage.Create("Star", docs); var queue = new QueueWrapper(TestHelpers.GenUnique("gateq"), TestConfig.GatewayStorageAccount); var bcont = new BlobContainerWrapper(TestHelpers.GenUnique("gatecont"), TestConfig.GatewayStorageAccount); dl.Add(queue.Delete); dl.Add(bcont.Delete); var blobStorage = new GatewayBlobStore(bcont); GatewayQueueWriter writer = new GatewayQueueWriter(queue, blobStorage); writer.Write(original); GatewayQueueReader reader = new GatewayQueueReader(queue, blobStorage); reader.ReadNextMessage <DocdbGatewayMessage>( gm => { var rdocs = gm.DocumentsAs <StarDocument>().ToList(); Assert.AreEqual(docs.Count, rdocs.Count); for (int i = 0; i < rdocs.Count; ++i) { TestHelpers.AssertEqualStars(docs[i], rdocs[i]); } }, (e, gm, cqm) => Assert.Fail()); } }
private async Task <CertificateAuthority> CreateDefaultTrustedCertificateAuthorityAsync() { var testServer = await _testServer.Value; var rootCa = CertificateAuthority.Create(testServer.Url); var intermediateCa = rootCa.CreateIntermediateCertificateAuthority(); var rootCertificate = new X509Certificate2(rootCa.Certificate.GetEncoded()); StoreLocation storeLocation = CertificateStoreUtilities.GetTrustedCertificateStoreLocation(); _trustedTimestampRoot = TrustedTestCert.Create( rootCertificate, StoreName.Root, storeLocation); var ca = intermediateCa; while (ca != null) { _responders.Add(testServer.RegisterResponder(ca)); _responders.Add(testServer.RegisterResponder(ca.OcspResponder)); ca = ca.Parent; } return(intermediateCa); }
public async Task <CertificateWithCustomIntermediatesResult> GetRevokedSigningCertificateAsync(DateTimeOffset revocationDate, DateTimeOffset crlUpdateTime) { var testServer = await GetTestServerAsync(); var ca = await GetCertificateAuthority(); var ca2 = ca.CreateIntermediateCertificateAuthority(); var responders = new DisposableList <IDisposable>(); var ca2Responder = OcspResponder.Create(ca2, new OcspResponderOptions { ThisUpdate = crlUpdateTime, }); responders.Add(testServer.RegisterResponder(ca2)); responders.Add(testServer.RegisterResponder(ca2Responder)); void CustomizeAsSigningCertificate(X509V3CertificateGenerator generator) { generator.AddSigningEku(); generator.AddAuthorityInfoAccess(ca2, addOcsp: true, addCAIssuers: true); } var(publicCertificate, certificate) = IssueCertificate(ca2, "Revoked Signing", CustomizeAsSigningCertificate); var caCert = ca.Certificate.ToX509Certificate2(); var ca2Cert = ca2.Certificate.ToX509Certificate2(); ca2.Revoke(publicCertificate, reason: RevocationReason.Unspecified, revocationDate: revocationDate); return(new CertificateWithCustomIntermediatesResult( certificate, new[] { caCert, ca2Cert }, responders)); }
private async Task <TimestampService> CreateDefaultTrustedTimestampServiceAsync() { var testServer = await _testServer.Value; var ca = await _defaultTrustedCertificateAuthority.Value; var timestampService = TimestampService.Create(ca); _responders.Add(testServer.RegisterResponder(timestampService)); return(timestampService); }
private Molecule AnalyzeMolecule(Point location) { // Make sure the molecule is visible in the sidebar var screenLocation = m_sidebar.ScrollTo(m_palette, location); Molecule molecule; bool edgeChanged; using (var captures = new DisposableList <ScreenCapture>()) { var sidebarCapture1 = captures.Add(new ScreenCapture(m_sidebar.Rect)); // To analyze the molecule, drag it onto the grid. This will expand it to full size and make it much easier to analyze. sm_log.Info("Centering grid"); m_grid.ScrollTo(new Vector2(0, 0)); sm_log.Info("Dragging molecule onto grid"); MouseUtils.LeftDrag(screenLocation, m_grid.GetScreenLocationForCell(new Vector2(0, 0))); molecule = new MoleculeAnalyzer(m_grid, m_type).Analyze(); sm_log.Info("Analyzed molecule:" + Environment.NewLine + molecule.ToString()); var sidebarCapture2 = captures.Add(new ScreenCapture(m_sidebar.Rect)); edgeChanged = ExcludeChangedPixels(sidebarCapture1, sidebarCapture2); sm_log.Info("edgeChanged: " + edgeChanged); } using (var captures = new DisposableList <ScreenCapture>()) { if (edgeChanged) { // A pixel on the bottom edge of the visible part of the sidebar changed, which means // the molecule probably extends onto the next page. So we scroll down and then exclude // any pixels that change there when we delete the molecule. m_sidebar.Area.ScrollBy(new Point(0, m_sidebar.Rect.Height)); var sidebarCapture1 = captures.Add(new ScreenCapture(m_sidebar.Rect)); // Delete the molecule from the grid KeyboardUtils.KeyPress(Keys.Z); var sidebarCapture2 = captures.Add(new ScreenCapture(m_sidebar.Rect)); ExcludeChangedPixels(sidebarCapture1, sidebarCapture2); // Technically the molecule could overlap a third page but we'll ignore that for now. } else { // Delete the molecule from the grid KeyboardUtils.KeyPress(Keys.Z); } } return(molecule); }
private IEnumerable <NtObject> CreateDirectoriesAndObject() { if (Close) { throw new ArgumentException("Can't use CreateDirectories and Close at the same time"); } DisposableList <NtObject> objects = new DisposableList <NtObject>(); string[] path_parts = ResolvePath().Split(new char[] { '\\' }, StringSplitOptions.RemoveEmptyEntries); StringBuilder builder = new StringBuilder(); bool finished = false; if (Root == null) { builder.Append(@"\"); } try { for (int i = 0; i < path_parts.Length - 1; ++i) { builder.Append(path_parts[i]); NtDirectory dir = null; try { dir = NtDirectory.Create(builder.ToString(), Root, DirectoryAccessRights.MaximumAllowed); } catch (NtException) { } if (dir != null) { objects.Add(dir); } builder.Append(@"\"); } objects.Add((NtObject)CreateObject(ResolvePath(), AttributeFlags, Root, SecurityQualityOfService, SecurityDescriptor)); finished = true; } finally { if (!finished) { objects.Dispose(); objects.Clear(); } } return(objects.ToArray()); }
private DisposableList <ScreenCapture> GetSidebarCaptures() { // Chop a bit off the bottom of the image because it starts to fade out there, making it more difficult to compare var captureRect = new Rectangle(SidebarRect.Left, SidebarRect.Top, SidebarRect.Width, SidebarRect.Height - 70); sm_log.Info("Capturing section 0 of the sidebar"); var prevCapture = new ScreenCapture(captureRect); var captures = new DisposableList <ScreenCapture> { prevCapture.Clone() }; int scrollDistance = CalculateScrollDistance(); const int maxIterations = 100; int i = 0; for (; i < maxIterations; i++) { MouseUtils.RightDrag(SidebarRect.Location.Add(new Point(0, scrollDistance)), SidebarRect.Location); // Check if we've reached the bottom if (SidebarUtil.FindVisibleSidebarHeight() <= SidebarUtil.MaxHeight) { break; } // Capture the next bit of the sidebar prevCapture.Dispose(); prevCapture = new ScreenCapture(captureRect); sm_log.Info(Invariant($"Capturing section {i + 1} of the sidebar")); captures.Add(prevCapture.Clone(new Rectangle(0, prevCapture.Rect.Height - scrollDistance, prevCapture.Rect.Width, scrollDistance))); } if (i >= maxIterations) { throw new AnalysisException(Invariant($"Couldn't find the bottom of the sidebar after {i} attempts.")); } // Capture the final bit of the sidebar var finalCapture = new ScreenCapture(SidebarRect); // Work out where it overlaps the previous part, and capture the overlap int overlap = BitmapComparer.CalculateVerticalOverlap(prevCapture.Bitmap, finalCapture.Bitmap, m_overlapComparer, 0); sm_log.Info("Capturing final section of the sidebar"); captures.Add(finalCapture.Clone(new Rectangle(0, overlap, finalCapture.Rect.Width, finalCapture.Rect.Height - overlap))); prevCapture.Dispose(); return(captures); }
public void BulkOfEntitiesConsistent() { using (var dl = new DisposableList()) { // create the source table var sourceTableWrapper = new TableWrapper(TestHelpers.GenUnique(TestConfig.TableName), TestConfig.PrimaryStorageAccount, true); dl.Add(sourceTableWrapper.Delete); // initialize the target table and attach it to the disposable container var targetTable = new TableWrapper(TestHelpers.GenUnique(TestConfig.TableName), TestConfig.SecondaryStorageAccount, true); dl.Add(targetTable.Delete); var tableParamsResolver = new InMemoryTargetTableResolver(); tableParamsResolver.Add(TestHelpers.TableKey, targetTable); // create gateway blob storage var gateBlob = new InMemoryGatewayBlobStore(); // create a gateway queue var gateQueue = new QueueWrapper(TestHelpers.GenUnique("gateq"), TestConfig.GatewayStorageAccount); var gateQueueWriter = new GatewayQueueWriter(gateQueue, gateBlob); var sourceTable = new SourceTable <StarEntity>(sourceTableWrapper, gateQueueWriter, TestHelpers.TableKey); // 100 entities to satisfy TableStorage's batch requirements var entities = TestHelpers.CreateStarEntities(100); sourceTable.Write(entities); // Now verify that the entities were synced to the secondary table storage TableGatewayQueueProcessor gateQueueProcessor = new TableGatewayQueueProcessor( new GatewayQueueReader(gateQueue, gateBlob), tableParamsResolver ); bool result = gateQueueProcessor.ProcessNext(); Assert.IsTrue(result); var targetEntities = targetTable.ReadEntities <StarEntity>() .ToList(); Assert.AreEqual(entities.Count, targetEntities.Count); foreach (var entity in entities) { TestHelpers.AssertEqualStars( entity, targetEntities.First(x => x.RowKey == entity.RowKey) ); } } }
public void MenuHandler(Converter<ThreadItem, object> selector, MenuItem menu, PopupTargetInfo pti) { using (ViewList vl = this.js.ViewList()) using (DisposableList<ViewItem> views = new DisposableList<ViewItem>(vl)) using (DisposableList<ThreadItem> threads = new DisposableList<ThreadItem>()) { //スレッドを抽出 foreach (ViewItem view in views) { ThreadItem t = view.Thread(); if (t != null) threads.Add(t); } //スレッドを全部閉じる foreach (ThreadItem t in threads) { this.js.Close(t); } //スレッドのソートキー取得 List<ThreadKeyPair> pairs = new List<ThreadKeyPair>(threads.Count); foreach (ThreadItem t in threads) { pairs.Add(new ThreadKeyPair() { Thread = t, SortKey = (IComparable)selector(t), }); } //スレッドソート if (this.ascending) { pairs.Sort(this.AscendingComparison); } else { pairs.Sort(this.DescendingComparison); } //全部開く foreach (var p in pairs) { this.js.Open(p.Thread, 0, OpenOperation.Local, true, false, true); } } }
private DisposableList <HLODBuildInfo> CreateBuildInfo(TerrainData data, SpaceNode root) { DisposableList <HLODBuildInfo> results = new DisposableList <HLODBuildInfo>(); Queue <SpaceNode> trevelQueue = new Queue <SpaceNode>(); Queue <int> parentQueue = new Queue <int>(); Queue <string> nameQueue = new Queue <string>(); Queue <int> depthQueue = new Queue <int>(); int maxDepth = 0; trevelQueue.Enqueue(root); parentQueue.Enqueue(-1); nameQueue.Enqueue("HLOD"); depthQueue.Enqueue(0); while (trevelQueue.Count > 0) { int currentNodeIndex = results.Count; string name = nameQueue.Dequeue(); SpaceNode node = trevelQueue.Dequeue(); int depth = depthQueue.Dequeue(); HLODBuildInfo info = new HLODBuildInfo { Name = name, ParentIndex = parentQueue.Dequeue(), Target = node, }; for (int i = 0; i < node.GetChildCount(); ++i) { trevelQueue.Enqueue(node.GetChild(i)); parentQueue.Enqueue(currentNodeIndex); nameQueue.Enqueue(name + "_" + (i + 1)); depthQueue.Enqueue(depth + 1); } info.Heightmap = CreateSubHightmap(node.Bounds); info.WorkingObjects.Add(CreateBakedTerrain(name, node.Bounds, info.Heightmap, depth, node.GetChildCount() == 0)); info.Distances.Add(depth); results.Add(info); if (depth > maxDepth) { maxDepth = depth; } } //convert depth to distance for (int i = 0; i < results.Count; ++i) { HLODBuildInfo info = results[i]; for (int di = 0; di < info.Distances.Count; ++di) { info.Distances[di] = maxDepth - info.Distances[di]; } } return(results); }
public void TestQueue() { using (var dl = new DisposableList()) { var entities = TestHelpers.CreateStarEntities(3); var original = TableGatewayMessage.Create("Star", entities); var queue = new QueueWrapper(TestHelpers.GenUnique("gateq"), TestConfig.GatewayStorageAccount); dl.Add(queue.Delete); var blobStorage = new InMemoryGatewayBlobStore(); GatewayQueueWriter writer = new GatewayQueueWriter(queue, blobStorage); writer.Write(original); GatewayQueueReader reader = new GatewayQueueReader(queue, blobStorage); reader.ReadNextMessage <TableGatewayMessage>( gm => { var rentities = gm.EntitiesAs <StarEntity>().ToList(); Assert.AreEqual(entities.Count, rentities.Count); for (int i = 0; i < rentities.Count; ++i) { TestHelpers.AssertEqualStars(entities[i], rentities[i]); } }, (e, gm, cqm) => Assert.Fail()); } }
private SecurityInformationImpl(string obj_name, Dictionary <uint, string> names, GenericMapping generic_mapping, bool read_only) { _mapping = generic_mapping; _obj_name = new SafeStringBuffer(obj_name); _access_map = new SafeHGlobalBuffer(Marshal.SizeOf(typeof(SiAccess)) * names.Count); SiAccess[] sis = new SiAccess[names.Count]; IntPtr current_ptr = _access_map.DangerousGetHandle(); _names = new DisposableList <SafeStringBuffer>(); int i = 0; foreach (KeyValuePair <uint, string> pair in names) { _names.Add(new SafeStringBuffer(pair.Value)); SiAccess si = new SiAccess { pguid = IntPtr.Zero, dwFlags = SiAccessFlags.SI_ACCESS_SPECIFIC | SiAccessFlags.SI_ACCESS_GENERAL, mask = pair.Key, pszName = _names[i].DangerousGetHandle() }; sis[i] = si; i++; } _access_map.WriteArray(0, sis, 0, names.Count); _read_only = read_only; }
public void TestDiscarding() { using (var dl = new DisposableList()) { var table = new TableWrapper(TestHelpers.GenUnique(TestConfig.TableName), TestConfig.PrimaryStorageAccount, true); dl.Add(table.Delete); var entity1 = TestHelpers.CreateStarEntity(); var entity2 = TestHelpers.CreateStarEntity(); entity2.Version++; entity2.Name = "Entity Version 2 name"; var entity3 = TestHelpers.CreateStarEntity(); entity3.Name = "Entity Version 3 name"; TargetTable ttable = new TargetTable(table); XTableResult result; StarEntity rentity; result = ttable.Write(TableConvert.ToDynamicTableEntity(entity1)); Assert.IsFalse(result.Discarded); rentity = table.ReadEntity <StarEntity>(entity3.PartitionKey, entity3.RowKey); TestHelpers.AssertEqualStars(entity1, rentity); result = ttable.Write(TableConvert.ToDynamicTableEntity(entity2)); Assert.IsFalse(result.Discarded); rentity = table.ReadEntity <StarEntity>(entity3.PartitionKey, entity3.RowKey); TestHelpers.AssertEqualStars(entity2, rentity); result = ttable.Write(TableConvert.ToDynamicTableEntity(entity3)); Assert.IsTrue(result.Discarded); rentity = table.ReadEntity <StarEntity>(entity3.PartitionKey, entity3.RowKey); TestHelpers.AssertEqualStars(entity2, rentity); } }
internal VkPipelineColorBlendStateCreateInfo GetNative(DisposableList <IDisposable> marshalled) { var result = new VkPipelineColorBlendStateCreateInfo(); result.sType = VkStructureType.PipelineColorBlendStateCreateInfo; result.logicOpEnable = logicOpEnable ? 1u : 0u; result.logicOp = logicOp; result.attachmentCount = (uint)this.attachments.Count; var attachments = new VkPipelineColorBlendAttachmentState[this.attachments.Count]; for (int i = 0; i < attachments.Length; i++) { attachments[i] = this.attachments[i].GetNative(); } var attachMarshalled = new MarshalledArray <VkPipelineColorBlendAttachmentState>(attachments); result.attachmentCount = (uint)attachMarshalled.Count; result.pAttachments = attachMarshalled.Address; if (blendConstants != null) { result.blendConstants_0 = blendConstants[0]; result.blendConstants_1 = blendConstants[1]; result.blendConstants_2 = blendConstants[2]; result.blendConstants_3 = blendConstants[3]; } marshalled.Add(attachMarshalled); return(result); }
public void TestETagViolation409() { using (var dl = new DisposableList()) { var table = new TableWrapper(TestHelpers.GenUnique(TestConfig.TableName), TestConfig.PrimaryStorageAccount, true); dl.Add(table.Delete); var entity1 = TestHelpers.CreateStarEntity(); entity1.Version = 1; var entity2 = TestHelpers.CreateStarEntity(); entity2.Version = 2; entity2.Name = "Entity Version 2 name"; TargetTable ttable = new TargetTable(table); XTableResult result; int retryAttempt = 0; result = ttable.Write( TableConvert.ToDynamicTableEntity(entity1), () => { if (retryAttempt++ == 0) { result = ttable.Write(TableConvert.ToDynamicTableEntity(entity2)); Assert.AreEqual(false, result.Discarded); } } ); Assert.AreEqual(true, result.Discarded); var rentity = table.ReadEntity <StarEntity>(entity1.PartitionKey, entity1.RowKey); TestHelpers.AssertEqualStars(entity2, rentity); } }
public async Task <CertificateWithCustomIntermediatesResult> GetPartialChainSigningCertificateAsync() { var testServer = await GetTestServerAsync(); var ca = await GetCertificateAuthority(); var ca2 = ca.CreateIntermediateCertificateAuthority(); var responders = new DisposableList <IDisposable>(); responders.Add(testServer.RegisterResponder(ca2.OcspResponder)); void CustomizeAsPartialChainSigningCertificate(X509V3CertificateGenerator generator) { generator.AddSigningEku(); generator.AddAuthorityInfoAccess(ca2, addOcsp: true, addCAIssuers: true); } var(publicCertificate, certificate) = IssueCertificate(ca2, "Untrusted Signing", CustomizeAsPartialChainSigningCertificate); var caCert = ca.Certificate.ToX509Certificate2(); var ca2Cert = ca2.Certificate.ToX509Certificate2(); return(new CertificateWithCustomIntermediatesResult( certificate, new[] { caCert, ca2Cert }, responders)); }
public async Task <CertificateWithCustomIntermediatesResult> GetPartialChainAndRevokedSigningCertificateAsync() { var testServer = await GetTestServerAsync(); var ca = await GetCertificateAuthority(); var ca2 = ca.CreateIntermediateCertificateAuthority(); var responders = new DisposableList <IDisposable>(); responders.Add(testServer.RegisterResponder(ca2.OcspResponder)); void CustomizeAsPartialChainAndRevokedCertificate(X509V3CertificateGenerator generator) { generator.AddSigningEku(); generator.AddAuthorityInfoAccess(ca2, addOcsp: true, addCAIssuers: true); } var(publicCertificate, certificate) = IssueCertificate(ca2, "Untrusted and Revoked Signing", CustomizeAsPartialChainAndRevokedCertificate); ca2.Revoke(publicCertificate, reason: RevocationReason.Unspecified, revocationDate: DateTimeOffset.UtcNow); return(new CertificateWithCustomIntermediatesResult( certificate, new X509Certificate2[0], responders)); }
private static TSqlModel CreateTestModel(params string[] Tsqlscripts) { var scripts = Tsqlscripts; TSqlModel model = _trash.Add(new TSqlModel(SqlServerVersion.Sql110, new TSqlModelOptions())); AddScriptsToModel(model, scripts); return(model); }
public void Host(ObfuscationConfiguration obfuscationConfiguration, Action <long, bool, double> statusCallback) { IEnumerable <IDictionary <string, object> > sourceDataEnumerable; IEnumerable <Message> messages; if ((object)obfuscationConfiguration == null) { throw new ArgumentNullException("obfuscationConfiguration"); } messages = obfuscationConfiguration.Validate(); if (messages.Any()) { throw new ApplicationException(string.Format("Obfuscation configuration validation failed:\r\n{0}", string.Join("\r\n", messages.Select(m => m.Description).ToArray()))); } using (IOxymoronEngine oxymoronEngine = new OxymoronEngine(this, obfuscationConfiguration)) { using (DisposableList <IDictionaryAdapter> dictionaryAdapters = new DisposableList <IDictionaryAdapter>()) { foreach (DictionaryConfiguration dictionaryConfiguration in obfuscationConfiguration.DictionaryConfigurations) { IDictionaryAdapter dictionaryAdapter; dictionaryAdapter = dictionaryConfiguration.DictionaryAdapterConfiguration.GetAdapterInstance <IDictionaryAdapter>(); dictionaryAdapters.Add(dictionaryAdapter); dictionaryAdapter.Initialize(dictionaryConfiguration.DictionaryAdapterConfiguration); dictionaryAdapter.InitializePreloadCache(dictionaryConfiguration, oxymoronEngine.SubstitutionCacheRoot); this.DictionaryConfigurationToAdapterMappings.Add(dictionaryConfiguration, dictionaryAdapter); } using (ISourceAdapter sourceAdapter = obfuscationConfiguration.SourceAdapterConfiguration.GetAdapterInstance <ISourceAdapter>()) { sourceAdapter.Initialize(obfuscationConfiguration.SourceAdapterConfiguration); using (IDestinationAdapter destinationAdapter = obfuscationConfiguration.DestinationAdapterConfiguration.GetAdapterInstance <IDestinationAdapter>()) { destinationAdapter.Initialize(obfuscationConfiguration.DestinationAdapterConfiguration); destinationAdapter.UpstreamMetadata = sourceAdapter.UpstreamMetadata; sourceDataEnumerable = sourceAdapter.PullData(obfuscationConfiguration.TableConfiguration); sourceDataEnumerable = oxymoronEngine.GetObfuscatedValues(sourceDataEnumerable); if ((object)statusCallback != null) { sourceDataEnumerable = WrapRecordCounter(sourceDataEnumerable, statusCallback); } destinationAdapter.PushData(obfuscationConfiguration.TableConfiguration, sourceDataEnumerable); } } } } }
public void TestDbLocationModifierForImport() { // Given database name, and path to save to string dbName = TestContext.TestName; string dataFolder = GetTestDir(); string filePrefix = "mydb"; string bacpacPath = Path.Combine(dataFolder, dbName + ".bacpac"); string mdfFilePath = Path.Combine(dataFolder, filePrefix + "_Primary.mdf"); string ldfFilePath = Path.Combine(dataFolder, filePrefix + "_Primary.ldf"); // Delete any existing artifacts from a previous run TestUtils.DropDbAndDeleteFiles(dbName, mdfFilePath, ldfFilePath); SqlTestDB importedDb = null; try { // Create a DB and export SqlTestDB db = _trash.Add(TestUtils.CreateTestDatabase(TestUtils.DefaultInstanceInfo, "MyOriginalDb")); db.Execute(CreateOneTable); db.ExportBacpac(bacpacPath); // When deploying using the location modifying contributor DacImportOptions options = new DacImportOptions(); options.ImportContributors = DbLocationModifier.ContributorId; options.ImportContributorArguments = Utils.BuildContributorArguments(new Dictionary <string, string>() { { DbLocationModifier.DbSaveDataLocationArg, dataFolder }, { DbLocationModifier.DbSaveLogDataLocationArg, dataFolder }, { DbLocationModifier.DbFilePrefixArg, filePrefix }, }); importedDb = SqlTestDB.CreateFromBacpac(TestUtils.DefaultInstanceInfo, bacpacPath, options, true); // Then expect the database to be saved under that path AssertDeploySucceeded(importedDb.BuildConnectionString(), importedDb.DatabaseName); Assert.IsTrue(File.Exists(mdfFilePath)); Assert.IsTrue(File.Exists(ldfFilePath)); // Note: for a real application, after creating the DB on the server they may want to // detach it and reattach using the database path. We are not doing this since it's // not relevant to this test } finally { if (importedDb != null) { importedDb.Dispose(); } TestUtils.DeleteIfExists(bacpacPath); TestUtils.DeleteIfExists(mdfFilePath); TestUtils.DeleteIfExists(ldfFilePath); } }
private void handleGetDataFn(DataBatchArgs args, IntPtr hcol) { OrtValueCollection col = new OrtValueCollection(hcol); for (int i = 0; i < args.Values.Count; i++) { MemoryHandle?memHandle; OrtValue val = args.Values[i].ToOrtValue(out memHandle); if (memHandle.HasValue) { m_rgCleanUpList.Add(memHandle); } m_rgCleanUpList.Add(val); col.SetAt(i, val, args.Values[i].Name); } }
public void EntityConsistent() { using (var dl = new DisposableList()) { // create the source table var sourceTableWrapper = new TableWrapper(TestHelpers.GenUnique(TestConfig.TableName), TestConfig.PrimaryStorageAccount, true); dl.Add(sourceTableWrapper.Delete); // initialize the target table and attach it to the disposable container var targetTable = new TableWrapper(TestHelpers.GenUnique(TestConfig.TableName), TestConfig.SecondaryStorageAccount, true); dl.Add(targetTable.Delete); var tableParamsResolver = new InMemoryTargetTableResolver(); tableParamsResolver.Add(TestHelpers.TableKey, targetTable); // create gateway blob storage var gateBlob = new InMemoryGatewayBlobStore(); // create a gateway queue var gateQueue = new QueueWrapper(TestHelpers.GenUnique("gateq"), TestConfig.GatewayStorageAccount); var gateQueueWriter = new GatewayQueueWriter(gateQueue, gateBlob); var sourceTable = new SourceTable <StarEntity>(sourceTableWrapper, gateQueueWriter, TestHelpers.TableKey); var entity = TestHelpers.CreateStarEntity(); // write the entity sourceTable.Write(entity); // now verify that the entity was synced to the secondary table storage TableGatewayQueueProcessor gateQueueProcessor = new TableGatewayQueueProcessor( new GatewayQueueReader(gateQueue, gateBlob), tableParamsResolver ); bool result = gateQueueProcessor.ProcessNext(); Assert.IsTrue(result); var targetEntity = targetTable.ReadEntity <StarEntity>(entity.PartitionKey, entity.RowKey); TestHelpers.AssertEqualStars(entity, targetEntity); } }
public static DisposableList <IDisposable> RegisterResponders( this ISigningTestServer testServer, CertificateAuthority ca, bool addCa = true, bool addOcsp = true) { var responders = new DisposableList <IDisposable>(); if (addCa) { responders.Add(testServer.RegisterResponder(ca)); } if (addOcsp) { responders.Add(testServer.RegisterResponder(ca.OcspResponder)); } return(responders); }
/// <summary> /// Fetch the spotify audio session. /// </summary> protected AudioSession? FetchAudioSession() { // Fetch sessions using var device = AudioDevice.GetDefaultAudioDevice(EDataFlow.eRender, ERole.eMultimedia); using var sessionManager = device.GetSessionManager(); using var sessions = sessionManager.GetSessionCollection(); // Check main process var sessionCount = sessions.Count; using var sessionCache = new DisposableList<AudioSession>(sessionCount); for (var i = 0; i < sessions.Count; i++) { var session = sessions[i]; if (session.ProcessID == MainWindowProcess?.Id) { Logger.LogInfo("SpotifyHook: Successfully fetched audio session using main window process."); _audioSession = session; return _audioSession; } else { // Store non-spotify sessions in disposable list to make sure that they the underlying COM objects are disposed. sessionCache.Add(session); } } Logger.LogWarning("SpotifyHook: Failed to fetch audio session using main window process."); // Try fetch through other "spotify" processes. var processes = FetchSpotifyProcesses(); // Transfer the found sessions into a dictionary to speed up the search by process id. // (we do this here to avoid the overhead as most of the time we will find the session in the code above.) using var sessionMap = new ValueDisposableDictionary<uint, AudioSession>(); foreach (var session in sessionCache) sessionMap.Add(session.ProcessID, session); sessionCache.Clear(); foreach (var process in processes) { var processId = (uint)process.Id; // skip main process as we already checked it if (MainWindowProcess?.Id == processId) continue; if (sessionMap.TryGetValue(processId, out AudioSession session)) { _audioSession = session; Logger.LogInfo("SpotifyHook: Successfully fetched audio session using secondary spotify processes."); // remove from map to avoid disposal sessionMap.Remove(processId); return _audioSession; } } Logger.LogError("SpotifyHook: Failed to fetch audio session."); return null; }
public async Task Timestamp_Verify_WithOfflineRevocation_ReturnsCorrectFlagsAndLogsAsync() { var nupkg = new SimpleTestPackageContext(); using (var testServer = await SigningTestServer.CreateAsync()) using (var responders = new DisposableList <IDisposable>()) using (var packageStream = await nupkg.CreateAsStreamAsync()) using (var testCertificate = new X509Certificate2(_trustedTestCert.Source.Cert)) { var ca = CreateOfflineRevocationCA(testServer, responders); var timestampService = TimestampService.Create(ca); responders.Add(testServer.RegisterResponder(timestampService)); var timestampProvider = new Rfc3161TimestampProvider(timestampService.Url); var signature = await SignedArchiveTestUtility.CreateAuthorSignatureForPackageAsync(testCertificate, packageStream, timestampProvider); var timestamp = signature.Timestamps.First(); var settings = new SignedPackageVerifierSettings( allowUnsigned: false, allowUntrusted: false, allowIllegal: false, allowIgnoreTimestamp: false, allowMultipleTimestamps: false, allowNoTimestamp: false, allowUnknownRevocation: false, reportUnknownRevocation: true, verificationTarget: VerificationTarget.All, signaturePlacement: SignaturePlacement.Any, repositoryCountersignatureVerificationBehavior: SignatureVerificationBehavior.Always, revocationMode: RevocationMode.Online); var logs = new List <SignatureLog>(); var result = timestamp.Verify(signature, settings, HashAlgorithmName.SHA256, logs); result.HasFlag(SignatureVerificationStatusFlags.UnknownRevocation).Should().BeTrue(); var errors = logs.Where(l => l.Level == LogLevel.Error); errors.Count().Should().Be(RuntimeEnvironmentHelper.IsWindows ? 2 : 1); if (RuntimeEnvironmentHelper.IsWindows) { errors.Should().Contain(w => w.Code == NuGetLogCode.NU3028 && w.Message.Contains("The revocation function was unable to check revocation because the revocation server could not be reached.")); errors.Should().Contain(w => w.Code == NuGetLogCode.NU3028 && w.Message.Contains("The revocation function was unable to check revocation for the certificate.")); } else { errors.Should().Contain(w => w.Code == NuGetLogCode.NU3028 && w.Message.Contains("unable to get certificate CRL")); } } }
internal DisposableList <X509Certificate2> GetCyclicCertificateChain() { var list = new DisposableList <X509Certificate2>(); foreach (var certificate in _cyclicChain) { list.Add(Clone(certificate)); } return(list); }
internal IntPtr GetNative(DisposableList <IDisposable> marshalled) { var entriesMarshalled = new MarshalledArray <VkSpecializationMapEntry>(mapEntries); var dataMarshalled = new PinnedArray <byte>(data); marshalled.Add(entriesMarshalled); marshalled.Add(dataMarshalled); var info = new VkSpecializationInfo(); info.mapEntryCount = (uint)entriesMarshalled.Count; info.pMapEntries = entriesMarshalled.Address; info.dataSize = (uint)dataMarshalled.Count; info.pData = dataMarshalled.Address; var infoMarshalled = new Marshalled <VkSpecializationInfo>(info); marshalled.Add(infoMarshalled); return(infoMarshalled.Address); }
internal VkPipelineVertexInputStateCreateInfo GetNative(DisposableList <IDisposable> marshalled) { var result = new VkPipelineVertexInputStateCreateInfo(); result.sType = VkStructureType.PipelineVertexInputStateCreateInfo; var attMarshalled = new MarshalledArray <VkVertexInputAttributeDescription>(vertexAttributeDescriptions); result.vertexAttributeDescriptionCount = (uint)attMarshalled.Count; result.pVertexAttributeDescriptions = attMarshalled.Address; var bindMarshalled = new MarshalledArray <VkVertexInputBindingDescription>(vertexBindingDescriptions); result.vertexBindingDescriptionCount = (uint)bindMarshalled.Count; result.pVertexBindingDescriptions = bindMarshalled.Address; marshalled.Add(attMarshalled); marshalled.Add(bindMarshalled); return(result); }
internal VkPipelineViewportStateCreateInfo GetNative(DisposableList <IDisposable> marshalled) { var result = new VkPipelineViewportStateCreateInfo(); result.sType = VkStructureType.PipelineViewportStateCreateInfo; var viewportMarshalled = new MarshalledArray <VkViewport>(viewports); result.viewportCount = (uint)viewportMarshalled.Count; result.pViewports = viewportMarshalled.Address; var scissorMarshalled = new MarshalledArray <VkRect2D>(scissors); result.scissorCount = (uint)scissorMarshalled.Count; result.pScissors = scissorMarshalled.Address; marshalled.Add(viewportMarshalled); marshalled.Add(scissorMarshalled); return(result); }
public void Host(ObfuscationConfiguration obfuscationConfiguration, Action<long, bool, double> statusCallback) { IEnumerable<IDictionary<string, object>> sourceDataEnumerable; IEnumerable<Message> messages; if ((object)obfuscationConfiguration == null) throw new ArgumentNullException("obfuscationConfiguration"); messages = obfuscationConfiguration.Validate(); if (messages.Any()) throw new ApplicationException(string.Format("Obfuscation configuration validation failed:\r\n{0}", string.Join("\r\n", messages.Select(m => m.Description).ToArray()))); using (IOxymoronEngine oxymoronEngine = new OxymoronEngine(this, obfuscationConfiguration)) { using (DisposableList<IDictionaryAdapter> dictionaryAdapters = new DisposableList<IDictionaryAdapter>()) { foreach (DictionaryConfiguration dictionaryConfiguration in obfuscationConfiguration.DictionaryConfigurations) { IDictionaryAdapter dictionaryAdapter; dictionaryAdapter = dictionaryConfiguration.DictionaryAdapterConfiguration.GetAdapterInstance<IDictionaryAdapter>(); dictionaryAdapters.Add(dictionaryAdapter); dictionaryAdapter.Initialize(dictionaryConfiguration.DictionaryAdapterConfiguration); dictionaryAdapter.InitializePreloadCache(dictionaryConfiguration, oxymoronEngine.SubstitutionCacheRoot); this.DictionaryConfigurationToAdapterMappings.Add(dictionaryConfiguration, dictionaryAdapter); } using (ISourceAdapter sourceAdapter = obfuscationConfiguration.SourceAdapterConfiguration.GetAdapterInstance<ISourceAdapter>()) { sourceAdapter.Initialize(obfuscationConfiguration.SourceAdapterConfiguration); using (IDestinationAdapter destinationAdapter = obfuscationConfiguration.DestinationAdapterConfiguration.GetAdapterInstance<IDestinationAdapter>()) { destinationAdapter.Initialize(obfuscationConfiguration.DestinationAdapterConfiguration); destinationAdapter.UpstreamMetadata = sourceAdapter.UpstreamMetadata; sourceDataEnumerable = sourceAdapter.PullData(obfuscationConfiguration.TableConfiguration); sourceDataEnumerable = oxymoronEngine.GetObfuscatedValues(sourceDataEnumerable); if ((object)statusCallback != null) sourceDataEnumerable = WrapRecordCounter(sourceDataEnumerable, statusCallback); destinationAdapter.PushData(obfuscationConfiguration.TableConfiguration, sourceDataEnumerable); } } } } }
public SecurityInformationImpl(string obj_name, NtObject handle, Dictionary<uint, string> names, GenericMapping generic_mapping) { _mapping = generic_mapping; _handle = handle; _obj_name = new SafeStringBuffer(obj_name); _access_map = new SafeHGlobalBuffer(Marshal.SizeOf(typeof(SiAccess)) * names.Count); SiAccess[] sis = new SiAccess[names.Count]; IntPtr current_ptr = _access_map.DangerousGetHandle(); _names = new DisposableList<SafeStringBuffer>(); int i = 0; foreach (KeyValuePair<uint, string> pair in names) { _names.Add(new SafeStringBuffer(pair.Value)); SiAccess si = new SiAccess(); si.dwFlags = SiAccessFlags.SI_ACCESS_SPECIFIC | SiAccessFlags.SI_ACCESS_GENERAL; si.mask = pair.Key; si.pszName = _names[i].DangerousGetHandle(); sis[i] = si; i++; } _access_map.WriteArray(0, sis, 0, names.Count); }
/// <summary> /// Updates the model inside an existing dacpac. All other parts of the package (the refactor log, pre/post deployment scripts, /// contributors to use etc) stay the same. /// </summary> /// <param name="dacpacPath"></param> public void UpdateDacpacModelWithFilter(string dacpacPath) { DisposableList disposables = new DisposableList(); try { TSqlModel model = disposables.Add(new TSqlModel(dacpacPath, DacSchemaModelStorageType.Memory)); TSqlModel filteredModel = disposables.Add(CreateFilteredModel(model)); // Note that the package must be opened in ReadWrite mode - this will fail if this isn't specified DacPackage package = disposables.Add(DacPackage.Load(dacpacPath, DacSchemaModelStorageType.Memory, FileAccess.ReadWrite)); package.UpdateModel(filteredModel, new PackageMetadata()); } finally { disposables.Dispose(); } }
/// <summary> /// Creates a new dacpac based on the filtered model. This implementation loads from and saves to /// disk, but the API supports loading from and saving to a memory stream. This can be useful if you /// want to support a scenario where you load a dacpac, filter its model, then immediately deploy the filtered /// model to a database. /// </summary> public void CreateFilteredDacpac(string dacpacPath, string filteredDacpacPath) { DisposableList disposables = new DisposableList(); try { TSqlModel model = disposables.Add(new TSqlModel(dacpacPath, DacSchemaModelStorageType.Memory)); TSqlModel filteredModel = disposables.Add(CreateFilteredModel(model)); DacPackageExtensions.BuildPackage( filteredDacpacPath, filteredModel, new PackageMetadata(), // Describes the dacpac. new PackageOptions()); // Use this to specify the deployment contributors, refactor log to include in package } finally { // Models and DacPackage objects are disposable - either use a "using" statement or dispose of them in a finally block. // In these examples we'd end up with a large amount of indentation and to avoid that I'm using a try/finally block. disposables.Dispose(); } }