protected void Setup() { _subjects = new IndexCollection<string>(); _subjects.Add(Subject1, 0, 2); _subjects.Add(Subject2, 2, 2); _subjects.Add(Subject3, 4, 2); }
public Mesh() { Positions = new Point3DCollection(); Normals = new Vector3DCollection(); TextureCoordinates = new Point2DCollection(); Indices = new IndexCollection(); Material = new Material(); }
internal SkinnedModelMeshContent(int numVertices, int numTriangles, VertexBufferContent vertices, IndexCollection indices, MaterialContent material) { this.numVertices = numVertices; this.numTriangles = numTriangles; this.vertices = vertices; this.indices = indices; this.material = material; }
private void Init(string name) { Name = name; Options = new DbEngineOptions(); Columns = new ColumnsCollection(); Constraints = new ConstraintsCollection(this); Indexes = new IndexCollection(this); ModifiedColumns = new ModifiedColumnsCollection(name); }
public TableSchema(string name) { Name = name; Options= new DbEngineOptions(); Columns= new ColumnsCollection(); Constraints = new ConstraintsCollection(this); Indexes= new IndexCollection(this); ModifiedColumns=new ModifiedColumnsCollection(name); }
internal ModelMeshPartContent(VertexBufferContent vertexBuffer, IndexCollection indices, int vertexOffset, int numVertices, int startIndex, int primitiveCount) { _vertexBuffer = vertexBuffer; _indexBuffer = indices; _vertexOffset = vertexOffset; _numVertices = numVertices; _startIndex = startIndex; _primitiveCount = primitiveCount; }
public void Init() { _driver = GetDriver(); _testModels = new List<TestModel>(3); for(var x = 0; x < 3; x++) { _testModels.Add(TestModel.MakeTestModel()); } _testAccessCount = 0; _target = new IndexCollection<TestModel, string, int>("TestIndex",_driver, tm => tm.Data , _GetTestModelByKey); }
public void AddModelPart( int triangleCount, IndexCollection indexCollection, CpuVertex[] vertices, BasicMaterialContent material) { if (material == null) throw new ArgumentNullException("material"); ModelParts.Add(new CpuSkinnedModelPartContent { TriangleCount = triangleCount, IndexCollection = indexCollection, Vertices = vertices, Material = material, }); }
public void SetShapeNode( string name, int parentIndex, int triangleCount, IndexCollection indexCollection, VertexData[] vertices, BoundingSphere[] bSpheres ) { ShapeNodes.Add(new ShapeReadingData { Name=name, ParentIndex=parentIndex, TriangleCount = triangleCount, IndexCollection = indexCollection, Vertices = vertices, BoundingSpheres = bSpheres, }); }
/// <summary> /// Helper function used by the CustomModelProcessor /// to add new ModelPart information. /// </summary> public void AddModelPart( int triangleCount, int vertexCount, int vertexStride, VertexElement[] vertexElements, VertexBufferContent vertexBufferContent, IndexCollection indexCollection, MaterialContent materialContent) { ModelPart modelPart = new ModelPart(); modelPart.TriangleCount = triangleCount; modelPart.VertexCount = vertexCount; modelPart.VertexStride = vertexStride; modelPart.VertexElements = vertexElements; modelPart.VertexBufferContent = vertexBufferContent; modelPart.IndexCollection = indexCollection; modelPart.MaterialContent = materialContent; modelParts.Add( modelPart ); }
protected void WriteIndexes(IndexCollection indexes) { if (indexes.Count > 0) { var w = GetIndexWriter(); foreach (var idx in indexes) { Builder.AppendLine(); w.Write(idx); Builder.Append(";"); } } var custom = indexes.GetSpecificIndexes(_engine); if (!custom.IsNullOrEmpty()) { foreach (var ch in custom) { Builder.AppendLine(ch + ";"); } } }
private IndexCollection[] BuildIndices(int numLevels, int level) { // calculate number of vertices int nNumVerticesOneSide = GetPowerOfTwo((int) (numLevels - 1 - level)) + 1; int nNumVertices = nNumVerticesOneSide * nNumVerticesOneSide; // calculate number of primitives int nNumRows = nNumVerticesOneSide - 1; // for level 0, we only have set of indices because we never adapt to neighbours int nTotalNeighbourCodes = (level == 0) ? 1 : TotalNeighbourCodes; IndexCollection[] indices = new IndexCollection[nTotalNeighbourCodes]; int hSkip = GetPowerOfTwo(level); int hHalfSkip = hSkip / 2; for (int j = 0; j < nTotalNeighbourCodes; j++) { bool bLeft, bRight, bTop, bBottom; GetNeighboursBoolean(j, out bLeft, out bRight, out bTop, out bBottom); // generate indices if (level == numLevels - 1) { indices[j] = AddHighestLevelTriangles(hSkip, hHalfSkip, j); } else { IndexBuilder pIndexBuilder = new IndexBuilder(this); for (int y = 0; y < _patchSize - hSkip; y += hSkip) AddTriangleRow(pIndexBuilder, y, hSkip, hHalfSkip, bLeft, bRight, bTop, bBottom); indices[j] = pIndexBuilder.Indices; } } return indices; }
public void CanReadIndiciesBack_SmallTailedFilesMid() { var pulse = new Subject <Unit>(); using (var testFiles = new TestFileCollection()) { for (int i = 0; i < 3; i++) { testFiles.Add(new TestFile()); testFiles[i].Append( Enumerable.Range(1, 5) .Select(j => $"{i + 1}. file and this is line number {j.ToString("00000000")}") .ToArray()); } IndexCollection result = null; var index = testFiles.Select(t => t.Info.WatchFile(pulse)) .Merge() .WithSegments() .Index(); using (index.Subscribe(indicies => result = indicies as IndexCollection)) { pulse.Once(); var mid = result.ReadLines(new ScrollRequest(10, 2)); var midText = mid.Select(l => l.Text).ToArray(); var midExpected = new[] { Enumerable.Range(3, 3).Select(i => $"3. file and this is line number {i.ToString("00000000")}"), Enumerable.Range(1, 5).Select(i => $"2. file and this is line number {i.ToString("00000000")}"), Enumerable.Range(1, 2).Select(i => $"1. file and this is line number {i.ToString("00000000")}"), }.SelectMany(t => t); midText.ShouldAllBeEquivalentTo(midExpected); } } }
// From IndexBufferWriter in MonoGame.Framework.Content.Pipeline private static void Write(ContentWriter output, IndexCollection value) { // Check if the buffer and can be saved as Int16. var shortIndices = true; foreach (var index in value) { if (index > ushort.MaxValue) { shortIndices = false; break; } } output.Write(shortIndices); var byteCount = shortIndices ? value.Count * 2 : value.Count * 4; output.Write(byteCount); if (shortIndices) { foreach (var item in value) { output.Write((ushort)item); } } else { foreach (var item in value) { output.Write(item); } } }
/// <summary> /// Get orders by criteria /// </summary> /// <param name="inputs"></param> /// <returns></returns> protected async Task <IndexCollection <ITransactionOrderModel> > GetGenericOrders(dynamic inputs) { var orders = new IndexCollection <ITransactionOrderModel>(); var response = await GetResponse <InputOrderListModel>($"/v3/accounts/{ Account.Id }/orders"); foreach (var inputOrder in response.Orders) { var orderModel = new TransactionOrderModel { Id = $"{ inputOrder.Id }", Size = inputOrder.Size, Price = inputOrder.Price, Time = inputOrder.CreationTime, Type = OrderTypeMap.Input(inputOrder.Type), Status = OrderStatusMap.Input(inputOrder.Status), TimeSpan = OrderTimeSpanMap.Input(inputOrder.TimeSpan), DealTime = inputOrder.FillTime ?? inputOrder.CancellationTime ?? inputOrder.TriggerTime }; orders.Add(orderModel); } return(orders); }
public void EuclideanTest() { // cluster is null { ArgumentExceptionAssert.Throw( () => { var partition = Distance.Euclidean((DoubleMatrix)null); }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: "cluster"); } // cluster is valid { var items = IndexCollection.Range(0, 3); int numberOfItems = items.Count; var attributes = IrisDataSet.GetAttributesAsDoubleMatrix()[items, ":"]; var actual = Distance.Euclidean(attributes); var expected = DoubleMatrix.Dense( numberOfItems, numberOfItems, new double[] { 0.0000000, 0.5385165, 0.5099020, 0.6480741, 0.5385165, 0.0000000, 0.3000000, 0.3316625, 0.5099020, 0.3000000, 0.0000000, 0.2449490, 0.6480741, 0.3316625, 0.2449490, 0.0000000 }, StorageOrder.RowMajor); DoubleMatrixAssert.AreEqual(expected, actual, DoubleMatrixTest.Accuracy); } }
public void CanReadIndiciesBack_LargeFile() { var pulse = new Subject <Unit>(); var scheduler = new TestScheduler(); using (var file = new TestFile()) { file.Append(Enumerable.Range(1, 10000).Select(i => $"This is line number {i.ToString("00000000")}").ToArray()); using (var indexer = new Indexer(file.Info.WatchFile(pulse).WithSegments(), tailSize: 1000, scheduler: scheduler)) { IndexCollection result = null; using (indexer.Result.Subscribe(indicies => result = indicies)) { //start off the head scanner scheduler.AdvanceBy(1); var head = result.ReadLines(new ScrollRequest(10, 0)); var headText = head.Select(l => l.Text).ToArray(); var headExpected = Enumerable.Range(1, 10).Select(i => $"This is line number {i.ToString("00000000")}"); headText.ShouldAllBeEquivalentTo(headExpected); var tail = result.ReadLines(new ScrollRequest(10)); var tailText = tail.Select(l => l.Text).ToArray(); var tailExpected = Enumerable.Range(9991, 10).Select(i => $"This is line number {i.ToString("00000000")}").ToArray(); tailText.ShouldAllBeEquivalentTo(tailExpected); var mid = result.ReadLines(new ScrollRequest(10, 100)); var midText = mid.Select(l => l.Text).ToArray(); var midExpected = Enumerable.Range(101, 10).Select(i => $"This is line number {i.ToString("00000000")}").ToArray(); midText.ShouldAllBeEquivalentTo(midExpected); } } } }
private IndexCollection CreateIndices(params int[][] pPositions) { IndexCollection indices = new IndexCollection(); int length = pPositions.Length; // positions are passed in in groups of two for (int i = 0; i < length; i++) { int hX = pPositions[i][0]; int hY = pPositions[i][1]; indices.Add(GetIndex(hX, hY)); } return indices; }
public static TEntity GetSelection <TEntity>(this IndexCollection <TEntity> collection, IConsole console, bool writeInline = false) { return(collection.GetSelection(console, writeInline ? CollectionWriteStyle.Inline : CollectionWriteStyle.Rows)); }
public IndexBuilder(LevelContentBuilder levelBuilder) { _levelBuilder = levelBuilder; Indices = new IndexCollection(); }
public void IndexerGetTest() { // Create an array of strings. var elements = new string[6] { "one", "two", "one", "one", "three", "three" }; // Partition the array positions by their contents. var target = IndexPartition.Create(elements); // The partition contains three parts, identified, respectively, // by the strings "one", "two", and "three". // Expected: // // Part identifier: one // indexes: 0, 2, 3 // // Part identifier: three // indexes: 4, 5 // // Part identifier: two // indexes: 1 // partIdentifier is null { ArgumentExceptionAssert.Throw( () => { var part = target[(string)null]; }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: "partIdentifier"); } // partIdentifier is not a key { ArgumentExceptionAssert.Throw( () => { var part = target["four"]; }, expectedType: typeof(ArgumentException), expectedPartialMessage: ImplementationServices.GetResourceString( "STR_EXCEPT_PAR_IS_NOT_A_PART_IDENTIFIER"), expectedParameterName: "partIdentifier"); } // Valid partIdentifier { var actual = target["one"]; var expected = IndexCollection.FromArray(new int[3] { 0, 2, 3 }); IndexCollectionAssert.AreEqual(expected, actual); } }
private ModelMeshContent ProcessMesh(MeshContent mesh, ModelBoneContent parent, ContentProcessorContext context) { var parts = new List <ModelMeshPartContent>(); var vertexBuffer = new VertexBufferContent(); var indexBuffer = new IndexCollection(); if (GenerateTangentFrames) { context.Logger.LogMessage("Generating tangent frames."); foreach (GeometryContent geom in mesh.Geometry) { if (!geom.Vertices.Channels.Contains(VertexChannelNames.Normal(0))) { MeshHelper.CalculateNormals(geom, true); } if (!geom.Vertices.Channels.Contains(VertexChannelNames.Tangent(0)) || !geom.Vertices.Channels.Contains(VertexChannelNames.Binormal(0))) { MeshHelper.CalculateTangentFrames(geom, VertexChannelNames.TextureCoordinate(0), VertexChannelNames.Tangent(0), VertexChannelNames.Binormal(0)); } } } var startVertex = 0; foreach (var geometry in mesh.Geometry) { var vertices = geometry.Vertices; var vertexCount = vertices.VertexCount; ModelMeshPartContent partContent; if (vertexCount == 0) { partContent = new ModelMeshPartContent(); } else { var geomBuffer = geometry.Vertices.CreateVertexBuffer(); vertexBuffer.Write(vertexBuffer.VertexData.Length, 1, geomBuffer.VertexData); var startIndex = indexBuffer.Count; indexBuffer.AddRange(geometry.Indices); partContent = new ModelMeshPartContent(vertexBuffer, indexBuffer, startVertex, vertexCount, startIndex, geometry.Indices.Count / 3); // Geoms are supposed to all have the same decl, so just steal one of these vertexBuffer.VertexDeclaration = geomBuffer.VertexDeclaration; startVertex += vertexCount; } partContent.Material = geometry.Material; parts.Add(partContent); } var bounds = new BoundingSphere(); if (mesh.Positions.Count > 0) { bounds = BoundingSphere.CreateFromPoints(mesh.Positions); } return(new ModelMeshContent(mesh.Name, mesh, parent, bounds, parts)); }
private void LoadAttributeIndices(IndexCollection indices) { foreach (IndexDefinition index in indices) { if (index.KeyColumns.Count == 1) { // We support only simple indexes SchemaAttribute attr = FindAttributeByIndexName(index.Name); if(attr != null) { // We found a single attribute to which this index corresponds attr.Index = index.Name; } } } }
/// <summary> /// Tests getting property /// <see cref="O:IReadOnlyTabularCollection{TValue, TCollection}.this"/> /// when column indexes are represented by a <b>null</b> instance. /// </summary> /// <typeparam name="TValue">The type of the items in the collection.</typeparam> /// <typeparam name="TCollection">The type of the collection.</typeparam> /// <param name="source">The source instance on which to invoke the property getter.</param> public static void ColumnIndexesIsNull <TValue, TCollection>( IReadOnlyTabularCollection <TValue, TCollection> source) where TCollection : IReadOnlyTabularCollection <TValue, TCollection> { Assert.IsNotNull(source); string parameterName = null; #region IndexCollection parameterName = "columnIndexes"; // Int32 ArgumentExceptionAssert.Throw( () => { var sub = source[0, (IndexCollection)null]; }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: parameterName); // IndexCollection ArgumentExceptionAssert.Throw( () => { var sub = source[IndexCollection.Range(0, source.NumberOfColumns - 1), (IndexCollection)null]; }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: parameterName); // String ArgumentExceptionAssert.Throw( () => { var sub = source[":", (IndexCollection)null]; }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: parameterName); #endregion #region String parameterName = "columnIndexes"; // Int32 ArgumentExceptionAssert.Throw( () => { var sub = source[0, (string)null]; }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: parameterName); // IndexCollection ArgumentExceptionAssert.Throw( () => { var sub = source[IndexCollection.Range(0, source.NumberOfColumns - 1), (string)null]; }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: parameterName); // String ArgumentExceptionAssert.Throw( () => { var sub = source[":", (string)null]; }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: parameterName); #endregion }
public void GetOptimalStateTest() { // valid input - random ties resolution { var context = new CategoricalEntailmentEnsembleOptimizationContext( objectiveFunction: (DoubleMatrix state) => { return(Double.PositiveInfinity); }, featureCategoryCounts: new List <int>(1) { 6 }, numberOfResponseCategories: 4, numberOfCategoricalEntailments: 1, allowEntailmentPartialTruthValues: true, probabilitySmoothingCoefficient: .9, optimizationGoal: OptimizationGoal.Maximization, minimumNumberOfIterations: 5, maximumNumberOfIterations: 1000); int numberOfEvaluations = 10000; double delta = .01; var parameter = DoubleMatrix.Dense(1, 10, new double[10] { .5, .5, .5, .5, .5, .5, .25, .25, .25, .25 }); // Generate states var states = new int[numberOfEvaluations]; var responseIndexes = IndexCollection.Range(6, 9); for (int i = 0; i < numberOfEvaluations; i++) { var state = context.GetOptimalState(parameter); states[i] = state.Vec(responseIndexes).FindNonzero()[0]; } // Compute the actual inclusion probabilities DoubleMatrix actualInclusionProbabilities = DoubleMatrix.Dense(context.NumberOfResponseCategories, 1); var stateIndexes = IndexCollection.Default(numberOfEvaluations - 1); for (int j = 0; j < context.NumberOfResponseCategories; j++) { var samplesContainingCurrentUnit = IndexPartition.Create( stateIndexes, (i) => { return(states[i] == j); }); actualInclusionProbabilities[j] = (double)samplesContainingCurrentUnit[true].Count / (double)numberOfEvaluations; } // Check the number of distinct generated states var distinctStates = IndexPartition.Create( states); int numberOfDistinctStates = distinctStates.Count; Assert.AreEqual( expected: context.NumberOfResponseCategories, actual: numberOfDistinctStates); // Check that the Chebyshev Inequality holds true // for each inclusion probability var expectedInclusionProbabilities = DoubleMatrix.Dense(context.NumberOfResponseCategories, 1, 1.0 / context.NumberOfResponseCategories); for (int j = 0; j < context.NumberOfResponseCategories; j++) { ProbabilityDistributionTest.CheckChebyshevInequality( new BernoulliDistribution(expectedInclusionProbabilities[j]), actualInclusionProbabilities[j], numberOfEvaluations, delta); } // Check how good the actual inclusion probabilities fit // the expected ones // The following assumes a number of response // categories equal to 4. // // The quantile of order .9 for // the chi-squared distribution having 4-1 // degrees of freedom is 6.251389 // (as from R function qchisq(.9, 3)) var goodnessOfFitCriticalValue = 6.251389; ProbabilityDistributionTest.CheckGoodnessOfFit( expectedInclusionProbabilities, actualInclusionProbabilities, goodnessOfFitCriticalValue); } }
public CoreIndexConfig(IndexCollection indexCollection, ValueSetHelper valueSetHelper) { _indexCollection = indexCollection; _valueSetHelper = valueSetHelper; }
private void FlattenNormals(ModelContent model) { //This stuff re-calculates surface normals to give a low-poly flat shading effect foreach (ModelMeshContent mesh in model.Meshes) { foreach (ModelMeshPartContent part in mesh.MeshParts) { IndexCollection indices = part.IndexBuffer; for (int i = 0; i < indices.Count; i += 3) { Vector3 p1 = GetVertexPosition(part.VertexBuffer, indices[i + 0]); Vector3 p2 = GetVertexPosition(part.VertexBuffer, indices[i + 1]); Vector3 p3 = GetVertexPosition(part.VertexBuffer, indices[i + 2]); Vector3 v1 = p2 - p1; Vector3 v2 = p3 - p1; Vector3 normal = Vector3.Cross(v1, v2); normal.Normalize(); for (int j = 0; j < 3; j++) { for (int k = 0; k < 3; k++) { SetVertexNormalCoord(part.VertexBuffer, i + j, normal, k); } } } /* * byte[] vertices = part.VertexBuffer.VertexData; * //We only have the packed vertex data available so we need to parse the individual bytes * for (int i = 0; i < part.NumVertices; i++) * { * float[] pos = new float[3]; * float[] normal = new float[3]; * * for (int j = 0; j < 3; j++) * { * int posOffset = i * (int)part.VertexBuffer.VertexDeclaration.VertexStride + j * 4; //4 bytes per float * int normalOffset = posOffset + 12; //Normal is after the 3 position words * * pos[j] = System.BitConverter.ToSingle(vertices, posOffset); * normal[j] = System.BitConverter.ToSingle(vertices, normalOffset); * } * * Console.WriteLine("====="); * }*/ //byte[] newArray = new[] { vertices[3], vertices[2], vertices[1], vertices[0] }; /*short[] indices = new short[part.IndexBuffer.IndexCount]; * part.IndexBuffer.GetData<short>(indices); * * for (int i = 0; i < indices.Length; i += 3) * { * Vector3 p1 = vertices[indices[i]].Position; * Vector3 p2 = vertices[indices[i + 1]].Position; * Vector3 p3 = vertices[indices[i + 2]].Position; * * Vector3 v1 = p2 - p1; * Vector3 v2 = p3 - p1; * Vector3 normal = Vector3.Cross(v1, v2); * * normal.Normalize(); * * vertices[indices[i]].Normal = normal; * vertices[indices[i + 1]].Normal = normal; * vertices[indices[i + 2]].Normal = normal; * } * * part.VertexBuffer.SetData<VertexPositionNormalTexture>(vertices);*/ } } }
/// <summary> /// Creates an instance of GeometryContent. /// </summary> public GeometryContent() { indices = new IndexCollection(); vertices = new VertexContent(this); }
public async Task IndexCollection() { string indexName = string.Format("delete-me-{0}", Guid.NewGuid()); using (var service = await SdkHelper.CreateService()) { IndexCollection indexes = service.Indexes; Index testIndex = await indexes.CreateAsync(indexName); //// TODO: Verify testIndex await testIndex.GetAsync(); //// TODO: Reverify testIndex await indexes.GetAllAsync(); Assert.NotNull(indexes.SingleOrDefault(x => x.Title == testIndex.Title)); foreach (Index index in indexes) { int dummyInt; string dummyString; bool dummyBool; DateTime dummyTime; dummyBool = index.AssureUTF8; dummyString = index.BlockSignatureDatabase; dummyInt = index.BlockSignSize; dummyInt = index.BloomFilterTotalSizeKB; dummyString = index.ColdPath; dummyString = index.ColdPathExpanded; dummyString = index.ColdToFrozenDir; dummyString = index.ColdToFrozenScript; dummyBool = index.CompressRawData; long size = index.CurrentDBSizeMB; dummyString = index.DefaultDatabase; dummyBool = index.EnableRealTimeSearch; dummyInt = index.FrozenTimePeriodInSecs; dummyString = index.HomePath; dummyString = index.HomePathExpanded; dummyString = index.IndexThreads; long time = index.LastInitTime; dummyString = index.MaxBloomBackfillBucketAge; dummyInt = index.MaxConcurrentOptimizes; dummyString = index.MaxDataSize; dummyInt = index.MaxHotBuckets; dummyInt = index.MaxHotIdleSecs; dummyInt = index.MaxHotSpanSecs; dummyInt = index.MaxMemMB; dummyInt = index.MaxMetaEntries; dummyInt = index.MaxRunningProcessGroups; dummyTime = index.MaxTime; dummyInt = index.MaxTotalDataSizeMB; dummyInt = index.MaxWarmDBCount; dummyString = index.MemPoolMB; dummyString = index.MinRawFileSyncSecs; dummyTime = index.MinTime; dummyInt = index.NumBloomFilters; dummyInt = index.NumHotBuckets; dummyInt = index.NumWarmBuckets; dummyInt = index.PartialServiceMetaPeriod; dummyInt = index.QuarantineFutureSecs; dummyInt = index.QuarantinePastSecs; dummyInt = index.RawChunkSizeBytes; dummyInt = index.RotatePeriodInSecs; dummyInt = index.ServiceMetaPeriod; dummyString = index.SuppressBannerList; bool sync = index.Sync; dummyBool = index.SyncMeta; dummyString = index.ThawedPath; dummyString = index.ThawedPathExpanded; dummyInt = index.ThrottleCheckPeriod; long eventCount = index.TotalEventCount; dummyBool = index.Disabled; dummyBool = index.IsInternal; } for (int i = 0; i < indexes.Count; i++) { Index index = indexes[i]; int dummyInt; string dummyString; bool dummyBool; DateTime dummyTime; dummyBool = index.AssureUTF8; dummyString = index.BlockSignatureDatabase; dummyInt = index.BlockSignSize; dummyInt = index.BloomFilterTotalSizeKB; dummyString = index.ColdPath; dummyString = index.ColdPathExpanded; dummyString = index.ColdToFrozenDir; dummyString = index.ColdToFrozenScript; dummyBool = index.CompressRawData; long size = index.CurrentDBSizeMB; dummyString = index.DefaultDatabase; dummyBool = index.EnableRealTimeSearch; dummyInt = index.FrozenTimePeriodInSecs; dummyString = index.HomePath; dummyString = index.HomePathExpanded; dummyString = index.IndexThreads; long time = index.LastInitTime; dummyString = index.MaxBloomBackfillBucketAge; dummyInt = index.MaxConcurrentOptimizes; dummyString = index.MaxDataSize; dummyInt = index.MaxHotBuckets; dummyInt = index.MaxHotIdleSecs; dummyInt = index.MaxHotSpanSecs; dummyInt = index.MaxMemMB; dummyInt = index.MaxMetaEntries; dummyInt = index.MaxRunningProcessGroups; dummyTime = index.MaxTime; dummyInt = index.MaxTotalDataSizeMB; dummyInt = index.MaxWarmDBCount; dummyString = index.MemPoolMB; dummyString = index.MinRawFileSyncSecs; dummyTime = index.MinTime; dummyInt = index.NumBloomFilters; dummyInt = index.NumHotBuckets; dummyInt = index.NumWarmBuckets; dummyInt = index.PartialServiceMetaPeriod; dummyInt = index.QuarantineFutureSecs; dummyInt = index.QuarantinePastSecs; dummyInt = index.RawChunkSizeBytes; dummyInt = index.RotatePeriodInSecs; dummyInt = index.ServiceMetaPeriod; dummyString = index.SuppressBannerList; bool sync = index.Sync; dummyBool = index.SyncMeta; dummyString = index.ThawedPath; dummyString = index.ThawedPathExpanded; dummyInt = index.ThrottleCheckPeriod; long eventCount = index.TotalEventCount; dummyBool = index.Disabled; dummyBool = index.IsInternal; } var attributes = GetIndexAttributes(testIndex); attributes.BlockSignSize = testIndex.BlockSignSize + 1; attributes.EnableOnlineBucketRepair = !testIndex.EnableOnlineBucketRepair; attributes.MaxBloomBackfillBucketAge = "20d"; attributes.FrozenTimePeriodInSecs = testIndex.FrozenTimePeriodInSecs + 1; attributes.MaxConcurrentOptimizes = testIndex.MaxConcurrentOptimizes + 1; attributes.MaxDataSize = "auto"; attributes.MaxHotBuckets = testIndex.MaxHotBuckets + 1; attributes.MaxHotIdleSecs = testIndex.MaxHotIdleSecs + 1; attributes.MaxMemMB = testIndex.MaxMemMB + 1; attributes.MaxMetaEntries = testIndex.MaxMetaEntries + 1; attributes.MaxTotalDataSizeMB = testIndex.MaxTotalDataSizeMB + 1; attributes.MaxWarmDBCount = testIndex.MaxWarmDBCount + 1; attributes.MinRawFileSyncSecs = "disable"; attributes.PartialServiceMetaPeriod = testIndex.PartialServiceMetaPeriod + 1; attributes.QuarantineFutureSecs = testIndex.QuarantineFutureSecs + 1; attributes.QuarantinePastSecs = testIndex.QuarantinePastSecs + 1; attributes.RawChunkSizeBytes = testIndex.RawChunkSizeBytes + 1; attributes.RotatePeriodInSecs = testIndex.RotatePeriodInSecs + 1; attributes.ServiceMetaPeriod = testIndex.ServiceMetaPeriod + 1; attributes.SyncMeta = !testIndex.SyncMeta; attributes.ThrottleCheckPeriod = testIndex.ThrottleCheckPeriod + 1; bool updatedSnapshot = await testIndex.UpdateAsync(attributes); Assert.True(updatedSnapshot); await testIndex.DisableAsync(); Assert.True(testIndex.Disabled); // Because the disable endpoint returns an updated snapshot await service.Server.RestartAsync(2 * 60 * 1000); // Because you can't re-enable an index without a restart await service.LogOnAsync(); testIndex = await service.Indexes.GetAsync(indexName); await testIndex.EnableAsync(); // Because the enable endpoint returns an updated snapshot Assert.False(testIndex.Disabled); await testIndex.RemoveAsync(); await SdkHelper.ThrowsAsync <ResourceNotFoundException>(async() => { await testIndex.GetAsync(); }); testIndex = await indexes.GetOrNullAsync(indexName); Assert.Null(testIndex); } }
public ConfigComponent(IndexCollection indexCollection, SearcherCollection searcherCollection, IExamineHelper examineHelper) { _indexCollection = indexCollection; _searcherCollection = searcherCollection; _examineHelper = examineHelper; }
public double Performance(DoubleMatrix state) { #region Create the ensemble of categorical entailments List <CategoricalEntailment> entailments = new(this.entailments); int numberOfSelectedCategoricalEntailments = this.trainSequentially ? 1 : this.numberOfTrainedCategoricalEntailments; int numberOfResponseCategories = this.numberOfResponseCategories; for (int e = 0; e < numberOfSelectedCategoricalEntailments; e++) { int entailmentRepresentationIndex = e * this.entailmentRepresentationLength; entailments.Add(new CategoricalEntailment( state[0, IndexCollection.Range( entailmentRepresentationIndex, entailmentRepresentationIndex + this.overallNumberOfCategories)], this.FeatureVariables, this.ResponseVariable)); } #endregion #region Exploit the ensemble to classify observed items int numberOfItems = this.featuresData.Length; DoubleMatrix itemClassifications = DoubleMatrix.Dense(numberOfItems, 1); DoubleMatrix item, votes; for (int r = 0; r < this.featuresData.Length; r++) { votes = DoubleMatrix.Dense(1, numberOfResponseCategories); item = this.featuresData[r]; for (int e = 0; e < entailments.Count; e++) { if (entailments[e].ValidatePremises(item)) { votes[this.responseCodeIndexPairs[entailments[e].ResponseConclusion]] += entailments[e].TruthValue; } } double maximumVote = Stat.Max(votes).value; var maximumVoteIndexes = votes.Find(maximumVote); int numberOfMaximumVoteIndexes = maximumVoteIndexes.Count; if (numberOfMaximumVoteIndexes == 1) { itemClassifications[r] = this.ResponseVariable.Categories[maximumVoteIndexes[0]].Code; } else { // Pick a position corresponding to a maximum vote at random int randomMaximumVotePosition = Convert.ToInt32( Math.Floor(numberOfMaximumVoteIndexes * this.randomNumberGeneratorPool.GetOrAdd( Environment.CurrentManagedThreadId, (threadId) => { var localRandomNumberGenerator = RandomNumberGenerator.CreateNextMT2203(7777777); return(localRandomNumberGenerator); }).DefaultUniform())); itemClassifications[r] = this.ResponseVariable.Categories[ maximumVoteIndexes[randomMaximumVotePosition]].Code; } } var predictedResponses = new CategoricalDataSet( new List <CategoricalVariable>(1) { this.ResponseVariable }, itemClassifications); #endregion #region Evaluate classification accuracy var actualResponses = this.responseData; var responseCodes = this.ResponseVariable.CategoryCodes; double numberOfExactPredictions = 0.0; foreach (var code in responseCodes) { IndexCollection codePredictedIndexes = predictedResponses.Data.Find(code); if (codePredictedIndexes is not null) { DoubleMatrix correspondingActualResponses = actualResponses.Vec(codePredictedIndexes); numberOfExactPredictions += correspondingActualResponses.Find(code)?.Count ?? 0; } } // Compute the overall confusion double totalConfusion = actualResponses.Count; var accuracy = numberOfExactPredictions / totalConfusion; return(accuracy); #endregion }
public async Task IndexAccessors() { string indexName = "sdk-tests2"; Service service = Connect(); DateTimeOffset offset = new DateTimeOffset(DateTime.Now); string now = DateTime.UtcNow.ToString("yyyy-MM-dd'T'HH:mm:ss") + string.Format("{0}{1} ", offset.Offset.Hours.ToString("D2"), offset.Offset.Minutes.ToString("D2")); ServerInfo info = await service.Server.GetInfoAsync(); //// set can_delete if not set, so we can delete events from the index. //User user = service.GetUsers().Get("admin"); //string[] roles = user.Roles; //if (!this.Contains(roles, "can_delete")) //{ // string[] newRoles = new string[roles.Length + 1]; // roles.CopyTo(newRoles, 0); // newRoles[roles.Length] = "can_delete"; // user.Roles = newRoles; // user.Update(); //} IndexCollection indexes = await service.GetIndexesAsync(); foreach (Index idx in indexes) { int dummyInt; string dummyString; bool dummyBool; DateTime dummyTime; dummyBool = idx.AssureUTF8; dummyString = idx.BlockSignatureDatabase; dummyInt = idx.BlockSignSize; dummyInt = idx.BloomFilterTotalSizeKB; dummyString = idx.ColdPath; dummyString = idx.ColdPathExpanded; dummyString = idx.ColdToFrozenDir; dummyString = idx.ColdToFrozenScript; dummyBool = idx.CompressRawData; dummyInt = idx.CurrentDBSizeMB; dummyString = idx.DefaultDatabase; dummyBool = idx.EnableRealtimeSearch; dummyInt = idx.FrozenTimePeriodInSecs; dummyString = idx.HomePath; dummyString = idx.HomePathExpanded; dummyString = idx.IndexThreads; long time = idx.LastInitTime; dummyString = idx.MaxBloomBackfillBucketAge; dummyInt = idx.MaxConcurrentOptimizes; dummyString = idx.MaxDataSize; dummyInt = idx.MaxHotBuckets; dummyInt = idx.MaxHotIdleSecs; dummyInt = idx.MaxHotSpanSecs; dummyInt = idx.MaxMemMB; dummyInt = idx.MaxMetaEntries; dummyInt = idx.MaxRunningProcessGroups; dummyTime = idx.MaxTime; dummyInt = idx.MaxTotalDataSizeMB; dummyInt = idx.MaxWarmDBCount; dummyString = idx.MemPoolMB; dummyString = idx.MinRawFileSyncSecs; dummyTime = idx.MinTime; dummyInt = idx.NumBloomFilters; dummyInt = idx.NumHotBuckets; dummyInt = idx.NumWarmBuckets; dummyInt = idx.PartialServiceMetaPeriod; dummyInt = idx.QuarantineFutureSecs; dummyInt = idx.QuarantinePastSecs; dummyInt = idx.RawChunkSizeBytes; dummyInt = idx.RotatePeriodInSecs; dummyInt = idx.ServiceMetaPeriod; dummyString = idx.SuppressBannerList; bool sync = idx.Sync; dummyBool = idx.SyncMeta; dummyString = idx.ThawedPath; dummyString = idx.ThawedPathExpanded; dummyInt = idx.ThrottleCheckPeriod; long eventCount = idx.TotalEventCount; dummyBool = idx.Disabled; dummyBool = idx.IsInternal; } Index index = null; try { index = await service.GetIndexAsync(indexName); } catch (Splunk.Client.ResourceNotFoundException) { } if (index == null) { index = await service.CreateIndexAsync(indexName); } await service.GetIndexAsync(indexName); var indexAttributes = GetIndexAttributes(index); // use setters to update most indexAttributes.BlockSignSize = index.BlockSignSize + 1; if (this.VersionCompare(service, "4.3") > 0) { indexAttributes.EnableOnlineBucketRepair = !index.EnableOnlineBucketRepair; indexAttributes.MaxBloomBackfillBucketAge = "20d"; } indexAttributes.FrozenTimePeriodInSecs = index.FrozenTimePeriodInSecs + 1; indexAttributes.MaxConcurrentOptimizes = index.MaxConcurrentOptimizes + 1; indexAttributes.MaxDataSize = "auto"; indexAttributes.MaxHotBuckets = index.MaxHotBuckets + 1; indexAttributes.MaxHotIdleSecs = index.MaxHotIdleSecs + 1; indexAttributes.MaxMemMB = index.MaxMemMB + 1; indexAttributes.MaxMetaEntries = index.MaxMetaEntries + 1; indexAttributes.MaxTotalDataSizeMB = index.MaxTotalDataSizeMB + 1; indexAttributes.MaxWarmDBCount = index.MaxWarmDBCount + 1; indexAttributes.MinRawFileSyncSecs = "disable"; indexAttributes.PartialServiceMetaPeriod = index.PartialServiceMetaPeriod + 1; indexAttributes.QuarantineFutureSecs = index.QuarantineFutureSecs + 1; indexAttributes.QuarantinePastSecs = index.QuarantinePastSecs + 1; indexAttributes.RawChunkSizeBytes = index.RawChunkSizeBytes + 1; indexAttributes.RotatePeriodInSecs = index.RotatePeriodInSecs + 1; indexAttributes.ServiceMetaPeriod = index.ServiceMetaPeriod + 1; indexAttributes.SyncMeta = !index.SyncMeta; indexAttributes.ThrottleCheckPeriod = index.ThrottleCheckPeriod + 1; await index.UpdateAsync(indexAttributes); // check, then restore using map method //index.Refresh(); await ClearIndex(service, indexName, index); //index.Disable(); Assert.True(index.Disabled); this.SplunkRestart(); service = this.Connect(); index = await service.GetIndexAsync(indexName); //user = service.GetUsers().Get("admin"); //index.Enable(); Assert.False(index.Disabled); //// Restore original roles //user.Roles = roles; //user.Update(); }
/// <summary> /// Creates an instance of GeometryContent. /// </summary> public GeometryContent() { indices = new IndexCollection(); vertices = new VertexContent(); }
public void Main() { // Create the context. var context = new RareShortestPathProbabilityEstimation(); // Create the estimator. var estimator = new RareEventProbabilityEstimator() { PerformanceEvaluationParallelOptions = { MaxDegreeOfParallelism = 1 }, SampleGenerationParallelOptions = { MaxDegreeOfParallelism = 1 } }; // Set estimation parameters. double rarity = 0.1; int sampleSize = 1000; int finalSampleSize = 10000; // Solve the problem. var results = estimator.Estimate( context, rarity, sampleSize, finalSampleSize); // Show the results. Console.WriteLine("Under the nominal parameter:"); Console.WriteLine(context.InitialParameter); Console.WriteLine("the estimated probability of observing"); Console.WriteLine("a shortest path greater than 2.0 is:"); Console.WriteLine(results.RareEventProbability); Console.WriteLine(); Console.WriteLine("Details on iterations:"); var info = DoubleMatrix.Dense( -1 + results.Parameters.Count, 1 + results.Parameters.Last.Value.Count); info.SetColumnName(0, "Level"); for (int j = 1; j < info.NumberOfColumns; j++) { info.SetColumnName(j, "Param" + (j - 1).ToString()); } int i = 0; foreach (var level in results.Levels) { info[i++, 0] = level; } var referenceParameters = results.Parameters.Skip(1).ToList(); var paramIndexes = IndexCollection.Range(1, info.NumberOfColumns - 1); for (i = 0; i < info.NumberOfRows; i++) { info[i, paramIndexes] = referenceParameters[i]; } Console.WriteLine(); Console.WriteLine(info); }
public void CreateFromRowCollectionTest() { // elements is null { ArgumentExceptionAssert.Throw( () => { var partition = IndexPartition.Create((DoubleMatrixRowCollection)null); }, expectedType: typeof(ArgumentNullException), expectedPartialMessage: ArgumentExceptionAssert.NullPartialMessage, expectedParameterName: "elements"); } // elements is not null { // Create a matrix. var data = new double[18] { 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0 }; var matrix = DoubleMatrix.Dense(6, 3, data, StorageOrder.RowMajor); // Partition the matrix row indexes by the contents of each row: // a part is created for each distinct row. var elements = matrix.AsRowCollection(); var actual = IndexPartition.Create(elements); // Each part is identified by its corresponding row and contains // the indexes of the rows which are equal to the identifier. // Expected: // // Part identifier: 0 0 1 // // indexes: 0, 1 // // Part identifier: 0 1 0 // // indexes: 2, 3 // // Part identifier: 1 0 0 // // indexes: 4, 5 // var expected = new IndexPartition <DoubleMatrixRow> { partIndetifiers = new List <DoubleMatrixRow>(3) { elements[0], elements[2], elements[4] }, parts = new Dictionary <DoubleMatrixRow, IndexCollection>(3) { { elements[0], IndexCollection.Default(1) }, { elements[2], IndexCollection.Range(2, 3) }, { elements[4], IndexCollection.Range(4, 5) } } }; IndexPartitionAssert.AreEqual(expected, actual); } }
// IndexCollection, * internal abstract MatrixImplementor <T> this[IndexCollection rowIndexes, int columnIndex] { get; set; }
public void Main() { // Create a data stream. const int numberOfInstances = 27; string[] data = new string[numberOfInstances + 1] { "NUMERICAL,TARGET", "0,A", "0,A", "0,A", "1,B", "1,B", "1,B", "1,B", "2,B", "2,B", "3,C", "3,C", "3,C", "4,B", "4,B", "4,B", "4,C", "5,A", "5,A", "6,A", "7,C", "7,C", "7,C", "8,C", "8,C", "9,C", "9,C", "9,C" }; MemoryStream stream = new(); StreamWriter writer = new(stream); for (int i = 0; i < data.Length; i++) { writer.WriteLine(data[i].ToCharArray()); writer.Flush(); } stream.Position = 0; // Identify the special categorizer for variable NUMERICAL. StreamReader streamReader = new(stream); char columnDelimiter = ','; IndexCollection numericalColumns = IndexCollection.Range(0, 0); bool firstLineContainsColumnHeaders = true; int targetColumn = 1; IFormatProvider provider = CultureInfo.InvariantCulture; var specialCategorizers = CategoricalDataSet.CategorizeByEntropyMinimization( streamReader, columnDelimiter, numericalColumns, firstLineContainsColumnHeaders, targetColumn, provider); // Encode the categorical data set using the special categorizer. stream.Position = 0; IndexCollection extractedColumns = IndexCollection.Range(0, 1); CategoricalDataSet dataset = CategoricalDataSet.Encode( streamReader, columnDelimiter, extractedColumns, firstLineContainsColumnHeaders, specialCategorizers, provider); // Decode and show the data set. Console.WriteLine("Decoded data set:"); Console.WriteLine(); var decodedDataSet = dataset.Decode(); int numberOfVariables = dataset.Data.NumberOfColumns; foreach (var variable in dataset.Variables) { Console.Write(variable.Name + ","); } Console.WriteLine(); for (int i = 0; i < numberOfInstances; i++) { for (int j = 0; j < numberOfVariables; j++) { Console.Write(decodedDataSet[i][j] + ","); } Console.WriteLine(); } }
/// <summary> /// Asynchronously retrieves a collection of indexes. /// </summary> /// <param name="args"> /// Specification of the collection of indexes to retrieve. /// </param> /// <returns> /// An object representing the collection of indexes retrieved. /// </returns> /// <remarks> /// This method uses the <a href="http://goo.gl/qVZ6wJ">GET /// data/indexes</a> endpoint to construct the <see cref= /// "IndexCollection"/> object it returns. /// </remarks> public async Task<IndexCollection> GetIndexesAsync(IndexCollectionArgs args = null) { var collection = new IndexCollection(this.Context, this.Namespace); await collection.GetAsync(); return collection; }
/// <summary> /// This function reverses the order of the indices, i.e changes which direction /// this face faces in. /// </summary> /// <param name="parent">The parent polygon.</param> public void Reorder(Polygon parent) { // Create a new index collection. IndexCollection newIndices = new IndexCollection(); // Go through every old index and add it. for(int i = 0; i < indices.Count; i++) newIndices.Add(indices[indices.Count - (i+1)]); // Set the new index array. indices = newIndices; // Recreate each normal. GenerateNormals(parent); }
public void TestSerialization() { _target.AddIndexAsync(_testModels[0], _testModels[0].Key).Wait(); _target.AddIndexAsync(_testModels[1], _testModels[1].Key).Wait(); Assert.IsTrue(_target.IsDirty, "Dirty flag not set."); _target.FlushAsync().Wait(); Assert.IsFalse(_target.IsDirty, "Dirty flag not reset on flush."); var secondTarget = new IndexCollection<TestModel, string, int>("TestIndex", _driver, tm => tm.Data, _GetTestModelByKey); // are we able to grab things? Assert.AreEqual(2, secondTarget.Query.Count(), "Key count is incorrect."); Assert.AreEqual(0, _testAccessCount, "Lazy loader was accessed prematurely."); var testIndex = (from k in secondTarget.Query where k.Index.Equals(_testModels[1].Data) select k).FirstOrDefault(); Assert.IsNotNull(testIndex, "Test index not retrieved."); Assert.AreEqual(_testModels[1].Key, testIndex.Key, "Key mismatch."); Assert.AreEqual(0, _testAccessCount, "Lazy loader was accessed prematurely."); var testModel = testIndex.Value.Result; Assert.AreSame(_testModels[1], testModel, "Model does not match."); Assert.AreEqual(1, _testAccessCount, "Lazy loader access count is incorrect."); // now let's test refresh secondTarget.AddIndexAsync(_testModels[2],_testModels[2].Key).Wait(); secondTarget.FlushAsync().Wait(); Assert.AreEqual(2, _target.Query.Count(), "Unexpected key count in original collection."); _target.RefreshAsync().Wait(); Assert.AreEqual(3, _target.Query.Count(), "Refresh failed."); }
internal sealed override MatrixImplementor <double> this[string rowIndexes, IndexCollection columnIndexes] { get { // Check if any column index is outside the range defined by matrix dimensions if (columnIndexes.maxIndex >= this.numberOfColumns) { throw new ArgumentOutOfRangeException( nameof(columnIndexes), ImplementationServices.GetResourceString( "STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS")); } int thisNumberOfRows = this.numberOfRows; int[] columns = columnIndexes.indexes; int rowsLength = thisNumberOfRows; int columnsLength = columns.Length; var subMatrix = new DenseDoubleMatrixImplementor(rowsLength, columnsLength); double[] subStorage = subMatrix.storage; double[] thisStorage = this.storage; int offset, index = 0; for (int j = 0; j < columnsLength; j++) { offset = thisNumberOfRows * columns[j]; for (int i = 0; i < rowsLength; i++, index++) { subStorage[index] = thisStorage[i + offset]; } } return(subMatrix); } set { // Check if any column index is outside the range defined by matrix dimensions if (columnIndexes.maxIndex >= this.numberOfColumns) { throw new ArgumentOutOfRangeException( nameof(columnIndexes), ImplementationServices.GetResourceString( "STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS")); } int thisNumberOfRows = this.numberOfRows; // Test for mismatched matrix dimensions ImplementationServices.ThrowOnMismatchedMatrixDimensions(thisNumberOfRows, columnIndexes.Count, value); MatrixImplementor <double> sourceImplementor; // if the source is this, clone the data before writing if (object.ReferenceEquals(this, value)) { sourceImplementor = (MatrixImplementor <double>)value.Clone(); } else { sourceImplementor = value; } int offset; int[] columns = columnIndexes.indexes; switch (sourceImplementor.StorageScheme) { case StorageScheme.CompressedRow: { var source = (SparseCsr3DoubleMatrixImplementor)sourceImplementor; var sourceValues = source.values; for (int j = 0; j < columns.Length; j++) { offset = thisNumberOfRows * columns[j]; for (int i = 0; i < thisNumberOfRows; i++) { if (source.TryGetPosition(i, j, out int index)) { this.storage[i + offset] = sourceValues[index]; } } } } break; case StorageScheme.Dense: { var source = (DenseDoubleMatrixImplementor)sourceImplementor; double[] sourceStorage = source.storage; int index = 0; for (int j = 0; j < columns.Length; j++) { offset = thisNumberOfRows * columns[j]; for (int i = 0; i < thisNumberOfRows; i++, index++) { this.storage[i + offset] = sourceStorage[index]; } } } break; } } }
/// <summary> /// Tests getting property /// <see cref="O:IReadOnlyTabularCollection{TValue, TCollection}.this"/> /// when a row index is out of range. /// </summary> /// <typeparam name="TValue">The type of the items in the collection.</typeparam> /// <typeparam name="TCollection">The type of the collection.</typeparam> /// <param name="source">The source instance on which to invoke the property getter.</param> public static void AnyRowIndexIsOutOrRange <TValue, TCollection>( IReadOnlyTabularCollection <TValue, TCollection> source) where TCollection : IReadOnlyTabularCollection <TValue, TCollection> { Assert.IsNotNull(source); string STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS = (string)Reflector.ExecuteStaticMember( typeof(ImplementationServices), "GetResourceString", new string[] { "STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS" }); string parameterName = null; #region Int32 parameterName = "rowIndex"; // Int32 ArgumentExceptionAssert.Throw( () => { var sub = source[-1, 0]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); ArgumentExceptionAssert.Throw( () => { var sub = source[source.NumberOfRows, 0]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); // IndexCollection ArgumentExceptionAssert.Throw( () => { var sub = source[-1, IndexCollection.Range(0, source.NumberOfColumns - 1)]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); ArgumentExceptionAssert.Throw( () => { var sub = source[source.NumberOfRows, IndexCollection.Range(0, source.NumberOfColumns - 1)]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); // String ArgumentExceptionAssert.Throw( () => { var sub = source[-1, ":"]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); ArgumentExceptionAssert.Throw( () => { var sub = source[source.NumberOfRows, ":"]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); #endregion #region IndexCollection parameterName = "rowIndexes"; // Int32 // IndexCollection instances cannot contain negative elements ArgumentExceptionAssert.Throw( () => { var sub = source[IndexCollection.Range(0, source.NumberOfRows), 0]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); // IndexCollection // IndexCollection instances cannot contain negative elements ArgumentExceptionAssert.Throw( () => { var sub = source[IndexCollection.Range(0, source.NumberOfRows), IndexCollection.Range(0, source.NumberOfColumns - 1)]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); // String // IndexCollection instances cannot contain negative elements ArgumentExceptionAssert.Throw( () => { var sub = source[IndexCollection.Range(0, source.NumberOfRows), ":"]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_INDEX_EXCEEDS_DIMS, expectedParameterName: parameterName); #endregion #region String parameterName = "rowIndexes"; var STR_EXCEPT_TAB_UNSUPPORTED_SUBREF_SYNTAX = (string)Reflector.ExecuteStaticMember( typeof(ImplementationServices), "GetResourceString", new string[] { "STR_EXCEPT_TAB_UNSUPPORTED_SUBREF_SYNTAX" }); // Int32 ArgumentExceptionAssert.Throw( () => { var sub = source["", 0]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_UNSUPPORTED_SUBREF_SYNTAX, expectedParameterName: parameterName); ArgumentExceptionAssert.Throw( () => { var sub = source["end", 0]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_UNSUPPORTED_SUBREF_SYNTAX, expectedParameterName: parameterName); // IndexCollection ArgumentExceptionAssert.Throw( () => { var sub = source["", IndexCollection.Range(0, source.NumberOfColumns - 1)]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_UNSUPPORTED_SUBREF_SYNTAX, expectedParameterName: parameterName); ArgumentExceptionAssert.Throw( () => { var sub = source["end", IndexCollection.Range(0, source.NumberOfColumns - 1)]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_UNSUPPORTED_SUBREF_SYNTAX, expectedParameterName: parameterName); // String ArgumentExceptionAssert.Throw( () => { var sub = source["", ":"]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_UNSUPPORTED_SUBREF_SYNTAX, expectedParameterName: parameterName); ArgumentExceptionAssert.Throw( () => { var sub = source["end", ":"]; }, expectedType: typeof(ArgumentOutOfRangeException), expectedPartialMessage: STR_EXCEPT_TAB_UNSUPPORTED_SUBREF_SYNTAX, expectedParameterName: parameterName); #endregion }
public TableInfo() { ColumnCollection = new ColumnCollection(); IndexCollection = new IndexCollection(); CommentCollection = new CommentCollection(); }
internal abstract MatrixImplementor <T> this[IndexCollection linearIndexes] { get; }
/// <summary> /// Constructor /// </summary> public ProcessorModel() { Charts = new IndexCollection <IChartModel>(); Gateways = new IndexCollection <IGatewayModel>(); }
internal abstract MatrixImplementor <T> this[string rowIndexes, IndexCollection columnIndexes] { get; set; }
/// <summary> /// Tests that method /// <see cref="RandomIndexPermutation.Next"/> /// terminates successfully as expected. /// </summary> /// <param name="indexes"> /// The indexes to permute. /// </param> /// <param name="numberOfRandomPermutations"> /// The number of permutations to draw. /// </param> /// <param name="criticalValue"> /// A quantile of the chi-squared distribution with a number of /// degrees of freedom equal to the <see cref="IndexCollection.Count"/> /// of <paramref name="indexes"/> /// minus <c>1</c>. /// To serve as the critical value for the Pearson's /// chi-squared test whose null hypothesis assume that the /// the distinct possible permutations /// are equiprobable. /// </param> /// <param name="delta">The required accuracy. /// Defaults to <c>.01</c>.</param> public static void Succeed( IndexCollection indexes, int numberOfRandomPermutations, double criticalValue, double delta = .01) { var randomPermutation = new RandomIndexPermutation(indexes); // Generate permutations var permutations = new IndexCollection[numberOfRandomPermutations]; for (int i = 0; i < numberOfRandomPermutations; i++) { permutations[i] = randomPermutation.Next(); } // Check the number of distinct generated permutations var permutationIdentifiers = IndexCollection.Default(numberOfRandomPermutations - 1); var actualDistinctPermutations = IndexPartition.Create( permutationIdentifiers, (i) => { return(permutations[i]); }); int numberOfActualDistinctPermutations = actualDistinctPermutations.Count; Assert.AreEqual( expected: SpecialFunctions.Factorial(indexes.Count), actual: numberOfActualDistinctPermutations); // Compute the actual permutation probabilities DoubleMatrix actualPermutationProbabilities = DoubleMatrix.Dense( numberOfActualDistinctPermutations, 1); int j = 0; foreach (var identifier in actualDistinctPermutations.Identifiers) { actualPermutationProbabilities[j] = (double)actualDistinctPermutations[identifier].Count / (double)numberOfRandomPermutations; j++; } // Check that the Chebyshev Inequality holds true // for each permutation probability var expectedPermutationProbabilities = DoubleMatrix.Dense( numberOfActualDistinctPermutations, 1, 1.0 / (double)numberOfActualDistinctPermutations); for (int i = 0; i < numberOfActualDistinctPermutations; i++) { ProbabilityDistributionTest.CheckChebyshevInequality( new BernoulliDistribution(expectedPermutationProbabilities[i]), actualPermutationProbabilities[i], numberOfRandomPermutations, delta); } // Check how good the actual permutation probabilities fit // the expected ones ProbabilityDistributionTest.CheckGoodnessOfFit( expectedPermutationProbabilities, actualPermutationProbabilities, criticalValue); }