void AssertEquals(IList<IList<int>> firstList, IList<IList<int>> secondList) { Assert.AreEqual(firstList.Count(), secondList.Count()); firstList = firstList .OrderBy(list => list[0]) .ThenBy(list => list[1]) .ThenBy(list => list[2]) .ToList(); secondList = secondList .OrderBy(list => list[0]) .ThenBy(list => list[1]) .ThenBy(list => list[2]) .ToList(); Assert.IsTrue(firstList .Zip(secondList , (firstSubList, secondeSubList) => firstSubList .Zip(secondeSubList , (first, second) => first == second) .All(item => item)) .All(item => item)); ; }
private static void CompareEvents(IList<Events.Base> events1, IList<Events.Base> events2) { Assert.That(events1.Count, Is.EqualTo(events2.Count)); foreach (var pair in events1.Zip(events2, Tuple.Create)) { Assert.That(pair.Item1, Is.TypeOf(pair.Item2.GetType())); var nodeEvent1 = pair.Item1 as Events.Node; if (nodeEvent1 == null) continue; var nodeEvent2 = (Events.Node) pair.Item2; Assert.That(nodeEvent1.Anchor, Is.EqualTo(nodeEvent2.Anchor)); var cstart1 = nodeEvent1 as Events.CollectionStart; if (cstart1 != null) { Assert.That(cstart1.Tag, Is.EqualTo(((Events.CollectionStart)nodeEvent2).Tag)); continue; } var scalar1 = nodeEvent1 as Events.Scalar; if (scalar1 == null) continue; var scalar2 = (Events.Scalar) nodeEvent2; if (scalar1.ImplicitLevel != ScalarImplicitLevel.Plain && scalar2.ImplicitLevel != ScalarImplicitLevel.Plain) Assert.That(scalar1.Tag, Is.EqualTo(scalar2.Tag)); Assert.That(scalar1.Value, Is.EqualTo(scalar2.Value)); } }
void AssertEquals(IList<int> subject, IList<int> @object) { Assert.AreEqual(subject.Count, @object.Count); Assert.IsTrue(subject .Zip(@object , (first, second) => first == second) .All(item => item)); }
private static double Distance(IList<int> aChromosone, IEnumerable<int> anotherChromosone) { var numberOfIdenticalBits = aChromosone .Zip(anotherChromosone, (x, y) => new { first = x, second = y }) .Count(tuple => tuple.first == tuple.second); return numberOfIdenticalBits / (double)aChromosone.Count(); }
private void AssertRecordsAreEqual(IList<Record> actualRecords, params string[] expectedRecords) { Assert.That(actualRecords, Is.Not.Null, "actualRecords"); Assert.That(actualRecords.Count, Is.EqualTo(expectedRecords.Length)); IEnumerable<object> expectedRecordsAsRecords = expectedRecords.Select(ParseRecord); var pairs = actualRecords.Zip(expectedRecordsAsRecords, (actual, expected) => new {Actual = actual, Expected = expected}); foreach (var pair in pairs) { Assert.That(pair.Actual, Is.EqualTo(pair.Expected)); } }
public static ModelShip FromCsvRecord(IList<string> header, IList<string> row) { var keyedRow = header.Zip(row, (h, r) => new { h, r }).ToDictionary(a => a.h, a => a.r); return new ModelShip() { No = keyedRow["No"].ToInt32(), ClassOfShip = keyedRow["ClassOfShip"], Name = keyedRow["Name"], TaxIncludedPrice = keyedRow["TaxIncludedPrice"].ToInt32OrDefault(System.Globalization.NumberStyles.Currency, -1), Price = keyedRow["Price"].ToInt32OrDefault(System.Globalization.NumberStyles.Currency, -1), Maker = keyedRow["Maker"] }; }
void AssertEquers(IList<string> firstList, IList<string> secondList) { Assert.AreEqual(firstList.Count(), secondList.Count()); firstList = firstList.OrderBy(item => item).ToList(); secondList = secondList.OrderBy(item => item).ToList(); Assert .IsTrue(firstList .Zip(secondList , (first, second) => first == second) .All(item => item)); }
public IList<Task<IObjectState>> SaveAllAsync(IList<IObjectState> states, IList<IDictionary<string, IParseFieldOperation>> operationsList, string sessionToken, CancellationToken cancellationToken) { var requests = states.Zip(operationsList, (item, ops) => new Dictionary<string, object> { { "method", (item.ObjectId == null ? "POST" : "PUT") }, { "path", (item.ObjectId == null ? string.Format("/1/classes/{0}", Uri.EscapeDataString(item.ClassName)) : string.Format("/1/classes/{0}/{1}", Uri.EscapeDataString(item.ClassName), Uri.EscapeDataString(item.ObjectId))) }, { "body", ParseObject.ToJSONObjectForSaving(ops) } }).Cast<object>().ToList(); var batchTasks = ExecuteBatchRequests(requests, sessionToken, cancellationToken); var stateTasks = new List<Task<IObjectState>>(); foreach (var task in batchTasks) { stateTasks.Add(task.OnSuccess(t => { return ParseObjectCoder.Instance.Decode(t.Result, ParseDecoder.Instance); })); } return stateTasks; }
private void AssertTokenStreamEquals(IList<Token> eTokens, IList<Token> tokens) { Assert.AreEqual(eTokens.Count, tokens.Count); foreach (var tokenPair in eTokens.Zip(tokens, (token, token1) => new {expected = token, actual = token1})) { Assert.AreEqual(tokenPair.expected.Kind, tokenPair.actual.Kind); Assert.AreEqual(tokenPair.expected.Value, tokenPair.actual.Value); } }
/// <summary> /// Computes the distance between two points in the sample space /// </summary> private double distance(IList<double> sample1, IList<double> sample2) { // Euclidian distance return Math.Sqrt(sample1.Zip(sample2, (s1, s2) => Math.Pow(s1 - s2, 2.0)).Sum()); // Manhattan distance //return sample1.Zip(sample2, (s1, s2) => Math.Abs(s1 - s2)).Sum(); }
private void AssertKeyCollectionsEquality(IList<Key> expected, IList<Key> actual) { Assert.NotNull(actual); Assert.Equal(expected.Count, actual.Count); Assert.True(expected.Zip(actual, (k1, k2) => k1.Equals(k2)).All(r => r)); }
private double distance(IList<double> sample1, IList<double> sample2) { // Simple euclidian distance return Math.Sqrt(sample1.Zip(sample2, (s1, s2) => Math.Pow(s1 - s2, 2.0)).Sum()); }
private static void PitchShift2(float pitchShift, int numSampsToProcess, int sampleOverlap, int sampleRate, IList<float> data) { var fftBuffer = data.Zip( Enumerable.Repeat(0f, data.Count), (real, imaginary) => new Complex(real, imaginary)).ToArray(); ShortTimeFourierTransform(fftBuffer, FftDirection.Forward); var bins = CalculateBins(SampleRate, fftBuffer); var dcOffset = bins[0].Magnitude / fftBuffer.Length; var shiftedBins = PitchShiftBins(pitchShift, bins); var newBuffer = SynthesizeFft(SampleRate, shiftedBins); ShortTimeFourierTransform(newBuffer, FftDirection.Inverse); var factor = (newBuffer.Length / 2f); for (var i = 0; i < fftBuffer.Length; i++) { data[i] = newBuffer[i].Real / factor - dcOffset; } }
public Tuple<double, double> DoDeltaCalibration(int numFactors, IList<PointError> zBedProbePoints, bool normalise) { if (numFactors != 3 && numFactors != 4 && numFactors != 6 && numFactors != 7) { throw new Exception("Error: " + numFactors + " factors requested but only 3, 4, 6 and 7 supported"); } if (numFactors > zBedProbePoints.Count) { throw new Exception("Error: need at least as many points as factors you want to calibrate"); } // Transform the probing points to motor endpoints and store them in a matrix, so that we can do multiple iterations using the same data var probeMotorPositions = new PointError[zBedProbePoints.Count]; var corrections = new double[zBedProbePoints.Count]; var initialSumOfSquares = 0.0; for (var i = 0; i < zBedProbePoints.Count; ++i) { corrections[i] = 0.0; var machinePos = new double[3] { zBedProbePoints[i].X, zBedProbePoints[i].Y, 0 }; probeMotorPositions[i] = new PointError(Transform(machinePos, 0), Transform(machinePos, 1), Transform(machinePos, 2)); initialSumOfSquares += FSquare(zBedProbePoints[i].X) + FSquare(zBedProbePoints[i].Y) + FSquare(zBedProbePoints[i].ZError); } // remove any erroneous data points.. maybe not the best idea?? var zip = zBedProbePoints .Zip(probeMotorPositions, (point, pos) => new { point, pos }) .Where(_ => !_.pos.X.Equals(double.NaN) && !_.pos.Y.Equals(double.NaN) && !_.pos.ZError.Equals(double.NaN)) .ToList(); zBedProbePoints = (from z in zip select z.point).ToList(); probeMotorPositions = (from z in zip select z.pos).ToArray(); // Do 1 or more Newton-Raphson iterations var iteration = 0; double expectedRmsError; for (;;) { // Build a Nx7 matrix of derivatives with respect to xa, xb, yc, za, zb, zc, diagonal. var derivativeMatrix = new double[zBedProbePoints.Count, numFactors]; for (var i = 0; i < zBedProbePoints.Count; ++i) { for (var j = 0; j < numFactors; ++j) { derivativeMatrix[i, j] = ComputeDerivative(j, probeMotorPositions[i].X, probeMotorPositions[i].Y, probeMotorPositions[i].ZError); } } // Now build the normal equations for least squares fitting var normalMatrix = new double[numFactors, numFactors + 1]; double temp; for (var i = 0; i < numFactors; ++i) { for (var j = 0; j < numFactors; ++j) { temp = derivativeMatrix[0,i] * derivativeMatrix[0,j]; for (var k = 1; k < zBedProbePoints.Count; ++k) { temp += derivativeMatrix[k,i] * derivativeMatrix[k,j]; } normalMatrix[i,j] = temp; } temp = derivativeMatrix[0,i] * -(zBedProbePoints[0].ZError + corrections[0]); for (var k = 1; k < zBedProbePoints.Count; ++k) { temp += derivativeMatrix[k,i] * -(zBedProbePoints[k].ZError + corrections[k]); } normalMatrix[i, numFactors] = temp; } double[] solution = GaussJordan(ref normalMatrix, numFactors); if (solution.Any(_ => _.Equals(double.NaN))) throw new Exception("Unable to calculate corrections. Please make sure the bed probe points are all distinct."); //if (debug) //{ // DebugPrint(PrintVector("Solution", solution)); // // Calculate and display the residuals // var residuals = []; // for (var i = 0; i < numPoints; ++i) // { // var r = zBedProbePoints[i]; // for (var j = 0; j < numFactors; ++j) // { // r += solution[j] * derivativeMatrix.data[i][j]; // } // residuals.push(r); // } // DebugPrint(PrintVector("Residuals", residuals)); //} Adjust(numFactors, solution, normalise); // Calculate the expected probe heights using the new parameters { var expectedResiduals = new double[zBedProbePoints.Count]; var sumOfSquares = 0.0; for (var i = 0; i < zBedProbePoints.Count; ++i) { probeMotorPositions[i] = new PointError(probeMotorPositions[i].X + solution[0], probeMotorPositions[i].Y + solution[1], probeMotorPositions[i].ZError + solution[2]); var newZ = InverseTransform(probeMotorPositions[i].X, probeMotorPositions[i].Y, probeMotorPositions[i].ZError); corrections[i] = newZ; expectedResiduals[i] = zBedProbePoints[i].ZError + newZ; sumOfSquares += FSquare(expectedResiduals[i]); } expectedRmsError = Math.Sqrt(sumOfSquares / zBedProbePoints.Count); } // Decide whether to do another iteration Two is slightly better than one, but three doesn't improve things. // Alternatively, we could stop when the expected RMS error is only slightly worse than the RMS of the residuals. ++iteration; if (iteration == 2) { break; } } return Tuple.Create(Math.Sqrt(initialSumOfSquares / zBedProbePoints.Count), expectedRmsError); }
public static bool Checkit(IList<double> first, IList<double> second) { return (first.Count == second.Count) && !first.Zip(second, (f, s) => (f - s)).Any(d => Math.Abs(d) > 1e-6); }
private static void WriteIcoToStream(Stream stream, IList<Bitmap> bitmaps) { if (bitmaps.Any(bmp => bmp.Width> 256 || bmp.Height> 256)) { throw new ArgumentException( "ICO files cannot contain images with dimensions greater than 256x256 pixels"); } bitmaps = bitmaps.Where(bmp => bmp.Width> 0 && bmp.Height> 0).ToList(); if (bitmaps.Count> ushort.MaxValue) { throw new ArgumentException( "ICO files can only contain up to " + short.MaxValue + " images."); } IList<byte[]> pngImages = new List<byte[]>(bitmaps.Count); foreach (var bmp in bitmaps) { using (var pngStream = new MemoryStream()) { bmp.Save(pngStream, ImageFormat.Png); pngImages.Add(pngStream.ToArray()); } } var numImages = (ushort) bitmaps.Count; WriteIcoMainHeader(stream, numImages); uint headerSize = 6; uint imageHeaderSize = 16; uint offset = headerSize + numImages * imageHeaderSize; foreach (var images in bitmaps.Zip(pngImages, Tuple.Create)) { var bmp = images.Item1; var png = images.Item2; var imageSize = (uint) png.Length; // The direct cast to a byte here is safe. We validate the image size above, // so all images have a width/height in the range [1, 256]. Casting [1, 255] // to a byte returns the same number, and casting 256 returns 0, which represents // an image of size 256 bytes in the ICO format. WriteIcoImageHeader(stream, (byte) bmp.Width, (byte) bmp.Height, imageSize, offset); offset += imageSize; } foreach (var png in pngImages) { stream.Write(png, 0, png.Length); } }