public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value) { if (value is string s) { return(ColourFormat.Parse(s)); } return(base.ConvertFrom(context, culture, value)); }
public static AnnotationList Translate(List<ImageDoc.Annotation> annotations, float scale, ColourFormat colourFormat, PointF controlOffset, PointF imageOffset) { AnnotationList imagingAnnotations = new AnnotationList(); foreach (ImageDoc.Annotation annotation in annotations) { imagingAnnotations.Add(Translate(annotation, scale, colourFormat, controlOffset, imageOffset)); } return imagingAnnotations; }
internal static Microsoft.Kinect.ColorImageFormat ConvertToKinect(ColourFormat df) { switch (df) { case ColourFormat.HighRes30Fps: return(Microsoft.Kinect.ColorImageFormat.RgbResolution640x480Fps30); default: throw new ArgumentException("Unsupported colour format!"); } }
public void ColourFormatCtorGeneral() { var p = new ColourFormat(1, 2, 4, 8); Assert.AreEqual(1, p.Red); Assert.AreEqual(2, p.Green); Assert.AreEqual(4, p.Blue); Assert.AreEqual(8, p.Alpha); }
public void ColourFormatCtorDefault() { var p = new ColourFormat(); Assert.AreEqual(0, p.Red); Assert.AreEqual(0, p.Green); Assert.AreEqual(0, p.Blue); Assert.AreEqual(0, p.Alpha); }
private static void ColourFormatCtorUniform(int expected) { var p = new ColourFormat(expected); Assert.AreEqual(expected, p.Red); Assert.AreEqual(expected, p.Green); Assert.AreEqual(expected, p.Blue); Assert.AreEqual(expected, p.Alpha); }
private static void ColourFormatCtorCopyModify(string field, params int[] expected) { var p = new ColourFormat(1, 2, 4, 8); p = new ColourFormat(p, field, 0); Assert.AreEqual(expected[0], p.Red); Assert.AreEqual(expected[1], p.Green); Assert.AreEqual(expected[2], p.Blue); Assert.AreEqual(expected[3], p.Alpha); }
public void ColourFormatEquality() { ColourFormat p = new ColourFormat(1, 2, 4, 8), q = new ColourFormat(1, 2, 4, 8); Assert.IsTrue(p.Equals(q)); Assert.IsTrue(p == q); Assert.IsFalse(p != q); }
private static void ColourFormatInequality(params int[] a) { ColourFormat p = new ColourFormat(1, 2, 4, 8), q = new ColourFormat(a[0], a[1], a[2], a[3]); Assert.IsFalse(p.Equals(q)); Assert.IsFalse(p == q); Assert.IsTrue(p != q); }
public void ColourFormatCtorCopy() { ColourFormat p = new ColourFormat(1, 2, 4, 8), q = new ColourFormat(p); Assert.IsTrue(p.Equals(q)); Assert.IsTrue(p == q); Assert.IsFalse(p != q); Assert.IsFalse(ReferenceEquals(p, q)); }
public IDictionary <TrackingMarker, Point3D> Convert2DTrackingPointsTo3DTrackingPoints( IDictionary <TrackingMarker, Point2D> trackingPoints2D, DepthFormat dFormat, ICollection <short> depthShorts, ColourFormat cFormat) { var kdFormat = FormatConvertor.ConvertToKinect(dFormat); var kcFormat = FormatConvertor.ConvertToKinect(cFormat); var dShorts = depthShorts.ToArray(); // HACK!!!!!!!!!!!!!!!! return(_ConvertColourPointsToWorldPoints(trackingPoints2D, kdFormat, dShorts, kcFormat)); }
public Recording(string mappingParamsPath, string timingPath, string depthPath, DepthFormat dFormat, string colourPath, ColourFormat cFormat, string calibrationPath) : this() { // open mapping data var fMapping = new FileStream(mappingParamsPath, FileMode.Open); this.mappingParams = new byte[fMapping.Length]; fMapping.Read(this.mappingParams, 0, this.mappingParams.Length); fMapping.Close(); // get image file streams ready this.fDepth = new FileStream(depthPath, FileMode.Open); this.fColour = new FileStream(colourPath, FileMode.Open); // deserialise calibration if required if (calibrationPath == null) { this.IsCalibratedRecording = false; } else { this.calibration = Calibration.CreateFromFile(calibrationPath); this.IsCalibratedRecording = true; } // read and convert timing data to fractional seconds var fTiming = new FileStream(timingPath, FileMode.Open); var timingReader = new BinaryReader(fTiming); this.Timestamps = new float[fTiming.Length / sizeof(Single)]; for (int i = 0; i < this.Timestamps.Length; i++) { this.Timestamps[i] = timingReader.ReadSingle(); } fTiming.Close(); // set other recording properties this.cFrameSizeBytes = FormatConvertor.ByteDataLength(cFormat); this.dFrameSizeShorts = FormatConvertor.PixelDataLength(dFormat); this.NumberOfFrames = (int)this.fDepth.Length / FormatConvertor.ByteDataLength(dFormat); }
public ICollection <Point3D> GeneratePointCloud(DepthFormat dFormat, ICollection <short> depthShorts, ColourFormat cFormat, ICollection <byte> colourPixels) { var kdFormat = FormatConvertor.ConvertToKinect(dFormat); var kcFormat = FormatConvertor.ConvertToKinect(cFormat); var dWidth = FormatConvertor.PixelWidth(kdFormat); var cWidth = FormatConvertor.PixelWidth(kcFormat); var points = new List <Point3D>(); var d = depthShorts.ToArray(); var c = colourPixels.ToArray(); for (int i = 0; i < d.Length; i++) { var depth = (short)(d[i] >> 3); if (depth < 0) { continue; } var dip = new DepthImagePoint() { Depth = depth, X = i % dWidth, Y = i / dWidth }; var skel = this.mapper.MapDepthPointToSkeletonPoint(kdFormat, dip); var cip = this.mapper.MapDepthPointToColorPoint(kdFormat, dip, kcFormat); var cIndex = 4 * (cip.X + cip.Y * cWidth); if (cIndex > c.Length || cIndex < 0) { continue; } var r = c[cIndex + 2]; var g = c[cIndex + 1]; var b = c[cIndex]; points.Add(new Point3D(skel.X, skel.Y, skel.Z, r, g, b)); } return(points); }
public static int PixelHeight(ColourFormat cFormat) { return(PixelHeight(ConvertToKinect(cFormat))); }
// Colour public static int PixelDataLength(ColourFormat cFormat) { return(PixelDataLength(ConvertToKinect(cFormat))); }
public static int ByteDataLength(ColourFormat cFormat) { return(ByteDataLength(ConvertToKinect(cFormat))); }
public Recording(string mappingParamsPath, string timingPath, string depthPath, DepthFormat dFormat, string colourPath, ColourFormat cFormat) : this(mappingParamsPath, timingPath, depthPath, dFormat, colourPath, cFormat, null) { }
public static BaseAnnotation Translate(ImageDoc.Annotation annotation, float scale, ColourFormat colourFormat, PointF controlOffset, PointF imageOffset) { BaseAnnotation imagingAnnotation = null; ImageDoc.ImagingAnnotationTypes annType = (ImageDoc.ImagingAnnotationTypes) Enum.Parse(typeof(ImageDoc.ImagingAnnotationTypes), annotation.AnnotationType); switch (annType) { case ImageDoc.ImagingAnnotationTypes.FreeLine: { imagingAnnotation = new FreeLineAnnotation(SimpleTypeToColor(annotation.LineColor), annotation.LineThickness, scale, colourFormat, controlOffset, imageOffset); break; } case ImageDoc.ImagingAnnotationTypes.Highlighter: case ImageDoc.ImagingAnnotationTypes.Polygon: { imagingAnnotation = new PolygonAnnotation(SimpleTypeToColor(annotation.LineColor), annotation.LineThickness, scale, colourFormat, controlOffset, imageOffset); } break; case ImageDoc.ImagingAnnotationTypes.Stamp: { imagingAnnotation = new StampAnnotation(annotation.LineThickness, scale, colourFormat, controlOffset, imageOffset); } break; case ImageDoc.ImagingAnnotationTypes.StraightLine: { imagingAnnotation = new StraightLineAnnotation(SimpleTypeToColor(annotation.LineColor), annotation.LineThickness, scale, colourFormat, controlOffset, imageOffset); } break; case ImageDoc.ImagingAnnotationTypes.TextAnnotation: { // Top left of the text Point topLeft = new Point((int)annotation.Rect.TopLeft.X, (int)annotation.Rect.TopLeft.Y); imagingAnnotation = new TextAnnotation(topLeft, controlOffset, imageOffset, scale, colourFormat); } break; case ImageDoc.ImagingAnnotationTypes.Svg: { // Top left of the text Point topLeft = new Point((int)annotation.Rect.TopLeft.X, (int)annotation.Rect.TopLeft.Y); imagingAnnotation = new SvgAnnotation(topLeft, controlOffset, imageOffset, scale, colourFormat); (imagingAnnotation as SvgAnnotation).ImageName = annotation.SvgImageName; } break; } if ((imagingAnnotation is PolygonAnnotation)) { (imagingAnnotation as PolygonAnnotation).ShapeName = annotation.SvgImageName; foreach (ImageDoc.Point point in annotation.Points) { Point intPoint = new Point((int)point.X, (int)point.Y); (imagingAnnotation as PolygonAnnotation).AddPoint(intPoint); } // This forces the initialisation of some required objects (imagingAnnotation as PolygonAnnotation).EndDrawing(); } if ((imagingAnnotation is StampAnnotation)) { foreach (ImageDoc.Point point in annotation.Points) { Point intPoint = new Point((int)point.X, (int)point.Y); (imagingAnnotation as StampAnnotation).Points.Add(intPoint); } } if ((imagingAnnotation is TextAnnotation)) { (imagingAnnotation as TextAnnotation).Text = annotation.Text; (imagingAnnotation as TextAnnotation).TextFont = annotation.Font.ToFont(); (imagingAnnotation as TextAnnotation).TextColor = SimpleTypeToColor(annotation.TextColor); } imagingAnnotation.Rect = SimpleTypeToRect(annotation.Rect); imagingAnnotation.Filled = true; imagingAnnotation.FillColor = SimpleTypeToColor(annotation.FillColor); imagingAnnotation.FillOpacity = annotation.FillOpacity; //imagingAnnotation.Outline = annotation.Outline; return imagingAnnotation; }
public SensorManager(DepthFormat dFormat, ColourFormat cFormat) { this.desiredDepthFormat = FormatConvertor.ConvertToKinect(dFormat); this.desiredColourFormat = FormatConvertor.ConvertToKinect(cFormat); this.frameReadyEventsRegistered = false; }
public Frame(int frameNumber, float timestamp, short[] depth, DepthFormat dFormat, byte[] colour, ColourFormat cFormat) : this() { this.FrameNumber = frameNumber; this.Timestamp = timestamp; this.Depth = depth; this.Colour = colour; this.DepthFormat = dFormat; this.ColourFormat = cFormat; }