private async Task UploadToAzure(Image dbImage, SKCodec codec, CloudBlobContainer container, int desiredWidth) { var ratio = (float)desiredWidth / (float)codec.Info.Width; var supportedScale = codec.GetScaledDimensions(ratio); SKEncodedImageFormat target = codec.EncodedFormat == SKEncodedImageFormat.Gif ? SKEncodedImageFormat.Gif : SKEncodedImageFormat.Png; var width = supportedScale.Width; var height = supportedScale.Height; var format = target.ToString(); var name = $"{dbImage.Slug}_{width}_{height}"; var blobRef = container.GetBlockBlobReference(name); var url = blobRef.StorageUri.PrimaryUri.ToString(); using (var resultBitmap = SKBitmap.Decode(codec)) using (var resizedBitmap = resultBitmap.Resize(new SKImageInfo(supportedScale.Width, supportedScale.Height), SKBitmapResizeMethod.Lanczos3)) using (var resultImage = SKImage.FromBitmap(resizedBitmap)) using (var imageStream = resultImage.Encode(target, 100).AsStream()) { using (var outputSTream = await blobRef.OpenWriteAsync()) { await imageStream.CopyToAsync(outputSTream); } } var data = new AzureAdapterMetadata(this.account.BlobEndpoint.ToString(), container.Name, name, url); await this.imageData.SetImageLinks(new ImageLink(dbImage.Id, null, width, height, format, ImageAdapter.AzureFile, data.ToString())); }
/// <summary> /// Resizes the image and encodes the BlurHash representation of the image. /// </summary> /// <param name="xComponent">The number x components.</param> /// <param name="yComponent">The number y components.</param> /// <param name="filename">The path to an encoded image on the file system.</param> /// <param name="maxWidth">The maximum width to resize the image to.</param> /// <param name="maxHeight">The maximum height to resize the image to.</param> /// <returns>BlurHash representation of the image.</returns> public static string Encode(int xComponent, int yComponent, string filename, int maxWidth, int maxHeight) { using (SKCodec codec = SKCodec.Create(filename)) { var width = codec.Info.Width; var height = codec.Info.Height; float scaleFactor = 0; if (width > maxWidth || height > maxHeight) { scaleFactor = ScaleHelper.GetScale(width, height, maxWidth, maxHeight); SKSizeI supportedScale = codec.GetScaledDimensions(scaleFactor); width = supportedScale.Width; height = supportedScale.Height; } var newInfo = new SKImageInfo() { Width = width, Height = height, ColorType = SKColorType.Rgba8888, AlphaType = SKAlphaType.Unpremul, ColorSpace = SKColorSpace.CreateSrgb() }; using (SKBitmap bitmap = SKBitmap.Decode(codec, newInfo)) { if (scaleFactor == 0) { return(EncodeInternal(xComponent, yComponent, bitmap)); } var(scaledWidth, scaledHeight) = ScaleHelper.GetScaleDimensions(bitmap.Width, bitmap.Height, scaleFactor); newInfo = newInfo.WithSize(scaledWidth, scaledHeight); using (SKBitmap scaledBitmap = bitmap.Resize(newInfo, SKFilterQuality.Low)) { return(EncodeInternal(xComponent, yComponent, scaledBitmap)); } } } }
/// <summary> /// Loads an image from a disk file, decoding for the optimal required /// size so that we don't load the entire image for a smaller target, /// and auto-orienting the bitmap according to the codec origin. /// This is faster than the slow method above, because it uses the trick /// outlined here: /// https://forums.xamarin.com/discussion/88794/fast-way-to-decode-skbitmap-from-byte-jpeg-raw /// </summary> /// <param name="source"></param> /// <param name="desiredWidth"></param> /// <returns></returns> private SKBitmap LoadOrientedBitmap(FileInfo source, int desiredWidth) { Stopwatch load = new Stopwatch("SkiaSharpLoad"); SKCodec codec = SKCodec.Create(source.FullName); SKImageInfo info = codec.Info; // get the scale that is nearest to what we want (eg: jpg returned 512) SKSizeI supportedScale = codec.GetScaledDimensions((float)desiredWidth / info.Width); // decode the bitmap at the nearest size SKImageInfo nearest = new SKImageInfo(supportedScale.Width, supportedScale.Height); SKBitmap bmp = SKBitmap.Decode(codec, nearest); load.Stop(); // First, auto-orient the bitmap var sourceBitmap = AutoOrient(bmp, codec.EncodedOrigin); return(sourceBitmap); }
static void StartCaptureAndMotionDetection(ConfigurationFeed feed) { DateTime lastSnapshot = DateTime.MinValue; DateTime lastMotionDectionFrame = DateTime.MinValue; DateTime lastMotionDetected = DateTime.MinValue; SKBitmap motionDetectionLastFrame = null; bool isCurrentlyRecording = false; var motionDetectionChangeDetectedFrames = new List <bool>(); byte[] motionDetectionCurrentFrame = null; int motionDetectionCurrentFrameLength = 0; Thread motionDetectionThread = null; var motionDetectionThreadIsRunning = true; if (feed.MotionDetectionPercentage > 0) { motionDetectionThread = new Thread(new ThreadStart(() => { Console.WriteLine("Starting motion detection thread"); while (IsRunning && motionDetectionThreadIsRunning) { if (motionDetectionCurrentFrameLength == 0) { Thread.Sleep(10); continue; } SKBitmap newFrame = null; using (var stream = new MemoryStream(motionDetectionCurrentFrame)) using (SKCodec codec = SKCodec.Create(stream)) { SKImageInfo info = codec.Info; SKSizeI supportedScale = codec.GetScaledDimensions((float)200 / info.Width); SKImageInfo nearest = new SKImageInfo(supportedScale.Width, supportedScale.Height); newFrame = SKBitmap.Decode(codec, nearest); } motionDetectionCurrentFrameLength = 0; // Mark as read if (motionDetectionLastFrame != null) { // analyse last x captures, if at least n % is different in all of them (using a grid, not compare all pixels), start recording process, stop if there is no movement for linger-seconds var step = newFrame.Height / 10; var pixelsChanged = 0; var pixelsTotal = 0; for (var y = (int)(step / 2); y < newFrame.Height; y += step) { for (var x = (int)(step / 2); x < newFrame.Width; x += step) { if (CompareColors(newFrame.GetPixel(x, y), motionDetectionLastFrame.GetPixel(x, y)) > feed.MotionColorIgnorePercentage) { pixelsChanged++; } pixelsTotal++; } } motionDetectionLastFrame.Dispose(); var percentageDifference = (((double)pixelsChanged / (double)pixelsTotal) * 100); motionDetectionChangeDetectedFrames.Add((percentageDifference > feed.MotionDetectionPercentage)); if (motionDetectionChangeDetectedFrames.Count > feed.MotionDetectionFrameCount) { motionDetectionChangeDetectedFrames.RemoveAt(0); } var totalDetectedFrames = motionDetectionChangeDetectedFrames.Where(a => a == true).Count(); if ((totalDetectedFrames == feed.MotionDetectionFrameCount) || (isCurrentlyRecording && totalDetectedFrames > 0)) { // Start or keep continuing recording Console.WriteLine("Detection! " + Math.Round(percentageDifference, 1) + " %"); lastMotionDetected = DateTime.UtcNow; if (!isCurrentlyRecording) { StartRecording(feed); isCurrentlyRecording = true; } } else { Console.WriteLine("No detection " + Math.Round(percentageDifference, 1) + " %"); if (isCurrentlyRecording && (DateTime.UtcNow - lastMotionDetected).TotalSeconds > feed.MotionSecondsLinger) { StopRecording(feed); isCurrentlyRecording = false; } } } motionDetectionLastFrame = newFrame; } Console.WriteLine("Ending motion detection thread"); })); motionDetectionThread.Start(); } MjpegUtils.BeginJpegsFromProcessWithMjpegOutput(feed.InputProcessName, feed.InputProcessArguments, (buffer, offset, count) => { if (buffer == null) { // process ended, todo: restart motionDetectionThreadIsRunning = false; return(false); } if (feed.SnapshotSecondsInterval > 0 && (DateTime.UtcNow - lastSnapshot).TotalSeconds >= feed.SnapshotSecondsInterval) { lastSnapshot = DateTime.UtcNow; lock (feed) { if (feed.SnapshotBytes == null || feed.SnapshotBytes.Length < count) { feed.SnapshotBytes = new byte[count * 2]; // Give some extra space to prevent resizing too many times at the start } feed.SnapshotBytesLength = count; Buffer.BlockCopy(buffer, offset, feed.SnapshotBytes, 0, count); } } if (feed.MotionDetectionPercentage > 0 && (DateTime.UtcNow - lastMotionDectionFrame).TotalSeconds >= feed.MotionDetectionSecondsBetweenFrames) { lastMotionDectionFrame = DateTime.UtcNow; if (motionDetectionCurrentFrameLength == 0) // Only update the buffer when the image code isn't still busy with this byte buffer { if (motionDetectionCurrentFrame == null || motionDetectionCurrentFrame.Length < count) { motionDetectionCurrentFrame = new byte[count * 2]; // Give some extra space to prevent resizing too many times at the start } Buffer.BlockCopy(buffer, offset, motionDetectionCurrentFrame, 0, count); motionDetectionCurrentFrameLength = count; } } return(IsRunning); // Keep going }); }