private (int, int, EventColor, EventColor, float) ParseVideoSettings() { int frameTime = 33333; // The amount of time per frame in uS (30 fps = 33333) int maxFrames; // Max number of frames in the reconstructed video float fps = framerateCombo.SelectedIndex == 1 ? 60.0f : 30.0f;; if (realTimeCheckbox.IsChecked == true) { frameTime = 33333; if (framerateCombo.SelectedIndex == 1) { frameTime = 33333 / 2; } } else { frameTime = Int32.Parse(frameTimeTB.Text); } if (allFrameCheckBox.IsChecked == true) { maxFrames = 2147483647; } else { maxFrames = Int32.Parse(maxFramesTB.Text); } if (maxFrames <= 0 || frameTime <= 0) { throw new FormatException(); } // Grab ON and OFF colors from comboBox EventColor onColor = onColorCombo.SelectedItem as EventColor; EventColor offColor = offColorCombo.SelectedItem as EventColor; if (onColor.Name == "Custom") { // use color picker // onColor.Color = colorPicker Color } if (offColor.Name == "Custom") { // use color picker // offColor.Color = colorPicker Color } return(frameTime, maxFrames, onColor, offColor, fps); }
private (int, int, EventColor, EventColor) ParseFrameSettings() { int frameTime = 33333; // The amount of time per frame in uS (30 fps = 33333) int maxFrames; // Max number of frames in the reconstructed video if (!playbackType.IsOn) { frameTime = Int32.Parse(frameTimeTB.Text); } maxFrames = allFrameCheckBox.IsChecked == true ? 2147483647 : Int32.Parse(maxFramesTB.Text); if (maxFrames <= 0 || frameTime <= 0) { throw new FormatException(); } // Grab ON and OFF colors from comboBox EventColor onColor = onColorCombo.SelectedItem as EventColor; EventColor offColor = offColorCombo.SelectedItem as EventColor; return(frameTime, maxFrames, onColor, offColor); }
public async Task <MediaComposition> EventBasedReconstruction(Stream aedatFile, CameraParameters cam, EventColor onColor, EventColor offColor, int eventsPerFrame, int maxFrames, float playback_frametime) { byte[] aedatBytes = new byte[5 * Convert.ToInt32(Math.Pow(10, 8))]; // Read 0.5 GB at a time MediaComposition composition = new MediaComposition(); int frameCount = 0; int eventCount = 0; Stream pixelStream = InitBitMap(cam); byte[] currentFrame = new byte[pixelStream.Length]; int bytesRead = aedatFile.Read(aedatBytes, 0, aedatBytes.Length); while (bytesRead != 0 && frameCount < maxFrames) { // Read through AEDAT file for (int i = 0, length = bytesRead; i < length; i += AedatUtilities.dataEntrySize) // iterate through file, 8 bytes at a time. { AEDATEvent currentEvent = new AEDATEvent(aedatBytes, i, cam); AedatUtilities.SetPixel(ref currentFrame, currentEvent.x, currentEvent.y, (currentEvent.onOff ? onColor.Color : offColor.Color), cam.cameraX); eventCount++; if (eventCount >= eventsPerFrame) // Collected events within specified timeframe, add frame to video { eventCount = 0; WriteableBitmap b = new WriteableBitmap(cam.cameraX, cam.cameraY); using (Stream stream = b.PixelBuffer.AsStream()) { await stream.WriteAsync(currentFrame, 0, currentFrame.Length); } SoftwareBitmap outputBitmap = SoftwareBitmap.CreateCopyFromBuffer(b.PixelBuffer, BitmapPixelFormat.Bgra8, b.PixelWidth, b.PixelHeight, BitmapAlphaMode.Ignore); CanvasBitmap bitmap2 = CanvasBitmap.CreateFromSoftwareBitmap(CanvasDevice.GetSharedDevice(), outputBitmap); // Set playback framerate MediaClip mediaClip = MediaClip.CreateFromSurface(bitmap2, TimeSpan.FromSeconds(playback_frametime)); composition.Clips.Add(mediaClip); frameCount++; // Stop adding frames to video if max frames has been reached if (frameCount >= maxFrames) { return(composition); } currentFrame = new byte[pixelStream.Length]; } } bytesRead = aedatFile.Read(aedatBytes, 0, aedatBytes.Length); } return(composition); }
public async Task EventBasedReconstruction(Stream aedatFile, CameraParameters cam, EventColor onColor, EventColor offColor, int eventsPerFrame, int maxFrames, StorageFolder folder, string fileName) { byte[] aedatBytes = new byte[5 * Convert.ToInt32(Math.Pow(10, 8))]; // Read 0.5 GB at a time int frameCount = 0; int eventCount = 0; Stream pixelStream = InitBitMap(cam); byte[] currentFrame = new byte[pixelStream.Length]; int bytesRead = aedatFile.Read(aedatBytes, 0, aedatBytes.Length); while (bytesRead != 0 && frameCount < maxFrames) { // Read through AEDAT file for (int i = 0, length = bytesRead; i < length; i += AedatUtilities.dataEntrySize) // iterate through file, 8 bytes at a time. { AEDATEvent currentEvent = new AEDATEvent(aedatBytes, i, cam); AedatUtilities.SetPixel(ref currentFrame, currentEvent.x, currentEvent.y, (currentEvent.onOff ? onColor.Color : offColor.Color), cam.cameraX); eventCount++; if (eventCount >= eventsPerFrame) // Collected events within specified timeframe, add frame to video { eventCount = 0; WriteableBitmap b = new WriteableBitmap(cam.cameraX, cam.cameraY); using (Stream stream = b.PixelBuffer.AsStream()) { await stream.WriteAsync(currentFrame, 0, currentFrame.Length); } var file = await folder.CreateFileAsync(fileName + frameCount + ".png"); using (IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.ReadWrite)) { BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, stream); Stream pixelStream2 = b.PixelBuffer.AsStream(); byte[] pixels = new byte[pixelStream2.Length]; await pixelStream2.ReadAsync(pixels, 0, pixels.Length); encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, (uint)b.PixelWidth, (uint)b.PixelHeight, 96.0, 96.0, pixels); await encoder.FlushAsync(); } frameCount++; // Stop adding frames to video if max frames has been reached if (frameCount >= maxFrames) { return; } currentFrame = new byte[pixelStream.Length]; } } bytesRead = aedatFile.Read(aedatBytes, 0, aedatBytes.Length); } return; }