Ejemplo n.º 1
0
        static void StartRecording(ConfigurationFeed feed)
        {
            if (feed.MotionResultStream != null && feed.MotionResultStream.CanWrite)
            {
                return;
            }

            Console.WriteLine("Start recording");

            // Get filename for recording and replace [name] and [yyyyMMdd] parts
            var outputFileName = string.IsNullOrEmpty(feed.MotionRecordingFileName) ? "[yyyyMMdd]" + Path.DirectorySeparatorChar + "[name]_[yyyyMMdd HHmmss].mp4" : feed.MotionRecordingFileName;

            outputFileName = outputFileName.Replace("[name]", feed.Name);
            outputFileName = new Regex("\\[(.*?)\\]").Replace(outputFileName, match => DateTime.Now.ToString(match.Groups[1].Value));

            if (outputFileName.Contains(Path.DirectorySeparatorChar))
            {
                var directory = outputFileName.Substring(0, outputFileName.LastIndexOf(Path.DirectorySeparatorChar));
                if (!string.IsNullOrEmpty(directory))
                {
                    Directory.CreateDirectory(directory);
                }
            }

            feed.MotionResultStream = File.OpenWrite(outputFileName);
            MjpegUtils.BeginPipeMjpegIntoProcessAndSendOutputToStream(feed.InputProcessName, feed.InputProcessArguments, feed.MotionProcessName, feed.MotionProcessArguments, feed.MotionResultStream, () => IsRunning && feed.MotionResultStream != null);
        }
Ejemplo n.º 2
0
 static bool HandleHttpRequestSnapshot(Socket socket, ConfigurationFeed requestedFeed, bool asRawMjpegStream = false, double?maxFps = null)
 {
     socket.Send(ASCIIEncoding.ASCII.GetBytes("HTTP/1.1 200 OK\r\n" +
                                              "Connection: Close\r\n" +
                                              "Access-Control-Allow-Headers: DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range\r\n" +
                                              "Access-Control-Allow-Headers: GET, POST, OPTIONS\r\n" +
                                              "Access-Control-Allow-Origin: *\r\n" +
                                              "Access-Control-Expose-Headers: *\r\n" +
                                              "Content-Type: image/jpeg\r\n\r\n"));
     lock (requestedFeed)
     {
         socket.Send(requestedFeed.SnapshotBytes, 0, requestedFeed.SnapshotBytesLength, SocketFlags.None);
     }
     return(false);
 }
Ejemplo n.º 3
0
        static bool HandleHttpRequestOutputStream(Socket socket, ConfigurationFeed requestedFeed)
        {
            // The CORS headers are needed to support Google Nest Hub and Chromecast
            socket.Send(ASCIIEncoding.ASCII.GetBytes("HTTP/1.1 200 OK\r\n" +
                                                     "Connection: Close\r\n" +
                                                     "Access-Control-Allow-Headers: DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range\r\n" +
                                                     "Access-Control-Allow-Headers: GET, POST, OPTIONS\r\n" +
                                                     "Access-Control-Allow-Origin: *\r\n" +
                                                     "Access-Control-Expose-Headers: *\r\n" +
                                                     "Content-Type: " + requestedFeed.OutputContentType + "\r\n\r\n"));

            var ns = new NetworkStream(socket, true);

            MjpegUtils.BeginPipeMjpegIntoProcessAndSendOutputToStream(requestedFeed.InputProcessName, requestedFeed.InputProcessArguments, requestedFeed.OutputProcessName, requestedFeed.OutputProcessArguments, ns, () => IsRunning && socket.Connected);
            return(true);
        }
Ejemplo n.º 4
0
        static void StartCaptureAndMotionDetection(ConfigurationFeed feed)
        {
            DateTime lastSnapshot                        = DateTime.MinValue;
            DateTime lastMotionDectionFrame              = DateTime.MinValue;
            DateTime lastMotionDetected                  = DateTime.MinValue;
            SKBitmap motionDetectionLastFrame            = null;
            bool     isCurrentlyRecording                = false;
            var      motionDetectionChangeDetectedFrames = new List <bool>();

            byte[] motionDetectionCurrentFrame       = null;
            int    motionDetectionCurrentFrameLength = 0;

            Thread motionDetectionThread          = null;
            var    motionDetectionThreadIsRunning = true;

            if (feed.MotionDetectionPercentage > 0)
            {
                motionDetectionThread = new Thread(new ThreadStart(() =>
                {
                    Console.WriteLine("Starting motion detection thread");
                    while (IsRunning && motionDetectionThreadIsRunning)
                    {
                        if (motionDetectionCurrentFrameLength == 0)
                        {
                            Thread.Sleep(10);
                            continue;
                        }

                        SKBitmap newFrame = null;

                        using (var stream = new MemoryStream(motionDetectionCurrentFrame))
                            using (SKCodec codec = SKCodec.Create(stream))
                            {
                                SKImageInfo info       = codec.Info;
                                SKSizeI supportedScale = codec.GetScaledDimensions((float)200 / info.Width);
                                SKImageInfo nearest    = new SKImageInfo(supportedScale.Width, supportedScale.Height);
                                newFrame = SKBitmap.Decode(codec, nearest);
                            }

                        motionDetectionCurrentFrameLength = 0; // Mark as read

                        if (motionDetectionLastFrame != null)
                        {
                            // analyse last x captures, if at least n % is different in all of them (using a grid, not compare all pixels), start recording process, stop if there is no movement for linger-seconds
                            var step          = newFrame.Height / 10;
                            var pixelsChanged = 0;
                            var pixelsTotal   = 0;
                            for (var y = (int)(step / 2); y < newFrame.Height; y += step)
                            {
                                for (var x = (int)(step / 2); x < newFrame.Width; x += step)
                                {
                                    if (CompareColors(newFrame.GetPixel(x, y), motionDetectionLastFrame.GetPixel(x, y)) > feed.MotionColorIgnorePercentage)
                                    {
                                        pixelsChanged++;
                                    }
                                    pixelsTotal++;
                                }
                            }
                            motionDetectionLastFrame.Dispose();

                            var percentageDifference = (((double)pixelsChanged / (double)pixelsTotal) * 100);
                            motionDetectionChangeDetectedFrames.Add((percentageDifference > feed.MotionDetectionPercentage));

                            if (motionDetectionChangeDetectedFrames.Count > feed.MotionDetectionFrameCount)
                            {
                                motionDetectionChangeDetectedFrames.RemoveAt(0);
                            }

                            var totalDetectedFrames = motionDetectionChangeDetectedFrames.Where(a => a == true).Count();
                            if ((totalDetectedFrames == feed.MotionDetectionFrameCount) || (isCurrentlyRecording && totalDetectedFrames > 0))
                            {
                                // Start or keep continuing recording
                                Console.WriteLine("Detection! " + Math.Round(percentageDifference, 1) + " %");
                                lastMotionDetected = DateTime.UtcNow;
                                if (!isCurrentlyRecording)
                                {
                                    StartRecording(feed);
                                    isCurrentlyRecording = true;
                                }
                            }
                            else
                            {
                                Console.WriteLine("No detection " + Math.Round(percentageDifference, 1) + " %");
                                if (isCurrentlyRecording && (DateTime.UtcNow - lastMotionDetected).TotalSeconds > feed.MotionSecondsLinger)
                                {
                                    StopRecording(feed);
                                    isCurrentlyRecording = false;
                                }
                            }
                        }
                        motionDetectionLastFrame = newFrame;
                    }
                    Console.WriteLine("Ending motion detection thread");
                }));
                motionDetectionThread.Start();
            }

            MjpegUtils.BeginJpegsFromProcessWithMjpegOutput(feed.InputProcessName, feed.InputProcessArguments, (buffer, offset, count) =>
            {
                if (buffer == null)
                {
                    // process ended, todo: restart
                    motionDetectionThreadIsRunning = false;
                    return(false);
                }

                if (feed.SnapshotSecondsInterval > 0 && (DateTime.UtcNow - lastSnapshot).TotalSeconds >= feed.SnapshotSecondsInterval)
                {
                    lastSnapshot = DateTime.UtcNow;
                    lock (feed)
                    {
                        if (feed.SnapshotBytes == null || feed.SnapshotBytes.Length < count)
                        {
                            feed.SnapshotBytes = new byte[count * 2]; // Give some extra space to prevent resizing too many times at the start
                        }
                        feed.SnapshotBytesLength = count;
                        Buffer.BlockCopy(buffer, offset, feed.SnapshotBytes, 0, count);
                    }
                }

                if (feed.MotionDetectionPercentage > 0 && (DateTime.UtcNow - lastMotionDectionFrame).TotalSeconds >= feed.MotionDetectionSecondsBetweenFrames)
                {
                    lastMotionDectionFrame = DateTime.UtcNow;

                    if (motionDetectionCurrentFrameLength == 0) // Only update the buffer when the image code isn't still busy with this byte buffer
                    {
                        if (motionDetectionCurrentFrame == null || motionDetectionCurrentFrame.Length < count)
                        {
                            motionDetectionCurrentFrame = new byte[count * 2]; // Give some extra space to prevent resizing too many times at the start
                        }
                        Buffer.BlockCopy(buffer, offset, motionDetectionCurrentFrame, 0, count);
                        motionDetectionCurrentFrameLength = count;
                    }
                }
                return(IsRunning); // Keep going
            });
        }
Ejemplo n.º 5
0
 static void StopRecording(ConfigurationFeed feed)
 {
     feed.MotionResultStream.Close();
     feed.MotionResultStream = null;
     Console.WriteLine("Stop recording");
 }
Ejemplo n.º 6
0
        static bool HandleHttpRequestMjpegStream(Socket socket, ConfigurationFeed requestedFeed, bool asRawMjpegStream = false, double?maxFps = null)
        {
            socket.Send(ASCIIEncoding.ASCII.GetBytes("HTTP/1.1 200 OK\r\n" +
                                                     "Connection: Close\r\n" +
                                                     "Access-Control-Allow-Headers: DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range\r\n" +
                                                     "Access-Control-Allow-Headers: GET, POST, OPTIONS\r\n" +
                                                     "Access-Control-Allow-Origin: *\r\n" +
                                                     "Access-Control-Expose-Headers: *\r\n" +
                                                     (asRawMjpegStream ? "Content-Type: video/x-motion-jpeg\r\n\r\n" : "Content-Type: multipart/x-mixed-replace;boundary=derpyderpderp\r\n")
                                                     ));

            byte[] headerToSend = asRawMjpegStream ? (byte[])null : ASCIIEncoding.ASCII.GetBytes("\r\n--derpyderpderp\r\nContent-type: image/jpeg\r\n\r\n");

            bool     isCurrentlySending = false;
            DateTime lastSentDate       = DateTime.MinValue; // for timeouts
            double   maxFpsSec          = 0;

            if (maxFps.HasValue)
            {
                maxFpsSec = (double)1 / maxFps.Value;
            }

            MjpegUtils.BeginJpegsFromProcessWithMjpegOutput(requestedFeed.InputProcessName, requestedFeed.InputProcessArguments, (buffer, offset, count) =>
            {
                if (isCurrentlySending)
                {
                    if (lastSentDate.AddSeconds(15) < DateTime.UtcNow)
                    {
                        // Timeout
                        Console.WriteLine("Sending data to client timeout. Closing connection\r\n");
                        socket.Close();
                        return(false);
                    }
                    return(IsRunning); // We'll skip this frame
                }
                if (maxFps.HasValue && (DateTime.UtcNow - lastSentDate).TotalSeconds < maxFpsSec)
                {
                    return(IsRunning); // Skip this frame to limit fps
                }
                if (buffer == null)
                {
                    // Process/stream ended
                    socket.Close();
                    return(false);
                }
                try
                {
                    isCurrentlySending = true;
                    lastSentDate       = DateTime.UtcNow;

                    if (headerToSend != null) // mjpeg over http
                    {
                        socket.BeginSend(headerToSend, 0, headerToSend.Length, SocketFlags.None, (a) =>
                        {
                            socket.EndSend(a);
                            socket.Send(buffer, offset, count, SocketFlags.None);
                            isCurrentlySending = false;
                        }, null);
                    }
                    else // raw
                    {
                        socket.BeginSend(buffer, offset, count, SocketFlags.None, (a) =>
                        {
                            socket.EndSend(a);

                            // We send some padding for raw streams. This is from the test mjpeg stream we've captured
                            var extraBytesNeeded = 8 - (count % 8);
                            if (extraBytesNeeded > 0 && extraBytesNeeded < 8)
                            {
                                var zeroBytes = new byte[extraBytesNeeded];
                                socket.Send(zeroBytes, 0, extraBytesNeeded, SocketFlags.None);
                            }
                            isCurrentlySending = false;
                        }, null);
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine("Could not send data: " + e.Message);
                    socket.Close();
                    return(false);
                }
                return(socket.Connected && IsRunning); // Keep going
            });
            return(true);
        }