示例#1
0
        static void StartRecording(ConfigurationFeed feed)
        {
            if (feed.MotionResultStream != null && feed.MotionResultStream.CanWrite)
            {
                return;
            }

            Console.WriteLine("Start recording");

            // Get filename for recording and replace [name] and [yyyyMMdd] parts
            var outputFileName = string.IsNullOrEmpty(feed.MotionRecordingFileName) ? "[yyyyMMdd]" + Path.DirectorySeparatorChar + "[name]_[yyyyMMdd HHmmss].mp4" : feed.MotionRecordingFileName;

            outputFileName = outputFileName.Replace("[name]", feed.Name);
            outputFileName = new Regex("\\[(.*?)\\]").Replace(outputFileName, match => DateTime.Now.ToString(match.Groups[1].Value));

            if (outputFileName.Contains(Path.DirectorySeparatorChar))
            {
                var directory = outputFileName.Substring(0, outputFileName.LastIndexOf(Path.DirectorySeparatorChar));
                if (!string.IsNullOrEmpty(directory))
                {
                    Directory.CreateDirectory(directory);
                }
            }

            feed.MotionResultStream = File.OpenWrite(outputFileName);
            MjpegUtils.BeginPipeMjpegIntoProcessAndSendOutputToStream(feed.InputProcessName, feed.InputProcessArguments, feed.MotionProcessName, feed.MotionProcessArguments, feed.MotionResultStream, () => IsRunning && feed.MotionResultStream != null);
        }
示例#2
0
        static bool HandleHttpRequestOutputStream(Socket socket, ConfigurationFeed requestedFeed)
        {
            // The CORS headers are needed to support Google Nest Hub and Chromecast
            socket.Send(ASCIIEncoding.ASCII.GetBytes("HTTP/1.1 200 OK\r\n" +
                                                     "Connection: Close\r\n" +
                                                     "Access-Control-Allow-Headers: DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range\r\n" +
                                                     "Access-Control-Allow-Headers: GET, POST, OPTIONS\r\n" +
                                                     "Access-Control-Allow-Origin: *\r\n" +
                                                     "Access-Control-Expose-Headers: *\r\n" +
                                                     "Content-Type: " + requestedFeed.OutputContentType + "\r\n\r\n"));

            var ns = new NetworkStream(socket, true);

            MjpegUtils.BeginPipeMjpegIntoProcessAndSendOutputToStream(requestedFeed.InputProcessName, requestedFeed.InputProcessArguments, requestedFeed.OutputProcessName, requestedFeed.OutputProcessArguments, ns, () => IsRunning && socket.Connected);
            return(true);
        }
        public static void BeginPipeMjpegIntoProcessAndSendOutputToStream(string processName, string arguments, string resultProcessName, string resultProcessArguments, Stream resultStream, Func <bool> shouldKeepGoing = null)
        {
            var process = new Process
            {
                StartInfo = new ProcessStartInfo
                {
                    RedirectStandardInput  = true,
                    RedirectStandardOutput = true,
                    RedirectStandardError  = true,
                    UseShellExecute        = false,
                    CreateNoWindow         = true,
                    Arguments = resultProcessArguments,
                    FileName  = resultProcessName
                },
                EnableRaisingEvents = true
            };

            process.ErrorDataReceived += (sender, eventArgs) =>
            {
                Console.WriteLine(eventArgs.Data);
            };

            process.Start();
            process.BeginErrorReadLine();
            var outputStream = process.StandardOutput.BaseStream;
            var inputStream  = process.StandardInput.BaseStream;


            var    isClosing    = false;
            Action closeProcess = () =>
            {
                if (isClosing)
                {
                    return;
                }
                isClosing = true;
                Console.WriteLine("Starting to close process");
                // We'll try to close the process by ending the input pipe first, so ffmpeg can free up any gpu memory allocations.
                inputStream.Flush();
                inputStream.Close();

                var    start = DateTime.UtcNow;
                byte[] temp  = new byte[1000];
                try
                {
                    while ((DateTime.UtcNow - start).TotalSeconds < 5)
                    {
                        if (outputStream.Read(temp, 0, temp.Length) == 0) // Make sure there is no data waiting, otherwise defunct processes might appear
                        {
                            break;
                        }
                    }
                }
                catch (Exception e) {
                    Console.WriteLine("closeProcess() exception 1: " + e.Message);
                }

                outputStream.Close();
                resultStream.Close();
                try
                {
                    process.CloseMainWindow();
                    process.WaitForExit(1000);
                }
                catch (Exception e)
                {
                    Console.WriteLine("closeProcess() exception 2: " + e.Message);
                }

                try
                {
                    process.Kill();
                }
                catch (Exception e)
                {
                    Console.WriteLine("closeProcess() exception 3: " + e.Message);
                }

                try
                {
                    process.Dispose();
                }
                catch (Exception e)
                {
                    Console.WriteLine("closeProcess() exception 4: " + e.Message);
                }
                Console.WriteLine("Closed process");
            };

            MjpegUtils.BeginJpegsFromProcessWithMjpegOutput(processName, arguments, (buffer, offset, count) =>
            {
                if (buffer == null || !outputStream.CanRead || !inputStream.CanWrite || !resultStream.CanWrite)
                {
                    // Process/stream ended
                    closeProcess();
                    return(false);
                }

                try
                {
                    inputStream.Write(buffer, offset, count);
                }
                catch
                {
                    closeProcess();
                    return(false);
                }

                var shouldRequestNextJpeg = inputStream.CanWrite && resultStream.CanWrite && (shouldKeepGoing == null || shouldKeepGoing());
                if (!shouldRequestNextJpeg)
                {
                    closeProcess();
                }
                return(shouldRequestNextJpeg);
            });

            var outputSendThread = new Thread(new ThreadStart(() =>
            {
                byte[] buffer = new byte[1024 * 1024 * 1];
                while (inputStream.CanWrite && resultStream.CanWrite && outputStream.CanRead && (shouldKeepGoing == null || shouldKeepGoing()))
                {
                    var len = outputStream.Read(buffer);
                    try
                    {
                        resultStream.Write(buffer, 0, len);
                    }
                    catch
                    {
                        break;
                    }
                }

                closeProcess();
            }));

            outputSendThread.Start();
        }
示例#4
0
        static void StartCaptureAndMotionDetection(ConfigurationFeed feed)
        {
            DateTime lastSnapshot                        = DateTime.MinValue;
            DateTime lastMotionDectionFrame              = DateTime.MinValue;
            DateTime lastMotionDetected                  = DateTime.MinValue;
            SKBitmap motionDetectionLastFrame            = null;
            bool     isCurrentlyRecording                = false;
            var      motionDetectionChangeDetectedFrames = new List <bool>();

            byte[] motionDetectionCurrentFrame       = null;
            int    motionDetectionCurrentFrameLength = 0;

            Thread motionDetectionThread          = null;
            var    motionDetectionThreadIsRunning = true;

            if (feed.MotionDetectionPercentage > 0)
            {
                motionDetectionThread = new Thread(new ThreadStart(() =>
                {
                    Console.WriteLine("Starting motion detection thread");
                    while (IsRunning && motionDetectionThreadIsRunning)
                    {
                        if (motionDetectionCurrentFrameLength == 0)
                        {
                            Thread.Sleep(10);
                            continue;
                        }

                        SKBitmap newFrame = null;

                        using (var stream = new MemoryStream(motionDetectionCurrentFrame))
                            using (SKCodec codec = SKCodec.Create(stream))
                            {
                                SKImageInfo info       = codec.Info;
                                SKSizeI supportedScale = codec.GetScaledDimensions((float)200 / info.Width);
                                SKImageInfo nearest    = new SKImageInfo(supportedScale.Width, supportedScale.Height);
                                newFrame = SKBitmap.Decode(codec, nearest);
                            }

                        motionDetectionCurrentFrameLength = 0; // Mark as read

                        if (motionDetectionLastFrame != null)
                        {
                            // analyse last x captures, if at least n % is different in all of them (using a grid, not compare all pixels), start recording process, stop if there is no movement for linger-seconds
                            var step          = newFrame.Height / 10;
                            var pixelsChanged = 0;
                            var pixelsTotal   = 0;
                            for (var y = (int)(step / 2); y < newFrame.Height; y += step)
                            {
                                for (var x = (int)(step / 2); x < newFrame.Width; x += step)
                                {
                                    if (CompareColors(newFrame.GetPixel(x, y), motionDetectionLastFrame.GetPixel(x, y)) > feed.MotionColorIgnorePercentage)
                                    {
                                        pixelsChanged++;
                                    }
                                    pixelsTotal++;
                                }
                            }
                            motionDetectionLastFrame.Dispose();

                            var percentageDifference = (((double)pixelsChanged / (double)pixelsTotal) * 100);
                            motionDetectionChangeDetectedFrames.Add((percentageDifference > feed.MotionDetectionPercentage));

                            if (motionDetectionChangeDetectedFrames.Count > feed.MotionDetectionFrameCount)
                            {
                                motionDetectionChangeDetectedFrames.RemoveAt(0);
                            }

                            var totalDetectedFrames = motionDetectionChangeDetectedFrames.Where(a => a == true).Count();
                            if ((totalDetectedFrames == feed.MotionDetectionFrameCount) || (isCurrentlyRecording && totalDetectedFrames > 0))
                            {
                                // Start or keep continuing recording
                                Console.WriteLine("Detection! " + Math.Round(percentageDifference, 1) + " %");
                                lastMotionDetected = DateTime.UtcNow;
                                if (!isCurrentlyRecording)
                                {
                                    StartRecording(feed);
                                    isCurrentlyRecording = true;
                                }
                            }
                            else
                            {
                                Console.WriteLine("No detection " + Math.Round(percentageDifference, 1) + " %");
                                if (isCurrentlyRecording && (DateTime.UtcNow - lastMotionDetected).TotalSeconds > feed.MotionSecondsLinger)
                                {
                                    StopRecording(feed);
                                    isCurrentlyRecording = false;
                                }
                            }
                        }
                        motionDetectionLastFrame = newFrame;
                    }
                    Console.WriteLine("Ending motion detection thread");
                }));
                motionDetectionThread.Start();
            }

            MjpegUtils.BeginJpegsFromProcessWithMjpegOutput(feed.InputProcessName, feed.InputProcessArguments, (buffer, offset, count) =>
            {
                if (buffer == null)
                {
                    // process ended, todo: restart
                    motionDetectionThreadIsRunning = false;
                    return(false);
                }

                if (feed.SnapshotSecondsInterval > 0 && (DateTime.UtcNow - lastSnapshot).TotalSeconds >= feed.SnapshotSecondsInterval)
                {
                    lastSnapshot = DateTime.UtcNow;
                    lock (feed)
                    {
                        if (feed.SnapshotBytes == null || feed.SnapshotBytes.Length < count)
                        {
                            feed.SnapshotBytes = new byte[count * 2]; // Give some extra space to prevent resizing too many times at the start
                        }
                        feed.SnapshotBytesLength = count;
                        Buffer.BlockCopy(buffer, offset, feed.SnapshotBytes, 0, count);
                    }
                }

                if (feed.MotionDetectionPercentage > 0 && (DateTime.UtcNow - lastMotionDectionFrame).TotalSeconds >= feed.MotionDetectionSecondsBetweenFrames)
                {
                    lastMotionDectionFrame = DateTime.UtcNow;

                    if (motionDetectionCurrentFrameLength == 0) // Only update the buffer when the image code isn't still busy with this byte buffer
                    {
                        if (motionDetectionCurrentFrame == null || motionDetectionCurrentFrame.Length < count)
                        {
                            motionDetectionCurrentFrame = new byte[count * 2]; // Give some extra space to prevent resizing too many times at the start
                        }
                        Buffer.BlockCopy(buffer, offset, motionDetectionCurrentFrame, 0, count);
                        motionDetectionCurrentFrameLength = count;
                    }
                }
                return(IsRunning); // Keep going
            });
        }
示例#5
0
        static bool HandleHttpRequestMjpegStream(Socket socket, ConfigurationFeed requestedFeed, bool asRawMjpegStream = false, double?maxFps = null)
        {
            socket.Send(ASCIIEncoding.ASCII.GetBytes("HTTP/1.1 200 OK\r\n" +
                                                     "Connection: Close\r\n" +
                                                     "Access-Control-Allow-Headers: DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range\r\n" +
                                                     "Access-Control-Allow-Headers: GET, POST, OPTIONS\r\n" +
                                                     "Access-Control-Allow-Origin: *\r\n" +
                                                     "Access-Control-Expose-Headers: *\r\n" +
                                                     (asRawMjpegStream ? "Content-Type: video/x-motion-jpeg\r\n\r\n" : "Content-Type: multipart/x-mixed-replace;boundary=derpyderpderp\r\n")
                                                     ));

            byte[] headerToSend = asRawMjpegStream ? (byte[])null : ASCIIEncoding.ASCII.GetBytes("\r\n--derpyderpderp\r\nContent-type: image/jpeg\r\n\r\n");

            bool     isCurrentlySending = false;
            DateTime lastSentDate       = DateTime.MinValue; // for timeouts
            double   maxFpsSec          = 0;

            if (maxFps.HasValue)
            {
                maxFpsSec = (double)1 / maxFps.Value;
            }

            MjpegUtils.BeginJpegsFromProcessWithMjpegOutput(requestedFeed.InputProcessName, requestedFeed.InputProcessArguments, (buffer, offset, count) =>
            {
                if (isCurrentlySending)
                {
                    if (lastSentDate.AddSeconds(15) < DateTime.UtcNow)
                    {
                        // Timeout
                        Console.WriteLine("Sending data to client timeout. Closing connection\r\n");
                        socket.Close();
                        return(false);
                    }
                    return(IsRunning); // We'll skip this frame
                }
                if (maxFps.HasValue && (DateTime.UtcNow - lastSentDate).TotalSeconds < maxFpsSec)
                {
                    return(IsRunning); // Skip this frame to limit fps
                }
                if (buffer == null)
                {
                    // Process/stream ended
                    socket.Close();
                    return(false);
                }
                try
                {
                    isCurrentlySending = true;
                    lastSentDate       = DateTime.UtcNow;

                    if (headerToSend != null) // mjpeg over http
                    {
                        socket.BeginSend(headerToSend, 0, headerToSend.Length, SocketFlags.None, (a) =>
                        {
                            socket.EndSend(a);
                            socket.Send(buffer, offset, count, SocketFlags.None);
                            isCurrentlySending = false;
                        }, null);
                    }
                    else // raw
                    {
                        socket.BeginSend(buffer, offset, count, SocketFlags.None, (a) =>
                        {
                            socket.EndSend(a);

                            // We send some padding for raw streams. This is from the test mjpeg stream we've captured
                            var extraBytesNeeded = 8 - (count % 8);
                            if (extraBytesNeeded > 0 && extraBytesNeeded < 8)
                            {
                                var zeroBytes = new byte[extraBytesNeeded];
                                socket.Send(zeroBytes, 0, extraBytesNeeded, SocketFlags.None);
                            }
                            isCurrentlySending = false;
                        }, null);
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine("Could not send data: " + e.Message);
                    socket.Close();
                    return(false);
                }
                return(socket.Connected && IsRunning); // Keep going
            });
            return(true);
        }