コード例 #1
0
ファイル: AudioWriter.cs プロジェクト: aalmada/bonsai
        protected override void Write(RiffWriter writer, Mat input)
        {
            var dataDepth   = input.Depth == Depth.U8 ? Depth.U8 : Depth.S16;
            var elementSize = input.Depth == Depth.U8 ? 1 : 2;
            var step        = elementSize * input.Cols;
            var data        = new byte[step * input.Rows];
            var dataHandle  = GCHandle.Alloc(data, GCHandleType.Pinned);

            try
            {
                var dataAddr = dataHandle.AddrOfPinnedObject();
                using (var dataHeader = new Mat(input.Cols, input.Rows, dataDepth, input.Channels,
                                                dataAddr, elementSize * input.Rows))
                {
                    if (input.Depth != dataDepth)
                    {
                        using (var conversion = new Mat(input.Size, dataDepth, input.Channels))
                        {
                            CV.Convert(input, conversion);
                            CV.Transpose(conversion, dataHeader);
                        }
                    }
                    else
                    {
                        CV.Transpose(input, dataHeader);
                    }
                }
            }
            finally { dataHandle.Free(); }
            writer.Write(data);
        }
コード例 #2
0
ファイル: RunningAverage.cs プロジェクト: spacelabswc/bonsai
        public override IObservable <TArray> Process <TArray>(IObservable <TArray> source)
        {
            var outputFactory      = ArrFactory <TArray> .TemplateFactory;
            var accumulatorFactory = ArrFactory <TArray> .TemplateSizeChannelFactory;

            return(Observable.Defer(() =>
            {
                TArray accumulator = null;
                return source.Select(input =>
                {
                    if (accumulator == null)
                    {
                        accumulator = accumulatorFactory(input, Depth.F32);
                        CV.Convert(input, accumulator);
                        return input;
                    }
                    else
                    {
                        var output = outputFactory(input);
                        CV.RunningAvg(input, accumulator, Alpha);
                        CV.Convert(accumulator, output);
                        return output;
                    }
                });
            }));
        }
コード例 #3
0
        Mat Calibrate(Tuple <Mat, Mat> source)
        {
            // Do we need to check array sizes or let CV take care of that?
            //if (source.Item1.Size != source.Item2.Size)

            if (!calibration_set)
            {
                // Read the calibration files
                offset_raw = ReadFully(CreateStream(OffsetPath));
                slope_raw  = ReadFully(CreateStream(SlopePath));

                calibration_set = true;
            }

            // Use exposures to index into raw data
            var exposures  = source.Item2;
            var offset_dat = new double[source.Item1.Rows * source.Item1.Cols];
            var slope_dat  = new double[source.Item1.Rows * source.Item1.Cols];

            // Fast copy
            unsafe
            {
                fixed(byte *p_offset_raw = offset_raw)
                fixed(double *p_offset_dat = offset_dat)
                fixed(byte *p_slope_raw    = slope_raw)
                fixed(double *p_slope_dat  = slope_dat)
                {
                    double *p_offset_raw_d = (double *)p_offset_raw;
                    double *p_slope_raw_d  = (double *)p_slope_raw;

                    var numel = exposures.Rows * exposures.Cols;

                    for (int i = 0; i < exposures.Rows; i++)
                    {
                        for (int j = 0; j < exposures.Cols; j++)
                        {
                            var mat_off = (int)exposures[i, j].Val0 * numel;
                            offset_dat[exposures.Cols * i + j] = p_offset_raw_d[mat_off + exposures.Cols * i + j];
                            slope_dat[exposures.Cols * i + j]  = p_slope_raw_d[mat_off + exposures.Cols * i + j];
                        }
                    }
                }
            }

            // Generate Mats from data
            var offset = Mat.CreateMatHeader(offset_dat, source.Item1.Rows, source.Item1.Cols, Depth.F64, 1);
            var slope  = Mat.CreateMatHeader(slope_dat, source.Item1.Rows, source.Item1.Cols, Depth.F64, 1);

            // Calibrate
            if (source.Item1.Depth != Depth.F64)
            {
                var tmp = new Mat(source.Item1.Size, Depth.F64, 1);
                CV.Convert(source.Item1, tmp);
                return(tmp * (slope - offset));
            }
            else
            {
                return(source.Item1 * (slope - offset));
            }
        }
コード例 #4
0
        static IObservable <double[]> AutoThreshold <TOther>(IObservable <IObservable <object> > source, IObservable <TOther> bufferBoundaries, Func <double> scale)
        {
            var concat = new Concat {
                Axis = 1
            };
            var firstBoundary = Observable.Return <TOther>(default(TOther));

            bufferBoundaries = firstBoundary.Concat(bufferBoundaries);
            return(bufferBoundaries.Publish(ps => ps.Window(2).Skip(1).SelectMany(start => Observable.Defer(() =>
            {
                var n = 0;
                var mean = default(double[]);
                var variance = default(double[]);
                var buffer = default(double[]);
                return source.SelectMany(xs => xs.Cast <Mat>()).TakeUntil(ps).Select(xs =>
                {
                    if (mean == null)
                    {
                        mean = new double[xs.Rows];
                        variance = new double[xs.Rows];
                        buffer = new double[xs.Rows * xs.Cols];
                    }

                    // Convert data into temporary buffer
                    var bufferHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned);
                    try
                    {
                        using (var bufferHeader = new Mat(xs.Rows, xs.Cols, Depth.F64, 1, bufferHandle.AddrOfPinnedObject()))
                        {
                            CV.Convert(xs, bufferHeader);
                        }
                    }
                    finally { bufferHandle.Free(); }

                    // Knuth's online variance algorithm
                    // http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
                    for (int i = 0; i < buffer.Length; i++)
                    {
                        var row = i / xs.Cols;
                        var col = i % xs.Cols;
                        var previousMean = mean[row];
                        var delta = buffer[i] - previousMean;
                        var newMean = previousMean + delta / (n + i + 1);
                        variance[row] = variance[row] + delta * (buffer[i] - newMean);
                        mean[row] = newMean;
                    }

                    n += xs.Cols;
                    return xs;
                }).TakeLast(1).Select(xs =>
                {
                    var result = new double[xs.Rows];
                    for (int i = 0; i < result.Length; i++)
                    {
                        result[i] = mean[i] + scale() * Math.Sqrt(variance[i] / (n - 1));
                    }
                    return result;
                });
            }))));
        }
コード例 #5
0
ファイル: BufferHelper.cs プロジェクト: aalmada/bonsai
        public static void UpdateBuffer(int bid, Mat buffer, int sampleRate)
        {
            var transpose = buffer.Rows < buffer.Cols;
            var channels  = transpose ? buffer.Rows : buffer.Cols;

            if (channels > 2)
            {
                throw new InvalidOperationException("Unsupported number of channels for the specified output format.");
            }

            var format       = channels == 2 ? ALFormat.Stereo16 : ALFormat.Mono16;
            var convertDepth = buffer.Depth != Depth.S16;

            if (convertDepth || transpose)
            {
                // Convert if needed
                if (convertDepth)
                {
                    var temp = new Mat(buffer.Rows, buffer.Cols, Depth.S16, 1);
                    CV.Convert(buffer, temp);
                    buffer = temp;
                }

                // Transpose multichannel to column order
                if (transpose)
                {
                    var temp = new Mat(buffer.Cols, buffer.Rows, buffer.Depth, 1);
                    CV.Transpose(buffer, temp);
                    buffer = temp;
                }
            }

            AL.BufferData(bid, format, buffer.Data, buffer.Rows * buffer.Step, sampleRate);
        }
コード例 #6
0
ファイル: UpdatePulseTrain.cs プロジェクト: bonsai-rx/neuro
        public override IObservable <Mat> Process(IObservable <Mat> source)
        {
            return(Observable.Using(
                       () => PulsePalManager.ReserveConnection(PortName),
                       pulsePal => source.Do(input =>
            {
                var pulseInterval = 1000000 / (Frequency * CycleTimeMicroseconds);
                var pulseTimes = new int[input.Cols];
                var pulseVoltages = new byte[input.Cols];
                for (int i = 0; i < pulseTimes.Length; i++)
                {
                    pulseTimes[i] = pulseInterval * i;
                }

                using (var voltageHeader = Mat.CreateMatHeader(pulseVoltages))
                {
                    CV.Convert(input, voltageHeader);
                }

                lock (pulsePal.PulsePal)
                {
                    pulsePal.PulsePal.SendCustomPulseTrain(PulseId, pulseTimes, pulseVoltages);
                }
            })));
        }
コード例 #7
0
        private Mat GetEphysDataS16(short[,] data)
        {
            var output = new Mat(NumberOfEphysChannels, NumberOfSamples, Depth.S16, 1);

            using (var header = Mat.CreateMatHeader(data))
            {
                CV.Convert(header, output);
            }

            return(output);
        }
コード例 #8
0
        private Mat GetData(float[,] data)
        {
            var output = new Mat(NumberOfChannels, NumberOfSamples, Depth.F32, 1);

            using (var header = Mat.CreateMatHeader(data))
            {
                CV.Convert(header, output);
            }

            return(output);
        }
コード例 #9
0
        private Mat GetDelta(double[] data)
        {
            var output = new Mat(1, NumberOfFrames, Depth.F64, 1);

            using (var header = Mat.CreateMatHeader(data))
            {
                CV.Convert(header, output);
            }

            return(output);
        }
コード例 #10
0
ファイル: ConvertToArray.cs プロジェクト: spacelabswc/bonsai
        static TElement[] Process <TArray, TElement>(TArray input, Depth depth)
            where TArray : Arr
            where TElement : struct
        {
            var inputHeader = input.GetMat();
            var output      = new TElement[inputHeader.Rows * inputHeader.Cols];

            using (var outputHeader = Mat.CreateMatHeader(output, inputHeader.Rows, inputHeader.Cols, depth, 1))
            {
                CV.Convert(inputHeader, outputHeader);
            }
            return(output);
        }
コード例 #11
0
        private static Mat GetEphysData(ushort[,] data)
        {
            var numChannels = data.GetLength(0);
            var numSamples  = data.GetLength(1);

            var output = new Mat(numChannels, numSamples, Depth.U16, 1);

            using (var header = Mat.CreateMatHeader(data))
            {
                CV.Convert(header, output);
            }

            return(output);
        }
コード例 #12
0
ファイル: Threshold.cs プロジェクト: spacelabswc/bonsai
        public override IObservable <IplImage> Process(IObservable <IplImage> source)
        {
            return(source.Select(input =>
            {
                if (input.Depth == IplDepth.U16)
                {
                    var temp = new IplImage(input.Size, IplDepth.F32, input.Channels);
                    CV.Convert(input, temp);
                    input = temp;
                }

                var output = new IplImage(input.Size, IplDepth.U8, input.Channels);
                CV.Threshold(input, output, ThresholdValue, MaxValue, ThresholdType);
                return output;
            }));
        }
コード例 #13
0
        Mat GetState(int[] data)
        {
            if (data.Length == 0)
            {
                return(null);
            }

            var output = new Mat(1, data.Length, Depth.S32, 1);

            using (var header = Mat.CreateMatHeader(data, 1, data.Length, Depth.S32, 1))
            {
                CV.Convert(header, output);
            }

            return(output);
        }
コード例 #14
0
        /// <summary>
        /// Converts the given value object to the specified type, using the specified
        /// context and culture information.
        /// </summary>
        /// <param name="context">
        /// An <see cref="ITypeDescriptorContext"/> that provides a format context.
        /// </param>
        /// <param name="culture">
        /// A <see cref="CultureInfo"/>. If <b>null</b> is passed, the current culture
        /// is assumed.
        /// </param>
        /// <param name="value">The <see cref="Object"/> to convert.</param>
        /// <param name="destinationType">The <see cref="Type"/> to convert the value parameter to.</param>
        /// <returns>An <see cref="Object"/> that represents the converted value.</returns>
        public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType)
        {
            if (value != null && destinationType == typeof(string))
            {
                var mat   = (Mat)value;
                var array = new float[mat.Rows, mat.Cols];
                using (var arrayHeader = Mat.CreateMatHeader(array))
                {
                    CV.Convert(mat, arrayHeader);
                }

                return(ArrayConvert.ToString(array, culture));
            }

            return(base.ConvertTo(context, culture, value, destinationType));
        }
コード例 #15
0
        public override IObservable <Mat> Process(IObservable <Mat> source)
        {
            return(source.Select(input =>
            {
                var channels = Channels;
                var output = new Mat(input.Size, input.Depth, input.Channels);
                var reference = new Mat(1, input.Cols, input.Depth, input.Channels);
                if (channels == null || channels.Length == 0)
                {
                    if (input.Depth != Depth.F32)
                    {
                        var temp = new Mat(reference.Rows, reference.Cols, Depth.F32, reference.Channels);
                        CV.Reduce(input, temp, 0, ReduceOperation.Avg);
                        CV.Convert(temp, reference);
                    }
                    else
                    {
                        CV.Reduce(input, reference, 0, ReduceOperation.Avg);
                    }
                }
                else if (channels.Length == 1)
                {
                    CV.Copy(input.GetRow(channels[0]), reference);
                }
                else
                {
                    var sum = input.Depth != Depth.F32
                        ? new Mat(reference.Rows, reference.Cols, Depth.F32, reference.Channels)
                        : reference;
                    sum.SetZero();
                    for (int i = 0; i < channels.Length; i++)
                    {
                        using (var referenceChannel = input.GetRow(channels[i]))
                        {
                            CV.Add(sum, referenceChannel, sum);
                        }
                    }

                    CV.ConvertScale(sum, reference, 1f / channels.Length);
                }

                CV.Repeat(reference, output);
                CV.Sub(input, output, output);
                return output;
            }));
        }
コード例 #16
0
ファイル: RHDDataFrame.cs プロジェクト: aacuevas/Bonsai.ONIX
        Mat GetAuxiliaryData(int[,] data)
        {
            if (data.Length == 0)
            {
                return(null);
            }
            var numChannels = data.GetLength(0);
            var numSamples  = data.GetLength(1);

            var output = new Mat(numChannels, numSamples, Depth.U16, 1);

            using (var header = Mat.CreateMatHeader(data))
            {
                CV.Convert(header, output);
            }

            return(output);
        }
コード例 #17
0
 private Mat GetAuxiliaryData(ushort[,] data)
 {
     using (var header = Mat.CreateMatHeader(data))
     {
         if (AuxFormat == RHD2164Configuration.AuxDataFormat.Volts)
         {
             var output = new Mat(NumberOfAuxChannels, NumberOfSamples, Depth.F32, 1);
             CV.ConvertScale(header, output, 0.0000374);
             return(output);
         }
         else
         {
             var output = new Mat(NumberOfAuxChannels, NumberOfSamples, Depth.U16, 1);
             CV.Convert(header, output);
             return(output);
         }
     }
 }
コード例 #18
0
        public override IObservable <TArray> Process <TArray>(IObservable <TArray> source)
        {
            var outputFactory = ArrFactory <TArray> .TemplateSizeFactory;
            var inputFactory  = ArrFactory <TArray> .TemplateSizeChannelFactory;

            return(source.Select(input =>
            {
                var output = outputFactory(input, Depth.F32, 2);
                if (input.ElementType != output.ElementType)
                {
                    var temp = inputFactory(input, Depth.F32);
                    CV.Convert(input, temp);
                    input = temp;
                }

                CV.DFT(input, output, OperationFlags, 0);
                return output;
            }));
        }
コード例 #19
0
        public GenericDevice()
        {
            // Reference to context
            this.oni_ref = ONIManager.ReserveDAQ();

            source = Observable.Create <Mat>(observer =>
            {
                EventHandler <FrameReceivedEventArgs> inputReceived;

                oni_ref.Environment.Start();

                inputReceived = (sender, e) =>
                {
                    var frame = e.Value;

                    // If this frame contains data from the selected device_index
                    if (frame.DeviceIndices.Contains(DeviceIndex))
                    {
                        var dat = frame.Data <ushort>(DeviceIndex, ReadSize);
                        var mat = new Mat(1, dat.Length, ElementDepth, 1);

                        using (var header = Mat.CreateMatHeader(dat))
                        {
                            CV.Convert(header, mat);
                        }

                        observer.OnNext(mat);
                    }
                };

                oni_ref.Environment.FrameInputReceived += inputReceived;
                return(Disposable.Create(() =>
                {
                    oni_ref.Environment.FrameInputReceived -= inputReceived;
                    oni_ref.Dispose();
                }));
            });
        }
コード例 #20
0
        // Mat case
        public override IObservable <Mat> Process(IObservable <Mat> source)
        {
            // TODO: what happens if more than one frame needs to be processed befor this finishes?
            return(source.Do(
                       input =>
            {
                // Sanity check
                if (rows < input.Rows || cols < input.Cols)
                {
                    throw new IndexOutOfRangeException();
                }

                // Data to send (row indicator along with input exposure pattern)
                var data = new Mat(rows, cols, Depth.S32, 1);    //S32

                var sub_data = data.GetSubRect(new Rect(1, 0, input.Cols, input.Rows));

                // Convert element type if needed
                var convertDepth = input.Depth != Depth.S32;    //S32
                if (convertDepth)
                {
                    CV.Convert(input, sub_data);
                }
                else
                {
                    CV.Copy(input, sub_data);
                }

                // Write out matrix, row by row with the first number being an encoded row number
                for (int i = 0; i < rows; i++)
                {
                    var row = data.GetRow(i);
                    row[0] = new Scalar(i + 16384, 0, 0, 0);
                    oni_ref.DAQ.Write((uint)DeviceIndex.SelectedIndex, row.Data, 4 * (data.Cols));
                }
            }));
        }
コード例 #21
0
        public override IObservable <Mat> Process(IObservable <Mat> source)
        {
            return(Observable.Defer(() =>
            {
                int rows = 0;
                double[] data = null;
                double[] feedforwardCoefficients = null;
                double[] feedbackCoefficients = null;
                double[] dataWeights = null;
                double[] dataMemory = null;
                double[] outputWeights = null;
                double[] outputMemory = null;
                return source.Select(input =>
                {
                    if (FeedforwardCoefficients != feedforwardCoefficients ||
                        FeedbackCoefficients != feedbackCoefficients ||
                        rows != input.Rows ||
                        data != null && data.Length != rows * input.Cols)
                    {
                        rows = input.Rows;
                        feedforwardCoefficients = FeedforwardCoefficients;
                        feedbackCoefficients = FeedbackCoefficients;
                        dataWeights = InitializeWeights(feedforwardCoefficients);
                        outputWeights = InitializeWeights(feedbackCoefficients);
                        for (int i = 0; i < outputWeights.Length - 1; i++)
                        {
                            outputWeights[i] = -outputWeights[i];
                        }

                        if (dataWeights != IdentityWeight || outputWeights != IdentityWeight)
                        {
                            data = new double[rows * input.Cols];
                            dataMemory = new double[rows * (dataWeights.Length - 1)];
                            outputMemory = new double[rows * (outputWeights.Length - 1)];
                        }
                    }

                    if (dataWeights == IdentityWeight && outputWeights == IdentityWeight)
                    {
                        return input;
                    }
                    else
                    {
                        var dataHandle = GCHandle.Alloc(data, GCHandleType.Pinned);
                        try
                        {
                            var output = new Mat(input.Size, input.Depth, input.Channels);
                            using (var dataHeader = new Mat(input.Size, Depth.F64, 1, dataHandle.AddrOfPinnedObject()))
                            {
                                CV.Convert(input, dataHeader);
                                ProcessData(rows, data, dataWeights, dataMemory, outputWeights, outputMemory);
                                CV.Convert(dataHeader, output);
                            }

                            return output;
                        }
                        finally { dataHandle.Free(); }
                    }
                });
            }));
        }
コード例 #22
0
        public override IObservable <Mat> Process(IObservable <Tuple <Mat, Mat> > source)
        {
            return(Observable.Create <Mat>(observer =>
            {
                bool active = false;
                var activeBuffers = new List <SampleBuffer>();
                return source.Subscribe(input =>
                {
                    try
                    {
                        var data = input.Item1;
                        var trigger = input.Item2;

                        // Update pending windows
                        activeBuffers.RemoveAll(buffer =>
                        {
                            buffer.Update(data, 0);
                            if (buffer.Completed)
                            {
                                // Window is ready, emit
                                observer.OnNext(buffer.Samples);
                                return true;
                            }

                            return false;
                        });

                        // Check if new triggers have arrived
                        var nonZero = CV.CountNonZero(trigger);
                        if (nonZero <= 0)
                        {
                            active = false;
                        }
                        else
                        {
                            var triggerBuffer = new byte[trigger.Cols];
                            var triggerHandle = GCHandle.Alloc(triggerBuffer, GCHandleType.Pinned);
                            using (var triggerHeader = new Mat(1, triggerBuffer.Length, Depth.U8, 1, triggerHandle.AddrOfPinnedObject()))
                            {
                                CV.Convert(trigger, triggerHeader);
                                triggerHandle.Free();
                            }

                            for (int i = 0; i < triggerBuffer.Length; i++)
                            {
                                var triggerHigh = triggerBuffer[i] > 0;
                                if (triggerHigh && !active)
                                {
                                    var buffer = new SampleBuffer(data, Count, i);
                                    buffer.Update(data, i);
                                    if (buffer.Completed)
                                    {
                                        // Window is ready, emit
                                        observer.OnNext(buffer.Samples);
                                    }
                                    // Window is missing data, add to list
                                    else
                                    {
                                        activeBuffers.Add(buffer);
                                    }
                                }

                                active = triggerHigh;
                            }
                        }
                    }
                    catch (Exception ex)
                    {
                        observer.OnError(ex);
                    }
                },
                                        error => observer.OnError(error),
                                        () => observer.OnCompleted());
            }));
        }
コード例 #23
0
        public override IObservable <Pose> Process(IObservable <IplImage> source)
        {
            return(Observable.Defer(() =>
            {
                TFSessionOptions options = new TFSessionOptions();
                unsafe
                {
                    byte[] GPUConfig = new byte[] { 0x32, 0x02, 0x20, 0x01 };
                    fixed(void *ptr = &GPUConfig[0])
                    {
                        options.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                    }
                }

                var graph = new TFGraph();
                var session = new TFSession(graph, options, null);
                var bytes = File.ReadAllBytes(ModelFileName);
                graph.Import(bytes);

                TFTensor tensor = null;
                var config = ConfigHelper.PoseConfig(PoseConfigFileName);
                return source.Select(input =>
                {
                    if (tensor == null || tensor.GetTensorDimension(1) != input.Height || tensor.GetTensorDimension(2) != input.Width)
                    {
                        tensor = new TFTensor(
                            TFDataType.Float,
                            new long[] { 1, input.Height, input.Width, 3 },
                            input.WidthStep * input.Height * 4);
                    }

                    using (var image = new IplImage(input.Size, IplDepth.F32, 3, tensor.Data))
                    {
                        CV.Convert(input, image);
                    }

                    var runner = session.GetRunner();
                    runner.AddInput(graph["Placeholder"][0], tensor);
                    runner.Fetch(graph["concat_1"][0]);

                    // Run the model
                    var output = runner.Run();

                    // Fetch the results from output:
                    var poseTensor = output[0];
                    var pose = new Mat((int)poseTensor.Shape[0], (int)poseTensor.Shape[1], Depth.F32, 1, poseTensor.Data);
                    var result = new Pose(input);
                    var threshold = MinConfidence;
                    for (int i = 0; i < pose.Rows; i++)
                    {
                        BodyPart bodyPart;
                        bodyPart.Name = config[i];
                        bodyPart.Confidence = (float)pose.GetReal(i, 2);
                        if (bodyPart.Confidence < threshold)
                        {
                            bodyPart.Position = new Point2f(float.NaN, float.NaN);
                        }
                        else
                        {
                            bodyPart.Position.X = (float)pose.GetReal(i, 1);
                            bodyPart.Position.Y = (float)pose.GetReal(i, 0);
                        }
                        result.Add(bodyPart);
                    }
                    return result;
                });
            }));
        }
コード例 #24
0
        public override IObservable <IplImage> Process(IObservable <IplImage> source)
        {
            return(Observable.Defer(() =>
            {
                int averageCount = 0;
                IplImage image = null;
                IplImage difference = null;
                IplImage background = null;
                return source.Select(input =>
                {
                    if (background == null || background.Size != input.Size)
                    {
                        averageCount = 0;
                        image = new IplImage(input.Size, IplDepth.F32, input.Channels);
                        difference = new IplImage(input.Size, IplDepth.F32, input.Channels);
                        background = new IplImage(input.Size, IplDepth.F32, input.Channels);
                        background.SetZero();
                    }

                    var output = new IplImage(input.Size, IplDepth.U8, input.Channels);
                    if (averageCount < BackgroundFrames)
                    {
                        averageCount++;
                        output.SetZero();
                        CV.Acc(input, background);
                        if (averageCount == BackgroundFrames)
                        {
                            CV.ConvertScale(background, background, 1.0 / averageCount, 0);
                        }
                    }
                    else
                    {
                        CV.Convert(input, image);
                        switch (SubtractionMethod)
                        {
                        case SubtractionMethod.Bright:
                            CV.Sub(image, background, difference);
                            break;

                        case SubtractionMethod.Dark:
                            CV.Sub(background, image, difference);
                            break;

                        case SubtractionMethod.Absolute:
                        default:
                            CV.AbsDiff(image, background, difference);
                            break;
                        }

                        if (AdaptationRate > 0)
                        {
                            CV.RunningAvg(image, background, AdaptationRate);
                        }

                        CV.Threshold(difference, output, ThresholdValue, 255, ThresholdType);
                    }

                    return output;
                });
            }));
        }
コード例 #25
0
        public override IObservable <Pose> Process(IObservable <IplImage> source)
        {
            return(Observable.Defer(() =>
            {
                TFSessionOptions options = new TFSessionOptions();
                unsafe
                {
                    byte[] GPUConfig = new byte[] { 0x32, 0x02, 0x20, 0x01 };
                    fixed(void *ptr = &GPUConfig[0])
                    {
                        options.SetConfig(new IntPtr(ptr), GPUConfig.Length);
                    }
                }

                var graph = new TFGraph();
                var session = new TFSession(graph, options, null);
                var bytes = File.ReadAllBytes(ModelFileName);
                graph.Import(bytes);

                IplImage temp = null;
                TFTensor tensor = null;
                TFSession.Runner runner = null;
                var config = ConfigHelper.PoseConfig(PoseConfigFileName);
                return source.Select(input =>
                {
                    var poseScale = 1.0;
                    const int TensorChannels = 3;
                    var frameSize = input.Size;
                    var scaleFactor = ScaleFactor;
                    if (scaleFactor.HasValue)
                    {
                        poseScale = scaleFactor.Value;
                        frameSize.Width = (int)(frameSize.Width * poseScale);
                        frameSize.Height = (int)(frameSize.Height * poseScale);
                        poseScale = 1.0 / poseScale;
                    }

                    if (tensor == null || tensor.GetTensorDimension(1) != frameSize.Height || tensor.GetTensorDimension(2) != frameSize.Width)
                    {
                        tensor = new TFTensor(
                            TFDataType.Float,
                            new long[] { 1, frameSize.Height, frameSize.Width, TensorChannels },
                            frameSize.Width * frameSize.Height * TensorChannels * sizeof(float));
                        runner = session.GetRunner();
                        runner.AddInput(graph["Placeholder"][0], tensor);
                        runner.Fetch(graph["concat_1"][0]);
                    }

                    var frame = input;
                    if (frameSize != input.Size)
                    {
                        if (temp == null || temp.Size != frameSize)
                        {
                            temp = new IplImage(frameSize, input.Depth, input.Channels);
                        }

                        CV.Resize(input, temp);
                        frame = temp;
                    }

                    using (var image = new IplImage(frameSize, IplDepth.F32, TensorChannels, tensor.Data))
                    {
                        CV.Convert(frame, image);
                    }

                    // Run the model
                    var output = runner.Run();

                    // Fetch the results from output:
                    var poseTensor = output[0];
                    var pose = new Mat((int)poseTensor.Shape[0], (int)poseTensor.Shape[1], Depth.F32, 1, poseTensor.Data);
                    var result = new Pose(input);
                    var threshold = MinConfidence;
                    for (int i = 0; i < pose.Rows; i++)
                    {
                        BodyPart bodyPart;
                        bodyPart.Name = config[i];
                        bodyPart.Confidence = (float)pose.GetReal(i, 2);
                        if (bodyPart.Confidence < threshold)
                        {
                            bodyPart.Position = new Point2f(float.NaN, float.NaN);
                        }
                        else
                        {
                            bodyPart.Position.X = (float)(pose.GetReal(i, 1) * poseScale);
                            bodyPart.Position.Y = (float)(pose.GetReal(i, 0) * poseScale);
                        }
                        result.Add(bodyPart);
                    }
                    return result;
                });
            }));
        }