Esempio n. 1
0
 public ConvolutionLayerConfigurtion(int numberOfKernels,
                                     int kernelSize,
                                     MessageShape inputMessageShape) : base(inputMessageShape)
 {
     NumberOfKernels = numberOfKernels;
     KernelSize      = kernelSize;
 }
 public ActivationLayerConfiguration(
     MessageShape inputMessageShape,
     ActivatorType activatorType)
     : base(inputMessageShape)
 {
     ActivatorType = activatorType;
 }
Esempio n. 3
0
 public ActivationForwardLayer(
     ActivatorType type,
     MessageShape inputMessageShape)
     : base(inputMessageShape, inputMessageShape)
 {
     Activator = ActivatorFactory.Produce(type);
 }
Esempio n. 4
0
 public BaseLayer(
     MessageShape inputMessageShape,
     MessageShape outputMessageShape)
 {
     _outputMessageShape = outputMessageShape;
     _inputMessageShape  = inputMessageShape;
 }
Esempio n. 5
0
        public static MessageShape BuildOutputMessageShape(MessageShape inputMessageShape, int stride)
        {
            int size = inputMessageShape.Size % stride == 0
                ? inputMessageShape.Size / stride
                : (inputMessageShape.Size / stride) + 1;

            return(new MessageShape(size, inputMessageShape.Depth));
        }
Esempio n. 6
0
 public PoolingFullLayer(
     MessageShape inputMessageShape, int stride)
     : base(inputMessageShape, stride)
 {
     _cache = new bool[
         inputMessageShape.Depth,
         inputMessageShape.Size,
         inputMessageShape.Size];
 }
 public DenseLayerConfiguration(
     MessageShape inputMessageShape,
     bool enableBiase,
     int numberOfNeurons)
     : base(inputMessageShape)
 {
     EnableBias      = enableBiase;
     NumberOfNeurons = numberOfNeurons;
 }
        public static MessageShape BuildOutputMessageShape(
            MessageShape inputMessageShape,
            int kernelSize,
            int numberOfKernels)
        {
            int size = inputMessageShape.Size - kernelSize + 1;

            return(new MessageShape(size, numberOfKernels));
        }
Esempio n. 9
0
 public ActivationFullLayer(
     ActivatorType type,
     MessageShape inputMessageShape)
     : base(type, inputMessageShape)
 {
     _cache       = inputMessageShape.Depth == 1
         ? _cache = new double[inputMessageShape.Size]
         : _cache = new double[inputMessageShape.Depth,
                               inputMessageShape.Size,
                               inputMessageShape.Size];
 }
Esempio n. 10
0
 public ConvolutionLayerConfigurtion(int numberOfKernels,
                                     int kernelSize,
                                     double[][,,] weights,
                                     double[] biases,
                                     MessageShape inputMessageShape) : base(inputMessageShape)
 {
     Weights         = weights;
     Biases          = biases;
     NumberOfKernels = numberOfKernels;
     KernelSize      = kernelSize;
 }
Esempio n. 11
0
 public DenseForwardLayer(
     MessageShape inputMessageShape,
     int numberOfNeurons,
     bool enableBiases)
     : base(inputMessageShape, new MessageShape(numberOfNeurons))
 {
     EnableBiases    = enableBiases;
     NumberOfNeurons = numberOfNeurons;
     Weights         = Matrix.Build.Dense(inputMessageShape.Size, NumberOfNeurons);
     Biases          = Vector.Build.Dense(NumberOfNeurons);
 }
Esempio n. 12
0
 public DenseLayerConfiguration(MessageShape inputMessageShape,
                                bool enableBiase,
                                int numberOfNeurons,
                                double[,] weights,
                                double[] biases)
     : base(inputMessageShape)
 {
     EnableBias      = enableBiase;
     Weights         = weights;
     Biases          = biases;
     NumberOfNeurons = numberOfNeurons;
 }
Esempio n. 13
0
 public ConvolutionForwardLayer(
     MessageShape inputMessageShape,
     int kernelSize,
     int numberOfKernels) :
     base(inputMessageShape, BuildOutputMessageShape(inputMessageShape, kernelSize, numberOfKernels))
 {
     KernelSize      = kernelSize;
     NumberOfKernels = numberOfKernels;
     Kernels         = new double[numberOfKernels][, , ];
     Kernels.UpdateForEach <double[, , ]>(q => new double[inputMessageShape.Depth, kernelSize, kernelSize]);
     Biases = new double[numberOfKernels];
 }
Esempio n. 14
0
 public ConvolutionFullLayer(
     MessageShape inputMessageShape,
     int kernelSize,
     int numberOfkernels,
     Optimizer optimizer)
     : base(inputMessageShape, kernelSize, numberOfkernels)
 {
     _cache            = new double[inputMessageShape.Depth, inputMessageShape.Size, inputMessageShape.Size];
     _weightOptimizers = Helper.InitializeKernelOptimizers(
         inputMessageShape.Depth, numberOfkernels, kernelSize, optimizer);
     _biasOptimizers = Helper.InitializeBiasOptimizers(numberOfkernels, optimizer);
 }
Esempio n. 15
0
 public DenseFullLayer(
     MessageShape inputMessageShape,
     int numberOfNeurons,
     bool enableBiases,
     Optimizer optimizer)
     : base(inputMessageShape, numberOfNeurons, enableBiases)
 {
     _weightOptimizers = new Optimizer[inputMessageShape.Size, numberOfNeurons];
     _weightOptimizers.UpdateForEach <Optimizer>((q, i) => optimizer.Clone() as Optimizer);
     _biasOptimizers = new Optimizer[numberOfNeurons];
     _biasOptimizers.UpdateForEach <Optimizer>((q, i) => optimizer.Clone() as Optimizer);
     _cache = Vector.Build.Dense(inputMessageShape.Size);
 }
        static MessageItemRowNotificationHandler()
        {
            Shape[] shapes = new Shape[]
            {
                ItemShape.CreateShape(),
                    MessageShape.CreateShape(),
                    TaskShape.CreateShape()
            };
            ResponseShape responseShape = WellKnownShapes.ResponseShapes[WellKnownShapeName.MailListItem];

            MessageItemRowNotificationHandler.defaultSubscriptionProperties           = RowNotificationHandler.GetPropertyDefinitionsForResponseShape(shapes, responseShape, new PropertyDefinition[0]);
            MessageItemRowNotificationHandler.normalizedSubjectPropertyDefinition     = WellKnownProperties.NormalizedSubject.ToPropertyDefinition();
            MessageItemRowNotificationHandler.lastVerbExecutedPropertyDefinition      = WellKnownProperties.LastVerbExecuted.ToPropertyDefinition();
            MessageItemRowNotificationHandler.lastVerbExecutionTimePropertyDefinition = WellKnownProperties.LastVerbExecutionTime.ToPropertyDefinition();
        }
        private static PropertyDefinition[] GetSubscriptionProperties(IFeaturesManager featuresManager)
        {
            string            text = WellKnownShapeName.MailListItem.ToString();
            ItemResponseShape itemResponseShape = new ItemResponseShape();

            itemResponseShape.BaseShape = ShapeEnum.IdOnly;
            ItemResponseShape responseShape = Global.ResponseShapeResolver.GetResponseShape <ItemResponseShape>(text, itemResponseShape, featuresManager);

            if (responseShape == null)
            {
                ExTraceGlobals.NotificationsCallTracer.TraceError <string>((long)text.GetHashCode(), "[MessageItemRowNotificationHandler.GetSubscriptionProperties] Unable to resolve shapeName: {0} with features manager", text);
                return(MessageItemRowNotificationHandler.defaultSubscriptionProperties);
            }
            Shape[] shapes = new Shape[]
            {
                ItemShape.CreateShape(),
                    MessageShape.CreateShape(),
                    TaskShape.CreateShape()
            };
            return(RowNotificationHandler.GetPropertyDefinitionsForResponseShape(shapes, responseShape, new PropertyDefinition[0]));
        }
Esempio n. 18
0
 public static MessageShape ComputeOutputMessageShape(MessageShape shape)
 {
     return(new MessageShape(shape.Size * shape.Size * shape.Depth));
 }
Esempio n. 19
0
 public PoolingForwardLayer(MessageShape inputMessageShape, int stride)
     : base(inputMessageShape, BuildOutputMessageShape(inputMessageShape, stride))
 {
     Stride = stride;
 }
Esempio n. 20
0
 public FlattenFullLayer(MessageShape inputMessageShape) : base(inputMessageShape)
 {
 }
Esempio n. 21
0
 public InputForwardLayer(
     MessageShape inputMessageShape)
     : base(inputMessageShape, inputMessageShape)
 {
 }
Esempio n. 22
0
 public LayerConfiguration(MessageShape inputMessageShape)
 {
     MessageShape = inputMessageShape;
 }
Esempio n. 23
0
 public InputFullLayer(MessageShape inputMessageShape)
     : base(inputMessageShape)
 {
 }
Esempio n. 24
0
 public FlattenForwardLayer(MessageShape inputMessageShape)
     : base(inputMessageShape, ComputeOutputMessageShape(inputMessageShape))
 {
 }
Esempio n. 25
0
 public SoftMaxFullLayer(MessageShape inputMessageShape)
     : base(inputMessageShape)
 {
     _cache = Vector.Build.Dense(inputMessageShape.Size);
 }
Esempio n. 26
0
 public InputLayerConfiguration(MessageShape inputMessageShape)
     : base(inputMessageShape)
 {
 }
Esempio n. 27
0
 public SoftmaxLayerConfiguration(MessageShape inputMessageShape)
     : base(inputMessageShape)
 {
 }
Esempio n. 28
0
 public FlattenLayerConfiguration(MessageShape inputMessageShape)
     : base(inputMessageShape)
 {
 }
Esempio n. 29
0
 public PoolingLayerConfiguration(int kernelSize, MessageShape inputMessageShape)
     : base(inputMessageShape)
 {
     KernelSize = kernelSize;
 }
Esempio n. 30
0
 public SoftMaxForwardLayer(
     MessageShape inputMessageShape)
     : base(inputMessageShape, inputMessageShape)
 {
 }