Пример #1
0
        private Dictionary <string, string> DetermineLabels(PrometheusMetricDefinition metricDefinition, ScrapeResult scrapeResult, MeasuredMetric measuredMetric, Dictionary <string, string> defaultLabels)
        {
            var labels = new Dictionary <string, string>(scrapeResult.Labels.Select(label => new KeyValuePair <string, string>(label.Key.SanitizeForPrometheusLabelKey(), label.Value)));

            if (measuredMetric.IsDimensional)
            {
                labels.Add(measuredMetric.DimensionName.SanitizeForPrometheusLabelKey(), measuredMetric.DimensionValue);
            }

            if (metricDefinition?.Labels?.Any() == true)
            {
                foreach (var customLabel in metricDefinition.Labels)
                {
                    var customLabelKey = customLabel.Key.SanitizeForPrometheusLabelKey();
                    if (labels.ContainsKey(customLabelKey))
                    {
                        _logger.LogWarning("Custom label {CustomLabelName} was already specified with value '{LabelValue}' instead of '{CustomLabelValue}'. Ignoring...", customLabel.Key, labels[customLabelKey], customLabel.Value);
                        continue;
                    }

                    labels.Add(customLabelKey, customLabel.Value);
                }
            }

            foreach (var defaultLabel in defaultLabels)
            {
                var defaultLabelKey = defaultLabel.Key.SanitizeForPrometheusLabelKey();
                if (labels.ContainsKey(defaultLabelKey) == false)
                {
                    labels.Add(defaultLabelKey, defaultLabel.Value);
                }
            }

            // Add the tenant id
            var metricsDeclaration = _metricsDeclarationProvider.Get(applyDefaults: true);

            if (labels.ContainsKey("tenant_id") == false)
            {
                labels.Add("tenant_id", metricsDeclaration.AzureMetadata.TenantId);
            }

            // Transform labels, if need be
            if (_prometheusConfiguration.CurrentValue.Labels != null)
            {
                labels = LabelTransformer.TransformLabels(_prometheusConfiguration.CurrentValue.Labels.Transformation, labels);
            }

            var orderedLabels = labels.OrderBy(kvp => kvp.Key).ToDictionary(kvp => kvp.Key, kvp => kvp.Value);

            return(orderedLabels);
        }
Пример #2
0
        public void TransformLabels_TransformationToLowercase_LabelValuesTransformedToLowercase()
        {
            // Arrange
            var inputLabels = new Dictionary <string, string>
            {
                { BogusGenerator.Name.FirstName(), BogusGenerator.Name.FirstName().ToUpper() },
                { BogusGenerator.Name.FirstName(), BogusGenerator.Name.FirstName() },
                { BogusGenerator.Name.FirstName(), BogusGenerator.Name.FirstName().ToUpperInvariant() }
            };

            // Act
            var transformedLabels = LabelTransformer.TransformLabels(LabelTransformation.Lowercase, inputLabels);

            // Assert
            Assert.Equal(inputLabels.Count, transformedLabels.Count);
            foreach (var inputLabel in inputLabels)
            {
                Assert.True(transformedLabels.ContainsKey(inputLabel.Key));
                Assert.Equal(inputLabel.Value.ToLower(), transformedLabels[inputLabel.Key]);
            }
        }
Пример #3
0
        public void TransformLabels_NoTransformationWithDifferentCases_NoTransformationWasApplied()
        {
            // Arrange
            var inputLabels = new Dictionary <string, string>
            {
                { BogusGenerator.Name.FirstName(), BogusGenerator.Name.FirstName() },
                { BogusGenerator.Name.FirstName(), BogusGenerator.Name.FirstName() },
                { BogusGenerator.Name.FirstName(), BogusGenerator.Name.FirstName() },
                { BogusGenerator.Name.FirstName(), BogusGenerator.Name.FirstName() },
                { BogusGenerator.Name.FirstName(), BogusGenerator.Name.FirstName() },
            };

            // Act
            var transformedLabels = LabelTransformer.TransformLabels(LabelTransformation.None, inputLabels);

            // Assert
            Assert.Equal(inputLabels.Count, transformedLabels.Count);
            foreach (var inputLabel in inputLabels)
            {
                Assert.True(transformedLabels.ContainsKey(inputLabel.Key));
                Assert.Equal(inputLabel.Value, transformedLabels[inputLabel.Key]);
            }
        }
Пример #4
0
        /// <summary>
        /// Run teaching epoch.
        /// </summary>
        /// <returns></returns>
        public override double RunEpoch()
        {
            var data = DataTransformer.Transform(Path(Config.Train.Data));

            int[] permutation = null;

            switch (Config.Batch)
            {
            case NetworkConfig.BatchType.Full:
            {
                permutation = Combinatorics.GeneratePermutation(data.RowCount);
                break;
            }

            case NetworkConfig.BatchType.Mini:
            {
                var batchSize = data.RowCount;

                if (Config.BatchSize > 0 && Config.BatchSize <= data.RowCount)
                {
                    batchSize = Config.BatchSize;
                }

                permutation = Combinatorics.GenerateVariation(data.RowCount, batchSize);
                break;
            }

            default:
            {
                permutation = new int[] { DiscreteUniform.Sample(0, data.RowCount - 1) };
                break;
            }
            }

            if (data.ColumnCount != Config.Inputs)
            {
                throw new Exception("Invalid data format.");
            }

            accumulatedCost = 0;

            accumulatedGradient = new Matrix <double> [layers.Count];

            var tasks = new Task[permutation.Length];

            for (int i = 0; i < permutation.Length; i++)
            {
                var index = permutation[i];

                tasks[i] = Task.Factory.StartNew((object trainState) =>
                {
                    var state = trainState as TrainState;

                    Train(
                        state.input,
                        state.target,
                        out double cost,
                        out List <Matrix <double> > gradient
                        );

                    // Console.WriteLine("\nTask cost {0} + {1}", cost, accumulatedCost);

                    accumulatedCost += cost;

                    for (int g = 0; g < gradient.Count; g++)
                    {
                        if (accumulatedGradient[g] == null)
                        {
                            accumulatedGradient[g] = Matrix <double> .Build.Dense(gradient[g].RowCount, gradient[g].ColumnCount);
                        }
                        accumulatedGradient[g] += gradient[g];
                    }
                }, new TrainState
                {
                    input  = data.Row(index),
                    target = LabelTransformer.TransformLabels().Row(index)
                }, TaskCreationOptions.LongRunning);

                //tasks[i].Wait(1);

                /*
                 * Train(
                 *  data.Row(index),
                 *  LabelTransformer.TransformLabels().Row(index),
                 *  out double cost,
                 *  out List<Matrix<double>> gradient
                 * );
                 *
                 * accumulatedCost += cost;
                 *
                 * for (int g = 0; g < gradient.Count; g++)
                 * {
                 *  if (accumulatedGradient[g] == null)
                 *  {
                 *      accumulatedGradient[g] = Matrix<double>.Build.Dense(gradient[g].RowCount, gradient[g].ColumnCount);
                 *  }
                 *  accumulatedGradient[g] += gradient[g];
                 * }
                 */
            }

            Task.WaitAll(tasks, Timeout.Infinite, CancellationToken.None);

            // Go through each layer of the network in opposite direction.
            for (int i = layers.Count - 1; i >= 0; i--)
            {
                var layer         = layers[i];
                var layerGradient = accumulatedGradient[layers.Count - 1 - i];

                if (Program.Debug)
                {
                    Console.WriteLine("accumulated gradient for layer {0}: {1}", i, layerGradient);
                }

                for (int j = 0; j < layer.Size; j++)
                {
                    for (int k = 0; k < layer.Neurons[j].InputCount; k++)
                    {
                        var v = layerGradient.Row(j);
                        layer.Neurons[j].Weights[k] += layerGradient.Row(j).At(k + 1);
                    }
                    layer.Neurons[j].Bias += layerGradient.Row(j).At(0);
                }
            }

            return(accumulatedCost / permutation.Length);
        }