private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Calculate the rolling average and use it as the base noise level
            int rollingAvgLength = (transform.Parameters.ContainsKey(ROLLING_AVG_LENGTH)) ? Int32.Parse(transform.Parameters[ROLLING_AVG_LENGTH]) :
                                   ROLLING_AVG_LENGTH_DEFAULT_VALUE;
            int   index     = 0;
            float baseNoise = FindLowestRollingAvg(msg.Measurements.Values, index, rollingAvgLength);

            // Subtract the base noise level from the values in the message
            SortedDictionary <long, float> newValues = new SortedDictionary <long, float>();

            foreach (KeyValuePair <long, float> entry in msg.Measurements.Values)
            {
                newValues.Add(entry.Key, Math.Max(entry.Value - baseNoise, 0));
            }

            // Route the new Metric to the next transform
            var metric = new Metric(msg.Measurements.Name + TRANSFORM_NAME_CONCATENATOR + TRANSFORM_NAME, newValues);
            var series = new Messages.TransformSeries(metric, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);
        }
        private void ProcessRequest(Messages.TransformSeries msg)
        {
            // Create a key from the vmName, groupId and the date as multiple combines will have the same group id and only
            // the groupId + vmDate + vmName are unique.
            string key = msg.VmName + "-" + msg.VmDate + "-" + msg.GroupID.ToString();

            _log.Debug($"Received transform series for combining. Key: {key}");

            // Check to see if there are any transforms already received and stored which have the same key
            if (TransformSereiesHoldingStore.ContainsKey(key))
            {
                _log.Debug($"Already have some transforms for key: {key}");

                // There are some transforms with the same id. Check to see if all of them have been received.
                var numExpected      = Convert.ToInt32(msg.Transforms.Dequeue().Parameters[TRANSFORM_PARAM_COUNT_NAME]);
                var storedTransforms = TransformSereiesHoldingStore[key];

                if (storedTransforms.Count == numExpected - 1)
                {
                    _log.Debug($"All transforms now received for Key: {key}. Combining the metrics from each and then routing.");

                    // Enough transforms have been received so combine them
                    storedTransforms.Add(msg);
                    var metric = Combine(storedTransforms);

                    // Find the stored message with the same key that has the largest number of transforms and use it to
                    // route the result. This is because only one of the messages will have any further transforms on it
                    // from the DSL.
                    var msgToUseForRouting = msg;
                    foreach (var storedMsg in storedTransforms)
                    {
                        if (storedMsg.Transforms.Count > msgToUseForRouting.Transforms.Count)
                        {
                            msgToUseForRouting = storedMsg;
                        }
                    }

                    // Route the result of the combine transform
                    var series = new Messages.TransformSeries(metric, msgToUseForRouting.Transforms, msgToUseForRouting.GroupID,
                                                              msgToUseForRouting.ConnectionId, msgToUseForRouting.VmName, msgToUseForRouting.VmDate);
                    RouteTransform(series);
                }
                else
                {
                    _log.Debug($"Still waiting for some transforms to be received, storing received transforms with others. Key: {key}");

                    // Still waiting for some of the metrics in the combine to be received so store this one
                    storedTransforms.Add(msg);
                }
            }
            else
            {
                _log.Debug($"This is the first transform with Key: {key}. Storing it and awaiting the rest.");

                // There are no entries for this TransformSeries, this is the first one
                var list = new List <Messages.TransformSeries>();
                list.Add(msg);
                TransformSereiesHoldingStore.Add(key, list);
            }
        }
        public static void RouteTransform(Messages.TransformSeries series)
        {
            // Get the name of the first transform in the series
            string path = null;

            if (series.Transforms.Count > 0)
            {
                // Get the name of the next transform in the queue
                var tname = series.Transforms.Peek().Name;
                path = "/user/Transforms-" + tname.ToUpper();
            }
            else
            {
                // The queue is empty so all transforms have beenc completed so route back to the MetricStoreManager
                path = "/user/" + MetricAccumulatorDispatcherActor.ACTOR_NAME;
            }

            // Look up the actor with the name of the transform and send the transform series
            // to it for the first step in the processing. Each transform actor in turn will then
            // use this method to route to the next transform in the series.
            var ctx = Context.ActorSelection(path);

            ctx.Tell(series);

            //"akka://vmstats/user/Transforms-RBN"
        }
        private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Get the value of the percentile to remove
            int valuesToRemove = (transform.Parameters.ContainsKey(VALUES_TO_REMOVE)) ? Int32.Parse(transform.Parameters[VALUES_TO_REMOVE]) :
                                 VALUES_TO_REMOVE_DEFAULT_VALUE;

            // Re-sort the values based on value and then time, ignoring values that are zero
            var newValues = new SortedDictionary <long, float>();

            foreach (var entry in msg.Measurements.Values)
            {
                // Skip values that are zero
                if (entry.Value <= valuesToRemove)
                {
                    newValues.Add(entry.Key, 0.0F);
                }
                else
                {
                    newValues.Add(entry.Key, entry.Value);
                }
            }

            // Route the new Metric to the next transform
            var metric = new Metric(msg.Measurements.Name + TRANSFORM_NAME_CONCATENATOR + TRANSFORM_NAME, newValues);
            var series = new Messages.TransformSeries(metric, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);
        }
        private async void ReturnResult(Messages.TransformSeries msg)
        {
            HttpClient client = null;

            var json = JsonConvert.SerializeObject(msg);

            _log.Debug($"Returning result to vmstatsGUI. Message received is: {json}");

            try
            {
                // Contact the vmstatsgui webserver and send it the details or the completed transform pipeline
                client             = new HttpClient();
                client.BaseAddress = new Uri(guiWebserverUrl);

                // Add an Accept header for JSON format.
                client.DefaultRequestHeaders.Accept.Add(
                    new MediaTypeWithQualityHeaderValue("application/json"));

                // Create a new ProcessCommand with the supplied data
                // TODO plumb the vmname and date throughout the chain so it gets back to the client
                bool isRaw  = (msg.GroupID == Guid.Empty) ? true : false;
                var  result = new Messages.Result(msg.ConnectionId, msg.Measurements.Values.Keys.ToArray(), msg.Measurements.Values.Values.ToArray(),
                                                  isRaw, msg.VmName, msg.VmDate, msg.Measurements.Name);
                string postBody = JsonConvert.SerializeObject(result);
                _log.Debug($"Returning result to vmstatsGUI. Result is: {postBody}");

                // Send the results to the vmstatsGUI
                HttpResponseMessage response = await client.PostAsync(guiWebserverUrl, new StringContent(postBody, Encoding.UTF8, "application/json"));

                if (response.IsSuccessStatusCode)
                {
                    _log.Info("Successfully returned the result of a transform seriues to the vmstatsGUI.");
                }
                else
                {
                    _log.Error($"Failed to ReturnResult to vmstatsGUI. Reason: {response.ReasonPhrase}");
                }
            }
            catch (HttpRequestException hre)
            {
                _log.Error($"ERROR Calling vmstatsgui webserver. Error is: {hre.Message}. The URI used is {guiWebserverUrl}");
            }
            catch (TaskCanceledException tce)
            {
                _log.Error($"ERROR Calling vmstatsgui webserver. Error is: {tce.Message}. The URI used is {guiWebserverUrl}");
            }
            catch (Exception ex)
            {
                _log.Error($"ERROR Calling vmstatsgui webserver. Error is: {ex.Message}. The URI used is {guiWebserverUrl}");
            }
            finally
            {
                if (client != null)
                {
                    client.Dispose();
                    client = null;
                }
            }
        }
Esempio n. 6
0
        private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Get the value of the percentile to remove
            int percentileToRemove = (transform.Parameters.ContainsKey(PERCENTILE_TO_REMOVE)) ? Int32.Parse(transform.Parameters[PERCENTILE_TO_REMOVE]) :
                                     PERCENTILE_TO_REMOVE_DEFAULT_VALUE;

            // Re-sort the values based on value and then time, ignoring values that are zero
            var valueOrder = new SortedDictionary <double, long>();

            foreach (var entry in msg.Measurements.Values)
            {
                // Skip values that are zero
                if (entry.Value > 0.0F)
                {
                    // Combine the value and the time so they are sorted in value first then time order
                    string newVal = Convert.ToString((long)(entry.Value * 100000)); // Hopefully no value is less than 100000
                    newVal += "." + entry.Key;
                    double sortValue = Convert.ToDouble(newVal);

                    valueOrder.Add(sortValue, entry.Key);
                }
            }

            // Set the values that are in percentile covered by the percentileToRemove variable
            int index = 0;
            int count = valueOrder.Count / 100 * percentileToRemove;

            foreach (var entry in valueOrder)
            {
                if (index < count)
                {
                    // Remove the value because it is in the percentile
                    msg.Measurements.Values[entry.Value] = 0;
                    index++;
                }
                else
                {
                    // There are no more values in the percentile so stop processing
                    break;
                }
            }

            // Create a clone of the updated measurements
            var newValues = new SortedDictionary <long, float>(msg.Measurements.Values);

            // Route the new Metric to the next transform
            var metric = new Metric(msg.Measurements.Name + TRANSFORM_NAME_CONCATENATOR + TRANSFORM_NAME, newValues);
            var series = new Messages.TransformSeries(metric, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);
        }
        /// <summary>
        /// Calculates the average given a series of measurements
        /// </summary>
        /// <param name="msg">A transform series that contains a series of measurements</param>
        /// <returns>The calaculated mean</returns>
        private float Mean(Messages.TransformSeries msg)
        {
            float sum = 0.0F;

            foreach (var entry in msg.Measurements.Values)
            {
                sum += entry.Value;
            }

            return(sum / msg.Measurements.Values.Count);
        }
Esempio n. 8
0
        private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Obtain any changes in the default settings
            int timePeriod = (transform.Parameters.ContainsKey(TIME_PERIOD)) ? Int32.Parse(transform.Parameters[TIME_PERIOD]) :
                             TIME_PERIOD_DEFAULT_VALUE;

            // Create the storage for the new values and the intermediate values usedin the calculation
            var newValues  = new SortedDictionary <long, float>();
            var tempValues = new SortedDictionary <long, TempValue>();

            // Calculate the start of the day in ticks
            var startPeriodTicks = (Convert.ToDateTime(msg.VmDate)).Ticks;

            // Calculate the time period to compress to, in ticks
            var timePeriodTicks = TimeSpan.TicksPerMinute * timePeriod;

            // Foreach value in the metric get the time period it is for and add its value to the correct compressed time period
            foreach (var entry in msg.Measurements.Values)
            {
                // Determine the compressed time
                var compressedTimeTicks = (((entry.Key - startPeriodTicks) / timePeriodTicks) * timePeriodTicks) + startPeriodTicks;

                TempValue tempValue;
                if (tempValues.TryGetValue(compressedTimeTicks, out tempValue))
                {
                    // Compressed time period already exists so add this value
                    tempValue.Value += entry.Value;
                    tempValue.Count++;
                }
                else
                {
                    // Period does not exist so add it
                    tempValues.Add(compressedTimeTicks, new TempValue(entry.Value));
                }
            }

            // Create the Metric values by averaging the TempValues collected
            foreach (var entry in tempValues)
            {
                newValues.Add(entry.Key, entry.Value.Value / entry.Value.Count);
            }
            var metric = new Metric(msg.Measurements.Name + TRANSFORM_NAME_CONCATENATOR + TRANSFORM_NAME, newValues);

            // Route the new Metric to the next transform
            var series = new Messages.TransformSeries(metric, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);
        }
Esempio n. 9
0
        private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Route the current Metric to the final step so that the results are returned to the user
            var series = new Messages.TransformSeries(msg.Measurements, new Queue <Messages.Transform>(), msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);

            // Route the Metric unchanged to the next transform
            series = new Messages.TransformSeries(msg.Measurements, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);
            RouteTransform(series);
        }
        private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Get the value of the sd to flatten parameter if specified, otherwiuse use the default
            int sdToFlatten = (transform.Parameters.ContainsKey(SD_TO_FLATTEN)) ? Int32.Parse(transform.Parameters[SD_TO_FLATTEN]) :
                              SD_TO_FLATTEN_DEFAULT_VALUE;

            // Get the mean of the values
            var mean = Mean(msg);

            // A new sorted dictionary to store the values in
            var newValues = new SortedDictionary <long, float>();

            // Calculate the standard devaition for the sample
            float sum = 0.0F;

            foreach (var entry in msg.Measurements.Values)
            {
                sum += (entry.Value - mean) * (entry.Value - mean);
            }
            float sd = (float)Math.Sqrt((double)(sum / msg.Measurements.Values.Count));

            // Flattern any points in the original measurement so that if a value is greater than the specified
            // number of standard deviations from the mean value its value is set to the mean plus the specified
            // number of standard deviations
            foreach (var entry in msg.Measurements.Values)
            {
                if (entry.Value > (mean + (sd * sdToFlatten)))
                {
                    newValues.Add(entry.Key, mean + (sd * sdToFlatten));
                }
                else
                {
                    newValues.Add(entry.Key, entry.Value);
                }
            }

            // Route the new Metric to the next transform
            var metric = new Metric(msg.Measurements.Name + TRANSFORM_NAME_CONCATENATOR + TRANSFORM_NAME, newValues);
            var series = new Messages.TransformSeries(metric, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);
        }
Esempio n. 11
0
        private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Calculate the rolling average and use it as the base noise level
            string saveName = (transform.Parameters.ContainsKey(SAVE_NAME)) ? transform.Parameters[SAVE_NAME] :
                              SAVE_NAME_DEFAULT_VALUE;

            // Construct the UPSERT msg so the values can be saved in the correct MetricStore
            var um = new MetricStoreActor.UpsertMetric(saveName, msg.Measurements.Values);

            // Find the correct MetricStore actor and send it the UPSERT msg
            var actorName = "/user/*/MetricStore:" + msg.VmName + ":" + msg.VmDate;
            var actor     = Context.ActorSelection(actorName);

            actor.Tell(um);

            // Route the Metric unchanged to the next transform
            var series = new Messages.TransformSeries(msg.Measurements, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);
        }
Esempio n. 12
0
        private void ProcessPipeline(Messages.BuildTransformSeries cmd)
        {
            // Get the metric requested in the pipeline
            Metric metric = null;

            _metricStore.metrics.TryGetValue(cmd.MetricName.ToLower(), out metric);
            if (metric != null)
            {
                // Create a start transform message and submit it to the first transform in the queue
                var msg = new Messages.TransformSeries(metric, cmd.Transforms, cmd.GroupID, cmd.ConnectionId, _metricStore.vmName, _metricStore.date);
                BaseTransformActor.RouteTransform(msg);

                // Create the message to return the raw un transformed data back to the client
                var rawMsg = new Messages.TransformSeries(metric, new Queue <Messages.Transform>(), Guid.Empty, cmd.ConnectionId,
                                                          _metricStore.vmName, _metricStore.date);
                BaseTransformActor.RouteTransform(rawMsg);
            }
            else
            {
                // ERROR the requested metric does not exist in this actor
                var json = JsonConvert.SerializeObject(_metricStore.metrics.Keys);
                _log.Error($"ERROR: Received ProcessPipeline command for a metric that does not exist. Metric requested is: {cmd.MetricName}. Available metrics are: {json}");
            }
        }
Esempio n. 13
0
        private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Create an array to hold the transformed values
            float[] valuesArray = new float[msg.Measurements.Values.Count];
            //            msg.Measurements.Values.Values.CopyTo(valuesArray, 0);

            // Find the max and min values in the data
            float max = float.MinValue;
            float min = float.MaxValue;

            foreach (var entry in msg.Measurements.Values)
            {
                if (entry.Value > max)
                {
                    max = entry.Value;
                }
                if (entry.Value < min)
                {
                    min = entry.Value;
                }
            }

            // Take the min value from the max as the min value is going to become zero after the
            // transform
            max -= min;

            // Make sure that the max is not zero to prevent divide by zero
            var newValues = new SortedDictionary <long, float>();

            if (max != 0.0F)
            {
                // Tansform the Metric data into percentages, where max is 100% and min is 0%
                foreach (var entry in msg.Measurements.Values)
                {
                    if (entry.Value != 0.0F)
                    {
                        newValues.Add(entry.Key, (entry.Value - min) / max * 100.0F);
                    }
                    else
                    {
                        newValues.Add(entry.Key, 0.0F);
                    }
                }
            }
            else
            {
                // Copy over the keys and set all the values to zero
                foreach (KeyValuePair <long, float> entry in msg.Measurements.Values)
                {
                    newValues.Add(entry.Key, 0.0F);
                }
            }


            // Route the new Metric to the next transform
            var metric = new Metric(msg.Measurements.Name + TRANSFORM_NAME_CONCATENATOR + TRANSFORM_NAME, newValues);
            var series = new Messages.TransformSeries(metric, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);
        }
Esempio n. 14
0
        private void CalculateTransformation(Messages.TransformSeries msg)
        {
            // Get the transform from the series
            var transform = msg.Transforms.Dequeue();

            // Obtain any changes in the default settings
            int spikeWindowLength = (transform.Parameters.ContainsKey(SPIKE_WINDOW_LENGTH)) ? Int32.Parse(transform.Parameters[SPIKE_WINDOW_LENGTH]) :
                                    SPIKE_WINDOW_LENGTH_DEFAULT_VALUE;
            int baseWindowLength = (transform.Parameters.ContainsKey(BASE_WINDOW_LENGTH)) ? Int32.Parse(transform.Parameters[BASE_WINDOW_LENGTH]) :
                                   BASE_WINDOW_LENGTH_DEFAULT_VALUE;
            int baseValue = (transform.Parameters.ContainsKey(BASE_VALUE)) ? Int32.Parse(transform.Parameters[BASE_VALUE]) :
                            BASE_VALUE_DEFAULT_VALUE;

            // Scan the values for spikes that match the windows size. A spike is determined by a series of base values followed by a
            // series of values over base followed by a return to a series of values at base. E.g. 0,0,0,5,7,0,0,0. The spike would be 5 & 7.
            // Get the values to be processed into an array
            float[] valuesArray = new float[msg.Measurements.Values.Count];
            msg.Measurements.Values.Values.CopyTo(valuesArray, 0);

            // TODO complete this code, create test for this class, determine all the other transformation classes needed to analyze the results
            for (int index = 0; index <= valuesArray.Length - spikeWindowLength - (baseWindowLength * 2); /* No auto increment */)
            {
                // Determine if the current location is the start of a base window
                bool inBaseWindow = true;
                for (int startBaseIndex = index; startBaseIndex < index + baseWindowLength; startBaseIndex++)
                {
                    // If the value at the current location in the values array is not a base value then stop the search and move on
                    if (valuesArray[startBaseIndex] > baseValue)
                    {
                        inBaseWindow = false;
                        break;
                    }
                }

                // Finish the search if the start base window condition is not met
                if (!inBaseWindow)
                {
                    index++;
                    continue;
                }

                // Determine if the current location has a window of base values at the end
                inBaseWindow = true;
                for (int endBaseIndex = index + baseWindowLength + spikeWindowLength;
                     endBaseIndex < index + (baseWindowLength * 2) + spikeWindowLength; endBaseIndex++)
                {
                    // If the value at the current location in the values array is not a base value then stop the search and move on
                    if (valuesArray[endBaseIndex] > baseValue)
                    {
                        inBaseWindow = false;
                        break;
                    }
                }

                // Finish the search if the end base window condition is not met
                if (!inBaseWindow)
                {
                    index++;
                    continue;
                }

                // Since the start of the range and the end of the range contain windows where the values are equal to or less that the supplied base value
                // then the conditions for a spike have been met.
                // The conditions for a spkie have been detected so set the spkie window of values to the base value
                for (int spikeBaseIndex = index + baseWindowLength;
                     spikeBaseIndex < index + baseWindowLength + spikeWindowLength; spikeBaseIndex++)
                {
                    valuesArray[spikeBaseIndex] = baseValue;
                }

                // A spike was found so advance the position in the array by the start base window plus the spkie window
                index = index + baseWindowLength + spikeWindowLength;
            }

            // Created a new Metric from the keys of the one sent in the message and the new values created by removing spikes
            int count = 0;
            SortedDictionary <long, float> newValues = new SortedDictionary <long, float>();

            foreach (KeyValuePair <long, float> entry in msg.Measurements.Values)
            {
                newValues.Add(entry.Key, valuesArray[count++]);
            }
            var metric = new Metric(msg.Measurements.Name + TRANSFORM_NAME_CONCATENATOR + TRANSFORM_NAME, newValues);

            // Route the new Metric to the next transform
            var series = new Messages.TransformSeries(metric, msg.Transforms, msg.GroupID, msg.ConnectionId, msg.VmName, msg.VmDate);

            RouteTransform(series);
        }