public static CloudBlockBlob GetCloudBlockBlob(this Model model)
        {
            var account = new Storage.Helper().GetStorageAccount();
            var client  = account.CreateCloudBlobClient();

            return(new CloudBlockBlob(model.Search.Url, client));
        }
        public static void Run([TimerTrigger("0 */1 * * * *", RunOnStartup = false)]TimerInfo myTimer, ILogger log)
#endif            
        {
            Engine engine = new Engine(log);
            try
            {
                string responseString = "";
                CloudStorageAccount storageAccount = engine.StorageAccount;
                CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
                //*****TODO***** externalize labeled data container name.
                CloudBlobContainer labeledDataContainer = blobClient.GetContainerReference("labeleddata");
                Model model = new Model(log);

                // with a container load training tags
                if (labeledDataContainer.ListBlobs(null, false) != null)
                {
                    //*****TODO***** Where should search be initialized?  Azure search does not offer CLI calls to configure all of search so it needs to be initialized befor it can be used as a service.  Look at putting it in engine.  Recognize this is not the same thing as migrating search to a non-static mode and then newing it up.
                    //Search.InitializeSearch();

                // Create Reference to Azure Storage Account
                var storageHelper = new Storage.Helper();
                var StorageAccount = storageHelper.GetStorageAccount();
                var BlobClient = StorageAccount.CreateCloudBlobClient();
                var LabeledDataContainer = BlobClient.GetContainerReference("labeleddata");
                var Client = new HttpClient();
                var Response = new HttpResponseMessage();
                var ResponseString = "";

                    //Add full set set of labeled training data to the model
                    //*****TODO***** add logic to only add incremental labeled data to model
                    string addLabeledDataResult = model.AddLabeledData();

                    //Train model using latest labeled training data.
                    string trainingResultsString = model.Train();

                string TrainingDataUrl;
                foreach (var item in LabeledDataContainer.ListBlobs(null, false))
                {
                    if (item.GetType() == typeof(CloudBlockBlob))
                    {
                        var dataCloudBlockBlob = (CloudBlockBlob)item;
                        TrainingDataUrl = dataCloudBlockBlob.Uri.ToString();
                        var BindingHash = dataCloudBlockBlob.Properties.ContentMD5.ToString();
                        if (BindingHash == null)
                        {
                            //compute the file hash as this will be added to the meta data to allow for file version validation
                            var BlobMd5 = dataCloudBlockBlob.ToString().CalculateMD5Hash();
                            if (BlobMd5 == null)
                            {
                                log.LogInformation("\nWarning: Blob Hash calculation failed and will not be included in file information blob, continuing operation.");
                            }
                            else
                            {
                                dataCloudBlockBlob.Properties.ContentMD5 = BlobMd5;
                            }

                        }
                        //trim the 2 "equals" off the trailing end of the hash or the http send will fail either using the client or raw http calls.
                        BindingHash = BindingHash.Substring(0, BindingHash.Length - 2);

                        //Get the content from the bound JSON file and instanciate a JsonBlob class then retrieve the labels collection from the Json to add to the image.
                        var json = storageHelper.DownloadBlobAsString(StorageAccount, "json", BindingHash);
                        var model = json.ToStorageModel();
                        var labels = Uri.EscapeDataString(JsonConvert.SerializeObject(model.Labels));

                        //construct and call model URL then fetch response
                        // the model always sends the label set in the message body with the name LabelsJson.  If your model needs other values in the URL then use
                        //{ {environment variable name}}.
                        // So the example load labels function in the sameple model package would look like this:
                        // https://branddetectionapp.azurewebsites.net/api/loadimagetags/?projectID={{ProjectID}}
                        // The orchestration engine appends the labels json file to the message body.
                        // http://localhost:7071/api/LoadImageTags/?projectID=8d9d12d1-5d5c-4893-b915-4b5b3201f78e&labelsJson={%22Labels%22:[%22Hemlock%22,%22Japanese%20Cherry%22]}

                        var AddLabeledDataUrl = model.Search.Url;
                        AddLabeledDataUrl = ConstructModelRequestUrl(AddLabeledDataUrl, labels, log);
                        Response = Client.GetAsync(AddLabeledDataUrl).Result;
                        ResponseString = Response.Content.ReadAsStringAsync().Result;
                        if (string.IsNullOrEmpty(ResponseString)) throw new MissingRequiredObjectException($"\nresponseString not generated from URL: {AddLabeledDataUrl}");

                        //the code below is for passing labels and conent as http content and not on the URL string.
                        //Format the Data Labels content
                        //HttpRequestMessage Request = new HttpRequestMessage(HttpMethod.Post, new Uri(AddLabeledDataUrl));
                        //HttpContent DataLabelsStringContent = new StringContent(trainingDataLabels, Encoding.UTF8, "application/x-www-form-urlencoded");
                        //MultipartFormDataContent LabeledDataContent = new MultipartFormDataContent();
                        //LabeledDataContent.Add(DataLabelsStringContent, "LabeledData");

                        //Format the data cotent
                        //*****TODO***** move to an async architecture
                        //*****TODO***** need to decide if there is value in sending the data as a binary stream in the post or if requireing the model data scienctist to accept URLs is sufficient.  If accessing the data blob with a SAS url requires Azure classes then create a configuration to pass the data as a stream in the post.  If there is then this should be a configurable option.
                        //MemoryStream dataBlobMemStream = new MemoryStream();
                        //dataBlob.DownloadToStream(dataBlobMemStream);
                        //HttpContent LabeledDataHttpContent = new StreamContent(dataBlobMemStream);
                        //LabeledDataContent.Add(LabeledDataContent, "LabeledData");

                        //Make the http call and get a response
                        //string AddLabelingTagsEndpoint = Engine.GetEnvironmentVariable("LabeledDataServiceEndpoint", log);
                        //if (string.IsNullOrEmpty(AddLabelingTagsEndpoint)) throw (new EnvironmentVariableNotSetException("LabeledDataServiceEndpoint environment variable not set"));
                        //string ResponseString = Helper.GetEvaluationResponseString(AddLabelingTagsEndpoint, LabeledDataContent, log);
                        //if (string.IsNullOrEmpty(ResponseString)) throw (new MissingRequiredObject("\nresponseString not generated from URL: " + AddLabelingTagsEndpoint));

                        log.LogInformation($"Successfully added blob: {dataCloudBlockBlob.Name} with labels: {JsonConvert.SerializeObject(model.Labels)}");
                    }
                }
                //Invoke the train model web service call
                var trainModelUrl = Engine.GetEnvironmentVariable("TrainModelServiceEndpoint", log);
                if (string.IsNullOrEmpty(trainModelUrl)) throw new EnvironmentVariableNotSetException("TrainModelServiceEndpoint environment variable not set");
                Client = new HttpClient();
                Response = Client.GetAsync(trainModelUrl).Result;
                ResponseString = Response.Content.ReadAsStringAsync().Result;
                if (string.IsNullOrEmpty(ResponseString)) throw new MissingRequiredObjectException($"\nresponseString not generated from URL: {trainModelUrl}");

                log.LogInformation($"C# Timer trigger function executed at: {DateTime.Now}");
            }
            catch (Exception e)
            {
                log.LogInformation("\nError processing training timer: ", e.Message);
            }
        }
        public static async Task RunAsync([BlobTrigger("pendingevaluation/{blobName}", Connection = "AzureWebJobsStorage")] Stream myBlob, string blobName, ILogger log)
        {
            try
            {
                // need to add/fix json storage so there is only one container and need to
                var PendingEvaluationStorageContainerName = GetEnvironmentVariable("pendingEvaluationStorageContainerName", log);
                var EvaluatedDataStorageContainerName     = GetEnvironmentVariable("evaluatedDataStorageContainerName", log);
                var JsonStorageContainerName = GetEnvironmentVariable("jsonStorageContainerName", log);
                var PendingSupervisionStorageContainerName = GetEnvironmentVariable("pendingSupervisionStorageContainerName", log);
                var LabeledDataStorageContainerName        = GetEnvironmentVariable("labeledDataStorageContainerName", log);
                var ModelValidationStorageContainerName    = GetEnvironmentVariable("modelValidationStorageContainerName", log);
                var PendingNewModelStorageContainerName    = GetEnvironmentVariable("pendingNewModelStorageContainerName", log);
                var StorageConnection        = GetEnvironmentVariable("AzureWebJobsStorage", log);
                var ConfidenceJsonPath       = GetEnvironmentVariable("confidenceJSONPath", log);
                var DataTagsBlobName         = GetEnvironmentVariable("dataTagsBlobName", log);
                var ConfidenceThreshold      = Convert.ToDouble(GetEnvironmentVariable("confidenceThreshold", log));
                var ModelVerificationPercent = Convert.ToDouble(GetEnvironmentVariable("modelVerificationPercentage", log));

                //------------------------This section retrieves the blob needing evaluation and calls the evaluation service for processing.-----------------------

                // Create Reference to Azure Storage Account
                var StorageAccount = CloudStorageAccount.Parse(StorageConnection);
                var BlobClient     = StorageAccount.CreateCloudBlobClient();
                var Container      = BlobClient.GetContainerReference(PendingEvaluationStorageContainerName);

                //Get a reference to a container, if the container does not exist create one then get the reference to the blob you want to evaluate."
                var json  = new Storage.Helper().DownloadBlobAsString(StorageAccount, JsonStorageContainerName, blobName);
                var model = json.ToStorageModel();
                if (model == null)
                {
                    throw new MissingRequiredObjectException("\nMissing dataEvaluating blob object.");
                }

                //compute the file hash as this will be added to the meta data to allow for file version validation
                var BlobMd5 = model.ToString().CalculateMD5Hash();
                if (BlobMd5 == null)
                {
                    log.LogInformation("\nWarning: Blob Hash calculation failed and will not be included in file information blob, continuing operation.");
                }
                else
                {
                    model.Md5Hash = BlobMd5;
                }

                //****Currently only working with public access set on blob folders
                //Generate a URL with SAS token to submit to analyze image API
                //string dataEvaluatingSas = GetBlobSharedAccessSignature(dataEvaluating);
                var DataEvaluatingUrl = model.Search.Url.ToString(); //+ dataEvaluatingSas;
                //string dataEvaluatingUrl = "test";

                //package the file contents to send as http request content
                var DataEvaluatingContent = new MemoryStream();
                await model.GetAzureBlob().DownloadToStreamAsync(DataEvaluatingContent);

                HttpContent DataEvaluatingStream = new StreamContent(DataEvaluatingContent);
                var         content = new MultipartFormDataContent();
                content.Add(DataEvaluatingStream, "name");

                //Make a request to the model service passing the file URL
                var ResponseString = Helper.GetEvaluationResponseString(DataEvaluatingUrl, content, log);
                if (ResponseString == "")
                {
                    throw new MissingRequiredObjectException("\nresponseString not generated from URL: " + DataEvaluatingUrl);
                }

                //deserialize response JSON, get confidence score and compare with confidence threshold
                var AnalysisJson  = JObject.Parse(ResponseString);
                var StrConfidence = (string)AnalysisJson.SelectToken(ConfidenceJsonPath);
                var Confidence    = (double)AnalysisJson.SelectToken(ConfidenceJsonPath);
                if (StrConfidence == null)
                {
                    throw new MissingRequiredObjectException("\nNo confidence value at " + ConfidenceJsonPath + " from environment variable ConfidenceJSONPath.");
                }

                //--------------------------------This section processes the results of the analysis and transferes the blob to the container responsible for the next appropriate stage of processing.-------------------------------

                //model successfully analyzed content
                if (Confidence >= ConfidenceThreshold)
                {
                    var EvaluatedData = Search.GetBlob(StorageAccount, EvaluatedDataStorageContainerName, blobName, log);
                    if (EvaluatedData == null)
                    {
                        throw new MissingRequiredObjectException("\nMissing evaluatedData " + blobName + " destination blob in container " + EvaluatedDataStorageContainerName);
                    }
                    CopyAzureBlobToAzureBlob(StorageAccount, model.GetAzureBlob(), EvaluatedData, log).Wait();

                    //pick a random number of successfully analyzed content blobs and submit them for supervision verification.
                    var Rnd = new Random();
                    if (Math.Round(Rnd.NextDouble(), 2) <= ModelVerificationPercent)
                    {
                        var ModelValidation = Search.GetBlob(StorageAccount, ModelValidationStorageContainerName, blobName, log);
                        if (ModelValidation == null)
                        {
                            log.LogInformation("\nWarning: Model validation skipped for " + blobName + " because of missing evaluatedData " + blobName + " destination blob in container " + ModelValidationStorageContainerName);
                        }
                        else
                        {
                            MoveAzureBlobToAzureBlob(StorageAccount, model.AzureBlob, ModelValidation, log).Wait();
                        }
                    }
                    await model.AzureBlob.DeleteIfExistsAsync();
                }

                //model was not sufficiently confident in its analysis
                else
                {
                    var PendingSupervision = Search.GetBlob(StorageAccount, PendingSupervisionStorageContainerName, blobName, log);
                    if (PendingSupervision == null)
                    {
                        throw new MissingRequiredObjectException("\nMissing pendingSupervision " + blobName + " destination blob in container " + PendingSupervisionStorageContainerName);
                    }

                    MoveAzureBlobToAzureBlob(StorageAccount, model.AzureBlob, PendingSupervision, log).Wait();
                }

                //----------------------------This section collects information about the blob being analyzied and packages it in JSON that is then written to blob storage for later processing-----------------------------------

                var BlobAnalysis =
                    new JObject(
                        new JProperty("id", Guid.NewGuid().ToString()),
                        new JProperty("blobInfo",
                                      new JObject(
                                          new JProperty("name", blobName),
                                          new JProperty("url", model.AzureBlob.Uri.ToString()),
                                          new JProperty("modified", model.AzureBlob.Properties.LastModified.ToString()),
                                          new JProperty("hash", BlobMd5)
                                          )
                                      )
                        );

                //create environment JSON object
                var BlobEnvironment = Engine.GetEnvironmentJson(log);

                BlobAnalysis.Add(BlobEnvironment);
                BlobAnalysis.Merge(AnalysisJson);

                //Note: all json files get writted to the same container as they are all accessed either by discrete name or by azure search index either GUID or Hash.
                var JsonBlob = Search.GetBlob(StorageAccount, JsonStorageContainerName, (string)BlobAnalysis.SelectToken("blobInfo.id") + ".json", log);
                JsonBlob.Properties.ContentType = "application/json";
                var SerializedJson = JsonConvert.SerializeObject(BlobAnalysis, Formatting.Indented, new JsonSerializerSettings {
                });
                Stream MemStream   = new MemoryStream(Encoding.UTF8.GetBytes(SerializedJson));
                if (MemStream.Length != 0)
                {
                    await JsonBlob.UploadFromStreamAsync(MemStream);
                }
                else
                {
                    throw new ZeroLengthFileException("\nencoded JSON memory stream is zero length and cannot be writted to blob storage");
                }


                log.LogInformation($"C# Blob trigger function Processed blob\n Name:{blobName} \n Size: {myBlob.Length} Bytes");
            }
            catch (MissingRequiredObjectException e)
            {
                log.LogInformation("\n" + blobName + " could not be analyzed with message: " + e.Message);
            }
            catch (Exception e)
            {
                log.LogInformation("\n" + blobName + " could not be analyzed with message: " + e.Message);
            }
        }