예제 #1
0
        public static string GetTagSuffix(this MLModelConfig config)
        {
            switch (config.Type)
            {
            case MLModelType.Cpu:
                return("cpu");

            case MLModelType.CpuNoAvx:
                return("cpu-no-avx");

            case MLModelType.Gpu:
                return("gpu");

            //case MLModelType.Tpu:
            //    return "tpu";
            //case MLModelType.TpuNoAvx:
            //    return "tpu-no-avx";
            //case MLModelType.TpuCpu:
            //    return "tpu-cpu";
            //case MLModelType.TpuCpuNoAvx:
            //    return "tpu-cpu-no-avx";
            //case MLModelType.TpuGpu:
            //    return "tpu-gpu";
            default:
                throw new Exception($"Invalid model type: {config.Type.ToString()}.");
            }
        }
예제 #2
0
        public static void GetConfigFromImage(this MLModelConfig config)
        {
            var apiVer   = uint.Parse(config.Image.Tag.Split('.').First());
            var modelVer = uint.Parse(config.Image.Tag.Split('.').Last().Split('-').First());
            var tag      = config.Image.Tag.Split('.').Last().Replace($"{modelVer}-", "");

            switch (tag)
            {
            case "cpu":
                config.Type = MLModelType.Cpu;
                break;

            case "cpu-no-avx":
                config.Type = MLModelType.CpuNoAvx;
                break;

            case "gpu":
                config.Type = MLModelType.Gpu;
                break;

            default:
                throw new Exception($"Invalid model type: {tag}.");
            }

            config.ApiVersion   = apiVer;
            config.ModelVersion = modelVer;
        }
예제 #3
0
        public static async Task Save(this MLModelConfig config, string path)
        {
            try
            {
                var str = JsonConvert.SerializeObject(config);
                var dir = Path.GetDirectoryName(path);
                if (!Directory.Exists(dir))
                {
                    Directory.CreateDirectory(dir);
                }
                await File.WriteAllTextAsync(path, str);

                Log.Debug($"Config saved to {path}.");
            }
            catch (Exception e)
            {
                throw new Exception($"Unable to save config to file {path}.", e);
            }
        }
        public async Task RemoveModel()
        {
            _applicationStatusManager.ChangeCurrentAppStatus(Enums.Status.Working, "Working | remove model...");
            try
            {
                if (SelectedInstalledModel == null)
                {
                    throw new Exception("No selected model.");
                }

                var config = new MLModelConfig();
                config.Image.Name   = SelectedInstalledModel.Name;
                config.Type         = SelectedInstalledModel.Type;
                config.ModelVersion = SelectedInstalledModel.Version;
                config.ApiVersion   = SelectedInstalledModel.ApiVersion;
                config.Image.Tag    = config.GetDockerTag();

                using (var model = new MLModel(config))
                    await model.Remove();

                if (SelectedInstalledModel.Name == Repository &&
                    Version == $"{SelectedInstalledModel.Version}" &&
                    API_VERSION == SelectedInstalledModel.ApiVersion &&
                    Type == $"{config.Type}")
                {
                    Repository = "None";
                    Type       = "None";
                    Version    = "None";
                    Status     = "Not ready";
                    await UpdateModelStatus();
                }
            }
            catch (Exception e)
            {
                Log.Error(e, "Unable to remove ml model.");
            }
            _applicationStatusManager.ChangeCurrentAppStatus(Enums.Status.Ready, "");
        }
        public async Task DownloadModel()
        {
            _applicationStatusManager.ChangeCurrentAppStatus(Enums.Status.Working, "Working | loading model...");
            try
            {
                if (SelectedAvailableModel == null)
                {
                    throw new Exception("No selected model.");
                }
                if (SelectedAvailableModel.Type == MLModelType.Gpu)
                {
                    if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
                    {
                        var msgbox = MessageBoxManager.GetMessageBoxStandardWindow(new MessageBoxStandardParams
                        {
                            ButtonDefinitions = ButtonEnum.Ok,
                            ContentTitle      = "OSError",
                            ContentMessage    = LocalizationContext.OsErrorMesageGPU,
                            Icon         = MessageBox.Avalonia.Enums.Icon.Error,
                            Style        = Style.None,
                            ShowInCenter = true
                        });
                        var result = await msgbox.Show();

                        throw new Exception($"Incorrect OS for {SelectedAvailableModel.Type} inference type");
                    }

                    /*
                     * if (CudafyHost.GetDeviceCount(eGPUType.Emulator) == 0)
                     * {
                     *  var msgbox = MessageBoxManager.GetMessageBoxStandardWindow(new MessageBoxStandardParams
                     *  {
                     *      ButtonDefinitions = ButtonEnum.Ok,
                     *      ContentTitle = "CUDA Error",
                     *      ContentMessage = "No CUDA devises.",
                     *      Icon = MessageBox.Avalonia.Enums.Icon.Error,
                     *      Style = Style.None,
                     *      ShowInCenter = true
                     *  });
                     *  var result = await msgbox.Show();
                     *  throw new Exception($"No CUDA devises.");
                     * }
                     */
                }

                var config = new MLModelConfig();
                config.Image.Name   = SelectedAvailableModel.Name;
                config.Type         = SelectedAvailableModel.Type;
                config.ModelVersion = SelectedAvailableModel.Version;
                config.ApiVersion   = SelectedAvailableModel.ApiVersion;
                config.Image.Tag    = config.GetDockerTag();

                using (var model = new MLModel(config))
                    await model.Download();
            }
            catch (Exception e)
            {
                Log.Error(e, "Unable to download ml model.");
            }
            _applicationStatusManager.ChangeCurrentAppStatus(Enums.Status.Ready, "");
        }
예제 #6
0
 public static string GetDockerTag(this MLModelConfig config)
 {
     return($"{config.ApiVersion}.{config.ModelVersion}-{GetTagSuffix(config)}");
 }