Ejemplo n.º 1
0
 public BaseEvaluator(MilkrunBufferAllocationProblem problem)
 {
     Flc = InstanceGenerator.Generate(1);
     for (var i = 0; i < Flc.NumMachines; i++)
     {
         Flc.Machines[i].ProcessingRate = problem.ProcessingRates[i];
     }
 }
Ejemplo n.º 2
0
        private static void Main(string[] args)
        {
            var algorithm = new BestFitAlgo();
            var instance  = InstanceGenerator.Generate(1000);
            var result    = algorithm.Solve(instance, ItemsOrder.VolumeDesc, false);

            PrintResult(result);
            Console.ReadLine();
        }
Ejemplo n.º 3
0
        public static void AnalysisScenarios()
        {
            // Container sizes
            var width  = 200;
            var height = 300;
            var depth  = 500;
            // Set instance sizes
            var instanceSizes = new int[] { 250, 500, 750, 1000, 2000 };
            //var instanceSizes = new int[] { 250, 500 };
            // Get all the sorts
            var itemsOrders = Enum.GetValues(typeof(ItemsOrder)).Cast <ItemsOrder>().ToList();
            // Get the algorithm
            var algorithm = new BestFitAlgo();
            var path      = "results.csv";

            using var w = new StreamWriter(path);
            // Write a header
            w.WriteLine($@"instance_size;items_order;load_constraint;3D;trucks_used;calculation_time;average_container_volume_utilization;worst_container_volume_utilization;average_axle_load");
            var instance = InstanceGenerator.Generate(itemsAmount: instanceSizes[0], width: width,
                                                      height: height, depth: depth);

            foreach (var instanceSize in instanceSizes)
            {
                instance = InstanceGenerator.GenerateMore(instance, instanceSize);
                foreach (var itemsOrder in itemsOrders)
                {
                    Console.WriteLine($@"Calculating instance_size={instance.Items.Count}, items_order={itemsOrder.ToString()}");
                    //var instanceCopyLoad = new Instance(instance);
                    var instanceCopyNoLoad   = new Instance(instance);
                    var instanceCopy2DLoad   = new Instance(instance);
                    var instanceCopy2DNoLoad = new Instance(instance);
                    instanceCopy2DLoad.Truck.FrontAxle.MaximumLoad /= 2;
                    instanceCopy2DLoad.Truck.RearAxle.MaximumLoad  /= 2;
                    //var resultWithLoad = algorithm.Solve(instanceCopyLoad, itemsOrder, loadConstraint: true);
                    var resultWithoutLoad = algorithm.Solve(instanceCopyNoLoad, itemsOrder, loadConstraint: false, threeDimensional: true);
                    var result2DLoad      = algorithm.Solve(instanceCopy2DLoad, itemsOrder, loadConstraint: true, threeDimensional: false);
                    var result2DNoLoad    = algorithm.Solve(instanceCopy2DNoLoad, itemsOrder, loadConstraint: false, threeDimensional: false);
                    //var metricsWithLoad = AlgorithmResultMetrics.FromAlgorithmResult(resultWithLoad);
                    var metricsWithoutLoad = AlgorithmResultMetrics.FromAlgorithmResult(resultWithoutLoad);
                    var metrics2DLoad      = AlgorithmResultMetrics.FromAlgorithmResult(result2DLoad);
                    var metrics2DNoLoad    = AlgorithmResultMetrics.FromAlgorithmResult(result2DNoLoad);
                    //w.WriteLine($@"{instanceCopyLoad.Items.Count};{itemsOrder.ToString()};true;{metricsWithLoad.ContainersUsed};{metricsWithLoad.CalculationTime.TotalSeconds};{metricsWithLoad.AverageContainerVolumeUtilization};{metricsWithLoad.WorstUtilizationExceptLastOne};{metricsWithLoad.AverageAxleLoadExceptLastOne}");
                    w.WriteLine($@"{instanceCopyNoLoad.Items.Count};{itemsOrder.ToString()};false;true;{metricsWithoutLoad.ContainersUsed};{metricsWithoutLoad.CalculationTime.TotalSeconds};{metricsWithoutLoad.AverageContainerVolumeUtilization};{metricsWithoutLoad.WorstUtilizationExceptLastOne};{metricsWithoutLoad.AverageAxleLoadExceptLastOne}");
                    w.WriteLine($@"{instance.Items.Count};{itemsOrder.ToString()};true;false;{metrics2DLoad.ContainersUsed};{metrics2DLoad.CalculationTime.TotalSeconds};{metrics2DLoad.AverageContainerVolumeUtilization};{metrics2DLoad.WorstUtilizationExceptLastOne};{metrics2DLoad.AverageAxleLoadExceptLastOne}");
                    w.WriteLine($@"{instance.Items.Count};{itemsOrder.ToString()};false;false;{metrics2DNoLoad.ContainersUsed};{metrics2DNoLoad.CalculationTime.TotalSeconds};{metrics2DNoLoad.AverageContainerVolumeUtilization};{metrics2DNoLoad.WorstUtilizationExceptLastOne};{metrics2DNoLoad.AverageAxleLoadExceptLastOne}");
                }
            }
        }
Ejemplo n.º 4
0
        public void ByDefaultGenerateWillReturnResultFromParent()
        {
            // Fixture setup
            var expectedResult = new object();
            var parent         = new MockInstanceGenerator
            {
                CanGenerateCallback = ap => true,
                GenerateCallback    = ap => expectedResult
            };
            var dummyMemberInfo = typeof(object);

            var sut = new InstanceGenerator(parent);
            // Exercise system
            var result = sut.Generate(dummyMemberInfo);

            // Verify outcome
            Assert.AreEqual(expectedResult, result, "Generate");
            // Teardown
        }
Ejemplo n.º 5
0
        private static void ParseArgs(string[] args)
        {
            var structuredArgs = StructuredArguments.FromStrings(args);

            void BatchSimulation()
            {
                var fromSeed    = structuredArgs.AsIntOrDefault("From", 1);
                var toSeed      = structuredArgs.AsIntOrDefault("To", 10);
                var td          = BatchSimulator.LinesFromSeedRange(fromSeed, toSeed);
                var outFilename = $"results_from_{fromSeed}_to_{toSeed}.bin";

                TrainingDataPersistence.SaveToDisk(td, outFilename);
            }

            void BatchSimulationOptimizationBased()
            {
                var td = OptimizationBasedGenerator.BatchGenerateTrainingData(100, 100);

                TrainingDataPersistence.SaveToDisk(td, $"new_results.bin");
            }

            void TrainNetwork()
            {
                var td    = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename"));
                var tvd   = MlUtils.Split(td, 0.5f, false);
                var model = NetworkTrainer.TrainNetworkWithData(tvd.Training, tvd.Validation);

                model.Save("model.hdf5");
            }

            void TrainNetworkOrthogonalSampling()
            {
                var td    = TrainingDataPersistence.ParseCsv(structuredArgs.AsString("Filename"));
                var tvd   = MlUtils.Split(td, 0.999f, true);
                var model = NetworkTrainer.TrainNetworkWithData(tvd.Training, tvd.Validation);

                model.Save("modelOrtho.hdf5");
            }

            void JobGeneration()
            {
                JobGenerator.GenerateJobs();
            }

            void MergeResults()
            {
                var mergedData = MergeResultFiles.MergeDataInPath(".", ".bin");

                TrainingDataPersistence.SaveToDisk(mergedData, "merged.bin");
            }

            void PrintData()
            {
                var maxCount = structuredArgs.AsIntOrDefault("NumRows", int.MaxValue);
                var samples  = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename")).Samples;

                Console.WriteLine(
                    $"Overall number of samples is {samples.Count}. Now showing up to {maxCount} samples...");
                var ctr = 0;

                foreach (var sample in samples)
                {
                    Console.WriteLine(sample);
                    if (ctr++ >= maxCount)
                    {
                        break;
                    }
                }
            }

            void Optimize()
            {
                var methodName = structuredArgs.AsStringOrDefault("Method", "LocalSolver");
                var problem    = ProblemInstanceGenerator.Generate(23);
                BaseProductionRatePredictor predictor = null;

                //predictor = new KerasNeuralProductionRatePredictor(ModelPersistence.LoadFromDisk("model.hdf5"));
                predictor = new OnnxNeuralProductionRatePredictor("converted.onnx");
                //predictor = new MlProductionRatePredictor("model.zip");
                MilkrunBufferAllocationSolution sol = null;

                switch (methodName)
                {
                case "SimulatedAnnealing":
                    sol = SimAnnealOptimizer.Solve(problem, predictor, 1000, 1.0f);
                    break;

                case "LocalSolver":
                    //var evaluator = new SimulationEvaluator(problem);
                    var evaluator = new PredictorBasedEvaluator(problem, predictor);
                    sol = LocalSolverOptimizer.Solve(problem, evaluator);
                    break;
                }

                Console.WriteLine("Solution of optimization = {0}", sol);
                Console.WriteLine("Production rate from predictor = {0}", predictor.Predict(sol.ToSample(problem.ProcessingRates)));
                Console.WriteLine("Production rate from simulation = {0}", SimulationRunner.ProductionRateForConfiguration(sol.ToFlowlineConfiguration(problem.ProcessingRates)));
                Console.WriteLine("Minimum production rate = {0}", problem.MinProductionRate);
            }

            void TrainForest()
            {
                var       td          = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename"));
                var       tvd         = MlUtils.Split(td, 0.999f, true);
                MLContext context     = new MLContext(23);
                var       transformer = ModelTrainer.TrainModelWithData(context, tvd.Training, tvd.Validation, out var schema);

                context.Model.Save(transformer, schema, "model.zip");
            }

            void AutoMl()
            {
                var       td      = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename"));
                var       tvd     = MlUtils.Split(td, 1.0f, true);
                MLContext context = new MLContext(23);

                ModelSearch.AutoMlOnDataset(context, tvd.Training, tvd.Validation);
            }

            void DumpPredictionErrors()
            {
                var td  = TrainingDataPersistence.LoadFromDisk(structuredArgs.AsString("Filename"));
                var tvd = MlUtils.Split(td, 0.5f, false);
                var dnn = new OnnxNeuralProductionRatePredictor("converted.onnx");

                PredictionSample Predict(Sample sample)
                {
                    var   predictedRate = dnn.Predict(sample);
                    float deviation     = predictedRate - sample.ProductionRate;

                    return(new PredictionSample(sample, deviation));
                }

                var psamples = tvd.Validation.Samples.Take(100).Select(Predict).ToList();

                File.WriteAllText("deviations.csv", CsvSerializer.SerializeToCsv(psamples));
            }

            void TestExhaustiveGenerator()
            {
                int numMachines = 6;
                int numBuffers  = numMachines - 1;
                var features    = new List <FeatureDescription> {
                    new FeatureDescription {
                        IsDiscrete = false,
                        LowerBound = 30,
                        UpperBound = 120,
                        Name       = DefaultFeatures.MilkRunCycleLength.ToString()
                    }
                };

                features.AddRange(Enumerable.Range(0, numMachines).Select(i =>
                                                                          new FeatureDescription {
                    IsDiscrete = false,
                    LowerBound = 0.8,
                    UpperBound = 1.2,
                    Name       = DefaultFeatures.ProcessingRate + $"{i+1}"
                }));
                features.AddRange(Enumerable.Range(0, numMachines).Select(i =>
                                                                          new FeatureDescription {
                    IsDiscrete = false,
                    LowerBound = 0.5,
                    UpperBound = 1.5,
                    Name       = DefaultFeatures.MaterialRatio + $"{i+1}"
                }));
                features.AddRange(Enumerable.Range(0, numBuffers).Select(i =>
                                                                         new FeatureDescription {
                    IsDiscrete = true,
                    LowerBound = 0,
                    UpperBound = 80,
                    Name       = DefaultFeatures.BufferSize + $"{i+1}"
                }));

                // Big ortho experiment
                //int targetSampleCount = 2000000;
                //int subCubeSplitFactor = 2;

                // Small latin only experiment
                int targetSampleCount  = 2000000;
                int subCubeSplitFactor = 1;

                int numCubes  = Utils.Pow(subCubeSplitFactor, features.Count);
                int numValues = (int)Math.Ceiling(targetSampleCount / (double)numCubes) * numCubes;
                var samples   = OrthoLatinHyperCube.PickSamples(features.ToArray(), numValues, subCubeSplitFactor);
                var lines     = new List <string> {
                    string.Join(",", samples.First().ColumnNames())
                };

                lines.AddRange(samples.Select(sample => string.Join(",", sample.ToFloats())));
                File.WriteAllText("ortholatinhypercube.csv", string.Join("\n", lines));

                Console.WriteLine("\nDistinct values");
                for (int i = 0; i < numMachines; i++)
                {
                    if (i < numBuffers)
                    {
                        Console.WriteLine($"Distinct buffer {i+1} sizes = {samples.Select(s => s.BufferSizes[i]).Distinct().Count()}");
                    }
                    Console.WriteLine($"Distinct order up to levels {i+1} = {samples.Select(s => s.OrderUpToLevels[i]).Distinct().Count()}");
                    Console.WriteLine($"Distinct processing rates {i+1} = {samples.Select(s => s.ProcessingRates[i]).Distinct().Count()}");
                }
                Console.WriteLine($"Distinct milk run cycle lengths = {samples.Select(s => s.MilkrunCycleLength).Distinct().Count()}");
            }

            void GenerateInstance()
            {
                int    seed           = structuredArgs.AsInt("Seed");
                string filename       = structuredArgs.AsStringOrDefault("Filename", "instance.json");
                var    flowLineConfig = InstanceGenerator.Generate(seed);

                Utils.SaveObjectAsJson(flowLineConfig, filename);
            }

            var availableActions = new List <Action> {
                BatchSimulation,
                TrainNetwork,
                TrainNetworkOrthogonalSampling,
                JobGeneration,
                MergeResults,
                PrintData,
                Optimize,
                TrainForest,
                AutoMl,
                BatchSimulationOptimizationBased,
                DumpPredictionErrors,
                TestExhaustiveGenerator,
                GenerateInstance
            };

            var actionMappings =
                availableActions.ToDictionary(action => Utils.NameOfLocalActionFunction("ParseArgs", action),
                                              action => action);

            if (args.Length >= 1)
            {
                var action = structuredArgs.GetAction();
                if (actionMappings.ContainsKey(action))
                {
                    actionMappings[action]();
                    return;
                }
            }

            ShowUsage(actionMappings);
        }