public static Drive SampleDrive(Vessel vessel, double size = 0, string filename = "") { Drive result = null; foreach (var drive in GetDrives(vessel)) { if (result == null) { result = drive; continue; } double available = drive.SampleCapacityAvailable(filename); if (size > double.Epsilon && available < size) { continue; } if (available > result.SampleCapacityAvailable(filename)) { result = drive; } } if (result == null) { // vessel has no drive. return(new Drive("Broken", 0, 0)); } return(result); }
private void UpdateCapacity() { if (drive == null) { return; } double mass = 0; foreach (var sample in drive.samples.Values) { mass += sample.mass; } totalSampleMass = mass; if (effectiveDataCapacity < 0 || effectiveSampleCapacity < 0 || IsPrivate()) { Fields["Capacity"].guiActive = false; Fields["Capacity"].guiActiveEditor = false; return; } double availableDataCapacity = effectiveDataCapacity; int availableSlots = effectiveSampleCapacity; if (Lib.IsFlight()) { availableDataCapacity = drive.FileCapacityAvailable(); availableSlots = Lib.SampleSizeToSlots(drive.SampleCapacityAvailable()); } Capacity = string.Empty; if (availableDataCapacity > double.Epsilon) { Capacity = Lib.HumanReadableDataSize(availableDataCapacity); } if (availableSlots > 0) { if (Capacity.Length > 0) { Capacity += " "; } Capacity += Lib.HumanReadableSampleSize(availableSlots); } if (Lib.IsFlight() && totalSampleMass > double.Epsilon) { Capacity += " " + Lib.HumanReadableMass(totalSampleMass); } }
public Drive BestDrive(double minDataCapacity = 0, int minSlots = 0) { Drive result = null; foreach (var drive in drives.Values) { if (result == null) { result = drive; continue; } if (minDataCapacity > double.Epsilon && drive.FileCapacityAvailable() < minDataCapacity) { continue; } if (minSlots > 0 && drive.SampleCapacityAvailable() < minSlots) { continue; } if (minDataCapacity > double.Epsilon && drive.FileCapacityAvailable() > result.FileCapacityAvailable()) { result = drive; } if (minSlots > 0 && drive.SampleCapacityAvailable() > result.SampleCapacityAvailable()) { result = drive; } } if (result == null) { // vessel has no drive. return(new Drive("Broken", 0, 0)); } return(result); }
private void UpdateCapacity() { totalSampleMass = 0; foreach (var sample in drive.samples.Values) { totalSampleMass += sample.mass; } double availableDataCapacity = dataCapacity; int availableSlots = sampleCapacity; if (Lib.IsFlight()) { availableDataCapacity = drive.FileCapacityAvailable(); availableSlots = Lib.SampleSizeToSlots(drive.SampleCapacityAvailable()); } Capacity = string.Empty; if (availableDataCapacity > double.Epsilon) { Capacity = Lib.HumanReadableDataSize(availableDataCapacity); } if (availableSlots > 0) { if (Capacity.Length > 0) { Capacity += " "; } Capacity += Lib.HumanReadableSampleSize(availableSlots); } if (Lib.IsFlight() && totalSampleMass > double.Epsilon) { Capacity += " " + Lib.HumanReadableMass(totalSampleMass); } }
/// <summary> Get a drive for storing samples. Will return null if there are no drives on the vessel </summary> public static Drive SampleDrive(VesselData vesselData, double size = 0, SubjectData subject = null) { Drive result = null; foreach (var drive in GetDrives(vesselData)) { if (result == null) { result = drive; continue; } double available = drive.SampleCapacityAvailable(subject); if (size > double.Epsilon && available < size) { continue; } if (available > result.SampleCapacityAvailable(subject)) { result = drive; } } return(result); }
private static string TestForIssues(Vessel v, Resource_info ec, Experiment experiment, uint hdId, bool broken, double remainingSampleMass, bool didPrepare, bool isShrouded, string last_subject_id) { var subject_id = Science.Generate_subject_id(experiment.experiment_id, v); if (broken) { return("broken"); } if (isShrouded && !experiment.allow_shrouded) { return("shrouded"); } bool needsReset = experiment.crew_reset.Length > 0 && !string.IsNullOrEmpty(last_subject_id) && subject_id != last_subject_id; if (needsReset) { return("reset required"); } if (ec.amount < double.Epsilon && experiment.ec_rate > double.Epsilon) { return("no Electricity"); } if (!string.IsNullOrEmpty(experiment.crew_operate)) { var cs = new CrewSpecs(experiment.crew_operate); if (!cs && Lib.CrewCount(v) > 0) { return("crew on board"); } else if (cs && !cs.Check(v)) { return(cs.Warning()); } } if (!experiment.sample_collecting && remainingSampleMass < double.Epsilon && experiment.sample_mass > double.Epsilon) { return("depleted"); } if (!didPrepare && !string.IsNullOrEmpty(experiment.crew_prepare)) { return("not prepared"); } string situationIssue = Science.TestRequirements(experiment.experiment_id, experiment.requires, v); if (situationIssue.Length > 0) { return(Science.RequirementText(situationIssue)); } var experimentSize = Science.Experiment(subject_id).max_amount; double chunkSize = Math.Min(experiment.data_rate * Kerbalism.elapsed_s, experimentSize); Drive drive = GetDrive(experiment, v, hdId, chunkSize, subject_id); var isFile = experiment.sample_mass < double.Epsilon; double available = 0; if (isFile) { available = drive.FileCapacityAvailable(); available += Cache.WarpCache(v).FileCapacityAvailable(); } else { available = drive.SampleCapacityAvailable(subject_id); } if (Math.Min(experiment.data_rate * Kerbalism.elapsed_s, experimentSize) > available) { return(insufficient_storage); } return(string.Empty); }
private static bool DoRecord(Experiment experiment, string subject_id, Vessel vessel, Resource_info ec, uint hdId, Vessel_resources resources, List <KeyValuePair <string, double> > resourceDefs, double remainingSampleMass, double dataSampled, out double sampledOut, out double remainingSampleMassOut) { // default output values for early returns sampledOut = dataSampled; remainingSampleMassOut = remainingSampleMass; var exp = Science.Experiment(subject_id); if (Done(exp, dataSampled)) { return(true); } double elapsed = Kerbalism.elapsed_s; double chunkSize = Math.Min(experiment.data_rate * elapsed, exp.max_amount); double massDelta = experiment.sample_mass * chunkSize / exp.max_amount; Drive drive = GetDrive(experiment, vessel, hdId, chunkSize, subject_id); // on high time warp this chunk size could be too big, but we could store a sizable amount if we process less bool isFile = experiment.sample_mass < float.Epsilon; double maxCapacity = isFile ? drive.FileCapacityAvailable() : drive.SampleCapacityAvailable(subject_id); Drive warpCacheDrive = null; if (isFile) { if (drive.GetFileSend(subject_id)) { warpCacheDrive = Cache.WarpCache(vessel); } if (warpCacheDrive != null) { maxCapacity += warpCacheDrive.FileCapacityAvailable(); } } double factor = Rate(vessel, chunkSize, maxCapacity, elapsed, ec, experiment.ec_rate, resources, resourceDefs); if (factor < double.Epsilon) { return(false); } chunkSize *= factor; massDelta *= factor; elapsed *= factor; bool stored = false; if (chunkSize > double.Epsilon) { if (isFile) { if (warpCacheDrive != null) { double s = Math.Min(chunkSize, warpCacheDrive.FileCapacityAvailable()); stored = warpCacheDrive.Record_file(subject_id, s, true); if (chunkSize > s) // only write to persisted drive if the data cannot be transmitted in this tick { stored &= drive.Record_file(subject_id, chunkSize - s, true); } } else { stored = drive.Record_file(subject_id, chunkSize, true); } } else { stored = drive.Record_sample(subject_id, chunkSize, massDelta); } } if (!stored) { return(false); } // consume resources ec.Consume(experiment.ec_rate * elapsed, "experiment"); foreach (var p in resourceDefs) { resources.Consume(vessel, p.Key, p.Value * elapsed, "experiment"); } dataSampled += chunkSize; dataSampled = Math.Min(dataSampled, exp.max_amount); sampledOut = dataSampled; if (!experiment.sample_collecting) { remainingSampleMass -= massDelta; remainingSampleMass = Math.Max(remainingSampleMass, 0); } remainingSampleMassOut = remainingSampleMass; return(true); }
private static bool DoRecord(Experiment experiment, string subject_id, Vessel vessel, Resource_info ec, uint hdId, Vessel_resources resources, List <KeyValuePair <string, double> > resourceDefs, double remainingSampleMass, double dataSampled, out double sampledOut, out double remainingSampleMassOut) { var exp = Science.Experiment(subject_id); if (Done(exp, dataSampled)) { sampledOut = dataSampled; remainingSampleMassOut = remainingSampleMass; return(true); } double elapsed = Kerbalism.elapsed_s; double chunkSize = Math.Min(experiment.data_rate * elapsed, exp.max_amount); double massDelta = experiment.sample_mass * chunkSize / exp.max_amount; Drive drive = GetDrive(experiment, vessel, hdId, chunkSize, subject_id); // on high time warp this chunk size could be too big, but we could store a sizable amount if we process less bool isFile = experiment.sample_mass < float.Epsilon; double maxCapacity = isFile ? drive.FileCapacityAvailable() : drive.SampleCapacityAvailable(subject_id); if (maxCapacity < chunkSize) { double factor = maxCapacity / chunkSize; chunkSize *= factor; massDelta *= factor; elapsed *= factor; } foreach (var p in resourceDefs) { resources.Consume(vessel, p.Key, p.Value * elapsed, "experiment"); } bool stored = false; if (isFile) { stored = drive.Record_file(subject_id, chunkSize, true); } else { stored = drive.Record_sample(subject_id, chunkSize, massDelta); } if (stored) { // consume ec ec.Consume(experiment.ec_rate * elapsed, "experiment"); dataSampled += chunkSize; dataSampled = Math.Min(dataSampled, exp.max_amount); sampledOut = dataSampled; if (!experiment.sample_collecting) { remainingSampleMass -= massDelta; remainingSampleMass = Math.Max(remainingSampleMass, 0); } remainingSampleMassOut = remainingSampleMass; return(true); } sampledOut = dataSampled; remainingSampleMassOut = remainingSampleMass; return(false); }
// move all data to another drive public bool Move(Drive destination, bool moveSamples) { bool result = true; // copy files List <SubjectData> filesList = new List <SubjectData>(); foreach (File file in files.Values) { double size = Math.Min(file.size, destination.FileCapacityAvailable()); if (destination.Record_file(file.subjectData, size, true, file.useStockCrediting)) { file.size -= size; file.subjectData.RemoveDataCollectedInFlight(size); if (file.size < double.Epsilon) { filesList.Add(file.subjectData); } else { result = false; break; } } else { result = false; break; } } foreach (SubjectData id in filesList) { files.Remove(id); } if (!moveSamples) { return(result); } // move samples List <SubjectData> samplesList = new List <SubjectData>(); foreach (Sample sample in samples.Values) { double size = Math.Min(sample.size, destination.SampleCapacityAvailable(sample.subjectData)); if (size < double.Epsilon) { result = false; break; } double mass = sample.mass * (sample.size / size); if (destination.Record_sample(sample.subjectData, size, mass, sample.useStockCrediting)) { sample.size -= size; sample.subjectData.RemoveDataCollectedInFlight(size); sample.mass -= mass; if (sample.size < double.Epsilon) { samplesList.Add(sample.subjectData); } else { result = false; break; } } else { result = false; break; } } foreach (var id in samplesList) { samples.Remove(id); } return(result); // true if everything was moved, false otherwise }
// move all data to another drive public bool Move(Drive destination, bool moveSamples) { bool result = true; // copy files var filesList = new List <string>(); foreach (var p in files) { double size = Math.Min(p.Value.size, destination.FileCapacityAvailable()); if (destination.Record_file(p.Key, size, true)) { destination.files[p.Key].buff += p.Value.buff; //< move the buffer along with the size p.Value.buff = 0; p.Value.size -= size; if (p.Value.size < double.Epsilon) { filesList.Add(p.Key); } else { result = false; break; } } else { result = false; break; } } foreach (var id in filesList) { files.Remove(id); } if (!moveSamples) { return(result); } // move samples var samplesList = new List <string>(); foreach (var p in samples) { double size = Math.Min(p.Value.size, destination.SampleCapacityAvailable(p.Key)); if (size < double.Epsilon) { result = false; break; } double mass = p.Value.mass * (p.Value.size / size); if (destination.Record_sample(p.Key, size, mass)) { p.Value.size -= size; p.Value.mass -= mass; p.Value.size = Math.Max(0, p.Value.size); p.Value.mass = Math.Max(0, p.Value.mass); if (p.Value.size < double.Epsilon) { samplesList.Add(p.Key); } else { result = false; break; } } else { result = false; break; } } foreach (var id in samplesList) { samples.Remove(id); } return(result); // true if everything was moved, false otherwise }