/// <summary> /// Enumerate sample batches /// </summary> public IEnumerable <ClassifiedSample <TObj> > Batch(int size) { if (size <= 0) { throw new MLException("Batch size must be positive"); } var bucket = new ClassifiedSample <TObj>(); var count = 0; foreach (var item in this) { bucket[item.Key] = item.Value; count++; if (count != size) { continue; } yield return(bucket); bucket.Clear(); count = 0; } if (bucket.Count == 0) { yield break; } yield return(bucket); }
protected AlgorithmBase(ClassifiedSample <TObj> trainingSample) { if (trainingSample == null || !trainingSample.Any()) { throw new MLException("AlrogithmBase.ctor(trainingSample=null|empty)"); } m_TrainingSample = new ClassifiedSample <TObj>(trainingSample); m_Classes = m_TrainingSample.Classes.ToDictionary(c => c.Name); }
/// <summary> /// Returns all errors of the given algorithm on some initially classified sample /// </summary> public virtual IEnumerable <ErrorInfo> GetErrors(ClassifiedSample <TObj> classifiedSample) { var errors = new List <ErrorInfo>(); Parallel.ForEach(classifiedSample, pdata => { var res = this.Classify(pdata.Key); if (res != pdata.Value) { lock (errors) { errors.Add(new ErrorInfo(pdata.Key, pdata.Value, res)); } } }); return(errors); }
/// <summary> /// Retrieves subset of the sample public ClassifiedSample <TObj> Subset(int skip, int take) { if (skip < 0) { throw new MLException("Skip value must be non-negative"); } if (take <= 0) { throw new MLException("Take value must be positive"); } var result = new ClassifiedSample <TObj>(); foreach (var item in this.Skip(skip).Take(take)) { result[item.Key] = item.Value; } return(result); }
public ClassifiedSample(ClassifiedSample <TObj> other) : base(other) { }