private static void AddData(BinaryReader lblReader, BinaryReader dataReader, bool isTraining) { // Skip magic: lblReader.ReadInt32(); dataReader.ReadInt32(); int count = lblReader.ReadInt32(); if (dataReader.ReadInt32() != count) throw new InvalidOperationException("Counts not match!"); int rows = dataReader.ReadInt32(); int cols = dataReader.ReadInt32(); if (rows != 28 || cols != 28) throw new InvalidOperationException("Invalid image size!"); Console.WriteLine("Reading: " + count + (isTraining ? " Training" : " Validation") + " samples."); int packSize = 1000; for (int idx = 0; idx < count; idx+= packSize) { Console.WriteLine(idx); var pack = ReadSamples(lblReader, dataReader, isTraining, packSize, idx, count); using (var ctx = new LeCunDataEntities()) { foreach (var sample in pack) { ctx.Samples.AddObject(sample); } ctx.SaveChanges(); } } Console.WriteLine("Done."); }
protected override UnorderedNeuralVectorFlowProvider.InitializationData Initialize() { using (var ctx = new LeCunDataEntities()) { int count = ctx.Samples.Count(s => s.IsTraining == IsTraining); indexMap = ctx.Samples.Where(s => s.IsTraining == IsTraining).OrderBy(s => s.ID).Select(s => s.ID).ToArray(); return new InitializationData(count, LeCunDefinitions.RowCount * LeCunDefinitions.ColCount, LeCunDefinitions.ResultCount); } }
protected override IEnumerable<NeuralVectorFlow> DoGetNext(IndexSet indexes) { using (var ctx = new LeCunDataEntities()) { var selectIndexes = indexes.ToDictionary(i => indexMap[i]); foreach (var sample in (from s in ctx.Samples where s.IsTraining == IsTraining && selectIndexes.Keys.Contains(s.ID) select s)) { yield return ToFlow(selectIndexes[sample.ID], sample); } } }
internal IEnumerable<NeuralVectorFlow> GetAll() { using (var ctx = new LeCunDataEntities()) { int index = 0; foreach (var sample in ctx.Samples.Where(s => s.IsTraining == IsTraining).OrderBy(s => s.ID)) { yield return ToFlow(index++, sample); } } }