/// <summary> /// Release all resources used. /// </summary> public void Dispose() { if (m_factory != null) { m_factory.Close(); m_factory.Dispose(); m_factory = null; } }
/// <summary> /// Releases the resouces used. /// </summary> /// <param name="bDisposing">Set to <i>true</i> when called by Dispose()</param> protected virtual void Dispose(bool bDisposing) { if (m_factory != null) { m_factory.Close(); m_factory.Dispose(); m_factory = null; } }
public void TestIndexQuery() { PreTest.Init(); Log log = new Log("Test Dataset Factory"); log.EnableTrace = true; string strDs = "MNIST"; DatasetFactory factory = new DatasetFactory(); Stopwatch sw = new Stopwatch(); try { DatasetDescriptor ds = factory.LoadDataset(strDs); factory.Open(ds.TrainingSource.ID); sw.Start(); List <DbItem> rgItems = factory.LoadImageIndexes(false); sw.Stop(); log.CHECK_EQ(rgItems.Count, ds.TrainingSource.ImageCount, "The query count should match the image count!"); factory.Close(); log.WriteLine("Query time = " + sw.Elapsed.TotalMilliseconds.ToString("N5") + " ms."); sw.Restart(); int nMin = int.MaxValue; int nMax = -int.MaxValue; for (int i = 0; i < rgItems.Count; i++) { nMin = Math.Min(rgItems[i].Label, nMin); nMax = Math.Max(rgItems[i].Label, nMax); } List <DbItem> rgBoosted = rgItems.Where(p => p.Boost > 0).ToList(); for (int nLabel = nMin; nLabel <= nMax; nLabel++) { List <DbItem> rgLabel = rgItems.Where(p => p.Label == nLabel).ToList(); } sw.Stop(); log.WriteLine("Query time (profile) = " + sw.Elapsed.TotalMilliseconds.ToString("N5") + " ms."); } finally { factory.Dispose(); } }
public void TestPutRawImage(bool bSaveImagesToFile) { DatasetFactory factory = new DatasetFactory(); factory.DeleteSources("Test123"); int nSrcId = factory.AddSource("Test123", 1, 10, 10, false, 0, bSaveImagesToFile); factory.Open(nSrcId, 10); byte[] rgBytes = new byte[10 * 10]; for (int i = 0; i < 20; i++) { rgBytes[i] = (byte)i; SimpleDatum sd = new SimpleDatum(false, 1, 10, 10, i, DateTime.MinValue, rgBytes.ToList(), null, 0, false, i); factory.PutRawImageCache(i, sd); } factory.ClearImageCash(true); List <RawImage> rgImg = factory.GetRawImagesAt(0, 20); for (int i = 0; i < rgImg.Count; i++) { SimpleDatum sd = factory.LoadDatum(rgImg[i]); bool bEncoded = false; byte[] rgData = sd.GetByteData(out bEncoded); for (int j = 0; j < 100; j++) { if (j <= i) { Assert.AreEqual(rgData[j], j); } else { Assert.AreEqual(rgData[j], 0); } } } factory.DeleteSources("Test123"); factory.Close(); }
private bool loadFile(string strImagesFile, string strSourceName, int nExtractTotal, ref int nExtractIdx, int nTotal, ref int nIdx, Log log, bool bExtractFiles, Dictionary <string, int> rgNameToLabel) { Stopwatch sw = new Stopwatch(); reportProgress(nIdx, nTotal, " Source: " + strSourceName); reportProgress(nIdx, nTotal, " loading " + strImagesFile + "..."); FileStream fs = null; try { int nSrcId = m_factory.AddSource(strSourceName, 3, -1, -1, false); addLabels(nSrcId, rgNameToLabel); m_factory.Open(nSrcId, 500, Database.FORCE_LOAD.NONE, log); int nPos = strImagesFile.ToLower().LastIndexOf(".tar"); string strPath = strImagesFile.Substring(0, nPos); if (!Directory.Exists(strPath)) { Directory.CreateDirectory(strPath); } if (bExtractFiles) { log.Progress = (double)nIdx / nExtractTotal; log.WriteLine("Extracting files from '" + strImagesFile + "'..."); if ((nExtractIdx = TarFile.ExtractTar(strImagesFile, strPath, m_evtCancel, log, nExtractTotal, nExtractIdx)) == 0) { log.WriteLine("Aborted."); return(false); } } // Load the annotations. SimpleDatum.ANNOTATION_TYPE type = SimpleDatum.ANNOTATION_TYPE.BBOX; int nResizeHeight = 0; int nResizeWidth = 0; // Create the training database images. // Create the master list file. List <Tuple <string, string> > rgFiles = createFileList(log, strPath); sw.Start(); for (int i = 0; i < rgFiles.Count; i++) { SimpleDatum datum = loadDatum(log, rgFiles[i].Item1, rgFiles[i].Item2, nResizeHeight, nResizeWidth, type, rgNameToLabel); m_factory.PutRawImageCache(nIdx, datum); nIdx++; if (m_evtCancel.WaitOne(0)) { log.WriteLine("Aborted."); return(false); } if (sw.Elapsed.TotalMilliseconds > 1000) { log.Progress = (double)nIdx / nTotal; log.WriteLine("Loading file " + i.ToString() + " of " + rgFiles.Count.ToString() + "..."); sw.Restart(); } } m_factory.ClearImageCashe(true); m_factory.Close(); } finally { if (fs != null) { fs.Dispose(); } } return(true); }
public void Create(DatasetConfiguration config, IXDatasetCreatorProgress progress) { string strCsvFile = Properties.Settings.Default.CsvFile; string strDsName = config.Name; string strTrainingSrc = config.Name + ".training"; string strTestingSrc = config.Name + ".testing"; m_bCancel = false; m_iprogress = progress; m_factory.DeleteSources(strTrainingSrc, strTestingSrc); Log log = new Log("CSV Dataset Creator"); log.OnWriteLine += new EventHandler <LogArg>(log_OnWriteLine); try { //----------------------------------------- // Load the schema that defines the layout // of the CSV file. //----------------------------------------- m_schema = loadSchema(config.Settings); //----------------------------------------- // Load and parse the CSV file. //----------------------------------------- DataConfigSetting dsCsvFile = config.Settings.Find("CSV File"); strCsvFile = dsCsvFile.Value.ToString(); if (strCsvFile.Length == 0) { throw new Exception("CSV data file name not specified!"); } log.WriteLine("Loading the data file..."); if (m_bCancel) { return; } m_parser.Load(strCsvFile, m_schema); //----------------------------------------- // Split the data into training and testing // sets. //----------------------------------------- List <DataItem> rgTraining = new List <DataItem>(); List <DataItem> rgTesting = new List <DataItem>(); DataConfigSetting dsPctTesting = config.Settings.Find("Testing Percentage"); double dfVal = (double)dsPctTesting.Value; Random random = new Random(); for (int i = 0; i < m_parser.Data.Count; i++) { if (random.NextDouble() > dfVal) { rgTraining.Add(m_parser.Data[i]); } else { rgTesting.Add(m_parser.Data[i]); } } Properties.Settings.Default.TestingPct = dfVal; //----------------------------------------- // Create the training data source. //----------------------------------------- int nCellHorizCount = 0; List <int> rgDim = getImageDim(m_parser, m_schema, out nCellHorizCount); int nTrainSrcId = m_factory.AddSource(strTrainingSrc, rgDim[0], rgDim[1], rgDim[2], false, 0); m_factory.Open(nTrainSrcId, 500, Database.FORCE_LOAD.FROM_FILE); // use file based data. log.WriteLine("Deleting existing data from '" + m_factory.OpenSource.Name + "'."); m_factory.DeleteSourceData(); if (!loadData(log, m_factory, m_parser, rgTraining, rgDim, true, true)) { return; } m_factory.UpdateSourceCounts(); updateLabels(m_factory); log.WriteLine("Creating the image mean..."); SimpleDatum dMean = SimpleDatum.CalculateMean(log, m_rgImages.ToArray(), new WaitHandle[] { new ManualResetEvent(false) }); m_factory.PutRawImageMean(dMean, true); m_rgImages.Clear(); m_factory.Close(); //----------------------------------------- // Create the testing data source. //----------------------------------------- int nTestSrcId = m_factory.AddSource(strTestingSrc, rgDim[0], rgDim[1], rgDim[2], false, 0); m_factory.Open(nTestSrcId, 500, Database.FORCE_LOAD.FROM_FILE); // use file based data. log.WriteLine("Deleting existing data from '" + m_factory.OpenSource.Name + "'."); m_factory.DeleteSourceData(); if (!loadData(log, m_factory, m_parser, rgTesting, rgDim, false, false)) { return; } m_factory.UpdateSourceCounts(); updateLabels(m_factory); m_factory.Close(); //----------------------------------------- // Crate the data set. //----------------------------------------- log.WriteLine("Done loading training and testing data."); int nDatasetID = 0; using (DNNEntities entities = EntitiesConnection.CreateEntities()) { List <Source> rgSrcTraining = entities.Sources.Where(p => p.Name == strTrainingSrc).ToList(); List <Source> rgSrcTesting = entities.Sources.Where(p => p.Name == strTestingSrc).ToList(); if (rgSrcTraining.Count == 0) { throw new Exception("Could not find the training source '" + strTrainingSrc + "'."); } if (rgSrcTesting.Count == 0) { throw new Exception("Could not find the tesing source '" + strTestingSrc + "'."); } DataConfigSetting dsName = config.Settings.Find("Output Dataset Name"); int nSrcTestingCount = rgSrcTesting[0].ImageCount.GetValueOrDefault(); int nSrcTrainingCount = rgSrcTraining[0].ImageCount.GetValueOrDefault(); int nSrcTotalCount = nSrcTestingCount + nSrcTrainingCount; double dfTestingPct = (nSrcTrainingCount == 0) ? 0.0 : nSrcTestingCount / (double)nSrcTotalCount; Dataset ds = new Dataset(); ds.ImageHeight = rgSrcTraining[0].ImageHeight; ds.ImageWidth = rgSrcTraining[0].ImageWidth; ds.Name = strDsName; ds.ImageEncoded = rgSrcTesting[0].ImageEncoded; ds.ImageChannels = rgSrcTesting[0].ImageChannels; ds.TestingPercent = (decimal)dfTestingPct; ds.TestingSourceID = rgSrcTesting[0].ID; ds.TestingTotal = rgSrcTesting[0].ImageCount; ds.TrainingSourceID = rgSrcTraining[0].ID; ds.TrainingTotal = rgSrcTraining[0].ImageCount; ds.DatasetCreatorID = config.ID; ds.DatasetGroupID = 0; ds.ModelGroupID = 0; entities.Datasets.Add(ds); entities.SaveChanges(); nDatasetID = ds.ID; } m_factory.SetDatasetParameter(nDatasetID, "PixelSize", m_schema.CellSize.ToString()); m_factory.SetDatasetParameter(nDatasetID, "AttributeCount", m_parser.DataDescriptions.Count.ToString()); m_factory.SetDatasetParameter(nDatasetID, "AttributeCountHoriz", nCellHorizCount.ToString()); m_factory.SetDatasetParameter(nDatasetID, "AttributeCountVert", nCellHorizCount.ToString()); } catch (Exception excpt) { log.WriteLine("ERROR: " + excpt.Message); } finally { Properties.Settings.Default.CsvFile = strCsvFile; Properties.Settings.Default.Save(); if (m_bCancel) { log.WriteLine("ABORTED converting CSV data files."); } else { log.WriteLine("Done converting CSV data files."); } if (m_bCancel) { m_iprogress.OnCompleted(new CreateProgressArgs(1, 1, "ABORTED!", null, true)); } else { m_iprogress.OnCompleted(new CreateProgressArgs(1, "COMPLETED.")); } } }
private ImageSet loadImageset(string strType, SourceDescriptor src, WaitHandle[] rgAbort, ref SimpleDatum imgMean, out int nLastImageIdx, int nPadW = 0, int nPadH = 0, Log log = null, IMAGEDB_LOAD_METHOD loadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL, int nImageDbLoadLimit = 0, int nImageDbLoadLimitStartIdx = 0, bool bLoadNext = false) { try { RawImageMean imgMeanRaw = null; m_factory.Open(src); nLastImageIdx = nImageDbLoadLimitStartIdx; if (loadMethod != IMAGEDB_LOAD_METHOD.LOAD_ALL) { if (imgMean == null) { imgMeanRaw = m_factory.GetRawImageMean(); if (imgMeanRaw == null) { if (log != null) { log.WriteLine("WARNING: No image mean exists in the database, changing image database load from " + loadMethod.ToString() + " to " + IMAGEDB_LOAD_METHOD.LOAD_ALL.ToString()); } loadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL; } } } int nCount = src.ImageCount; if (nCount == 0) { throw new Exception("Could not find any images with " + strType + " Source = '" + src.Name + "'."); } if (log != null) { log.WriteLine("Loading '" + src.Name + "' - " + nCount.ToString("N0") + " images."); } ImageSet imgset = new ImageSet(m_factory, src, loadMethod, nImageDbLoadLimit); if (OnCalculateImageMean != null) { imgset.OnCalculateImageMean += OnCalculateImageMean; } if (loadMethod != IMAGEDB_LOAD_METHOD.LOAD_ON_DEMAND) { bool bDataIsReal = src.IsRealData; int nBatchSize = 20000; Stopwatch sw = new Stopwatch(); int nImageSize = src.ImageHeight * src.ImageWidth; if (nImageSize > 60000) { nBatchSize = 5000; } else if (nBatchSize > 20000) { nBatchSize = 7500; } else if (nImageSize > 3000) { nBatchSize = 10000; } if (nImageDbLoadLimit <= 0) { nImageDbLoadLimit = nCount; } List <int> rgIdx = getIndexList(nImageDbLoadLimitStartIdx, nImageDbLoadLimit); int nIdx = 0; sw.Start(); while (nIdx < rgIdx.Count) { int nImageIdx = rgIdx[nIdx]; int nImageCount = Math.Min(rgIdx.Count - nIdx, nBatchSize); List <RawImage> rgImg = m_factory.GetRawImagesAt(nImageIdx, nImageCount); for (int j = 0; j < rgImg.Count; j++) { SimpleDatum sd1 = m_factory.LoadDatum(rgImg[j], nPadW, nPadH); imgset.Add(nIdx + j, sd1); if (sw.Elapsed.TotalMilliseconds > 1000) { if (log != null) { double dfPct = (double)(nIdx + j) / (double)nCount; log.Progress = dfPct; log.WriteLine("image loading at " + dfPct.ToString("P") + "..."); } sw.Restart(); if (EventWaitHandle.WaitAny(rgAbort, 0) != EventWaitHandle.WaitTimeout) { return(null); } } } nIdx += rgImg.Count; if (loadMethod == IMAGEDB_LOAD_METHOD.LOAD_ALL && rgImg.Count == 0 && nIdx < nCount) { log.WriteLine("WARNING: Loaded " + nIdx.ToString("N0") + " images, yet " + (nCount - nIdx).ToString("N0") + " images are unaccounted for. You may need to reindex the dataset."); break; } } if (log != null) { log.Progress = 0; } if (rgIdx.Count > 0) { nLastImageIdx = rgIdx[rgIdx.Count - 1] + 1; } } else if (bLoadNext) { nLastImageIdx += nImageDbLoadLimit; } if (imgMean == null) { if (imgMeanRaw == null) { imgMeanRaw = m_factory.GetRawImageMean(); } if (imgMeanRaw != null) { imgMean = m_factory.LoadDatum(imgMeanRaw, nPadW, nPadH); } else { if (log != null) { log.WriteLine("Calculating mean..."); } imgMean = imgset.GetImageMean(log, rgAbort); m_factory.PutRawImageMean(imgMean, true); } } if (imgMean != null) { imgset.SetImageMean(imgMean); } imgset.CompleteLoad(nLastImageIdx); return(imgset); } finally { m_factory.Close(); } }
/// <summary> /// The dataLoadThread is responsible for loading the data source images in the background. /// </summary> private void dataLoadThread() { m_evtRunning.Set(); DatasetFactory factory = new DatasetFactory(m_factory); int? nNextIdx = m_loadSequence.GetNext(); Stopwatch sw = new Stopwatch(); if (m_refreshManager != null) { m_refreshManager.Reset(); } try { sw.Start(); List <int> rgIdxBatch = new List <int>(); int nBatchSize = getBatchSize(m_src); if (m_nLoadedCount > 0) { throw new Exception("The loaded count is > 0!"); } factory.Open(m_src); m_log.WriteLine(m_src.Name + " loading " + m_loadSequence.Count.ToString("N0") + " items..."); while (nNextIdx.HasValue || rgIdxBatch.Count > 0) { if (nNextIdx.HasValue) { rgIdxBatch.Add(nNextIdx.Value); } if (rgIdxBatch.Count >= nBatchSize || !nNextIdx.HasValue) { List <RawImage> rgImg; if (m_refreshManager == null) { rgImg = factory.GetRawImagesAt(rgIdxBatch[0], rgIdxBatch.Count); } else { rgImg = factory.GetRawImagesAt(rgIdxBatch, m_evtCancel); } if (rgImg == null) { break; } for (int j = 0; j < rgImg.Count; j++) { SimpleDatum sd = factory.LoadDatum(rgImg[j]); if (m_refreshManager != null) { m_refreshManager.AddLoaded(sd); } m_rgImages[m_nLoadedCount] = sd; m_nLoadedCount++; if (sw.Elapsed.TotalMilliseconds > 1000) { if (m_log != null && !m_bSilent) { double dfPct = m_nLoadedCount / (double)m_rgImages.Length; m_log.Progress = dfPct; m_log.WriteLine("Loading '" + m_src.Name + "' at " + dfPct.ToString("P") + " (" + m_nLoadedCount.ToString("N0") + " of " + m_rgImages.Length.ToString("N0") + ")..."); } int nWait = WaitHandle.WaitAny(m_rgAbort.ToArray(), 0); if (nWait != WaitHandle.WaitTimeout) { return; } sw.Restart(); } } rgIdxBatch = new List <int>(); } nNextIdx = m_loadSequence.GetNext(); } if (rgIdxBatch.Count > 0) { m_log.FAIL("Not all images were loaded!"); } } finally { factory.Close(); factory.Dispose(); m_evtRunning.Reset(); m_evtDone.Set(); } }
public void Create(DatasetConfiguration config, IXDatasetCreatorProgress progress) { string strTrainingBatchFile1 = Properties.Settings.Default.TrainingDataFile1; string strTrainingBatchFile2 = Properties.Settings.Default.TrainingDataFile2; string strTrainingBatchFile3 = Properties.Settings.Default.TrainingDataFile3; string strTrainingBatchFile4 = Properties.Settings.Default.TrainingDataFile4; string strTrainingBatchFile5 = Properties.Settings.Default.TrainingDataFile5; string strTestingBatchFile = Properties.Settings.Default.TestingDataFile; string strDsName = config.Name; string strTrainingSrc = config.Name + ".training"; string strTestingSrc = config.Name + ".testing"; int nIdx = 0; int nTotal = 50000; m_bCancel = false; m_iprogress = progress; m_factory.DeleteSources(strTrainingSrc, strTestingSrc); Log log = new Log("CIFAR Dataset Creator"); log.OnWriteLine += new EventHandler <LogArg>(log_OnWriteLine); try { DataConfigSetting dsTrainingDataFile1 = config.Settings.Find("Training Data File 1"); DataConfigSetting dsTrainingDataFile2 = config.Settings.Find("Training Data File 2"); DataConfigSetting dsTrainingDataFile3 = config.Settings.Find("Training Data File 3"); DataConfigSetting dsTrainingDataFile4 = config.Settings.Find("Training Data File 4"); DataConfigSetting dsTrainingDataFile5 = config.Settings.Find("Training Data File 5"); DataConfigSetting dsTestingDataFile = config.Settings.Find("Testing Data File"); strTrainingBatchFile1 = dsTrainingDataFile1.Value.ToString(); if (strTrainingBatchFile1.Length == 0) { throw new Exception("Training data file #1 name not specified!"); } strTrainingBatchFile2 = dsTrainingDataFile2.Value.ToString(); if (strTrainingBatchFile2.Length == 0) { throw new Exception("Training data file #2 name not specified!"); } strTrainingBatchFile3 = dsTrainingDataFile3.Value.ToString(); if (strTrainingBatchFile3.Length == 0) { throw new Exception("Training data file #3 name not specified!"); } strTrainingBatchFile4 = dsTrainingDataFile4.Value.ToString(); if (strTrainingBatchFile4.Length == 0) { throw new Exception("Training data file #4 name not specified!"); } strTrainingBatchFile5 = dsTrainingDataFile5.Value.ToString(); if (strTrainingBatchFile5.Length == 0) { throw new Exception("Training data file #5 name not specified!"); } strTestingBatchFile = dsTestingDataFile.Value.ToString(); if (strTestingBatchFile.Length == 0) { throw new Exception("Testing data file name not specified!"); } log.WriteLine("Loading the data files..."); if (m_bCancel) { return; } int nTrainSrcId = m_factory.AddSource(strTrainingSrc, 3, 32, 32, false, 0); m_factory.Open(nTrainSrcId, 500, Database.FORCE_LOAD.FROM_FILE); // use file based data. log.WriteLine("Deleting existing data from '" + m_factory.OpenSource.Name + "'."); m_factory.DeleteSourceData(); if (!loadFile(log, dsTrainingDataFile1.Name, strTrainingBatchFile1, m_factory, nTotal, true, ref nIdx)) { return; } if (!loadFile(log, dsTrainingDataFile2.Name, strTrainingBatchFile2, m_factory, nTotal, true, ref nIdx)) { return; } if (!loadFile(log, dsTrainingDataFile3.Name, strTrainingBatchFile3, m_factory, nTotal, true, ref nIdx)) { return; } if (!loadFile(log, dsTrainingDataFile4.Name, strTrainingBatchFile4, m_factory, nTotal, true, ref nIdx)) { return; } if (!loadFile(log, dsTrainingDataFile5.Name, strTrainingBatchFile5, m_factory, nTotal, true, ref nIdx)) { return; } m_factory.UpdateSourceCounts(); updateLabels(m_factory); log.WriteLine("Creating the image mean..."); SimpleDatum dMean = SimpleDatum.CalculateMean(log, m_rgImages.ToArray(), new WaitHandle[] { new ManualResetEvent(false) }); m_factory.PutRawImageMean(dMean, true); m_rgImages.Clear(); m_factory.Close(); int nTestSrcId = m_factory.AddSource(strTestingSrc, 3, 32, 32, false, 0); m_factory.Open(nTestSrcId, 500, Database.FORCE_LOAD.FROM_FILE); // use file based data. log.WriteLine("Deleting existing data from '" + m_factory.OpenSource.Name + "'."); m_factory.DeleteSourceData(); nIdx = 0; nTotal = 10000; if (!loadFile(log, dsTestingDataFile.Name, strTestingBatchFile, m_factory, nTotal, false, ref nIdx)) { return; } m_factory.CopyImageMean(strTrainingSrc, strTestingSrc); m_factory.UpdateSourceCounts(); updateLabels(m_factory); m_factory.Close(); log.WriteLine("Done loading training and testing data."); using (DNNEntities entities = EntitiesConnection.CreateEntities()) { List <Source> rgSrcTraining = entities.Sources.Where(p => p.Name == strTrainingSrc).ToList(); List <Source> rgSrcTesting = entities.Sources.Where(p => p.Name == strTestingSrc).ToList(); if (rgSrcTraining.Count == 0) { throw new Exception("Could not find the training source '" + strTrainingSrc + "'."); } if (rgSrcTesting.Count == 0) { throw new Exception("Could not find the tesing source '" + strTestingSrc + "'."); } DataConfigSetting dsName = config.Settings.Find("Output Dataset Name"); int nSrcTestingCount = rgSrcTesting[0].ImageCount.GetValueOrDefault(); int nSrcTrainingCount = rgSrcTraining[0].ImageCount.GetValueOrDefault(); int nSrcTotalCount = nSrcTestingCount + nSrcTrainingCount; double dfTestingPct = (nSrcTrainingCount == 0) ? 0.0 : nSrcTestingCount / (double)nSrcTotalCount; Dataset ds = new Dataset(); ds.ImageHeight = rgSrcTraining[0].ImageHeight; ds.ImageWidth = rgSrcTraining[0].ImageWidth; ds.Name = strDsName; ds.ImageEncoded = rgSrcTesting[0].ImageEncoded; ds.ImageChannels = rgSrcTesting[0].ImageChannels; ds.TestingPercent = (decimal)dfTestingPct; ds.TestingSourceID = rgSrcTesting[0].ID; ds.TestingTotal = rgSrcTesting[0].ImageCount; ds.TrainingSourceID = rgSrcTraining[0].ID; ds.TrainingTotal = rgSrcTraining[0].ImageCount; ds.DatasetCreatorID = config.ID; ds.DatasetGroupID = 0; ds.ModelGroupID = 0; entities.Datasets.Add(ds); entities.SaveChanges(); } } catch (Exception excpt) { log.WriteLine("ERROR: " + excpt.Message); } finally { Properties.Settings.Default.TrainingDataFile1 = strTrainingBatchFile1; Properties.Settings.Default.TrainingDataFile2 = strTrainingBatchFile2; Properties.Settings.Default.TrainingDataFile3 = strTrainingBatchFile3; Properties.Settings.Default.TrainingDataFile4 = strTrainingBatchFile4; Properties.Settings.Default.TrainingDataFile5 = strTrainingBatchFile5; Properties.Settings.Default.TestingDataFile = strTestingBatchFile; Properties.Settings.Default.Save(); if (m_bCancel) { log.WriteLine("ABORTED converting CIFAR data files."); } else { log.WriteLine("Done converting CIFAR data files."); } if (m_bCancel) { m_iprogress.OnCompleted(new CreateProgressArgs(nIdx, nTotal, "ABORTED!", null, true)); } else { m_iprogress.OnCompleted(new CreateProgressArgs(1, "COMPLETED.")); } } }