Пример #1
0
        /// <summary>
        /// Returns the dataset descriptor of the dynamic dataset produced by the Gym.
        /// </summary>
        /// <param name="dt">Specifies the data-type to use.</param>
        /// <param name="log">Optionally, specifies the output log to use (default = <i>null</i>).</param>
        /// <returns>The dataset descriptor is returned.</returns>
        public DatasetDescriptor GetDataset(DATA_TYPE dt, Log log = null)
        {
            int nH = 1;
            int nW = 1;
            int nC = 4;

            if (dt == DATA_TYPE.DEFAULT)
            {
                dt = DATA_TYPE.VALUES;
            }

            if (dt == DATA_TYPE.BLOB)
            {
                nH = 156;
                nW = 156;
                nC = 3;
            }

            SourceDescriptor  srcTrain = new SourceDescriptor((int)GYM_DS_ID.CARTPOLE, Name + ".training", nW, nH, nC, false, false);
            SourceDescriptor  srcTest  = new SourceDescriptor((int)GYM_SRC_TEST_ID.CARTPOLE, Name + ".testing", nW, nH, nC, false, false);
            DatasetDescriptor ds       = new DatasetDescriptor((int)GYM_SRC_TRAIN_ID.CARTPOLE, Name, null, null, srcTrain, srcTest, "CartPoleGym", "CartPole Gym", null, GYM_TYPE.DYNAMIC);

            m_dt = dt;

            return(ds);
        }
Пример #2
0
        /// <summary>
        /// Load the MNIST LeNet model and set its sources to the MNIST dataset (already loaded
        /// in the database using the MyCaffeTestApplication).
        /// </summary>
        /// <param name="ds">Specifies the MNIST dataset descriptor.</param>
        /// <returns>The NetParameter for the LeNet is returned.</returns>
        public NetParameter CreateMnistModel(DatasetDescriptor ds)
        {
            string       str      = System.Text.Encoding.Default.GetString(Properties.Resources.lenet_train_test);
            RawProto     proto    = RawProto.Parse(str);
            NetParameter netParam = NetParameter.FromProto(proto);

            for (int i = 0; i < netParam.layer.Count; i++)
            {
                LayerParameter layer = netParam.layer[i];

                if (layer.type == LayerParameter.LayerType.DATA)
                {
                    if (layer.include[0].phase == Phase.TRAIN)
                    {
                        layer.data_param.source = ds.TrainingSourceName;
                    }
                    else
                    {
                        layer.data_param.source = ds.TestingSourceName;
                    }
                }
            }

            return(netParam);
        }
Пример #3
0
        /// <summary>
        /// This method validates dataset's data dictionary against the dataset descriptor
        /// </summary>
        /// <param name="datasetDescriptor">Descriptor of dataset the data to validate are from</param>
        /// <param name="dataDictionary">Data  dictionary to validate</param>
        /// <param name="validReferencesIdsDictionary">Dictionary of valid references</param>
        /// <returns>List of messages.</returns>
        public List <Message> ValidateDataByApplicationDescriptor(DatasetDescriptor datasetDescriptor, Dictionary <string, List <object> > dataDictionary, Dictionary <string, List <long> > validReferencesIdsDictionary)
        {
            var messages = new List <Message>();

            // Validate each item in data dictionary
            foreach (var item in dataDictionary)
            {
                // Get item's attribute descriptor
                var attributeDescriptor = datasetDescriptor.Attributes.FirstOrDefault(a => a.Name == item.Key);
                if (attributeDescriptor == null)
                {
                    var attributeNotFoundMessage = new Message(
                        MessageTypeEnum.Error,
                        6001,
                        new List <string>()
                    {
                        item.Key, datasetDescriptor.Name
                    },
                        item.Key
                        );
                    Logger.LogMessageToConsole(attributeNotFoundMessage);
                    messages.Add(attributeNotFoundMessage);
                    continue;
                }
                // Validate the attribute
                var attributeMessages = validateOneAttribute(attributeDescriptor, item.Value, validReferencesIdsDictionary);
                if (attributeMessages != null)
                {
                    messages.AddRange(attributeMessages);
                }
            }
            return(messages);
        }
Пример #4
0
        public void LoadDatabase()
        {
            int nIdx   = 0;
            int nTotal = 0;

            reportProgress(nIdx, nTotal, "Unpacking files...");
            string strTrainImagesBin = expandFile(m_param.TrainImagesFile);
            string strTrainLabelsBin = expandFile(m_param.TrainLabelsFile);
            string strTestImagesBin  = expandFile(m_param.TestImagesFile);
            string strTestLabelsBin  = expandFile(m_param.TestLabelsFile);

            reportProgress(nIdx, nTotal, "Loading database...");

            loadFile(strTrainImagesBin, strTrainLabelsBin, "MNIST.training");
            loadFile(strTestImagesBin, strTestLabelsBin, "MNIST.testing");

            DatasetFactory    factory  = new DatasetFactory();
            SourceDescriptor  srcTrain = factory.LoadSource("MNIST.training");
            SourceDescriptor  srcTest  = factory.LoadSource("MNIST.testing");
            DatasetDescriptor ds       = new DatasetDescriptor(0, "MNIST", null, null, srcTrain, srcTest, "MNIST", "MNIST Character Dataset");

            factory.AddDataset(ds);
            factory.UpdateDatasetCounts(ds.ID);

            if (OnCompleted != null)
            {
                OnCompleted(this, new EventArgs());
            }
        }
Пример #5
0
        protected override DatasetDescriptor get_dataset_override(int nProjectID)
        {
            if (m_igym == null)
            {
                m_igym = m_colGyms.Find(m_strName);
            }

            m_ds = m_igym.GetDataset(DATA_TYPE.BLOB);

            return(m_ds);
        }
Пример #6
0
        //-----------------------------------------------------------------------------------------
        //  Simplest Classification (using MyCaffeControl and MyCaffeImageDatabase)
        //-----------------------------------------------------------------------------------------

        /// <summary>
        /// The SimplestClassification shows how to the MyCaffeControl (and internal MyCaffeImageDatabase) to train and test on the MNIST dataset.
        /// </summary>
        /// <param name="sender">Specifies the event sender.</param>
        /// <param name="e">Specifies the event args.</param>
        private void btnSimplestClassification_Click(object sender, EventArgs e)
        {
            DatasetFactory factory  = new DatasetFactory();
            Stopwatch      sw       = new Stopwatch();
            SettingsCaffe  settings = new SettingsCaffe();

            // Load all images into memory before training.
            settings.ImageDbLoadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL;
            // Use GPU ID = 0.
            settings.GpuIds = "0";

            string strSolver;
            string strModel;

            // Load the descriptors from their respective files (installed by MyCaffe Test Application install)
            load_descriptors("mnist", out strSolver, out strModel);

            // NOTE: model fixup not needed for we will use the DATA layer which pulls data from SQL or SQLEXPRESS via the MyCaffeImageDatabase.
            // Set the interval beyond the iterations to skip testing during solving.
            strSolver = fixup_solver(strSolver, 10000);

            // Load the MNIST dataset descriptor.
            DatasetDescriptor ds = factory.LoadDataset("MNIST");

            // Create a test project with the dataset and descriptors.
            ProjectEx project = new ProjectEx("Test");

            project.SetDataset(ds);
            project.ModelDescription  = strModel;
            project.SolverDescription = strSolver;
            project.WeightsState      = null;

            // Create the MyCaffeControl
            MyCaffeControl <float> mycaffe = new MyCaffeControl <float>(settings, m_log, m_evtCancel);

            // Load the project, using the TRAIN phase.
            mycaffe.Load(Phase.TRAIN, project);

            // Trian the model for 5000 interations (which uses the internal solver and internal training net)
            int nIterations = 5000;

            mycaffe.Train(nIterations);

            // Test the model for 100 iterations (which uses the internal solver and internal testing net)
            nIterations = 100;
            double dfAccuracy = mycaffe.Test(nIterations);

            // Report the testing accuracy.
            m_log.WriteLine("Accuracy = " + dfAccuracy.ToString("P"));

            MessageBox.Show("Average Accuracy = " + dfAccuracy.ToString("P"), "Traing/Test on MNIST Completed", MessageBoxButtons.OK, MessageBoxIcon.Information);
        }
        public void TestIndexQuery()
        {
            PreTest.Init();
            Log log = new Log("Test Dataset Factory");

            log.EnableTrace = true;

            string         strDs   = "MNIST";
            DatasetFactory factory = new DatasetFactory();
            Stopwatch      sw      = new Stopwatch();

            try
            {
                DatasetDescriptor ds = factory.LoadDataset(strDs);
                factory.Open(ds.TrainingSource.ID);

                sw.Start();
                List <DbItem> rgItems = factory.LoadImageIndexes(false);
                sw.Stop();

                log.CHECK_EQ(rgItems.Count, ds.TrainingSource.ImageCount, "The query count should match the image count!");
                factory.Close();

                log.WriteLine("Query time = " + sw.Elapsed.TotalMilliseconds.ToString("N5") + " ms.");

                sw.Restart();

                int nMin = int.MaxValue;
                int nMax = -int.MaxValue;
                for (int i = 0; i < rgItems.Count; i++)
                {
                    nMin = Math.Min(rgItems[i].Label, nMin);
                    nMax = Math.Max(rgItems[i].Label, nMax);
                }

                List <DbItem> rgBoosted = rgItems.Where(p => p.Boost > 0).ToList();

                for (int nLabel = nMin; nLabel <= nMax; nLabel++)
                {
                    List <DbItem> rgLabel = rgItems.Where(p => p.Label == nLabel).ToList();
                }

                sw.Stop();

                log.WriteLine("Query time (profile) = " + sw.Elapsed.TotalMilliseconds.ToString("N5") + " ms.");
            }
            finally
            {
                factory.Dispose();
            }
        }
Пример #8
0
        /// <summary>
        /// Initialize the DatasetEx by loading the training and testing data sources into memory.
        /// </summary>
        /// <param name="ds">Specifies the dataset to load.</param>
        /// <param name="rgAbort">Specifies a set of wait handles used to cancel the load.</param>
        /// <param name="nPadW">Optionally, specifies a pad to apply to the width of each item (default = 0).</param>
        /// <param name="nPadH">Optionally, specifies a pad to apply to the height of each item (default = 0).</param>
        /// <param name="log">Optionally, specifies an external Log to output status (default = null).</param>
        /// <param name="loadMethod">Optionally, specifies the load method to use (default = LOAD_ALL).</param>
        /// <param name="nImageDbLoadLimit">Optionally, specifies the load limit (default = 0).</param>
        /// <param name="bSkipMeanCheck">Optionally, specifies to skip the mean check (default = false).</param>
        /// <returns>Upon loading the dataset <i>true</i> is returned, otherwise on failure or abort <i>false</i> is returned.</returns>
        public bool Initialize(DatasetDescriptor ds, WaitHandle[] rgAbort, int nPadW = 0, int nPadH = 0, Log log = null, IMAGEDB_LOAD_METHOD loadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL, int nImageDbLoadLimit = 0, bool bSkipMeanCheck = false)
        {
            lock (m_syncObj)
            {
                if (loadMethod != IMAGEDB_LOAD_METHOD.LOAD_ALL && nImageDbLoadLimit > 0)
                {
                    throw new Exception("Currently the load-limit only works with the LOAD_ALLL image loading method.");
                }

                SimpleDatum imgMean = null;

                if (ds != null)
                {
                    m_ds = ds;
                }

                if (m_ds.TrainingSource.ImageWidth == -1 || m_ds.TrainingSource.ImageHeight == -1)
                {
                    log.WriteLine("WARNING: Cannot create a mean image for data sources that contain variable sized images.  The mean check will be skipped.");
                    bSkipMeanCheck = true;
                }

                m_TrainingImages = loadImageset("Training", m_ds.TrainingSource, rgAbort, ref imgMean, out m_nLastTrainingImageIdx, nPadW, nPadH, log, loadMethod, nImageDbLoadLimit, m_nLastTrainingImageIdx, (ds == null) ? true : false, bSkipMeanCheck);
                if (m_nLastTrainingImageIdx >= m_ds.TrainingSource.ImageCount)
                {
                    m_nLastTrainingImageIdx = 0;
                }

                if (EventWaitHandle.WaitAny(rgAbort, 0) != EventWaitHandle.WaitTimeout)
                {
                    return(false);
                }

                m_TestingImages = loadImageset("Testing", m_ds.TestingSource, rgAbort, ref imgMean, out m_nLastTestingImageIdx, nPadW, nPadH, log, loadMethod, nImageDbLoadLimit, m_nLastTestingImageIdx, (ds == null) ? true : false, bSkipMeanCheck);
                if (m_nLastTestingImageIdx >= m_ds.TestingSource.ImageCount)
                {
                    m_nLastTestingImageIdx = 0;
                }

                if (EventWaitHandle.WaitAny(rgAbort, 0) != EventWaitHandle.WaitTimeout)
                {
                    return(false);
                }

                return(true);
            }
        }
        public void TestMean()
        {
            List <string> rgDs = new List <string>()
            {
                "MNIST", "CIFAR-10", "MNIST"
            };
            IXImageDatabase db = new MyCaffeImageDatabase();

            foreach (string strDs in rgDs)
            {
                SettingsCaffe settings = new SettingsCaffe();
                Stopwatch     sw       = new Stopwatch();

                sw.Start();
                db.InitializeWithDsName(settings, strDs);
                string str = sw.ElapsedMilliseconds.ToString();
                Trace.WriteLine(strDs + " Initialization Time: " + str + " ms.");

                DatasetDescriptor ds = db.GetDatasetByName(strDs);

                SimpleDatum d1 = db.QueryImageMean(ds.TrainingSource.ID);
                SimpleDatum d2 = db.QueryImageMeanFromDataset(ds.ID);
                SimpleDatum d3 = db.GetImageMean(ds.TrainingSource.ID);

                byte[] rgB1 = d1.ByteData;
                byte[] rgB2 = d2.ByteData;
                byte[] rgB3 = d3.ByteData;

                Assert.AreEqual(rgB1.Length, rgB2.Length);
                Assert.AreEqual(rgB2.Length, rgB3.Length);

                for (int i = 0; i < rgB1.Length; i++)
                {
                    Assert.AreEqual(rgB1[i], rgB2[i]);
                    Assert.AreEqual(rgB2[i], rgB3[i]);
                }
            }

            db.CleanUp();

            IDisposable idisp = db as IDisposable;

            if (idisp != null)
            {
                idisp.Dispose();
            }
        }
        public DataSequenceLayerTest(string strDs = null)
            : base("Data Sequence Layer Test")
        {
            m_settings = new SettingsCaffe();
            m_settings.EnableLabelBalancing       = false;
            m_settings.EnableLabelBoosting        = false;
            m_settings.EnablePairInputSelection   = false;
            m_settings.EnableRandomInputSelection = false;

            if (strDs != null && strDs.Length > 0)
            {
                m_db = createImageDb(null);
                m_db.InitializeWithDsName1(m_settings, strDs);

                DatasetDescriptor ds = m_db.GetDatasetByName(strDs);
                m_strSrc = ds.TrainingSourceName;
            }
        }
Пример #11
0
        /// <summary>
        /// Initialize the DatasetEx by loading the training and testing data sources into memory.
        /// </summary>
        /// <param name="ds">Specifies the dataset to load.</param>
        /// <param name="rgAbort">Specifies a set of wait handles used to cancel the load.</param>
        /// <param name="nPadW">Optionally, specifies a pad to apply to the width of each item (default = 0).</param>
        /// <param name="nPadH">Optionally, specifies a pad to apply to the height of each item (default = 0).</param>
        /// <param name="log">Optionally, specifies an external Log to output status (default = null).</param>
        /// <param name="loadMethod">Optionally, specifies the load method to use (default = LOAD_ALL).</param>
        /// <param name="nImageDbLoadLimit">Optionally, specifies the load limit (default = 0).</param>
        /// <returns></returns>
        public bool Initialize(DatasetDescriptor ds, WaitHandle[] rgAbort, int nPadW = 0, int nPadH = 0, Log log = null, IMAGEDB_LOAD_METHOD loadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL, int nImageDbLoadLimit = 0)
        {
            lock (m_syncObj)
            {
                if (loadMethod != IMAGEDB_LOAD_METHOD.LOAD_ALL && nImageDbLoadLimit > 0)
                {
                    throw new Exception("Currently the load-limit only works with the LOAD_ALLL image loading method.");
                }

                SimpleDatum imgMean = null;

                if (ds != null)
                {
                    m_ds = ds;
                }

                m_TrainingImages = loadImageset("Training", m_ds.TrainingSource, rgAbort, ref imgMean, out m_nLastTrainingImageIdx, nPadW, nPadH, log, loadMethod, nImageDbLoadLimit, m_nLastTrainingImageIdx, (ds == null) ? true : false);
                if (m_nLastTrainingImageIdx >= m_ds.TrainingSource.ImageCount)
                {
                    m_nLastTrainingImageIdx = 0;
                }

                if (EventWaitHandle.WaitAny(rgAbort, 0) != EventWaitHandle.WaitTimeout)
                {
                    return(false);
                }

                m_TestingImages = loadImageset("Testing", m_ds.TestingSource, rgAbort, ref imgMean, out m_nLastTestingImageIdx, nPadW, nPadH, log, loadMethod, nImageDbLoadLimit, m_nLastTestingImageIdx, (ds == null) ? true : false);
                if (m_nLastTestingImageIdx >= m_ds.TestingSource.ImageCount)
                {
                    m_nLastTestingImageIdx = 0;
                }

                if (EventWaitHandle.WaitAny(rgAbort, 0) != EventWaitHandle.WaitTimeout)
                {
                    return(false);
                }

                return(true);
            }
        }
Пример #12
0
        public void LoadDatabase()
        {
            int nIdx   = 0;
            int nTotal = 50000;

            reportProgress(nIdx, 0, "Loading database...");

            Log log = new Log("MNIST");

            log.OnWriteLine += Log_OnWriteLine;

            DatasetFactory factory = new DatasetFactory();

            loadFile(m_param.DataBatchFile1, "CIFAR-10.training", nTotal, ref nIdx);
            loadFile(m_param.DataBatchFile2, "CIFAR-10.training", nTotal, ref nIdx);
            loadFile(m_param.DataBatchFile3, "CIFAR-10.training", nTotal, ref nIdx);
            loadFile(m_param.DataBatchFile4, "CIFAR-10.training", nTotal, ref nIdx);
            loadFile(m_param.DataBatchFile5, "CIFAR-10.training", nTotal, ref nIdx);
            SourceDescriptor srcTrain = factory.LoadSource("CIFAR-10.training");

            m_factory.SaveImageMean(SimpleDatum.CalculateMean(log, m_rgImg.ToArray(), new WaitHandle[] { new ManualResetEvent(false) }), true, srcTrain.ID);

            m_rgImg = new List <SimpleDatum>();
            nIdx    = 0;
            nTotal  = 10000;
            loadFile(m_param.TestBatchFile, "CIFAR-10.testing", nTotal, ref nIdx);
            SourceDescriptor srcTest = factory.LoadSource("CIFAR-10.testing");

            m_factory.SaveImageMean(SimpleDatum.CalculateMean(log, m_rgImg.ToArray(), new WaitHandle[] { new ManualResetEvent(false) }), true, srcTest.ID);

            DatasetDescriptor ds = new DatasetDescriptor(0, "CIFAR-10", null, null, srcTrain, srcTest, "CIFAR-10", "CiFar-10 Dataset");

            factory.AddDataset(ds);
            factory.UpdateDatasetCounts(ds.ID);

            if (OnCompleted != null)
            {
                OnCompleted(this, new EventArgs());
            }
        }
Пример #13
0
        /// <summary>
        /// Releases all resources used.
        /// </summary>
        /// <param name="bDisposing">Set to <i>true</i> when called by Dispose().</param>
        protected virtual void Dispose(bool bDisposing)
        {
            m_ds = null;

            if (m_TestingImages != null)
            {
                m_TestingImages.Dispose();
                m_TestingImages = null;
            }

            if (m_TrainingImages != null)
            {
                m_TrainingImages.Dispose();
                m_TrainingImages = null;
            }

            if (m_factory != null)
            {
                m_factory.Dispose();
                m_factory = null;
            }
        }
Пример #14
0
        /// <summary>
        /// Initialize the gym with the specified properties.
        /// </summary>
        /// <param name="log">Specifies the output log to use.</param>
        /// <param name="properties">Specifies the properties containing Gym specific initialization parameters.</param>
        /// <remarks>
        /// The ModelGym uses the following initialization properties.
        ///
        /// 'GpuID' - the GPU to run on.
        /// 'ModelDescription' - the model description of the model to use.
        /// 'Dataset' - the name of the dataset to use.
        /// 'Weights' - the model trained weights.
        /// 'CudaPath' - the path of the CudaDnnDLL to use.
        /// 'BatchSize' - the batch size used when running images through the model (default = 16).
        /// 'RecreateData' - when 'True' the data is re-run through the model, otherwise if already run the data is loaded from file (faster).
        /// </remarks>
        public void Initialize(Log log, PropertySet properties)
        {
            m_nGpuID        = properties.GetPropertyAsInt("GpuID");
            m_strModelDesc  = properties.GetProperty("ModelDescription");
            m_strDataset    = properties.GetProperty("Dataset");
            m_rgWeights     = properties.GetPropertyBlob("Weights");
            m_nBatchSize    = properties.GetPropertyAsInt("BatchSize", 16);
            m_bRecreateData = properties.GetPropertyAsBool("RecreateData", false);
            m_strProject    = properties.GetProperty("ProjectName");
            if (string.IsNullOrEmpty(m_strProject))
            {
                m_strProject = "default";
            }

            string strCudaPath = properties.GetProperty("CudaPath");

            SettingsCaffe s = new SettingsCaffe();

            s.GpuIds            = m_nGpuID.ToString();
            s.ImageDbLoadMethod = IMAGEDB_LOAD_METHOD.LOAD_ON_DEMAND_BACKGROUND;

            m_imgdb = new MyCaffeImageDatabase2(log);
            m_imgdb.InitializeWithDsName1(s, m_strDataset);
            m_ds = m_imgdb.GetDatasetByName(m_strDataset);

            SimpleDatum sd    = m_imgdb.QueryImage(m_ds.TrainingSource.ID, 0, IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE);
            BlobShape   shape = new BlobShape(1, sd.Channels, sd.Height, sd.Width);

            if (m_evtCancel == null)
            {
                m_evtCancel = new CancelEvent();
            }

            m_mycaffe = new MyCaffeControl <float>(s, log, m_evtCancel, null, null, null, null, strCudaPath);
            m_mycaffe.LoadToRun(m_strModelDesc, m_rgWeights, shape);

            m_log = log;
        }
Пример #15
0
        /// <summary>
        /// Returns the dataset descriptor of the dynamic dataset produced by the Gym.
        /// </summary>
        /// <param name="dt">Specifies the data-type to use.</param>
        /// <param name="log">Optionally, specifies the output log to use (default = <i>null</i>).</param>
        /// <returns>The dataset descriptor is returned.</returns>
        public DatasetDescriptor GetDataset(DATA_TYPE dt, Log log = null)
        {
            if (dt == DATA_TYPE.DEFAULT)
            {
                dt = DATA_TYPE.BLOB;
            }

            if (dt != DATA_TYPE.BLOB)
            {
                if (log == null)
                {
                    log = m_log;
                }

                if (log != null)
                {
                    log.WriteLine("WARNING: This gym only supports the BLOB type, the datatype will be changed to BLOB.");
                }
                else
                {
                    throw new Exception("This gym only supports the BLOB type.");
                }

                dt = DATA_TYPE.BLOB;
            }

            int nH = 80;
            int nW = 80;
            int nC = 1;

            SourceDescriptor  srcTrain = new SourceDescriptor((int)GYM_DS_ID.ATARI, Name + ".training", nW, nH, nC, false, false);
            SourceDescriptor  srcTest  = new SourceDescriptor((int)GYM_SRC_TEST_ID.ATARI, Name + ".testing", nW, nH, nC, false, false);
            DatasetDescriptor ds       = new DatasetDescriptor((int)GYM_SRC_TRAIN_ID.ATARI, Name, null, null, srcTrain, srcTest, "AtariGym", "Atari Gym", null, GYM_TYPE.DYNAMIC);

            m_dt = dt;

            return(ds);
        }
        private ProjectEx getProject()
        {
            ProjectEx p = new ProjectEx("AlexNet Project");

            DatasetFactory    factory = new DatasetFactory();
            DatasetDescriptor ds      = factory.LoadDataset("CIFAR-10");

            p.SetDataset(ds);

            string strModelFile  = getTestPath("\\MyCaffe\\test_data\\models\\alexnet\\cifar\\alexnet_cifar_train_val.prototxt");
            string strSolverFile = getTestPath("\\MyCaffe\\test_data\\models\\alexnet\\cifar\\alexnet_cifar_solver.prototxt");

            p.LoadModelFile(strModelFile);
            RawProto proto = RawProtoFile.LoadFromFile(strSolverFile);

            RawProto iter = proto.FindChild("max_iter");

            iter.Value = m_nMaxIteration.ToString();

            p.SolverDescription = proto.ToString();

            return(p);
        }
Пример #17
0
        /// <summary>
        /// Releases all resources used.
        /// </summary>
        /// <param name="bDisposing">Set to <i>true</i> when called by Dispose().</param>
        protected virtual void Dispose(bool bDisposing)
        {
            m_ds = null;

            StopAutomaticRefreshSchedule(true, true);

            if (m_TestingImages != null)
            {
                m_TestingImages.Dispose();
                m_TestingImages = null;
            }

            if (m_TrainingImages != null)
            {
                m_TrainingImages.Dispose();
                m_TrainingImages = null;
            }

            if (m_factory != null)
            {
                m_factory.Dispose();
                m_factory = null;
            }
        }
Пример #18
0
        /// <summary>
        /// This method validates single application attribute.
        /// </summary>
        /// <param name="applicationDescriptor">Application descriptor to validate</param>
        /// <param name="datasetDescriptor">Dataset the attribute is from</param>
        /// <param name="attributeDescriptor">Attribute to validate</param>
        /// <param name="isPassword">True if attribute is a password attribute</param>
        /// <returns>List of messages</returns>
        List <Message> validateOneAttribute(ApplicationDescriptor applicationDescriptor, DatasetDescriptor datasetDescriptor, AttributeDescriptor attributeDescriptor, bool isPassword = false)
        {
            var messages = new List <Message>();

            // Attribute name cannot contain {number}, because of error messages placeholders
            if (Regex.IsMatch(attributeDescriptor.Name, "{[0-9]*}"))
            {
                messages.Add(new Message(MessageTypeEnum.Error,
                                         0029,
                                         new List <string>()
                {
                    attributeDescriptor.Name, datasetDescriptor.Name
                }));
            }

            // Not a basic type
            if (!AttributeType.Types.Contains(attributeDescriptor.Type))
            {
                var validReferences = applicationDescriptor.Datasets.Select(d => d.Name).ToList();
                validReferences.Add(applicationDescriptor.SystemDatasets.UsersDatasetDescriptor.Name);
                // Invalid reference
                if (!validReferences.Contains(attributeDescriptor.Type))
                {
                    messages.Add(new Message(MessageTypeEnum.Error,
                                             0010,
                                             new List <string>()
                    {
                        datasetDescriptor.Name, attributeDescriptor.Name, attributeDescriptor.Type
                    }));
                }
                // Valid reference
                else
                {
                    // Must have OnDeleteAction
                    if (attributeDescriptor.OnDeleteAction == OnDeleteActionEnum.None)
                    {
                        messages.Add(new Message(MessageTypeEnum.Error,
                                                 0011,
                                                 new List <string>()
                        {
                            attributeDescriptor.Name, datasetDescriptor.Name
                        }));
                    }
                    // And if attribute is in system users dataset, then the OnDeleteAction cannot be Cascade,
                    // to prevent deleting last user of the application by accident.
                    if (datasetDescriptor == applicationDescriptor.SystemDatasets.UsersDatasetDescriptor &&
                        attributeDescriptor.OnDeleteAction == OnDeleteActionEnum.Cascade)
                    {
                        messages.Add(new Message(MessageTypeEnum.Error,
                                                 0012,
                                                 new List <string>()
                        {
                            attributeDescriptor.Name, datasetDescriptor.Name, attributeDescriptor.Type
                        }));
                    }
                    // If Min is set, Required cannot be false
                    if (attributeDescriptor.Min != null && attributeDescriptor.Required == false)
                    {
                        messages.Add(new Message(MessageTypeEnum.Error,
                                                 0024,
                                                 new List <string>()
                        {
                            attributeDescriptor.Name, datasetDescriptor.Name
                        }));
                    }
                }
            }
            // A basic type
            else
            {
                // Password attribute
                if (isPassword)
                {
                    // Must have its type set to password
                    if (attributeDescriptor.Type != "password")
                    {
                        messages.Add(new Message(MessageTypeEnum.Error,
                                                 0009,
                                                 new List <string>()
                        {
                            attributeDescriptor.Name
                        }));
                    }
                    // Must be required
                    if (attributeDescriptor.Required != true)
                    {
                        messages.Add(new Message(MessageTypeEnum.Error,
                                                 0014,
                                                 new List <string>()
                        {
                            attributeDescriptor.Name
                        }));
                    }
                }
                // Not a password
                else
                {
                    // If isPassword flag is not set, attribute type cannot be "password"
                    if (attributeDescriptor.Type == "password")
                    {
                        messages.Add(new Message(MessageTypeEnum.Error,
                                                 0015,
                                                 new List <string>()
                        {
                            attributeDescriptor.Name, datasetDescriptor.Name
                        }));
                    }
                }
                // Can not have OnDeleteAction other than None
                if (attributeDescriptor.OnDeleteAction != OnDeleteActionEnum.None)
                {
                    messages.Add(new Message(MessageTypeEnum.Error,
                                             0013,
                                             new List <string>()
                    {
                        attributeDescriptor.Name, datasetDescriptor.Name
                    }));
                }
            }
            // All types
            // Safer can be set only for password
            if (!isPassword)
            {
                if (attributeDescriptor.Safer != null)
                {
                    messages.Add(new Message(MessageTypeEnum.Error,
                                             0016,
                                             new List <string>()
                    {
                        attributeDescriptor.Name, datasetDescriptor.Name
                    }));
                }
            }
            // Min <= Max if both set
            if (attributeDescriptor.Min != null && attributeDescriptor.Max != null && attributeDescriptor.Min > attributeDescriptor.Max)
            {
                messages.Add(new Message(MessageTypeEnum.Error,
                                         0017,
                                         new List <string>()
                {
                    attributeDescriptor.Name, datasetDescriptor.Name
                }));
            }
            // Min and Max cannot be set for color, date, datetime, email, month, phone, time, url and bool
            if (attributeDescriptor.Type == "color" || attributeDescriptor.Type == "date" ||
                attributeDescriptor.Type == "datetime" || attributeDescriptor.Type == "email" ||
                attributeDescriptor.Type == "month" || attributeDescriptor.Type == "phone" ||
                attributeDescriptor.Type == "time" || attributeDescriptor.Type == "url" ||
                attributeDescriptor.Type == "bool")
            {
                if (attributeDescriptor.Min != null)
                {
                    messages.Add(new Message(MessageTypeEnum.Error,
                                             0027,
                                             new List <string>()
                    {
                        attributeDescriptor.Name, datasetDescriptor.Name
                    }));
                }
                if (attributeDescriptor.Max != null)
                {
                    messages.Add(new Message(MessageTypeEnum.Error,
                                             0028,
                                             new List <string>()
                    {
                        attributeDescriptor.Name, datasetDescriptor.Name
                    }));
                }
            }
            else // Attributes that can have Min and Max set
            {
                // Min > 0 and Max > 0 for text, string, username, password and references
                if (attributeDescriptor.Type == "text" || attributeDescriptor.Type == "string" ||
                    attributeDescriptor.Type == "username" || attributeDescriptor.Type == "password" ||
                    !AttributeType.Types.Contains(attributeDescriptor.Type))
                {
                    // Min > 0
                    if (attributeDescriptor.Min != null && attributeDescriptor.Min <= 0)
                    {
                        messages.Add(new Message(MessageTypeEnum.Error,
                                                 0018,
                                                 new List <string>()
                        {
                            attributeDescriptor.Name, datasetDescriptor.Name
                        }));
                    }
                    // Max > 0
                    if (attributeDescriptor.Max != null && attributeDescriptor.Max <= 0)
                    {
                        messages.Add(new Message(MessageTypeEnum.Error,
                                                 0019,
                                                 new List <string>()
                        {
                            attributeDescriptor.Name, datasetDescriptor.Name
                        }));
                    }
                }
            }

            return(messages);
        }
        /// <summary>
        /// Create the dataset and load it into the database.
        /// </summary>
        /// <param name="nCreatorID">Specifies the creator ID.</param>
        /// <returns>On successful creation, <i>true</i> is returned, otherwise <i>false</i> is returned on abort.</returns>
        public bool LoadDatabase(int nCreatorID = 0)
        {
            try
            {
                int nIdx   = 0;
                int nTotal = 50000;

                reportProgress(nIdx, 0, "Loading database " + dataset_name + "...");

                DatasetFactory factory = new DatasetFactory();

                string strTrainSrc = dataset_name + ".training";
                int    nSrcId      = factory.GetSourceID(strTrainSrc);
                if (nSrcId != 0)
                {
                    factory.DeleteSourceData(nSrcId);
                }

                if (!loadFile(m_param.DataBatchFile1, strTrainSrc, nTotal, ref nIdx, m_log))
                {
                    return(false);
                }

                if (!loadFile(m_param.DataBatchFile2, strTrainSrc, nTotal, ref nIdx, m_log))
                {
                    return(false);
                }

                if (!loadFile(m_param.DataBatchFile3, strTrainSrc, nTotal, ref nIdx, m_log))
                {
                    return(false);
                }

                if (!loadFile(m_param.DataBatchFile4, strTrainSrc, nTotal, ref nIdx, m_log))
                {
                    return(false);
                }

                if (!loadFile(m_param.DataBatchFile5, strTrainSrc, nTotal, ref nIdx, m_log))
                {
                    return(false);
                }

                SourceDescriptor srcTrain = factory.LoadSource(strTrainSrc);
                m_factory.SaveImageMean(SimpleDatum.CalculateMean(m_log, m_rgImg.ToArray(), new WaitHandle[] { new ManualResetEvent(false) }), true, srcTrain.ID);

                m_rgImg = new List <SimpleDatum>();
                nIdx    = 0;
                nTotal  = 10000;

                string strTestSrc = dataset_name + ".testing";
                nSrcId = factory.GetSourceID(strTestSrc);
                if (nSrcId != 0)
                {
                    factory.DeleteSourceData(nSrcId);
                }

                if (!loadFile(m_param.TestBatchFile, strTestSrc, nTotal, ref nIdx, m_log))
                {
                    return(false);
                }

                SourceDescriptor srcTest = factory.LoadSource(strTestSrc);
                m_factory.SaveImageMean(SimpleDatum.CalculateMean(m_log, m_rgImg.ToArray(), new WaitHandle[] { new ManualResetEvent(false) }), true, srcTest.ID);

                DatasetDescriptor ds = new DatasetDescriptor(nCreatorID, dataset_name, null, null, srcTrain, srcTest, dataset_name, dataset_name + " Dataset");
                factory.AddDataset(ds);
                factory.UpdateDatasetCounts(ds.ID);

                return(true);
            }
            catch (Exception excpt)
            {
                throw excpt;
            }
            finally
            {
                if (OnCompleted != null)
                {
                    OnCompleted(this, new EventArgs());
                }
            }
        }
Пример #20
0
        public void TestQueryRandom2(IMAGEDB_LOAD_METHOD loadMethod, int nLoadLimit)
        {
            List <string> rgDs = new List <string>()
            {
                "MNIST", "CIFAR-10", "MNIST"
            };

            foreach (string strDs in rgDs)
            {
                IXImageDatabase db = new MyCaffeImageDatabase();

                SettingsCaffe settings = new SettingsCaffe();
                settings.ImageDbLoadMethod = loadMethod;
                settings.ImageDbLoadLimit  = nLoadLimit;

                Stopwatch sw = new Stopwatch();

                sw.Start();
                db.InitializeWithDsName(settings, strDs);
                db.SetSelectionMethod(IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE);
                string str = sw.ElapsedMilliseconds.ToString();
                Trace.WriteLine(strDs + " Initialization Time: " + str + " ms.");

                DatasetDescriptor ds = db.GetDatasetByName(strDs);
                Dictionary <int, List <SimpleDatum> > rg = new Dictionary <int, List <SimpleDatum> >();
                Dictionary <int, int> rgCounts           = new Dictionary <int, int>();

                int    nCount     = 10000;
                double dfTotalMs  = 0;
                int    nCount1    = 0;
                double dfTotalMs1 = 0;

                Stopwatch swTimer = new Stopwatch();
                swTimer.Start();


                // Randomly query each image and count up the number if times a given label is hit.
                for (int i = 0; i < nCount; i++)
                {
                    sw.Reset();
                    sw.Start();
                    SimpleDatum d = db.QueryImage(ds.TrainingSource.ID, 0, IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.RANDOM);
                    sw.Stop();
                    dfTotalMs  += sw.ElapsedMilliseconds;
                    dfTotalMs1 += sw.ElapsedMilliseconds;
                    nCount1++;

                    if (!rg.Keys.Contains(d.Index))
                    {
                        rg.Add(d.Index, new List <SimpleDatum>()
                        {
                            d
                        });
                    }
                    else
                    {
                        rg[d.Index].Add(d);
                    }

                    if (!rgCounts.Keys.Contains(d.Label))
                    {
                        rgCounts.Add(d.Label, 1);
                    }
                    else
                    {
                        rgCounts[d.Label]++;
                    }

                    if (swTimer.Elapsed.TotalMilliseconds > 2000)
                    {
                        double dfPct = (double)i / (double)nCount;
                        Trace.WriteLine("(" + dfPct.ToString("P") + ") ave time = " + (dfTotalMs1 / nCount1).ToString("N3") + " ms.");
                        dfTotalMs1 = 0;
                        nCount1    = 0;
                        swTimer.Restart();
                    }
                }

                // Total the label counts and calculate the average and stddev.
                List <KeyValuePair <int, int> > rgCountsNoLabelBalancing = rgCounts.OrderBy(p => p.Key).ToList();
                Trace.WriteLine("NO LABEL BALANCING COUNTS");

                CalculationArray ca = new CalculationArray();
                foreach (KeyValuePair <int, int> kv in rgCountsNoLabelBalancing)
                {
                    ca.Add(kv.Value);
                    Trace.WriteLine(kv.Key + " -> " + kv.Value.ToString("N0"));
                }

                double dfAve     = ca.Average;
                double dfStdDev1 = ca.CalculateStandardDeviation(dfAve);

                Trace.WriteLine("Average = " + dfAve.ToString());
                Trace.WriteLine("StdDev = " + dfStdDev1.ToString());

                // Load the labels by first selecting the label randomly and then the image randomly from the label set.
                rg       = new Dictionary <int, List <SimpleDatum> >();
                rgCounts = new Dictionary <int, int>();

                for (int i = 0; i < nCount; i++)
                {
                    sw.Reset();
                    sw.Start();
                    SimpleDatum d = db.QueryImage(ds.TrainingSource.ID, 0, IMGDB_LABEL_SELECTION_METHOD.RANDOM, IMGDB_IMAGE_SELECTION_METHOD.RANDOM);
                    dfTotalMs += sw.ElapsedMilliseconds;
                    sw.Stop();

                    if (!rg.Keys.Contains(d.Index))
                    {
                        rg.Add(d.Index, new List <SimpleDatum>()
                        {
                            d
                        });
                    }
                    else
                    {
                        rg[d.Index].Add(d);
                    }

                    if (!rgCounts.Keys.Contains(d.Label))
                    {
                        rgCounts.Add(d.Label, 1);
                    }
                    else
                    {
                        rgCounts[d.Label]++;
                    }
                }

                // Total the balanced label counts and calculate the average and stddev.
                List <KeyValuePair <int, int> > rgCountsLabelBalancing = rgCounts.OrderBy(p => p.Key).ToList();
                Trace.WriteLine("LABEL BALANCING COUNTS");

                ca = new CalculationArray();

                foreach (KeyValuePair <int, int> kv in rgCountsLabelBalancing)
                {
                    ca.Add(kv.Value);
                    Trace.WriteLine(kv.Key + " -> " + kv.Value.ToString("N0"));
                }

                dfAve = ca.Average;
                double dfStdDev2 = ca.CalculateStandardDeviation(dfAve);

                Trace.WriteLine("Average = " + dfAve.ToString());
                Trace.WriteLine("StdDev = " + dfStdDev2.ToString());

                Assert.AreEqual(true, dfStdDev2 < dfStdDev1 * 1.5);

                str = (dfTotalMs / (double)(nCount * 2)).ToString();
                Trace.WriteLine("Average Query Time: " + str + " ms.");

                db.CleanUp();

                IDisposable idisp = db as IDisposable;
                if (idisp != null)
                {
                    idisp.Dispose();
                }
            }
        }
Пример #21
0
        /// <summary>
        /// Initialize the DatasetEx by loading the training and testing data sources into memory.
        /// </summary>
        /// <param name="ds">Specifies the dataset to load.</param>
        /// <param name="rgAbort">Specifies a set of wait handles used to cancel the load.</param>
        /// <param name="nPadW">Optionally, specifies a pad to apply to the width of each item (default = 0).</param>
        /// <param name="nPadH">Optionally, specifies a pad to apply to the height of each item (default = 0).</param>
        /// <param name="log">Optionally, specifies an external Log to output status (default = null).</param>
        /// <param name="loadMethod">Optionally, specifies the load method to use (default = LOAD_ALL).</param>
        /// <param name="bSkipMeanCheck">Optionally, specifies to skip the mean check (default = false).</param>
        /// <param name="nImageDbLoadLimit">Optionally, specifies the load limit (default = 0).</param>
        /// <param name="nImageDbAutoRefreshScheduledUpdateInMs">Optionally, specifies the scheduled refresh update period in ms (default = 0).</param>
        /// <param name="dfImageDbAutoRefreshScheduledReplacementPct">Optionally, specifies the scheduled refresh replacement percent (default = 0).</param>
        /// <returns>Upon loading the dataset a handle to the default QueryState is returned, or 0 on cancel.</returns>
        public long Initialize(DatasetDescriptor ds, WaitHandle[] rgAbort, int nPadW = 0, int nPadH = 0, Log log = null, IMAGEDB_LOAD_METHOD loadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL, bool bSkipMeanCheck = false, int nImageDbLoadLimit = 0, int nImageDbAutoRefreshScheduledUpdateInMs = 0, double dfImageDbAutoRefreshScheduledReplacementPct = 0)
        {
            lock (m_syncObj)
            {
                m_log = log;

                if (ds != null)
                {
                    m_ds = ds;
                }

                if (m_ds.TrainingSource.ImageWidth == -1 || m_ds.TrainingSource.ImageHeight == -1)
                {
                    log.WriteLine("WARNING: Cannot create a mean image for data sources that contain variable sized images.  The mean check will be skipped.");
                    bSkipMeanCheck = true;
                }

                bool bSilentLoad = (loadMethod == IMAGEDB_LOAD_METHOD.LOAD_ON_DEMAND_BACKGROUND) ? true : false;

                m_TrainingImages = new ImageSet2(ImageSet2.TYPE.TRAIN, log, m_factory, m_ds.TrainingSource, loadMethod, m_random, rgAbort);
                m_TrainingImages.OnCalculateImageMean += OnCalculateImageMean;
                QueryState  qsTraining = m_TrainingImages.Initialize(bSilentLoad, true, true, nImageDbLoadLimit);
                SimpleDatum sdMean     = null;

                if (!bSkipMeanCheck)
                {
                    bool bQueryOnly = false;
                    if (EntitiesConnection.GlobalDatabaseConnectInfo.Location == ConnectInfo.TYPE.AZURE)
                    {
                        bQueryOnly = true;
                    }

                    sdMean = m_TrainingImages.GetImageMean(log, rgAbort, bQueryOnly);
                }

                if (EventWaitHandle.WaitAny(rgAbort, 0) != EventWaitHandle.WaitTimeout)
                {
                    return(0);
                }

                m_TestingImages = new ImageSet2(ImageSet2.TYPE.TEST, log, m_factory, m_ds.TestingSource, loadMethod, m_random, rgAbort);
                m_TestingImages.OnCalculateImageMean += OnCalculateImageMean;
                QueryState qsTesting = m_TestingImages.Initialize(bSilentLoad, true, true, nImageDbLoadLimit);

                if (!bSkipMeanCheck)
                {
                    bool bSave = true;
                    if (EntitiesConnection.GlobalDatabaseConnectInfo.Location == ConnectInfo.TYPE.AZURE)
                    {
                        bSave = false;
                    }

                    m_TestingImages.SetImageMean(sdMean, bSave);
                }

                if (EventWaitHandle.WaitAny(rgAbort, 0) != EventWaitHandle.WaitTimeout)
                {
                    return(0);
                }

                if (loadMethod == IMAGEDB_LOAD_METHOD.LOAD_ALL && nImageDbAutoRefreshScheduledUpdateInMs > 0 && dfImageDbAutoRefreshScheduledReplacementPct > 0)
                {
                    StartAutomaticRefreshSchedule(true, true, nImageDbAutoRefreshScheduledUpdateInMs, dfImageDbAutoRefreshScheduledReplacementPct);
                }

                m_lDefaultQueryState = m_queryStates.CreateNewState(qsTraining, qsTesting);
                return(m_lDefaultQueryState);
            }
        }
Пример #22
0
        static void Main(string[] args)
        {
            if (!sqlCheck())
            {
                return;
            }

            Log log = new Log("test");

            log.OnWriteLine += Log_OnWriteLine;
            CancelEvent   cancel   = new CancelEvent();
            SettingsCaffe settings = new SettingsCaffe();

            // Load all images into memory before training.
            settings.ImageDbLoadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL;
            // Use GPU ID = 0
            settings.GpuIds = "0";

            // Load the descriptors from their respective files
            string strSolver = load_file("C:\\ProgramData\\MyCaffe\\test_data\\models\\siamese\\mnist\\solver.prototxt");
            string strModel  = load_file("C:\\ProgramData\\MyCaffe\\test_data\\models\\siamese\\mnist\\train_val.prototxt");

            RawProto       proto     = RawProto.Parse(strModel);
            NetParameter   net_param = NetParameter.FromProto(proto);
            LayerParameter layer     = net_param.FindLayer(LayerParameter.LayerType.DECODE);

            layer.decode_param.target = DecodeParameter.TARGET.CENTROID;
            proto    = net_param.ToProto("root");
            strModel = proto.ToString();

            // Load the MNIST data descriptor.
            DatasetFactory    factory = new DatasetFactory();
            DatasetDescriptor ds      = factory.LoadDataset("MNIST");

            // Create a test project with the dataset and descriptors
            ProjectEx project = new ProjectEx("Test");

            project.SetDataset(ds);
            project.ModelDescription  = strModel;
            project.SolverDescription = strSolver;

            // Crate the MyCaffeControl (with the 'float' base type)
            string strCudaPath             = "C:\\Program Files\\SignalPop\\MyCaffe\\cuda_11.3\\CudaDnnDll.11.3.dll";
            MyCaffeControl <float> mycaffe = new MyCaffeControl <float>(settings, log, cancel, null, null, null, null, strCudaPath);

            // Load the project, using the TRAIN phase.
            mycaffe.Load(Phase.TRAIN, project);

            // Train the model for 4000 iterations
            // (which uses the internal solver and internal training net)
            int nIterations = 4000;

            mycaffe.Train(nIterations);

            // Test the model for 100 iterations
            // (which uses the internal testing net)
            nIterations = 100;
            double dfAccuracy = mycaffe.Test(nIterations);

            // Report the testing accuracy.
            log.WriteLine("Accuracy = " + dfAccuracy.ToString("P"));

            mycaffe.Dispose();

            Console.Write("Press any key...");
            Console.ReadKey();
        }
Пример #23
0
        public void TestQuerySequential(IMAGEDB_LOAD_METHOD loadMethod, int nLoadLimit)
        {
            List <string> rgDs = new List <string>()
            {
                "MNIST", "CIFAR-10", "MNIST"
            };
            IXImageDatabase db = new MyCaffeImageDatabase();

            foreach (string strDs in rgDs)
            {
                DatasetFactory df  = new DatasetFactory();
                int            nDs = df.GetDatasetID(strDs);
                if (nDs == 0)
                {
                    throw new Exception("The dataset '" + strDs + "' does not exist - you need to load it.");
                }

                SettingsCaffe settings = new SettingsCaffe();
                settings.ImageDbLoadMethod = loadMethod;
                settings.ImageDbLoadLimit  = nLoadLimit;

                Stopwatch sw = new Stopwatch();

                sw.Start();
                db.InitializeWithDsName(settings, strDs);
                string str = sw.ElapsedMilliseconds.ToString();
                Trace.WriteLine(strDs + " Initialization Time: " + str + " ms.");

                DatasetDescriptor ds = db.GetDatasetByName(strDs);
                Dictionary <int, List <SimpleDatum> > rg = new Dictionary <int, List <SimpleDatum> >();

                int    nCount    = 100;
                double dfTotalMs = 0;

                for (int i = 0; i < nCount; i++)
                {
                    sw.Reset();
                    sw.Start();
                    SimpleDatum d = db.QueryImage(ds.TrainingSource.ID, 0, IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE);
                    dfTotalMs += sw.ElapsedMilliseconds;
                    sw.Stop();

                    if (!rg.Keys.Contains(d.Index))
                    {
                        rg.Add(d.Index, new List <SimpleDatum>()
                        {
                            d
                        });
                    }
                    else
                    {
                        rg[d.Index].Add(d);
                    }
                }

                str = (dfTotalMs / (double)nCount).ToString();
                Trace.WriteLine("Average Query Time: " + str + " ms.");

                // Verify sequential selection, so all indexes should be the same.

                foreach (KeyValuePair <int, List <SimpleDatum> > kv in rg)
                {
                    Assert.AreEqual(kv.Value.Count, nCount);
                }
            }

            db.CleanUp();

            IDisposable idisp = db as IDisposable;

            if (idisp != null)
            {
                idisp.Dispose();
            }
        }
Пример #24
0
        /// <summary>
        /// Create the dataset and load it into the database.
        /// </summary>
        /// <param name="nCreatorID">Specifies the creator ID.</param>
        /// <returns>On successful creation, <i>true</i> is returned, otherwise <i>false</i> is returned on abort.</returns>
        public bool LoadDatabase(int nCreatorID = 0)
        {
            try
            {
                reportProgress(0, 0, "Loading " + dataset_name + " database...");

                int nIdx          = 0;
                int nTotal        = 5011 + 17125;
                int nExtractIdx   = 0;
                int nExtractTotal = 10935 + 40178;

                // Get the label map.
                LabelMap labelMap = loadLabelMap();
                Dictionary <string, int> rgNameToLabel = labelMap.MapToLabel(m_log, true);
                string strSrc = dataset_name + ".training";

                int nSrcId = m_factory.GetSourceID(strSrc);
                if (nSrcId > 0)
                {
                    m_factory.DeleteSourceData(nSrcId);
                }

                List <Tuple <int, string, Size> > rgFileSizes = new List <Tuple <int, string, Size> >();

                if (!loadFile(m_param.DataBatchFileTrain2007, strSrc, nExtractTotal, ref nExtractIdx, nTotal, ref nIdx, m_log, m_param.ExtractFiles, rgNameToLabel, rgFileSizes))
                {
                    return(false);
                }

                if (!loadFile(m_param.DataBatchFileTrain2012, strSrc, nExtractTotal, ref nExtractIdx, nTotal, ref nIdx, m_log, m_param.ExtractFiles, rgNameToLabel, rgFileSizes))
                {
                    return(false);
                }

                string strDir = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) + "\\MyCaffe\\test_data\\data\\ssd\\VOC0712\\";
                if (!Directory.Exists(strDir))
                {
                    Directory.CreateDirectory(strDir);
                }

                saveFileSizes(rgFileSizes, strDir + "train_name_size.txt");

                SourceDescriptor srcTrain = m_factory.LoadSource(strSrc);

                rgFileSizes   = new List <Tuple <int, string, Size> >();
                m_rgImg       = new List <SimpleDatum>();
                nIdx          = 0;
                nTotal        = 4952;
                nExtractIdx   = 0;
                nExtractTotal = 10347;

                strSrc = dataset_name + ".testing";

                nSrcId = m_factory.GetSourceID(strSrc);
                if (nSrcId > 0)
                {
                    m_factory.DeleteSourceData(nSrcId);
                }

                if (!loadFile(m_param.DataBatchFileTest2007, strSrc, nExtractTotal, ref nExtractIdx, nTotal, ref nIdx, m_log, m_param.ExtractFiles, rgNameToLabel, rgFileSizes))
                {
                    return(false);
                }

                saveFileSizes(rgFileSizes, strDir + "test_name_size.txt");

                SourceDescriptor srcTest = m_factory.LoadSource(strSrc);

                DatasetDescriptor ds = new DatasetDescriptor(nCreatorID, dataset_name, null, null, srcTrain, srcTest, dataset_name, dataset_name + " Dataset");
                m_factory.AddDataset(ds);
                m_factory.UpdateDatasetCounts(ds.ID);

                return(true);
            }
            catch (Exception excpt)
            {
                throw excpt;
            }
            finally
            {
                if (OnCompleted != null)
                {
                    OnCompleted(this, new EventArgs());
                }
            }
        }
Пример #25
0
        /// <summary>
        /// Create the dataset and load it into the database.
        /// </summary>
        /// <param name="nCreatorID">Specifies the creator ID.</param>
        /// <returns>On successful creation, <i>true</i> is returned, otherwise <i>false</i> is returned on abort.</returns>
        public bool LoadDatabase(int nCreatorID = 0)
        {
            try
            {
                reportProgress(0, 0, "Loading " + dataset_name + " database...");

                int nIdx          = 0;
                int nTotal        = 5011 + 17125;
                int nExtractIdx   = 0;
                int nExtractTotal = 10935 + 40178;

                // Get the label map.
                LabelMap labelMap = loadLabelMap();
                Dictionary <string, int> rgNameToLabel = labelMap.MapToLabel(m_log, true);
                string strSrc = dataset_name + ".training";

                int nSrcId = m_factory.GetSourceID(strSrc);
                if (nSrcId > 0)
                {
                    m_factory.DeleteSourceData(nSrcId);
                }

                if (!loadFile(m_param.DataBatchFileTrain2007, strSrc, nExtractTotal, ref nExtractIdx, nTotal, ref nIdx, m_log, m_param.ExtractFiles, rgNameToLabel))
                {
                    return(false);
                }

                if (!loadFile(m_param.DataBatchFileTrain2012, strSrc, nExtractTotal, ref nExtractIdx, nTotal, ref nIdx, m_log, m_param.ExtractFiles, rgNameToLabel))
                {
                    return(false);
                }

                SourceDescriptor srcTrain = m_factory.LoadSource(strSrc);

                m_rgImg       = new List <SimpleDatum>();
                nIdx          = 0;
                nTotal        = 4952;
                nExtractIdx   = 0;
                nExtractTotal = 10347;

                strSrc = dataset_name + ".testing";

                nSrcId = m_factory.GetSourceID(strSrc);
                if (nSrcId > 0)
                {
                    m_factory.DeleteSourceData(nSrcId);
                }

                if (!loadFile(m_param.DataBatchFileTest2007, strSrc, nExtractTotal, ref nExtractIdx, nTotal, ref nIdx, m_log, m_param.ExtractFiles, rgNameToLabel))
                {
                    return(false);
                }

                SourceDescriptor srcTest = m_factory.LoadSource(strSrc);

                DatasetDescriptor ds = new DatasetDescriptor(nCreatorID, dataset_name, null, null, srcTrain, srcTest, dataset_name, dataset_name + " Dataset");
                m_factory.AddDataset(ds);
                m_factory.UpdateDatasetCounts(ds.ID);

                return(true);
            }
            catch (Exception excpt)
            {
                throw excpt;
            }
            finally
            {
                if (OnCompleted != null)
                {
                    OnCompleted(this, new EventArgs());
                }
            }
        }
        /// <summary>
        /// Create the dataset and load it into the database.
        /// </summary>
        /// <param name="nCreatorID">Specifies the creator ID.</param>
        /// <returns>On successful creation, <i>true</i> is returned, otherwise <i>false</i> is returned on abort.</returns>
        public bool LoadDatabase(int nCreatorID = 0)
        {
            int nIdx   = 0;
            int nTotal = 0;

            try
            {
                List <Tuple <byte[], int> > rgTrainImg;
                List <Tuple <byte[], int> > rgTestImg;

                m_extractor.ExtractImages(out rgTrainImg, out rgTestImg);

                reportProgress(nIdx, nTotal, "Loading " + dataset_name + " database...");

                DatasetFactory factory         = null;
                string         strExportFolder = null;

                if (m_param.ExportToFile)
                {
                    strExportFolder = m_param.ExportPath.TrimEnd('\\') + "\\";
                    if (!Directory.Exists(strExportFolder))
                    {
                        Directory.CreateDirectory(strExportFolder);
                    }
                }

                string strTrainSrc = "training";
                if (!m_param.ExportToFile)
                {
                    factory = new DatasetFactory();

                    strTrainSrc = dataset_name + "." + strTrainSrc;
                    int nSrcId = factory.GetSourceID(strTrainSrc);
                    if (nSrcId != 0)
                    {
                        factory.DeleteSourceData(nSrcId);
                    }
                }

                if (!loadFile(factory, rgTrainImg, m_extractor.Channels, m_extractor.Height, m_extractor.Width, strTrainSrc, strExportFolder))
                {
                    return(false);
                }

                string strTestSrc = "testing";
                if (!m_param.ExportToFile)
                {
                    strTestSrc = dataset_name + "." + strTestSrc;
                    int nSrcId = factory.GetSourceID(strTestSrc);
                    if (nSrcId != 0)
                    {
                        factory.DeleteSourceData(nSrcId);
                    }
                }

                if (!loadFile(factory, rgTestImg, m_extractor.Channels, m_extractor.Height, m_extractor.Width, strTestSrc, strExportFolder))
                {
                    return(false);
                }

                if (!m_param.ExportToFile)
                {
                    SourceDescriptor  srcTrain = factory.LoadSource(strTrainSrc);
                    SourceDescriptor  srcTest  = factory.LoadSource(strTestSrc);
                    DatasetDescriptor ds       = new DatasetDescriptor(nCreatorID, dataset_name, null, null, srcTrain, srcTest, dataset_name, dataset_name + " Character Dataset");
                    factory.AddDataset(ds);
                    factory.UpdateDatasetCounts(ds.ID);
                }

                return(true);
            }
            catch (Exception excpt)
            {
                throw excpt;
            }
            finally
            {
                if (OnCompleted != null)
                {
                    OnCompleted(this, new EventArgs());
                }
            }
        }
Пример #27
0
        public void TestLoadLimitNextSequential(IMAGEDB_LOAD_METHOD loadMethod, int nLoadLimit)
        {
            List <string> rgDs = new List <string>()
            {
                "MNIST", "CIFAR-10", "MNIST"
            };
            IXImageDatabase db = new MyCaffeImageDatabase();

            foreach (string strDs in rgDs)
            {
                DatasetFactory df  = new DatasetFactory();
                int            nDs = df.GetDatasetID(strDs);
                if (nDs == 0)
                {
                    throw new Exception("The dataset '" + strDs + "' does not exist - you need to load it.");
                }

                SettingsCaffe settings = new SettingsCaffe();
                settings.ImageDbLoadMethod = loadMethod;
                settings.ImageDbLoadLimit  = nLoadLimit;

                Stopwatch sw = new Stopwatch();

                sw.Start();
                db.InitializeWithDsName(settings, strDs);
                db.SetSelectionMethod(IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE);
                string str = sw.ElapsedMilliseconds.ToString();
                Trace.WriteLine(strDs + " Initialization Time: " + str + " ms.");

                DatasetDescriptor ds = db.GetDatasetByName(strDs);
                Dictionary <int, List <SimpleDatum> > rg      = new Dictionary <int, List <SimpleDatum> >();
                Dictionary <int, List <SimpleDatum> > rgFirst = new Dictionary <int, List <SimpleDatum> >();

                int    nTotal    = ds.TrainingSource.ImageCount;
                int    nCount    = 0;
                double dfTotalMs = 0;

                while (nCount < nTotal)
                {
                    for (int i = 0; i < nLoadLimit; i++)
                    {
                        sw.Reset();
                        sw.Start();
                        SimpleDatum d1 = db.QueryImage(ds.TrainingSource.ID, i);
                        dfTotalMs += sw.ElapsedMilliseconds;
                        sw.Stop();

                        if (!rg.Keys.Contains(d1.Index))
                        {
                            rg.Add(d1.Index, new List <SimpleDatum>()
                            {
                                d1
                            });
                        }
                        else
                        {
                            rg[d1.Index].Add(d1);
                        }

                        if (nCount == 0)
                        {
                            if (!rgFirst.Keys.Contains(d1.Index))
                            {
                                rgFirst.Add(d1.Index, new List <SimpleDatum>()
                                {
                                    d1
                                });
                            }
                            else
                            {
                                rgFirst[d1.Index].Add(d1);
                            }
                        }
                    }

                    db.LoadNextSet(null);
                    nCount += nLoadLimit;
                }

                str = (dfTotalMs / (double)nCount).ToString();
                Trace.WriteLine("Average Query Time: " + str + " ms.");

                // Verify that all items have been queried
                Assert.AreEqual(nTotal, rg.Count);

                Dictionary <int, List <SimpleDatum> > rgWrapAround = new Dictionary <int, List <SimpleDatum> >();

                for (int i = 0; i < nLoadLimit; i++)
                {
                    SimpleDatum d1 = db.QueryImage(ds.TrainingSource.ID, i);

                    if (!rgWrapAround.Keys.Contains(d1.Index))
                    {
                        rgWrapAround.Add(d1.Index, new List <SimpleDatum>()
                        {
                            d1
                        });
                    }
                    else
                    {
                        rgWrapAround[d1.Index].Add(d1);
                    }
                }

                // Verify that the reads wrap around to the start.
                Assert.AreEqual(rgWrapAround.Count, rgFirst.Count);

                List <KeyValuePair <int, List <SimpleDatum> > > rg1 = new List <KeyValuePair <int, List <SimpleDatum> > >();
                List <KeyValuePair <int, List <SimpleDatum> > > rg2 = new List <KeyValuePair <int, List <SimpleDatum> > >();

                foreach (KeyValuePair <int, List <SimpleDatum> > kv in rgWrapAround)
                {
                    rg1.Add(kv);
                }

                foreach (KeyValuePair <int, List <SimpleDatum> > kv in rgFirst)
                {
                    rg2.Add(kv);
                }

                for (int i = 0; i < rg1.Count; i++)
                {
                    Assert.AreEqual(rg1[i].Key, rg2[i].Key);
                    Assert.AreEqual(rg1[i].Value.Count, rg2[i].Value.Count);

                    for (int j = 0; j < rg1[i].Value.Count; j++)
                    {
                        Assert.AreEqual(rg1[i].Value[j].Label, rg2[i].Value[j].Label);
                    }
                }
            }

            db.CleanUp();

            IDisposable idisp = db as IDisposable;

            if (idisp != null)
            {
                idisp.Dispose();
            }
        }
Пример #28
0
        public void TestQueryPair(IMAGEDB_LOAD_METHOD loadMethod, int nLoadLimit)
        {
            List <string> rgDs = new List <string>()
            {
                "MNIST", "CIFAR-10", "MNIST"
            };
            IXImageDatabase db = new MyCaffeImageDatabase();

            foreach (string strDs in rgDs)
            {
                SettingsCaffe settings = new SettingsCaffe();
                settings.ImageDbLoadMethod = loadMethod;
                settings.ImageDbLoadLimit  = nLoadLimit;

                Stopwatch sw = new Stopwatch();

                sw.Start();
                db.InitializeWithDsName(settings, strDs);
                db.SetSelectionMethod(IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE);
                string str = sw.ElapsedMilliseconds.ToString();
                Trace.WriteLine(strDs + " Initialization Time: " + str + " ms.");

                DatasetDescriptor ds = db.GetDatasetByName(strDs);
                Dictionary <int, List <SimpleDatum> > rg = new Dictionary <int, List <SimpleDatum> >();

                int    nCount    = 100;
                double dfTotalMs = 0;

                for (int i = 0; i < nCount; i++)
                {
                    sw.Reset();
                    sw.Start();
                    SimpleDatum d1 = db.QueryImage(ds.TrainingSource.ID, i);
                    SimpleDatum d2 = db.QueryImage(ds.TrainingSource.ID, i, IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.PAIR);
                    dfTotalMs += sw.ElapsedMilliseconds;
                    sw.Stop();

                    if (!rg.Keys.Contains(d1.Index))
                    {
                        rg.Add(d1.Index, new List <SimpleDatum>()
                        {
                            d1
                        });
                    }
                    else
                    {
                        rg[d1.Index].Add(d1);
                    }

                    if (nLoadLimit > 0)
                    {
                        Assert.AreEqual(true, d1.Index == d2.Index - 1 || d1.Index == nLoadLimit - 1 && d2.Index == 0);
                    }
                    else
                    {
                        Assert.AreEqual(d1.Index, d2.Index - 1);
                    }
                }

                str = (dfTotalMs / (double)nCount).ToString();
                Trace.WriteLine("Average Query Time: " + str + " ms.");

                // Verify that all labels are hit.
                if (nLoadLimit > 0)
                {
                    Assert.AreEqual(rg.Count, nLoadLimit);
                }
            }

            db.CleanUp();

            IDisposable idisp = db as IDisposable;

            if (idisp != null)
            {
                idisp.Dispose();
            }
        }
Пример #29
0
        public void TestQuerySequential4(IMAGEDB_LOAD_METHOD loadMethod, int nLoadLimit)
        {
            List <string> rgDs = new List <string>()
            {
                "MNIST", "CIFAR-10", "MNIST"
            };
            IXImageDatabase db = new MyCaffeImageDatabase();

            foreach (string strDs in rgDs)
            {
                SettingsCaffe settings = new SettingsCaffe();
                settings.ImageDbLoadMethod = loadMethod;
                settings.ImageDbLoadLimit  = nLoadLimit;

                Stopwatch sw = new Stopwatch();

                sw.Start();
                db.InitializeWithDsName(settings, strDs);
                db.SetSelectionMethod(IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE);
                string str = sw.ElapsedMilliseconds.ToString();
                Trace.WriteLine(strDs + " Initialization Time: " + str + " ms.");

                DatasetDescriptor ds = db.GetDatasetByName(strDs);
                Dictionary <int, List <SimpleDatum> > rg = new Dictionary <int, List <SimpleDatum> >();

                int        nCount    = 100;
                double     dfTotalMs = 0;
                List <int> rgIdx     = new List <int>();

                for (int i = 0; i < nCount; i++)
                {
                    sw.Reset();
                    sw.Start();
                    SimpleDatum d = db.QueryImage(ds.TrainingSource.ID, i);
                    dfTotalMs += sw.ElapsedMilliseconds;
                    sw.Stop();

                    if (!rg.Keys.Contains(d.Index))
                    {
                        rg.Add(d.Index, new List <SimpleDatum>()
                        {
                            d
                        });
                    }
                    else
                    {
                        rg[d.Index].Add(d);
                    }

                    rgIdx.Add(d.Index);
                }

                str = (dfTotalMs / (double)nCount).ToString();
                Trace.WriteLine("Average Query Time: " + str + " ms.");

                // Verify sequential selection.

                int nIdx = 0;

                rgIdx.Sort();

                foreach (KeyValuePair <int, List <SimpleDatum> > kv in rg)
                {
                    int nIdx1 = rgIdx[nIdx];

                    Assert.AreEqual(kv.Value.Count, (nLoadLimit == 0) ? 1 : nLoadLimit);
                    Assert.AreEqual(rg[nIdx1][0].Index, (nLoadLimit == 0) ? nIdx1 : nIdx1 % nLoadLimit);
                    nIdx++;
                }
            }

            db.CleanUp();

            IDisposable idisp = db as IDisposable;

            if (idisp != null)
            {
                idisp.Dispose();
            }
        }
Пример #30
0
        /// <summary>
        /// This method creates text representation of a reference from database.
        /// </summary>
        /// <param name="referenceType">Type of the reference</param>
        /// <param name="referenceId">Id to get the text representation for</param>
        /// <param name="level">Current depth of looking for the reference</param>
        /// <returns>String representation of the reference.</returns>
        public string GetTextForReference(string referenceType, long referenceId, int level = 1)
        {
            // If text is already in cache, return it
            if (referenceCache.ContainsKey(referenceType) && referenceCache[referenceType].ContainsKey(referenceId))
            {
                return(referenceCache[referenceType][referenceId]);
            }
            // Otherwise load it into cache and return it
            string value = "";

            // If reference is of type system users dataset, get username as value
            if (referenceType == applicationModel.ApplicationDescriptor.SystemDatasets.UsersDatasetDescriptor.Name)
            {
                UserModel userModel = userRepository.GetById(referenceId);
                value = userModel.GetUsername();
                addToCache(referenceType, referenceId, value);
            }
            // If reference is of type user-defined dataset
            else
            {
                DataModel         dataModel         = dataRepository.GetById(referenceId);
                DatasetDescriptor datasetDescriptor = applicationModel.ApplicationDescriptor.Datasets.Where(d => d.Name == referenceType).FirstOrDefault();
                if (datasetDescriptor == null)
                {
                    throw new Exception($"[ERROR]: Dataset {referenceType} not in application {applicationModel.LoginApplicationName} with id {applicationModel.Id}.\n");
                }
                var sb = new StringBuilder("");
                // Get text representation for first at most MaxAttributesDisplayedInReference attributes of dataModel
                bool shouldAddToCache         = true;
                var  displayedAttributesCount = Math.Min(datasetDescriptor.Attributes.Count, Constants.MaxAttributesDisplayedInReference);
                for (int i = 0; i < displayedAttributesCount; i++)
                {
                    var attributeDescriptor = datasetDescriptor.Attributes[i];
                    // For basic types get the value and for reference types get first at most MaxReferencedDisplayedInListOfReferences references and get their text representation
                    foreach (var dataDictionaryValue in dataModel.DataDictionary[attributeDescriptor.Name].Take(Constants.MaxReferencedDisplayedInListOfReferences))
                    {
                        // If dataDictionaryValue is reference, get its representation
                        bool isReference = !AttributeType.Types.Contains(attributeDescriptor.Type);
                        if (isReference)
                        {
                            long dataId;
                            if (level > Constants.MaxDepthOfDisplayedReferences)
                            {
                                sb.Append("...");
                                // If reference is too deep, do not add it into the cache
                                shouldAddToCache = false;
                            }
                            else if (long.TryParse(dataDictionaryValue.ToString(), out dataId))
                            {
                                sb.Append("(" + GetTextForReference(attributeDescriptor.Type, dataId, level + 1) + ")");
                                // Do not add too deep references into the cache
                                if (level + 1 > Constants.MaxDepthOfDisplayedReferences)
                                {
                                    shouldAddToCache = false;
                                }
                            }
                            else
                            {
                                try
                                {
                                    sb.Append(JsonConvert.DeserializeObject <Tuple <string, string> >(dataDictionaryValue.ToString()).Item2);
                                    // Do not add too deep references into the cache
                                    if (level + 1 > Constants.MaxDepthOfDisplayedReferences)
                                    {
                                        shouldAddToCache = false;
                                    }
                                }
                                catch (Exception e)
                                {
                                    if (e is JsonSerializationException || e is JsonReaderException)
                                    {
                                        Logger.LogToConsole($"ERROR: Reference {dataDictionaryValue} could not be parsed in ReferenceCache.");
                                        Logger.LogExceptionToConsole(e);
                                        // Do not add invalid references into the cache
                                        shouldAddToCache = false;
                                    }
                                    else
                                    {
                                        throw;
                                    }
                                }
                            }
                        }
                        // If dataDictionaryValue is not reference, get its value
                        else
                        {
                            sb.Append(dataDictionaryValue);
                        }
                        // Separate values in one attribute with comma (except last value)
                        if (!dataDictionaryValue.Equals(dataModel.DataDictionary[attributeDescriptor.Name].Take(Constants.MaxReferencedDisplayedInListOfReferences).Last()))
                        {
                            sb.Append(", ");
                        }
                    }
                    // Separate values of individual attributes with | (except last attribute and empty attributes)
                    if (i + 1 != displayedAttributesCount && dataModel.DataDictionary[datasetDescriptor.Attributes[i + 1].Name].Count != 0)
                    {
                        sb.Append(" | ");
                    }
                }
                value = sb.ToString();
                if (shouldAddToCache)
                {
                    addToCache(referenceType, referenceId, value);
                }
            }
            return(value);
        }