Beispiel #1
0
        /// <summary>
        /// The BaseDataLayer constructor.
        /// </summary>
        /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
        /// <param name="log">Specifies the Log for output.</param>
        /// <param name="p">Specifies the LayerParameter</param>
        /// <param name="db">Specifies the external database to use.</param>
        public BaseDataLayer(CudaDnn <T> cuda, Log log, LayerParameter p, IXImageDatabase db)
            : base(cuda, log, p)
        {
            if (db != null)
            {
                m_imgdb = db;

                if (p.type == LayerParameter.LayerType.DATA ||
                    p.type == LayerParameter.LayerType.TRIPLET_DATA)
                {
                    m_src = m_imgdb.GetSourceByName(p.data_param.source);
                }
                else if (p.type == LayerParameter.LayerType.BATCHDATA)
                {
                    m_src = m_imgdb.GetSourceByName(p.batch_data_param.source);
                }

                if (p.transform_param.use_imagedb_mean)
                {
                    if (db != null)
                    {
                        m_imgMean = db.GetImageMean(m_src.ID);
                    }
                    else
                    {
                        m_log.WriteLine("WARNING: The image database is NULL, and therefore no mean image can not be acquired.");
                    }
                }
            }
        }
Beispiel #2
0
        /// <summary>
        /// The Cursor constructor.
        /// </summary>
        /// <param name="db">Specifies the underlying database.</param>
        /// <param name="strSrc">Specifies the name of the data source to use.</param>
        public Cursor(IXImageDatabase db, string strSrc)
        {
            m_db = db;
            SourceDescriptor src = m_db.GetSourceByName(strSrc);

            m_nSrcID = src.ID;
            m_nCount = src.ImageCount;
        }
Beispiel #3
0
        /// <summary>
        /// The DataLayer constructor.
        /// </summary>
        /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
        /// <param name="log">Specifies the Log for output.</param>
        /// <param name="p">Specifies the LayerParameter with the batchdata_param.</param>
        /// <param name="db">Specifies the external database to use.</param>
        /// <param name="evtCancel">Specifies the CancelEvent used to cancel any pre-fetching operations.</param>
        /// <param name="fnSet">Specifies the delegate used to set the input data.</param>
        public BatchDataLayer(CudaDnn <T> cuda, Log log, LayerParameter p, IXImageDatabase db, CancelEvent evtCancel, TransferInput.fnSetInputData fnSet)
            : base(cuda, log, p, db)
        {
            log.CHECK(p.type == LayerParameter.LayerType.BATCHDATA, "The layer type should be BATCHDATA.");

            m_type                   = LayerParameter.LayerType.BATCHDATA;
            m_evtCancel              = evtCancel;
            m_fnSetInput             = fnSet;
            m_batch                  = new layers.Batch <T>(m_cuda, m_log);
            m_internalThread         = new common.InternalThread <T>();
            m_internalThread.DoWork += m_internalThread_DoWork;
        }
Beispiel #4
0
        /// <summary>
        /// The AnnotatedDataLayer constructor.
        /// </summary>
        /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
        /// <param name="log">Specifies the Log for output.</param>
        /// <param name="p">provides LayerParameter annotated_data_param.</param>
        /// <param name="db">Specifies the external database to use.</param>
        /// <param name="evtCancel">Specifies the CancelEvent used to cancel any pre-fetching operations.</param>
        public AnnotatedDataLayer(CudaDnn <T> cuda, Log log, LayerParameter p, IXImageDatabase db, CancelEvent evtCancel)
            : base(cuda, log, p, db, evtCancel)
        {
            m_type = LayerParameter.LayerType.ANNOTATED_DATA;

            Tuple <IMGDB_LABEL_SELECTION_METHOD, IMGDB_IMAGE_SELECTION_METHOD> kvSel = db.GetSelectionMethod();
            IMGDB_IMAGE_SELECTION_METHOD imgSel = kvSel.Item2;

            if (m_param.data_param.enable_pair_selection.HasValue)
            {
                if (m_param.data_param.enable_pair_selection.Value)
                {
                    imgSel |= IMGDB_IMAGE_SELECTION_METHOD.PAIR;
                }
                else
                {
                    imgSel &= (~IMGDB_IMAGE_SELECTION_METHOD.PAIR);
                }
            }

            if (m_param.data_param.enable_random_selection.HasValue)
            {
                if (m_param.data_param.enable_random_selection.Value)
                {
                    imgSel |= IMGDB_IMAGE_SELECTION_METHOD.RANDOM;
                }
                else
                {
                    imgSel &= (~IMGDB_IMAGE_SELECTION_METHOD.RANDOM);
                }
            }

            if (!db.GetLoadImageDataCriteria())
            {
                m_log.WriteError(new Exception("The 'Load Image Data Criteria' must be set to TRUE in order to load the Annotation data."));
            }

            db.SetSelectionMethod(null, imgSel);

            m_db = new data.DB(db);
            m_db.Open(p.data_param.source);
            m_cursor = m_db.NewCursor();

            if (p.data_param.display_timing)
            {
                m_swTimerBatch       = new Stopwatch();
                m_swTimerTransaction = new Stopwatch();
            }

            m_sampler = new SsdSampler <T>(cuda, log);
        }
Beispiel #5
0
        /// <summary>
        /// The DataLayer constructor.
        /// </summary>
        /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
        /// <param name="log">Specifies the Log for output.</param>
        /// <param name="p">Specifies the LayerParameter data_param</param>
        /// <param name="db">Specifies the external database to use.</param>
        /// <param name="evtCancel">Specifies the CancelEvent used to cancel any pre-fetching operations.</param>
        public DataLayer(CudaDnn <T> cuda, Log log, LayerParameter p, IXImageDatabase db, CancelEvent evtCancel)
            : base(cuda, log, p, db, evtCancel)
        {
            m_type = LayerParameter.LayerType.DATA;

            if (p.data_param.synchronize_target)
            {
                m_rgBatchLabels = new LabelCollection();
            }

            Tuple <IMGDB_LABEL_SELECTION_METHOD, IMGDB_IMAGE_SELECTION_METHOD> kvSel = db.GetSelectionMethod();
            IMGDB_IMAGE_SELECTION_METHOD imgSel = kvSel.Item2;

            if (m_param.data_param.enable_pair_selection.HasValue)
            {
                if (m_param.data_param.enable_pair_selection.Value)
                {
                    imgSel |= IMGDB_IMAGE_SELECTION_METHOD.PAIR;
                }
                else
                {
                    imgSel &= (~IMGDB_IMAGE_SELECTION_METHOD.PAIR);
                }
            }

            if (m_param.data_param.enable_random_selection.HasValue)
            {
                if (m_param.data_param.enable_random_selection.Value)
                {
                    imgSel |= IMGDB_IMAGE_SELECTION_METHOD.RANDOM;
                }
                else
                {
                    imgSel &= (~IMGDB_IMAGE_SELECTION_METHOD.RANDOM);
                }
            }

            db.SetSelectionMethod(null, imgSel);

            m_db = new data.DB(db);
            m_db.Open(p.data_param.source);
            m_cursor = m_db.NewCursor();

            if (p.data_param.display_timing)
            {
                m_swTimerBatch       = new Stopwatch();
                m_swTimerTransaction = new Stopwatch();
            }
        }
        /// <summary>
        /// The BaseDataLayer constructor.
        /// </summary>
        /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
        /// <param name="log">Specifies the Log for output.</param>
        /// <param name="p">Specifies the LayerParameter</param>
        /// <param name="db">Specifies the external database to use.</param>
        /// <param name="evtCancel">Specifies the CancelEvent used to cancel any pre-fetching operations.</param>
        public BasePrefetchingDataLayer(CudaDnn <T> cuda, Log log, LayerParameter p, IXImageDatabase db, CancelEvent evtCancel)
            : base(cuda, log, p, db)
        {
            m_evtCancel              = evtCancel;
            m_rgPrefetch             = new Batch <T> [p.data_param.prefetch];
            m_rgPrefetchFree         = new BlockingQueue <Batch <T> >(m_evtCancel);
            m_rgPrefetchFull         = new BlockingQueue <Batch <T> >(m_evtCancel);
            m_internalThread         = new InternalThread <T>();
            m_internalThread.DoWork += new EventHandler <ActionStateArgs <T> >(m_internalThread_DoWork);

            for (int i = 0; i < m_rgPrefetch.Length; i++)
            {
                m_rgPrefetch[i] = new Batch <T>(cuda, log);
                m_rgPrefetchFree.Push(m_rgPrefetch[i]);
            }
        }
Beispiel #7
0
 /// <summary>
 /// The DB Constructor.
 /// </summary>
 /// <param name="imgDb">The underlying database - the CaffeImageDatabase.</param>
 public DB(IXImageDatabase imgDb)
 {
     m_db = imgDb;
 }
Beispiel #8
0
 /// <summary>
 /// The AdamSolver constructor.
 /// </summary>
 /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">Specifies teh SolverParameter.</param>
 /// <param name="evtCancel">Specifies a CancelEvent used to cancel the current operation (e.g. training, testing) for which the Solver is performing.</param>
 /// <param name="evtForceSnapshot">Specifies an automatic reset event that causes the Solver to perform a Snapshot when set.</param>
 /// <param name="evtForceTest">Specifies an automatic reset event that causes teh Solver to run a testing cycle when set.</param>
 /// <param name="imgDb">Specifies the CaffeImageDatabase.</param>
 /// <param name="persist">Specifies the peristence used for loading and saving weights.</param>
 /// <param name="nSolverCount">Specifies the number of Solvers participating in a multi-GPU session.</param>
 /// <param name="nSolverRank">Specifies the rank of this Solver in a multi-GPU session.</param>
 public AdamSolver(CudaDnn <T> cuda, Log log, SolverParameter p, CancelEvent evtCancel, AutoResetEvent evtForceSnapshot, AutoResetEvent evtForceTest, IXImageDatabase imgDb, IXPersist <T> persist, int nSolverCount = 1, int nSolverRank = 0)
     : base(cuda, log, p, evtCancel, evtForceSnapshot, evtForceTest, imgDb, persist, nSolverCount, nSolverRank)
 {
     AdamPreSolve();
 }
Beispiel #9
0
        /// <summary>
        /// Create a new Layer based on the LayerParameter.
        /// </summary>
        /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
        /// <param name="log">Specifies the Log for output.</param>
        /// <param name="p">Specifies the LayerParameter that contains the LayerType to create.</param>
        /// <param name="evtCancel">Specifies the CancelEvent used by some Layers when created.</param>
        /// <param name="imgDb">Optionally, specifies the CaffeImageDatabase used by data Layers.</param>
        /// <param name="trxinput">Optionally, specifies the transfer input object used by some of the data Layers.</param>
        /// <returns></returns>
        public static Layer <T> Create(CudaDnn <T> cuda, Log log, LayerParameter p, CancelEvent evtCancel, IXImageDatabase imgDb = null, TransferInput trxinput = null)
        {
            switch (p.type)
            {
            case LayerParameter.LayerType.ABSVAL:
                return(new AbsValLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.ACCURACY:
                return(new AccuracyLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.ARGMAX:
                return(new ArgMaxLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.BATCHDATA:
                return(new BatchDataLayer <T>(cuda, log, p, imgDb, evtCancel, (trxinput != null) ? trxinput.Set : null));

            case LayerParameter.LayerType.BATCHNORM:
                return(new BatchNormLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.BATCHREINDEX:
                return(new BatchReindexLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.BNLL:
                return(new BNLLLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.BIAS:
                return(new BiasLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.BINARYHASH:
                return(new BinaryHashLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.CONCAT:
                return(new ConcatLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.CONTRASTIVE_LOSS:
                return(new ContrastiveLossLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.CONVOLUTION:
                return(new ConvolutionLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.CROP:
                return(new CropLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.DECONVOLUTION:
                return(new DeconvolutionLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.IM2COL:
                return(new Im2colLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.DATA:
                return(new DataLayer <T>(cuda, log, p, imgDb, evtCancel));

            case LayerParameter.LayerType.DEBUG:
                return(new DebugLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.DROPOUT:
                return(new DropoutLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.DUMMYDATA:
                return(new DummyDataLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.ELTWISE:
                return(new EltwiseLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.ELU:
                return(new ELULayer <T>(cuda, log, p));

            case LayerParameter.LayerType.EMBED:
                return(new EmbedLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.EUCLIDEAN_LOSS:
                return(new EuclideanLossLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.EXP:
                return(new ExpLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.FILTER:
                return(new FilterLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.FLATTEN:
                return(new FlattenLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.GRN:
                return(new GRNLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.GRADIENTSCALER:
                return(new GradientScaleLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.HINGE_LOSS:
                return(new HingeLossLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.INFOGAIN_LOSS:
                return(new InfogainLossLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.INNERPRODUCT:
                return(new InnerProductLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.KNN:
                return(new KnnLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.LOG:
                return(new LogLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.LABELMAPPING:
                return(new LabelMappingLayer <T>(cuda, log, p, imgDb));

            case LayerParameter.LayerType.LRN:
                return(new LRNLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.MEMORYDATA:
                return(new MemoryDataLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.MULTINOMIALLOGISTIC_LOSS:
                return(new MultinomialLogisticLossLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.MVN:
                return(new MVNLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.NORMALIZATION:
                return(new NormalizationLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.POOLING:
                return(new PoolingLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.UNPOOLING1:
                return(new UnPoolingLayer1 <T>(cuda, log, p));

            case LayerParameter.LayerType.UNPOOLING2:
                return(new UnPoolingLayer2 <T>(cuda, log, p));

            case LayerParameter.LayerType.POWER:
                return(new PowerLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.PRELU:
                return(new PReLULayer <T>(cuda, log, p));

            case LayerParameter.LayerType.REDUCTION:
                return(new ReductionLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.REINFORCEMENT_LOSS:
                return(new ReinforcementLossLayer <T>(cuda, log, p, (trxinput != null) ? trxinput.Get : null));

            case LayerParameter.LayerType.RELU:
                return(new ReLULayer <T>(cuda, log, p));

            case LayerParameter.LayerType.RESHAPE:
                return(new ReshapeLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SCALE:
                return(new ScaleLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SIGMOID:
                return(new SigmoidLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SIGMOIDCROSSENTROPY_LOSS:
                return(new SigmoidCrossEntropyLossLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SILENCE:
                return(new SilenceLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SLICE:
                return(new SliceLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SOFTMAX:
                return(new SoftmaxLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SOFTMAXWITH_LOSS:
                return(new SoftmaxLossLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SPLIT:
                return(new SplitLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SPP:
                return(new SPPLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.SWISH:
                return(new SwishLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.TANH:
                return(new TanhLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.THRESHOLD:
                return(new ThresholdLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.TRIPLET_LOSS_SIMPLE:
                return(new TripletLossSimpleLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.TRIPLET_LOSS:
                return(new TripletLossLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.TRIPLET_SELECT:
                return(new TripletSelectLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.TRIPLET_DATA:
                return(new TripletDataLayer <T>(cuda, log, p, imgDb, evtCancel));

            case LayerParameter.LayerType.TILE:
                return(new TileLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.LSTM_SIMPLE:
                return(new LSTMSimpleLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.RNN:
                return(new RNNLayer <T>(cuda, log, p, evtCancel));

            case LayerParameter.LayerType.LSTM:
                return(new LSTMLayer <T>(cuda, log, p, evtCancel));

            case LayerParameter.LayerType.LSTM_UNIT:
                return(new LSTMUnitLayer <T>(cuda, log, p));

            case LayerParameter.LayerType.INPUT:
                return(new InputLayer <T>(cuda, log, p));

            default:
                log.FAIL("Unknown layer type: " + p.type.ToString());
                break;
            }

            throw new NotImplementedException("The layer type: " + p.type.ToString() + " is not implemented yet.");
        }
Beispiel #10
0
 /// <summary>
 /// The SGDSolver constructor.
 /// </summary>
 /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">Specifies teh SolverParameter.</param>
 /// <param name="evtCancel">Specifies a CancelEvent used to cancel the current operation (e.g. training, testing) for which the Solver is performing.</param>
 /// <param name="evtForceSnapshot">Specifies an automatic reset event that causes the Solver to perform a Snapshot when set.</param>
 /// <param name="evtForceTest">Specifies an automatic reset event that causes teh Solver to run a testing cycle when set.</param>
 /// <param name="imgDb">Specifies the CaffeImageDatabase.</param>
 /// <param name="persist">Specifies the peristence used for loading and saving weights.</param>
 /// <param name="nSolverCount">Specifies the number of Solvers participating in a multi-GPU session.</param>
 /// <param name="nSolverRank">Specifies the rank of this Solver in a multi-GPU session.</param>
 /// <param name="shareNet">Optionally, specifies the net to share when creating the training network (default = null, meaning no share net is used).</param>
 /// <param name="getws">Optionally, specifies the handler for getting the workspace.</param>
 /// <param name="setws">Optionally, specifies the handler for setting the workspace.</param>
 public AdaDeltaSolver(CudaDnn <T> cuda, Log log, SolverParameter p, CancelEvent evtCancel, AutoResetEvent evtForceSnapshot, AutoResetEvent evtForceTest, IXImageDatabase imgDb, IXPersist <T> persist, int nSolverCount = 1, int nSolverRank = 0, Net <T> shareNet = null, onGetWorkspace getws = null, onSetWorkspace setws = null)
     : base(cuda, log, p, evtCancel, evtForceSnapshot, evtForceTest, imgDb, persist, nSolverCount, nSolverRank, shareNet, getws, setws)
 {
     AdaDeltaPreSolve();
 }
Beispiel #11
0
 /// <summary>
 /// The NesterovSolver constructor.
 /// </summary>
 /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">Specifies teh SolverParameter.</param>
 /// <param name="evtCancel">Specifies a CancelEvent used to cancel the current operation (e.g. training, testing) for which the Solver is performing.</param>
 /// <param name="evtForceSnapshot">Specifies an automatic reset event that causes the Solver to perform a Snapshot when set.</param>
 /// <param name="evtForceTest">Specifies an automatic reset event that causes teh Solver to run a testing cycle when set.</param>
 /// <param name="imgDb">Specifies the CaffeImageDatabase.</param>
 /// <param name="persist">Specifies the peristence used for loading and saving weights.</param>
 /// <param name="nSolverCount">Specifies the number of Solvers participating in a multi-GPU session.</param>
 /// <param name="nSolverRank">Specifies the rank of this Solver in a multi-GPU session.</param>
 public AdaGradSolver(CudaDnn <T> cuda, Log log, SolverParameter p, CancelEvent evtCancel, AutoResetEvent evtForceSnapshot, AutoResetEvent evtForceTest, IXImageDatabase imgDb, IXPersist <T> persist, int nSolverCount = 1, int nSolverRank = 0)
     : base(cuda, log, p, evtCancel, evtForceSnapshot, evtForceTest, imgDb, persist, nSolverCount, nSolverRank)
 {
     m_log.CHECK_EQ(0, m_param.momentum, "Momentum cannot be used with AdaGrad.");
 }
Beispiel #12
0
 /// <summary>
 /// The InnerProductLayer constructor.
 /// </summary>
 /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">provides LayerParameter labelmapping_param, with options:
 ///   - mapping. Defines the mappings from original label to new label.
 ///
 ///   - update_database (/b optional, default = false). Whether or not to update the database with the new mapping, otherwise mappings are used online.
 ///
 ///   - reset_database_labels (/b optional, default = false). Whether or not to reset the database to its original labels or not. This setting requires 'update_database' = true.
 ///
 ///   - label_boosts (/b optional, default = ""). A string that defines which labels to boost, giving them a higher probability of being selected.
 /// </param>
 /// <param name="db">Specifies the CaffeImageDatabase.</param>
 public LabelMappingLayer(CudaDnn <T> cuda, Log log, LayerParameter p, IXImageDatabase db)
     : base(cuda, log, p)
 {
     m_db   = db;
     m_type = LayerParameter.LayerType.LABELMAPPING;
 }
Beispiel #13
0
 /// <summary>
 /// The RmsPropSolver constructor.
 /// </summary>
 /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">Specifies teh SolverParameter.</param>
 /// <param name="evtCancel">Specifies a CancelEvent used to cancel the current operation (e.g. training, testing) for which the Solver is performing.</param>
 /// <param name="evtForceSnapshot">Specifies an automatic reset event that causes the Solver to perform a Snapshot when set.</param>
 /// <param name="evtForceTest">Specifies an automatic reset event that causes teh Solver to run a testing cycle when set.</param>
 /// <param name="imgDb">Specifies the CaffeImageDatabase.</param>
 /// <param name="persist">Specifies the peristence used for loading and saving weights.</param>
 /// <param name="nSolverCount">Specifies the number of Solvers participating in a multi-GPU session.</param>
 /// <param name="nSolverRank">Specifies the rank of this Solver in a multi-GPU session.</param>
 public RmsPropSolver(CudaDnn <T> cuda, Log log, SolverParameter p, CancelEvent evtCancel, AutoResetEvent evtForceSnapshot, AutoResetEvent evtForceTest, IXImageDatabase imgDb, IXPersist <T> persist, int nSolverCount = 1, int nSolverRank = 0)
     : base(cuda, log, p, evtCancel, evtForceSnapshot, evtForceTest, imgDb, persist, nSolverCount, nSolverRank)
 {
     m_log.CHECK_EQ(0, m_param.momentum, "Momentum cannot be used with RmsProp.");
     m_log.CHECK_GE(m_param.rms_decay, 0, "rms_decay should lie between 0 and 1.");
     m_log.CHECK_LT(m_param.rms_decay, 1, "rms_decay should lie between 0 and 1.");
 }
Beispiel #14
0
 /// <summary>
 /// The LBFGSSolver constructor.
 /// </summary>
 /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">Specifies teh SolverParameter.</param>
 /// <param name="evtCancel">Specifies a CancelEvent used to cancel the current operation (e.g. training, testing) for which the Solver is performing.</param>
 /// <param name="evtForceSnapshot">Specifies an automatic reset event that causes the Solver to perform a Snapshot when set.</param>
 /// <param name="evtForceTest">Specifies an automatic reset event that causes teh Solver to run a testing cycle when set.</param>
 /// <param name="imgDb">Specifies the CaffeImageDatabase.</param>
 /// <param name="persist">Specifies the peristence used for loading and saving weights.</param>
 /// <param name="nSolverCount">Specifies the number of Solvers participating in a multi-GPU session.</param>
 /// <param name="nSolverRank">Specifies the rank of this Solver in a multi-GPU session.</param>
 public LBFGSSolver(CudaDnn <T> cuda, Log log, SolverParameter p, CancelEvent evtCancel, AutoResetEvent evtForceSnapshot, AutoResetEvent evtForceTest, IXImageDatabase imgDb, IXPersist <T> persist, int nSolverCount = 1, int nSolverRank = 0)
     : base(cuda, log, p, evtCancel, evtForceSnapshot, evtForceTest, imgDb, persist, nSolverCount, nSolverRank)
 {
     m_tZero     = (T)Convert.ChangeType(0, typeof(T));
     m_tOne      = (T)Convert.ChangeType(1, typeof(T));
     m_tMinusOne = (T)Convert.ChangeType(-1, typeof(T));
     PreSolve();
 }
Beispiel #15
0
 /// <summary>
 /// The NesterovSolver constructor.
 /// </summary>
 /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">Specifies teh SolverParameter.</param>
 /// <param name="evtCancel">Specifies a CancelEvent used to cancel the current operation (e.g. training, testing) for which the Solver is performing.</param>
 /// <param name="evtForceSnapshot">Specifies an automatic reset event that causes the Solver to perform a Snapshot when set.</param>
 /// <param name="evtForceTest">Specifies an automatic reset event that causes teh Solver to run a testing cycle when set.</param>
 /// <param name="imgDb">Specifies the CaffeImageDatabase.</param>
 /// <param name="persist">Specifies the peristence used for loading and saving weights.</param>
 /// <param name="nSolverCount">Specifies the number of Solvers participating in a multi-GPU session.</param>
 /// <param name="nSolverRank">Specifies the rank of this Solver in a multi-GPU session.</param>
 /// <param name="shareNet">Optionally, specifies the net to share when creating the training network (default = null, meaning no share net is used).</param>
 public NesterovSolver(CudaDnn <T> cuda, Log log, SolverParameter p, CancelEvent evtCancel, AutoResetEvent evtForceSnapshot, AutoResetEvent evtForceTest, IXImageDatabase imgDb, IXPersist <T> persist, int nSolverCount = 1, int nSolverRank = 0, Net <T> shareNet = null)
     : base(cuda, log, p, evtCancel, evtForceSnapshot, evtForceTest, imgDb, persist, nSolverCount, nSolverRank, shareNet)
 {
 }
Beispiel #16
0
 /// <summary>
 /// The TripletDataLayer constructor.
 /// </summary>
 /// <param name="cuda">Specifies the CudaDnn connection to Cuda.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">Specifies the LayerParameter of type TRIPLET_DATA with parameter triplet_data_param.
 /// <param name="db">Specifies the image database.</param>
 /// <param name="evtCancel">Specifies the cancel event to cancel data loading operations.</param>
 /// </param>
 public TripletDataLayer(CudaDnn <T> cuda, Log log, LayerParameter p, IXImageDatabase db, CancelEvent evtCancel)
     : base(cuda, log, p, db, evtCancel)
 {
     log.CHECK(p.type == LayerParameter.LayerType.TRIPLET_DATA, "The layer type should be TRIPLET_DATA.");
     m_type = LayerParameter.LayerType.TRIPLET_DATA;
 }
Beispiel #17
0
 /// <summary>
 /// The NesterovSolver constructor.
 /// </summary>
 /// <param name="cuda">Specifies the instance of CudaDnn to use.</param>
 /// <param name="log">Specifies the Log for output.</param>
 /// <param name="p">Specifies teh SolverParameter.</param>
 /// <param name="evtCancel">Specifies a CancelEvent used to cancel the current operation (e.g. training, testing) for which the Solver is performing.</param>
 /// <param name="evtForceSnapshot">Specifies an automatic reset event that causes the Solver to perform a Snapshot when set.</param>
 /// <param name="evtForceTest">Specifies an automatic reset event that causes teh Solver to run a testing cycle when set.</param>
 /// <param name="imgDb">Specifies the CaffeImageDatabase.</param>
 /// <param name="persist">Specifies the peristence used for loading and saving weights.</param>
 /// <param name="nSolverCount">Specifies the number of Solvers participating in a multi-GPU session.</param>
 /// <param name="nSolverRank">Specifies the rank of this Solver in a multi-GPU session.</param>
 /// <param name="shareNet">Optionally, specifies the net to share when creating the training network (default = null, meaning no share net is used).</param>
 /// <param name="getws">Optionally, specifies the handler for getting the workspace.</param>
 /// <param name="setws">Optionally, specifies the handler for setting the workspace.</param>
 public AdaGradSolver(CudaDnn <T> cuda, Log log, SolverParameter p, CancelEvent evtCancel, AutoResetEvent evtForceSnapshot, AutoResetEvent evtForceTest, IXImageDatabase imgDb, IXPersist <T> persist, int nSolverCount = 1, int nSolverRank = 0, Net <T> shareNet = null, onGetWorkspace getws = null, onSetWorkspace setws = null)
     : base(cuda, log, p, evtCancel, evtForceSnapshot, evtForceTest, imgDb, persist, nSolverCount, nSolverRank, shareNet, getws, setws)
 {
     m_log.CHECK_EQ(0, m_param.momentum, "Momentum cannot be used with AdaGrad.");
 }