private void testRandom(Log log, CryptoRandom.METHOD method, int nBuckets) { PreTest.Init(); BucketCollection col = new BucketCollection(0, 1, nBuckets); CryptoRandom rand = new CryptoRandom(method, Guid.NewGuid().GetHashCode()); int nTotal = 1000000; log.WriteLine("Testing (" + nBuckets.ToString() + ") " + method.ToString()); for (int i = 0; i < nTotal; i++) { double df = rand.NextDouble(); col.Add(df); } string str = ""; List <double> rgdf = new List <double>(); for (int i = 0; i < nBuckets; i++) { double dfPct = col[i].Count / (double)nTotal; str += dfPct.ToString("P"); str += ", "; rgdf.Add(dfPct); } str = str.TrimEnd(',', ' '); log.WriteLine(method.ToString() + " =>> " + str); double dfStdev = stdDev(rgdf, false); log.WriteLine(method.ToString() + " stdev = " + dfStdev.ToString()); }
public DefaultGameContext(Size windowBoundsInPixels) { Bounds = windowBoundsInPixels; _camera = Camera.CreateOrthographic(windowBoundsInPixels); Annotations = new BucketCollection<string, IEntityRecord>(); Registry = EntityRegistry.Current; Registry.Entered += OnEntityEntered; Registry.Removed += OnEntityRemoved; Registry.SetTrigger( ForRenderables, (sender, args) => { _renderer.Entered(args.AttachedComponents); _renderer.Removed(args.DettachedComponents); }); Registry.SetTrigger( ForTransformables, (sender, args) => { _transformer.Entered(args.AttachedComponents); _transformer.Removed(args.DettachedComponents); }); Registry.SetTrigger( ForAnimatables, (sender, args) => { _timeline.Entered(args.AttachedComponents); _timeline.Removed(args.DettachedComponents); }); _camera.Background = new Color4(255, 0, 0, 255); }
/// <summary> /// The constructor. /// </summary> /// <param name="mycaffe">Specifies the MyCaffeControl to use for learning and prediction.</param> /// <param name="properties">Specifies the property set containing the key/value pairs of property settings.</param> /// <param name="random">Specifies the random number generator to use.</param> /// <param name="icallback">Specifies the callback for parent notifications and queries.</param> /// <param name="rgVocabulary">Specifies the vocabulary to use.</param> public TrainerRNN(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, IxTrainerCallback icallback, BucketCollection rgVocabulary) { m_icallback = icallback; m_mycaffe = mycaffe; m_properties = properties; m_random = random; m_rgVocabulary = rgVocabulary; }
private int getLastPrediction(float[] rgDataRaw, BucketCollection rgVocabulary, int nLookahead = 1) { // Get the probabilities for the last character of the first sequence in the batch int nOffset = (m_nSequenceLength - nLookahead) * m_nBatchSize * m_nVocabSize; float[] rgData = new float[m_nVocabSize]; for (int i = 0; i < rgData.Length; i++) { rgData[i] = rgDataRaw[nOffset + i]; } return(getLastPrediction(rgData, rgVocabulary)); }
/// <summary> /// The ResizeModel method gives the custom trainer the opportunity to resize the model if needed. /// </summary> /// <param name="strModel">Specifies the model descriptor.</param> /// <param name="rgVocabulary">Specifies the vocabulary.</param> /// <param name="log">Specifies the output log.</param> /// <returns>A new model discriptor is returned (or the same 'strModel' if no changes were made).</returns> /// <remarks>Note, this method is called after PreloadData.</remarks> string IXMyCaffeCustomTrainerRNN.ResizeModel(Log log, string strModel, BucketCollection rgVocabulary) { if (rgVocabulary == null || rgVocabulary.Count == 0) { return(strModel); } int nVocabCount = rgVocabulary.Count; NetParameter p = NetParameter.FromProto(RawProto.Parse(strModel)); string strEmbedName = ""; EmbedParameter embed = null; string strIpName = ""; InnerProductParameter ip = null; foreach (LayerParameter layer in p.layer) { if (layer.type == LayerParameter.LayerType.EMBED) { strEmbedName = layer.name; embed = layer.embed_param; } else if (layer.type == LayerParameter.LayerType.INNERPRODUCT) { strIpName = layer.name; ip = layer.inner_product_param; } } if (embed != null) { if (embed.input_dim != (uint)nVocabCount) { log.WriteLine("WARNING: Embed layer '" + strEmbedName + "' input dim changed from " + embed.input_dim.ToString() + " to " + nVocabCount.ToString() + " to accomodate for the vocabulary count."); embed.input_dim = (uint)nVocabCount; } } if (ip.num_output != (uint)nVocabCount) { log.WriteLine("WARNING: InnerProduct layer '" + strIpName + "' num_output changed from " + ip.num_output.ToString() + " to " + nVocabCount.ToString() + " to accomodate for the vocabulary count."); ip.num_output = (uint)nVocabCount; } m_rgVocabulary = rgVocabulary; RawProto proto = p.ToProto("root"); return(proto.ToString()); }
private int getLastPrediction(float[] rgData, BucketCollection rgVocabulary) { int nIdx = m_nVocabSize - 1; // If no temperature, return directly the character with the best score if (m_dfTemperature == 0) { nIdx = ArgMax(rgData, 0, m_nVocabSize); } else { // Otherwise, compute the probabilities with the temperature and select the character according to the probabilities. double[] rgAccumulatedProba = new double[m_nVocabSize]; double[] rgProba = new double[m_nVocabSize]; double dfExpoSum = 0; for (int i = 0; i < m_nVocabSize; i++) { // The max value is subtracted for numerical stability rgProba[i] = Math.Exp((rgData[i] - (m_nVocabSize - 1)) / m_dfTemperature); dfExpoSum += rgProba[i]; } rgProba[0] /= dfExpoSum; rgAccumulatedProba[0] = rgProba[0]; double dfRandom = m_random.NextDouble(); for (int i = 1; i < rgProba.Length; i++) { // Return the first index for which the accumulated probability is bigger than the random number. if (rgAccumulatedProba[i - 1] > dfRandom) { return(i - 1); } rgProba[i] /= dfExpoSum; rgAccumulatedProba[i] = rgAccumulatedProba[i - 1] + rgProba[i]; } } if (nIdx < 0 || nIdx > rgVocabulary.Count) { throw new Exception("Invalid index - out of the vocabulary range of [0," + rgVocabulary.Count.ToString() + "]"); } return(nIdx); }
private int getLastPrediction(Blob <T> blobOutput, BucketCollection rgVocabulary) { if (m_blobOutput != null) { blobOutput = m_blobOutput; } // Get the probabilities for the last character of the first sequence in the batch int nOffset = (m_nSequenceLength - 1) * m_nBatchSize * m_nVocabSize; float[] rgDataRaw = Utility.ConvertVecF <T>(blobOutput.update_cpu_data()); float[] rgData = new float[m_nVocabSize]; for (int i = 0; i < rgData.Length; i++) { rgData[i] = rgDataRaw[nOffset + i]; } return(getLastPrediction(rgData, rgVocabulary)); }
protected override BucketCollection preloaddata(Log log, CancelEvent evtCancel, int nProjectID) { initialize(log); IXMyCaffeGymData igym = m_igym as IXMyCaffeGymData; Tuple <State, double, bool> state = igym.Reset(); int nDataLen; SimpleDatum sd = state.Item1.GetData(false, out nDataLen); BucketCollection rgBucketCollection = null; if (sd.IsRealData) { // Create the vocabulary bucket collection. rgBucketCollection = BucketCollection.Bucketize("Building vocabulary", 128, sd, log, evtCancel); if (rgBucketCollection == null) { return(null); } } else { List <int> rgVocabulary = new List <int>(); for (int i = 0; i < sd.ByteData.Length; i++) { int nVal = (int)sd.ByteData[i]; if (!rgVocabulary.Contains(nVal)) { rgVocabulary.Add(nVal); } } rgBucketCollection = new BucketCollection(rgVocabulary); } m_firststate = state; return(rgBucketCollection); }
/// <summary> /// The ResizeModel method gives the custom trainer the opportunity to resize the model if needed. /// </summary> /// <param name="strModel">Specifies the model descriptor.</param> /// <param name="rgVocabulary">Specifies the vocabulary.</param> /// <returns>A new model discriptor is returned (or the same 'strModel' if no changes were made).</returns> /// <remarks>Note, this method is called after PreloadData.</remarks> public string ResizeModel(string strModel, BucketCollection rgVocabulary) { if (rgVocabulary == null || rgVocabulary.Count == 0) { return(strModel); } int nVocabCount = rgVocabulary.Count; NetParameter p = NetParameter.FromProto(RawProto.Parse(strModel)); EmbedParameter embed = null; InnerProductParameter ip = null; foreach (LayerParameter layer in p.layer) { if (layer.type == LayerParameter.LayerType.EMBED) { embed = layer.embed_param; } else if (layer.type == LayerParameter.LayerType.INNERPRODUCT) { ip = layer.inner_product_param; } } if (embed != null) { embed.input_dim = (uint)nVocabCount; } ip.num_output = (uint)nVocabCount; m_rgVocabulary = rgVocabulary; RawProto proto = p.ToProto("root"); return(proto.ToString()); }
public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, IxTrainerCallbackRNN icallback, Phase phase, BucketCollection rgVocabulary, bool bUsePreloadData, string strRunProperties = null) { string strOutputBlob = null; if (strRunProperties != null) { m_runProperties = new PropertySet(strRunProperties); } m_icallback = icallback; m_mycaffe = mycaffe; m_properties = properties; m_random = random; m_rgVocabulary = rgVocabulary; m_bUsePreloadData = bUsePreloadData; m_nSolverSequenceLength = m_properties.GetPropertyAsInt("SequenceLength", -1); m_bDisableVocabulary = m_properties.GetPropertyAsBool("DisableVocabulary", false); m_nThreads = m_properties.GetPropertyAsInt("Threads", 1); m_dfScale = m_properties.GetPropertyAsDouble("Scale", 1.0); if (m_nThreads > 1) { m_dataPool.Initialize(m_nThreads, icallback); } if (m_runProperties != null) { m_dfTemperature = Math.Abs(m_runProperties.GetPropertyAsDouble("Temperature", 0)); if (m_dfTemperature > 1.0) { m_dfTemperature = 1.0; } string strPhaseOnRun = m_runProperties.GetProperty("PhaseOnRun", false); switch (strPhaseOnRun) { case "RUN": m_phaseOnRun = Phase.RUN; break; case "TEST": m_phaseOnRun = Phase.TEST; break; case "TRAIN": m_phaseOnRun = Phase.TRAIN; break; } if (phase == Phase.RUN && m_phaseOnRun != Phase.NONE) { if (m_phaseOnRun != Phase.RUN) { m_mycaffe.Log.WriteLine("Warning: Running on the '" + m_phaseOnRun.ToString() + "' network."); } strOutputBlob = m_runProperties.GetProperty("OutputBlob", false); if (strOutputBlob == null) { throw new Exception("You must specify the 'OutputBlob' when Running with a phase other than RUN."); } strOutputBlob = Utility.Replace(strOutputBlob, '~', ';'); phase = m_phaseOnRun; } } m_net = mycaffe.GetInternalNet(phase); if (m_net == null) { mycaffe.Log.WriteLine("WARNING: Test net does not exist, set test_iteration > 0. Using TRAIN phase instead."); m_net = mycaffe.GetInternalNet(Phase.TRAIN); } // Find the first LSTM layer to determine how to load the data. // NOTE: Only LSTM has a special loading order, other layers use the standard N, C, H, W ordering. LSTMLayer <T> lstmLayer = null; LSTMSimpleLayer <T> lstmSimpleLayer = null; foreach (Layer <T> layer1 in m_net.layers) { if (layer1.layer_param.type == LayerParameter.LayerType.LSTM) { lstmLayer = layer1 as LSTMLayer <T>; m_lstmType = LayerParameter.LayerType.LSTM; break; } else if (layer1.layer_param.type == LayerParameter.LayerType.LSTM_SIMPLE) { lstmSimpleLayer = layer1 as LSTMSimpleLayer <T>; m_lstmType = LayerParameter.LayerType.LSTM_SIMPLE; break; } } if (lstmLayer == null && lstmSimpleLayer == null) { throw new Exception("Could not find the required LSTM or LSTM_SIMPLE layer!"); } if (m_phaseOnRun != Phase.NONE && m_phaseOnRun != Phase.RUN && strOutputBlob != null) { if ((m_blobOutput = m_net.FindBlob(strOutputBlob)) == null) { throw new Exception("Could not find the 'Output' layer top named '" + strOutputBlob + "'!"); } } if ((m_blobData = m_net.FindBlob("data")) == null) { throw new Exception("Could not find the 'Input' layer top named 'data'!"); } if ((m_blobClip = m_net.FindBlob("clip")) == null) { throw new Exception("Could not find the 'Input' layer top named 'clip'!"); } Layer <T> layer = m_net.FindLastLayer(LayerParameter.LayerType.INNERPRODUCT); m_mycaffe.Log.CHECK(layer != null, "Could not find an ending INNERPRODUCT layer!"); if (!m_bDisableVocabulary) { m_nVocabSize = (int)layer.layer_param.inner_product_param.num_output; if (rgVocabulary != null) { m_mycaffe.Log.CHECK_EQ(m_nVocabSize, rgVocabulary.Count, "The vocabulary count = '" + rgVocabulary.Count.ToString() + "' and last inner product output count = '" + m_nVocabSize.ToString() + "' - these do not match but they should!"); } } if (m_lstmType == LayerParameter.LayerType.LSTM) { m_nSequenceLength = m_blobData.shape(0); m_nBatchSize = m_blobData.shape(1); } else { m_nBatchSize = (int)lstmSimpleLayer.layer_param.lstm_simple_param.batch_size; m_nSequenceLength = m_blobData.shape(0) / m_nBatchSize; if (phase == Phase.RUN) { m_nBatchSize = 1; List <int> rgNewShape = new List <int>() { m_nSequenceLength, 1 }; m_blobData.Reshape(rgNewShape); m_blobClip.Reshape(rgNewShape); m_net.Reshape(); } } m_mycaffe.Log.CHECK_EQ(m_nSequenceLength, m_blobData.num, "The data num must equal the sequence lengh of " + m_nSequenceLength.ToString()); m_rgDataInput = new T[m_nSequenceLength * m_nBatchSize]; T[] rgClipInput = new T[m_nSequenceLength * m_nBatchSize]; m_mycaffe.Log.CHECK_EQ(rgClipInput.Length, m_blobClip.count(), "The clip count must equal the sequence length * batch size: " + rgClipInput.Length.ToString()); m_tZero = (T)Convert.ChangeType(0, typeof(T)); m_tOne = (T)Convert.ChangeType(1, typeof(T)); for (int i = 0; i < rgClipInput.Length; i++) { if (m_lstmType == LayerParameter.LayerType.LSTM) { rgClipInput[i] = (i < m_nBatchSize) ? m_tZero : m_tOne; } else { rgClipInput[i] = (i % m_nSequenceLength == 0) ? m_tZero : m_tOne; } } m_blobClip.mutable_cpu_data = rgClipInput; if (phase != Phase.RUN) { m_solver = mycaffe.GetInternalSolver(); m_solver.OnStart += m_solver_OnStart; m_solver.OnTestStart += m_solver_OnTestStart; m_solver.OnTestingIteration += m_solver_OnTestingIteration; m_solver.OnTrainingIteration += m_solver_OnTrainingIteration; if ((m_blobLabel = m_net.FindBlob("label")) == null) { throw new Exception("Could not find the 'Input' layer top named 'label'!"); } m_nSequenceLengthLabel = m_blobLabel.count(0, 2); m_rgLabelInput = new T[m_nSequenceLengthLabel]; m_mycaffe.Log.CHECK_EQ(m_rgLabelInput.Length, m_blobLabel.count(), "The label count must equal the label sequence length * batch size: " + m_rgLabelInput.Length.ToString()); m_mycaffe.Log.CHECK(m_nSequenceLengthLabel == m_nSequenceLength * m_nBatchSize || m_nSequenceLengthLabel == 1, "The label sqeuence length must be 1 or equal the length of the sequence: " + m_nSequenceLength.ToString()); } }
public Agent(IxTrainerCallback icallback, MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, Phase phase, BucketCollection rgVocabulary, bool bUsePreloadData, string strRunProperties = null) { m_icallback = icallback; m_brain = new Brain <T>(mycaffe, properties, random, icallback as IxTrainerCallbackRNN, phase, rgVocabulary, bUsePreloadData, strRunProperties); m_properties = properties; m_random = random; }
/// <summary> /// Setup the layer. /// </summary> /// <param name="colBottom">Specifies the collection of bottom (input) Blobs.</param> /// <param name="colTop">Specifies the collection of top (output) Blobs.</param> public override void LayerSetUp(BlobCollection <T> colBottom, BlobCollection <T> colTop) { m_rgOneHotVector = new float[m_param.onehot_param.num_output]; m_colBuckets = new BucketCollection(m_param.onehot_param.min, m_param.onehot_param.max, (int)m_param.onehot_param.num_output); m_nAxis = colBottom[0].CanonicalAxisIndex(m_param.onehot_param.axis); }
public StatsCollection(StatsCollectionConfig config) { this.config = config; this.buckets = new BucketCollection(config); }
private void testRandomIdx(Log log, CryptoRandom.METHOD method, int nBuckets) { PreTest.Init(); BucketCollection col = new BucketCollection(0.0, 1.0, nBuckets); CryptoRandom rand = new CryptoRandom(method, Guid.NewGuid().GetHashCode()); int nTotal = 100000; log.WriteLine("Testing (" + nBuckets.ToString() + ") " + method.ToString()); List <int> rgIdx1 = new List <int>(); List <List <int> > rgrgPermutations = new List <List <int> >(); for (int i = 0; i < nTotal / nBuckets; i++) { List <int> rgPermutation = new List <int>(); for (int j = 0; j < nBuckets; j++) { int nIdx = rand.Next(nBuckets); double dfPct = (double)nIdx / (double)nBuckets; rgPermutation.Add(nIdx); col.Add(dfPct); } rgrgPermutations.Add(rgPermutation); } string str = ""; List <double> rgdf = new List <double>(); for (int i = 0; i < nBuckets; i++) { double dfPct = col[i].Count / (double)nTotal; str += dfPct.ToString("P"); str += ", "; rgdf.Add(dfPct); } str = str.TrimEnd(',', ' '); log.WriteLine(method.ToString() + " =>> " + str); double dfStdev = stdDev(rgdf, false); log.WriteLine(method.ToString() + " stdev = " + dfStdev.ToString()); // Verify permuation uniqueness int nDuplicateCount = 0; int nPermutationCount = rgrgPermutations.Count; Stopwatch sw = new Stopwatch(); sw.Start(); int nProgressIdx = 0; while (rgrgPermutations.Count > 1) { List <int> rgPermutation1 = rgrgPermutations[0]; rgrgPermutations.RemoveAt(0); List <int> rgRemove = new List <int>(); for (int j = 0; j < rgrgPermutations.Count; j++) { if (compareLists(rgPermutation1, rgrgPermutations[j])) { nDuplicateCount++; rgRemove.Add(j); } } for (int j = rgRemove.Count - 1; j >= 0; j--) { rgrgPermutations.RemoveAt(rgRemove[j]); } if (sw.Elapsed.TotalMilliseconds > 2000) { log.Progress = (double)nProgressIdx / (double)nPermutationCount; log.WriteLine("Permutation checking at " + log.Progress.ToString("P") + "..."); sw.Restart(); } nProgressIdx++; } log.WriteLine("Out of " + nPermutationCount.ToString("N0") + " permutations, " + nDuplicateCount.ToString("N0") + " duplicates were found (" + ((double)nDuplicateCount / nPermutationCount).ToString("P") + ")."); }
public Brain(MyCaffeControl <T> mycaffe, PropertySet properties, CryptoRandom random, IxTrainerCallbackRNN icallback, Phase phase, BucketCollection rgVocabulary, string strRunProperties = null) { string strOutputBlob = null; if (strRunProperties != null) { m_runProperties = new PropertySet(strRunProperties); } m_icallback = icallback; m_mycaffe = mycaffe; m_properties = properties; m_random = random; m_rgVocabulary = rgVocabulary; if (m_runProperties != null) { m_dfTemperature = m_runProperties.GetPropertyAsDouble("Temperature", 0); string strPhaseOnRun = m_runProperties.GetProperty("PhaseOnRun", false); switch (strPhaseOnRun) { case "RUN": m_phaseOnRun = Phase.RUN; break; case "TEST": m_phaseOnRun = Phase.TEST; break; case "TRAIN": m_phaseOnRun = Phase.TRAIN; break; } if (phase == Phase.RUN && m_phaseOnRun != Phase.NONE) { if (m_phaseOnRun != Phase.RUN) { m_mycaffe.Log.WriteLine("Warning: Running on the '" + m_phaseOnRun.ToString() + "' network."); } strOutputBlob = m_runProperties.GetProperty("OutputBlob", false); if (strOutputBlob == null) { throw new Exception("You must specify the 'OutputBlob' when Running with a phase other than RUN."); } strOutputBlob = Utility.Replace(strOutputBlob, '~', ';'); phase = m_phaseOnRun; } } m_net = mycaffe.GetInternalNet(phase); // Find the first LSTM layer to determine how to load the data. // NOTE: Only LSTM has a special loading order, other layers use the standard N, C, H, W ordering. LSTMLayer <T> lstmLayer = null; LSTMSimpleLayer <T> lstmSimpleLayer = null; foreach (Layer <T> layer1 in m_net.layers) { if (layer1.layer_param.type == LayerParameter.LayerType.LSTM) { lstmLayer = layer1 as LSTMLayer <T>; m_lstmType = LayerParameter.LayerType.LSTM; break; } else if (layer1.layer_param.type == LayerParameter.LayerType.LSTM_SIMPLE) { lstmSimpleLayer = layer1 as LSTMSimpleLayer <T>; m_lstmType = LayerParameter.LayerType.LSTM_SIMPLE; break; } } if (lstmLayer == null && lstmSimpleLayer == null) { throw new Exception("Could not find the required LSTM or LSTM_SIMPLE layer!"); } if (m_phaseOnRun != Phase.NONE && m_phaseOnRun != Phase.RUN && strOutputBlob != null) { if ((m_blobOutput = m_net.FindBlob(strOutputBlob)) == null) { throw new Exception("Could not find the 'Output' layer top named '" + strOutputBlob + "'!"); } } if ((m_blobData = m_net.FindBlob("data")) == null) { throw new Exception("Could not find the 'Input' layer top named 'data'!"); } if ((m_blobClip = m_net.FindBlob("clip")) == null) { throw new Exception("Could not find the 'Input' layer top named 'clip'!"); } Layer <T> layer = m_net.FindLastLayer(LayerParameter.LayerType.INNERPRODUCT); m_mycaffe.Log.CHECK(layer != null, "Could not find an ending INNERPRODUCT layer!"); m_nVocabSize = (int)layer.layer_param.inner_product_param.num_output; if (rgVocabulary != null) { m_mycaffe.Log.CHECK_EQ(m_nVocabSize, rgVocabulary.Count, "The vocabulary count and last inner product output count should match!"); } if (m_lstmType == LayerParameter.LayerType.LSTM) { m_nSequenceLength = m_blobData.shape(0); m_nBatchSize = m_blobData.shape(1); } else { m_nBatchSize = (int)lstmSimpleLayer.layer_param.lstm_simple_param.batch_size; m_nSequenceLength = m_blobData.shape(0) / m_nBatchSize; if (phase == Phase.RUN) { m_nBatchSize = 1; List <int> rgNewShape = new List <int>() { m_nSequenceLength, 1 }; m_blobData.Reshape(rgNewShape); m_blobClip.Reshape(rgNewShape); m_net.Reshape(); } } m_mycaffe.Log.CHECK_EQ(m_blobData.count(), m_blobClip.count(), "The data and clip blobs must have the same count!"); m_rgDataInput = new T[m_nSequenceLength * m_nBatchSize]; T[] rgClipInput = new T[m_nSequenceLength * m_nBatchSize]; m_tZero = (T)Convert.ChangeType(0, typeof(T)); m_tOne = (T)Convert.ChangeType(1, typeof(T)); for (int i = 0; i < rgClipInput.Length; i++) { if (m_lstmType == LayerParameter.LayerType.LSTM) { rgClipInput[i] = (i < m_nBatchSize) ? m_tZero : m_tOne; } else { rgClipInput[i] = (i % m_nSequenceLength == 0) ? m_tZero : m_tOne; } } m_blobClip.mutable_cpu_data = rgClipInput; if (phase != Phase.RUN) { m_solver = mycaffe.GetInternalSolver(); m_solver.OnStart += m_solver_OnStart; m_solver.OnTestStart += m_solver_OnTestStart; m_solver.OnTestingIteration += m_solver_OnTestingIteration; m_solver.OnTrainingIteration += m_solver_OnTrainingIteration; if ((m_blobLabel = m_net.FindBlob("label")) == null) { throw new Exception("Could not find the 'Input' layer top named 'label'!"); } m_rgLabelInput = new T[m_nSequenceLength * m_nBatchSize]; m_mycaffe.Log.CHECK_EQ(m_blobData.count(), m_blobLabel.count(), "The data and label blobs must have the same count!"); } }