private void m_internalThread_DoWork(object sender, ActionStateArgs <T> e) { while (!m_internalThread.CancellationPending) { if (m_evtDataReady.WaitOne(10)) { for (int i = 0; i < m_nNumBatches; i++) { for (int j = 0; j < m_nBatchSize; j++) { if (m_evtCancel.WaitOne(0)) { return; } int nIdx = (i * m_nBatchSize) + j; int nImgIdx = (int)Convert.ChangeType(m_rgInput[nIdx], typeof(int)); SimpleDatum d = m_imgdb.QueryImage(m_src.ID, nImgIdx, IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE); m_rgrgDataQueue[i].Push(new Datum(d)); } } } } }
private async Task<string> DownloadLatestPatcher() { Label = DownloadingLabel; try { if (CancelEvent.WaitOne(0)) return null; Downloader downloader = new Downloader(CancelEvent); downloader.DownloadProgress += OnDownloadProgress; HttpFileInfo latest = await GetLatestPatcherUrl(downloader); if (latest == null) throw new Exception("Не удалось найти свежую версию программы установки. Проверьте файл конфигурации."); Maximum = latest.ContentLength; string filePath = Path.GetTempFileName(); await downloader.Download(latest.Url, filePath); return filePath; } finally { Label = InstallLabel; } }
protected override async Task DoAction() { Label = DownloadingLabel; try { if (CancelEvent.WaitOne(0)) { return; } Downloader downloader = new Downloader(CancelEvent); downloader.DownloadProgress += OnDownloadProgress; HttpFileInfo fileInfo = _latestTranslationInfo.Value; Maximum = fileInfo.ContentLength; await downloader.Download(fileInfo.Url, PatcherService.ArchiveFileName); if (fileInfo.LastModified != null) { File.SetLastWriteTime(PatcherService.ArchiveFileName, fileInfo.LastModified.Value); } SubLabel = null; } finally { Label = DownloadLabel; } }
/// <summary> /// The DownSample method reduces the number of samples per second in the resulting sample set. /// </summary> /// <param name="nNewSamplesPerSecond">Specifies the new (lower) samples per second - must be a factor of the original samples per second.</param> /// <returns>A new WAVProcessor with the new downsamples samples is returned.</returns> public WAVProcessor DownSample(int nNewSamplesPerSecond) { if (m_fmt.nSamplesPerSec < nNewSamplesPerSecond) { throw new Exception("The new sample rate (" + nNewSamplesPerSecond.ToString() + ") must be less than the old sample rate (" + m_fmt.nSamplesPerSec.ToString() + ")."); } if (m_fmt.nSamplesPerSec % nNewSamplesPerSecond != 0) { throw new Exception("The new sample rate (" + nNewSamplesPerSecond.ToString() + ") must be a factor of the old sample rate (" + m_fmt.nSamplesPerSec.ToString() + ")."); } int nStep = (int)m_fmt.nSamplesPerSec / nNewSamplesPerSecond; List <double[]> rgrgNewSamples = new List <double[]>(); List <double> rgSamples = new List <double>(); int nIdx = 0; int nTotal = m_rgrgSamples.Count * m_rgrgSamples[0].Length; Stopwatch sw = new Stopwatch(); sw.Start(); for (int i = 0; i < m_rgrgSamples.Count; i++) { for (int j = 0; j < m_rgrgSamples[i].Length; j++) { if (j % nStep == 0) { double fVal = m_rgrgSamples[i][j]; rgSamples.Add(fVal); if (m_evtCancel.WaitOne(0)) { return(null); } if (sw.Elapsed.TotalMilliseconds > 1000) { double dfPct = (double)nIdx / (double)nTotal; m_log.WriteLine("Downsampling at " + dfPct.ToString("P") + "..."); sw.Restart(); } } nIdx++; } rgrgNewSamples.Add(rgSamples.ToArray()); rgSamples = new List <double>(); } WaveFormat fmt = m_fmt; fmt.nSamplesPerSec = (uint)nNewSamplesPerSecond; fmt.nAvgBytesPerSec = (uint)(nNewSamplesPerSecond * fmt.wBitsPerSample); return(new WAVProcessor(fmt, rgrgNewSamples, m_log, m_evtCancel)); }
public bool DeleteDirectory(string strDir) { if (!Directory.Exists(strDir)) { return(false); } Stopwatch sw = new Stopwatch(); string[] rgstrFiles = Directory.GetFiles(strDir); sw.Start(); for (int i = 0; i < rgstrFiles.Length; i++) { File.Delete(rgstrFiles[i]); //SHFILEOPSTRUCT shf = new SHFILEOPSTRUCT(); //shf.wFunc = FO_DELETE; //shf.fFlags = FOF_ALLOWUNDO + FOF_NOCONFIRMATION; //shf.pFrom = strDir + '\0' + '\0'; //SHFileOperation(ref shf); if (m_evtCancel.WaitOne(0)) { return(false); } if (sw.Elapsed.TotalMilliseconds > 1000) { m_log.Progress = (double)i / (double)rgstrFiles.Length; m_log.WriteLine("deleting " + i.ToString("N0") + " of " + rgstrFiles.Length.ToString("N0") + "..."); sw.Restart(); } } rgstrFiles = Directory.GetFiles(strDir); if (rgstrFiles.Length == 0) { Directory.Delete(strDir); } return(true); }
/// <summary> /// Retrieve an item from the front of the queue, but do not remove it. /// </summary> /// <remarks> /// This function will wait until either data is added to the queue or the CancelEvent or AbortEvent are set. /// </remarks> /// <param name="t">Specifies the item removed.</param> /// <returns>If a CancelEvent occurs, <i>false</i> is returned, otherwise if the data is successfully removed from the queue <i>true</i> is returned.</returns> public bool Peek(ref T t) { while (Count == 0) { if (m_evtReady == null) { return(false); } if (m_evtCancel.WaitOne(0)) { return(false); } Thread.Sleep(0); } lock (m_syncObj) { t = m_rgData[0]; return(true); } }
protected override async Task DoAction() { Label = PlayingLabel; try { Maximum = 2; GameLocationInfo gameLocation = PatcherService.GetGameLocation(FFXIIIGamePart.Part1); gameLocation.Validate(); Position = 1; if (CancelEvent.WaitOne(0)) { return; } if (MusicPlayer != null && MusicPlayer.PlaybackState == NAudio.Wave.PlaybackState.Playing) { MusicPlayer.Pause(); } String args = GameSettings.GetGameProcessArguments(); await Task.Factory.StartNew(() => Process.Start(gameLocation.ExecutablePath, args)); Position = 2; if (InteractionService.LocalizatorEnvironment.Provide().ExitAfterRunGame) { Application.Current.MainWindow.Close(); } } finally { Label = PlayLabel; } }
private bool loadFile(string strImagesFile, string strSourceName, int nExtractTotal, ref int nExtractIdx, int nTotal, ref int nIdx, Log log, bool bExtractFiles, Dictionary <string, int> rgNameToLabel) { Stopwatch sw = new Stopwatch(); reportProgress(nIdx, nTotal, " Source: " + strSourceName); reportProgress(nIdx, nTotal, " loading " + strImagesFile + "..."); FileStream fs = null; try { int nSrcId = m_factory.AddSource(strSourceName, 3, -1, -1, false); addLabels(nSrcId, rgNameToLabel); m_factory.Open(nSrcId, 500, Database.FORCE_LOAD.NONE, log); int nPos = strImagesFile.ToLower().LastIndexOf(".tar"); string strPath = strImagesFile.Substring(0, nPos); if (!Directory.Exists(strPath)) { Directory.CreateDirectory(strPath); } if (bExtractFiles) { log.Progress = (double)nIdx / nExtractTotal; log.WriteLine("Extracting files from '" + strImagesFile + "'..."); if ((nExtractIdx = TarFile.ExtractTar(strImagesFile, strPath, m_evtCancel, log, nExtractTotal, nExtractIdx)) == 0) { log.WriteLine("Aborted."); return(false); } } // Load the annotations. SimpleDatum.ANNOTATION_TYPE type = SimpleDatum.ANNOTATION_TYPE.BBOX; int nResizeHeight = 0; int nResizeWidth = 0; // Create the training database images. // Create the master list file. List <Tuple <string, string> > rgFiles = createFileList(log, strPath); sw.Start(); for (int i = 0; i < rgFiles.Count; i++) { SimpleDatum datum = loadDatum(log, rgFiles[i].Item1, rgFiles[i].Item2, nResizeHeight, nResizeWidth, type, rgNameToLabel); m_factory.PutRawImageCache(nIdx, datum); nIdx++; if (m_evtCancel.WaitOne(0)) { log.WriteLine("Aborted."); return(false); } if (sw.Elapsed.TotalMilliseconds > 1000) { log.Progress = (double)nIdx / nTotal; log.WriteLine("Loading file " + i.ToString() + " of " + rgFiles.Count.ToString() + "..."); sw.Restart(); } } m_factory.ClearImageCashe(true); m_factory.Close(); } finally { if (fs != null) { fs.Dispose(); } } return(true); }
private void m_bwProcess_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = sender as BackgroundWorker; Log log = new Log("MyCaffe"); MyCaffeControl <float> caffe = null; log.OnWriteLine += log_OnWriteLine1; while (!m_evtCancel.WaitOne(0)) { List <WaitHandle> rgWait = new List <WaitHandle>(); rgWait.AddRange(m_evtCancel.Handles); rgWait.Add(m_evtCommandRead); int nWait = WaitHandle.WaitAny(rgWait.ToArray()); if (nWait > 0) { try { switch (m_Cmd) { case COMMAND.CREATE: SettingsCaffe settings = new SettingsCaffe(); settings.ImageDbLoadMethod = IMAGEDB_LOAD_METHOD.LOAD_ALL; settings.EnableRandomInputSelection = true; caffe = new MyCaffeControl <float>(settings, log, m_evtCaffeCancel); string strSolver = System.Text.Encoding.UTF8.GetString(Properties.Resources.lenet_solver); string strModel = System.Text.Encoding.UTF8.GetString(Properties.Resources.lenet_train_test); caffe.Load(Phase.TRAIN, strSolver, strModel, null); bw.ReportProgress(1, new ProgressInfo(1, 1, "MyCaffe Created.", null, true)); break; case COMMAND.DESTROY: m_bCaffeCreated = false; caffe.Dispose(); caffe = null; bw.ReportProgress(0, new ProgressInfo(0, 0, "MyCaffe Destroyed", null, false)); break; case COMMAND.TRAIN: caffe.Train(5000); m_rgTrainedWeights = caffe.GetWeights(); m_sdImageMean = caffe.GetImageMean(); bw.ReportProgress(0, new ProgressInfo(0, 0, "MyCaffe Training completed.", null, true)); break; case COMMAND.TEST: double dfAccuracy = caffe.Test(100); log.WriteLine("Accuracy = " + dfAccuracy.ToString("P")); bw.ReportProgress(0, new ProgressInfo(0, 0, "MyCaffe Testing completed.", null, true)); break; case COMMAND.DEVICEINFO: string str1 = caffe.GetDeviceName(0); str1 += Environment.NewLine; str1 += caffe.Cuda.GetDeviceInfo(0, true); bw.ReportProgress(0, new ProgressInfo(0, 0, str1, null, true)); break; } } catch (Exception excpt) { log.WriteError(excpt); } } m_evtCaffeCancel.Reset(); } if (caffe != null) { caffe.Dispose(); m_evtCancel.Reset(); m_bCaffeCreated = false; } m_evtThreadDone.Set(); }
private bool loadFile(DatasetFactory factory, string strImagesFile, string strLabelsFile, string strSourceName, string strExportPath) { if (strExportPath != null) { strExportPath += strSourceName; if (!Directory.Exists(strExportPath)) { Directory.CreateDirectory(strExportPath); } } Stopwatch sw = new Stopwatch(); reportProgress(0, 0, " Source: " + strSourceName); reportProgress(0, 0, " loading " + strImagesFile + "..."); BinaryFile image_file = new BinaryFile(strImagesFile); BinaryFile label_file = new BinaryFile(strLabelsFile); try { // Verify the files uint magicImg = image_file.ReadUInt32(); uint magicLbl = label_file.ReadUInt32(); if (magicImg != 2051) { throw new Exception("Incorrect image file magic."); } if (magicLbl != 2049) { throw new Exception("Incorrect label file magic."); } uint num_items = image_file.ReadUInt32(); uint num_labels = label_file.ReadUInt32(); if (num_items != num_labels) { throw new Exception("The number of items must be equal to the number of labels!"); } // Add the data source to the database. uint rows = image_file.ReadUInt32(); uint cols = image_file.ReadUInt32(); int nChannels = 1; // black and white if (factory != null) { int nSrcId = factory.AddSource(strSourceName, nChannels, (int)cols, (int)rows, false, 0, true); factory.Open(nSrcId, 500, Database.FORCE_LOAD.NONE, m_log); factory.DeleteSourceData(); } // Storing to database; byte[] rgLabel; byte[] rgPixels; Datum datum = new Datum(false, nChannels, (int)cols, (int)rows, -1, DateTime.MinValue, new List <byte>(), 0, false, -1); string strAction = (m_param.ExportToFile) ? "exporing" : "loading"; reportProgress(0, (int)num_items, " " + strAction + " a total of " + num_items.ToString() + " items."); reportProgress(0, (int)num_items, " (with rows: " + rows.ToString() + ", cols: " + cols.ToString() + ")"); sw.Start(); List <SimpleDatum> rgImg = new List <SimpleDatum>(); FileStream fsFileDesc = null; StreamWriter swFileDesc = null; if (m_param.ExportToFile) { string strFile = strExportPath + "\\file_list.txt"; fsFileDesc = File.OpenWrite(strFile); swFileDesc = new StreamWriter(fsFileDesc); } for (int i = 0; i < num_items; i++) { rgPixels = image_file.ReadBytes((int)(rows * cols)); rgLabel = label_file.ReadBytes(1); if (sw.Elapsed.TotalMilliseconds > 1000) { reportProgress(i, (int)num_items, " " + strAction + " data..."); sw.Restart(); } datum.SetData(rgPixels.ToList(), (int)rgLabel[0]); if (factory != null) { factory.PutRawImageCache(i, datum); } else if (strExportPath != null) { saveToFile(strExportPath, i, datum, swFileDesc); } rgImg.Add(new SimpleDatum(datum)); if (m_evtCancel.WaitOne(0)) { return(false); } } if (swFileDesc != null) { swFileDesc.Flush(); swFileDesc.Close(); swFileDesc.Dispose(); fsFileDesc.Close(); fsFileDesc.Dispose(); } if (factory != null) { factory.ClearImageCashe(true); factory.UpdateSourceCounts(); factory.SaveImageMean(SimpleDatum.CalculateMean(m_log, rgImg.ToArray(), new WaitHandle[] { new ManualResetEvent(false) }), true); } reportProgress((int)num_items, (int)num_items, " " + strAction + " completed."); } finally { image_file.Dispose(); label_file.Dispose(); } return(true); }
/// <summary> /// Renders the deep draw image(s) depending on the Octave's installed. /// </summary> /// <param name="bmpInput">Specifies the input image.</param> /// <param name="nFocusLabel">Specifies a label to focus on (use this when running on classifying layers).</param> /// <param name="dfDetailPercentageToOutput">Optionally, specifies the amount of detail to apply to the original image when producing the final image (Default = 0.25 for 25%).</param> /// <param name="strOutputDir">Optionally, specifies the output directory wheren images are to be output. When <i>null</i>, no images are output, but are instead set in each Octave.</param> /// <param name="bVisualizeEachStep">Optionally, specifies to create an image at each step of the process which can be useful when making a video of the evolution (default = <i>false</i>).</param> /// <param name="rgDirectInputs">Optionally, specifies the direct inputs used to set each output. When not <i>null</i> the direct inputs are used instead of the <i>nFocusLabel</i> whereby the /// network outputs are set to the direct input values and the <i>nFocusLabel</i> is used to index the image and should therefore be unique for each set of direct inputs. /// By default, this value is set to <i>null</i>. /// </param> /// <returns>Upon completing the render, this method returns <i>true</i>, otherwise if cancelled it returns <i>false</i>.</returns> public bool Render(Bitmap bmpInput, int nFocusLabel = -1, double dfDetailPercentageToOutput = 0.25, string strOutputDir = null, bool bVisualizeEachStep = false, float[] rgDirectInputs = null) { if (rgDirectInputs != null && nFocusLabel < 0) { throw new Exception("The focus label must be set to a unique value >= 0 that corresponds to this specific direct input set."); } // get the input dimensions from net Blob <T> blobSrc = m_net.blob_by_name("data"); int nW = blobSrc.width; int nH = blobSrc.height; m_log.WriteLine("Starting drawing..."); blobSrc.Reshape(1, 3, nH, nW); // resize the networks input. // Set the base data. if (strOutputDir != null) { bmpInput.Save(strOutputDir + "\\input_image.png"); } Datum d = ImageData.GetImageData(bmpInput, 3, false, -1); m_blobBase.mutable_cpu_data = m_transformer.Transform(d); m_blobDetail.SetData(0.0); m_blobBlur.SetData(0); for (int i = 0; i < m_rgOctaves.Count; i++) { Octaves o = m_rgOctaves[i]; // Select layer. string strLayer = o.LayerName; // Add changed details to the image. if (nFocusLabel < 0) { m_cuda.add(blobSrc.count(), m_blobBase.gpu_data, m_blobDetail.gpu_data, blobSrc.mutable_gpu_data, o.PercentageOfPreviousOctaveDetailsToApply); } for (int j = 0; j < o.IterationN; j++) { if (m_evtCancel.WaitOne(0)) { return(false); } if (nFocusLabel >= 0) { blobSrc.CopyFrom(m_blobBase); } double dfSigma = o.StartSigma + ((o.EndSigma - o.StartSigma) * j) / o.IterationN; double dfStepSize = o.StartStepSize + ((o.EndStepSize - o.StartStepSize) * j) / o.IterationN; make_step(strLayer, dfSigma, dfStepSize, nFocusLabel, rgDirectInputs); if ((bVisualizeEachStep || (j == o.IterationN - 1 && o.Save))) { // Get the detail. m_cuda.sub(m_blobDetail.count(), blobSrc.gpu_data, m_blobBase.gpu_data, m_blobDetail.mutable_gpu_data); if (dfDetailPercentageToOutput < 1.0) { // reuse blob blur memory. m_cuda.add(m_blobBlur.count(), m_blobBase.gpu_data, m_blobDetail.gpu_data, m_blobBlur.mutable_gpu_data, dfDetailPercentageToOutput); } else { m_blobBlur.CopyFrom(blobSrc); } Image bmp = getImage(m_blobBlur); if (nFocusLabel < 0) { Bitmap bmp1 = AdjustContrast(bmp, 0.9f, 1.6f, 1.2f); bmp.Dispose(); bmp = bmp1; } if (strOutputDir != null) { string strFile = strOutputDir + "\\" + o.UniqueName + "_" + j.ToString(); if (nFocusLabel >= 0) { if (rgDirectInputs != null) { strFile += "_idx_" + nFocusLabel.ToString(); } else { strFile += "_class_" + nFocusLabel.ToString(); } } bmp.Save(strFile + ".png"); } if (j == o.IterationN - 1) { o.Images.Add(nFocusLabel, bmp); } else { bmp.Dispose(); } } m_log.Progress = (double)j / (double)o.IterationN; m_log.WriteLine("Focus Label: " + nFocusLabel.ToString() + " Octave: '" + o.LayerName + "' - " + j.ToString() + " of " + o.IterationN.ToString() + " " + m_log.Progress.ToString("P")); if (nFocusLabel >= 0) { m_blobBase.CopyFrom(blobSrc); } } // Extract details produced on the current octave. if (nFocusLabel < 0) { m_cuda.sub(m_blobDetail.count(), blobSrc.gpu_data, m_blobBase.gpu_data, m_blobDetail.mutable_gpu_data); } } m_log.WriteLine("Rendering completed!"); return(true); }
/// <summary> /// The consoldiate thread synchronized all data queries using their synchronization field (field #0) to make sure /// that all data items line up. /// </summary> private void consolidateThread() { int nAllDataReady = ((int)Math.Pow(2, m_colDataQuery.Count)) - 1; int nWait = 0; while (!m_evtCancel.WaitOne(nWait)) { if (!m_evtEnabled.WaitOne(0)) { nWait = 250; m_evtPaused.Set(); continue; } nWait = 0; m_evtPaused.Reset(); int nDataReady = 0; int nDataDone = 0; for (int i = 0; i < m_colDataQuery.Count; i++) { DataQuery dq = m_colDataQuery[i]; if (dq.DataReady(1)) { nDataReady |= (0x0001 << i); } if (dq.DataDone()) { nDataDone |= (0x0001 << i); } } if (nDataDone != 0) { m_colData.QueryEnd.Set(); } if (nDataReady != nAllDataReady) { continue; } DataItem di = new DataItem(m_nFieldCount); int nLocalFieldCount = m_colDataQuery[0].FieldCount; double[] rg = m_colDataQuery[0].GetNextData(); if (rg == null) { continue; } DateTime dtSync = Utility.ConvertTimeFromMinutes(rg[0]); bool bSkip = false; for (int i = 0; i < m_nSegmentSize; i++) { int nFieldIdx = di.Add(0, i, rg, nLocalFieldCount); for (int j = 1; j < m_colDataQuery.Count; j++) { double[] rg1 = m_colDataQuery[j].GetNextData(); if (rg1 == null) { bSkip = true; break; } DateTime dtSync1 = Utility.ConvertTimeFromMinutes(rg1[0]); while (dtSync1 < dtSync) { rg1 = m_colDataQuery[j].GetNextData(); if (rg1 == null) { bSkip = true; break; } dtSync1 = Utility.ConvertTimeFromMinutes(rg1[0]); } if (bSkip) { break; } nLocalFieldCount = m_colDataQuery[j].FieldCount; nFieldIdx = di.Add(nFieldIdx, i, rg1, nLocalFieldCount); } if (bSkip) { break; } } if (!bSkip) { m_colData.Add(di); } } }
/// <summary> /// Run the trained model. When run each hand-written image is fed in sequence (by label, /// e.g. 0,1,2,...,9 through the model, yet images within each label are selected at random. /// </summary> /// <param name="mycaffe">Specifies the mycaffe instance running the sequence run model.</param> /// <param name="bw">Specifies the background worker.</param> private void runModel(MyCaffeControl <float> mycaffe, BackgroundWorker bw) { Random random = new Random((int)DateTime.Now.Ticks); // Get the internal RUN net and associated blobs. Net <float> net = m_mycaffe.GetInternalNet(Phase.RUN); Blob <float> blobData = net.FindBlob("data"); Blob <float> blobClip1 = net.FindBlob("clip1"); Blob <float> blobIp1 = net.FindBlob("ip1"); List <float> rgPrediction = new List <float>(); List <float> rgTarget = new List <float>(); List <float> rgT = new List <float>(); m_mycaffeInput.UpdateRunWeights(); blobClip1.SetData(0); bool bForcedError = false; for (int i = 0; i < 100; i++) { if (m_evtCancel.WaitOne(0)) { return; } int nLabelSeq = m_nLabelSeq; if (m_evtForceError.WaitOne(0)) { nLabelSeq = random.Next(10); bForcedError = true; } else { bForcedError = false; } // Get images one number at a time, in order by label, but randomly selected. SimpleDatum sd = m_imgDb.QueryImage(m_ds.TrainingSource.ID, 0, null, IMGDB_IMAGE_SELECTION_METHOD.RANDOM, nLabelSeq); ResultCollection res = m_mycaffeInput.Run(sd); Net <float> inputNet = m_mycaffeInput.GetInternalNet(Phase.RUN); Blob <float> input_ip = inputNet.FindBlob(m_strInputOutputBlobName); Dictionary <string, float[]> data = Signal.GenerateSample(1, m_nLabelSeq / 10.0f, 1, m_model.InputLabel, m_model.TimeSteps); float[] rgFY1 = data["FY"]; // Run the model. blobClip1.SetData(1); blobData.mutable_cpu_data = input_ip.mutable_cpu_data; net.Forward(); rgPrediction.AddRange(blobIp1.mutable_cpu_data); // Graph and show the results. float[] rgFT = data["FT"]; float[] rgFY = data["FY"]; for (int j = 0; j < rgFT.Length; j++) { rgT.Add(rgFT[j]); rgTarget.Add(rgFY[j]); } while (rgTarget.Count * 5 > pbImage.Width) { rgTarget.RemoveAt(0); rgPrediction.RemoveAt(0); } // Plot the graph. PlotCollection plotsTarget = createPlots("Target", rgT.ToArray(), new List <float[]>() { rgTarget.ToArray() }, 0); PlotCollection plotsPrediction = createPlots("Predicted", rgT.ToArray(), new List <float[]>() { rgPrediction.ToArray() }, 0); PlotCollection plotsAvePrediction = createPlotsAve("Predicted SMA", plotsPrediction, 10); PlotCollectionSet set = new PlotCollectionSet(new List <PlotCollection>() { plotsTarget, plotsPrediction, plotsAvePrediction }); // Create the graph image and display Image img = SimpleGraphingControl.QuickRender(set, pbImage.Width, pbImage.Height); img = drawInput(img, sd, res.DetectedLabel, bForcedError); bw.ReportProgress(0, img); Thread.Sleep(1000); m_nLabelSeq++; if (m_nLabelSeq == 10) { m_nLabelSeq = 0; } } }
public void Create(DatasetConfiguration config, IXDatasetCreatorProgress progress) { string strTrainingFile1 = Properties.Settings.Default.vocTrainingFile1; string strTrainingFile2 = Properties.Settings.Default.vocTrainingFile2; string strTestingFile1 = Properties.Settings.Default.vocTestingFile1; bool bExtract = Properties.Settings.Default.ExpandFiles; m_evtCancel.Reset(); DataConfigSetting dsTrainingFile1 = config.Settings.Find("Training Data File 2007"); strTrainingFile1 = dsTrainingFile1.Value.ToString(); DataConfigSetting dsTrainingFile2 = config.Settings.Find("Training Data File 2012"); strTrainingFile2 = dsTrainingFile2.Value.ToString(); DataConfigSetting dsTestingFile1 = config.Settings.Find("Testing Data File 2007"); strTestingFile1 = dsTestingFile1.Value.ToString(); DataConfigSetting dsExtract = config.Settings.Find("Extract Data Files"); OptionItem extractOption = dsExtract.Value as OptionItem; bExtract = (extractOption.Index == 0) ? false : true; DataConfigSetting dsName = config.Settings.Find("Output Dataset Name"); string strDsName = dsName.Value.ToString(); string strTrainingSrc = strDsName + ".training"; string strTestingSrc = strDsName + ".testing"; m_iprogress = progress; m_factory.DeleteSources(strTrainingSrc, strTestingSrc); Log log = new Log("VOC0712 Dataset Creator"); log.OnWriteLine += new EventHandler <LogArg>(log_OnWriteLine); try { VOCDataParameters param = new VOCDataParameters(strTrainingFile1, strTrainingFile2, strTestingFile1, bExtract); VOCDataLoader loader = new VOCDataLoader(param, log, m_evtCancel); loader.OnProgress += Loader_OnProgress; loader.OnError += Loader_OnError; loader.OnCompleted += Loader_OnCompleted; if (!loader.LoadDatabase(config.ID)) { return; } using (DNNEntities entities = EntitiesConnection.CreateEntities()) { List <Dataset> rgDs = entities.Datasets.Where(p => p.Name == strDsName).ToList(); List <Source> rgSrcTraining = entities.Sources.Where(p => p.Name == strTrainingSrc).ToList(); List <Source> rgSrcTesting = entities.Sources.Where(p => p.Name == strTestingSrc).ToList(); if (rgSrcTraining.Count == 0) { throw new Exception("Could not find the training source '" + strTrainingSrc + "'."); } if (rgSrcTesting.Count == 0) { throw new Exception("Could not find the tesing source '" + strTestingSrc + "'."); } int nSrcTestingCount = rgSrcTesting[0].ImageCount.GetValueOrDefault(); int nSrcTrainingCount = rgSrcTraining[0].ImageCount.GetValueOrDefault(); int nSrcTotalCount = nSrcTestingCount + nSrcTrainingCount; double dfTestingPct = (nSrcTrainingCount == 0) ? 0.0 : nSrcTestingCount / (double)nSrcTotalCount; Dataset ds = null; if (rgDs.Count == 0) { ds = new Dataset(); ds.Name = strDsName; } else { ds = rgDs[0]; } ds.ImageEncoded = rgSrcTesting[0].ImageEncoded; ds.ImageChannels = rgSrcTesting[0].ImageChannels; ds.ImageHeight = rgSrcTraining[0].ImageHeight; ds.ImageWidth = rgSrcTraining[0].ImageWidth; ds.TestingPercent = (decimal)dfTestingPct; ds.TestingSourceID = rgSrcTesting[0].ID; ds.TestingTotal = rgSrcTesting[0].ImageCount; ds.TrainingSourceID = rgSrcTraining[0].ID; ds.TrainingTotal = rgSrcTraining[0].ImageCount; ds.DatasetCreatorID = config.ID; ds.DatasetGroupID = 0; ds.ModelGroupID = 0; if (rgDs.Count == 0) { entities.Datasets.Add(ds); } entities.SaveChanges(); } } catch (Exception excpt) { log.WriteLine("ERROR: " + excpt.Message); } finally { if (m_evtCancel.WaitOne(0)) { log.WriteLine("ABORTED converting VOC0712 data files."); m_iprogress.OnCompleted(new CreateProgressArgs(1, "ABORTED!", null, true)); } else { log.WriteLine("Done converting VOC0712 data files."); m_iprogress.OnCompleted(new CreateProgressArgs(1, "COMPLETED.")); } Properties.Settings.Default.vocTestingFile1 = strTestingFile1; Properties.Settings.Default.vocTrainingFile1 = strTrainingFile1; Properties.Settings.Default.vocTrainingFile2 = strTrainingFile2; Properties.Settings.Default.ExpandFiles = bExtract; Properties.Settings.Default.Save(); } }
/// <summary> /// Step the gym one step in the data. /// </summary> /// <param name="nAction">Specifies the action to run on the gym.</param> /// <param name="bGetLabel">Not used.</param> /// <param name="extraProp">Optionally, specifies extra properties.</param> /// <returns>A tuple containing state data, the reward, and the done state is returned.</returns> public Tuple <State, double, bool> Step(int nAction, bool bGetLabel = false, PropertySet extraProp = null) { DataState data = new DataState(); ScoreCollection scores = null; if (ActivePhase == Phase.RUN) { if (extraProp == null) { throw new Exception("The extra properties are needed when querying data during the RUN phase."); } int nDataCount = extraProp.GetPropertyAsInt("DataCountRequested"); string strStartTime = extraProp.GetProperty("SeedTime"); int nStartIdx = m_scores.Count - nDataCount; DateTime dt; if (DateTime.TryParse(strStartTime, out dt)) { nStartIdx = m_scores.FindIndexAt(dt, nDataCount); } scores = m_scores.CopyFrom(nStartIdx, nDataCount); } else { int nCount = 0; m_scores = load(out m_nDim, out m_nWidth); if (m_bRecreateData || m_scores.Count != m_ds.TrainingSource.ImageCount) { Stopwatch sw = new Stopwatch(); sw.Start(); m_scores = new ScoreCollection(); while (m_nCurrentIdx < m_ds.TrainingSource.ImageCount) { // Query images sequentially by index in batches List <SimpleDatum> rgSd = new List <SimpleDatum>(); for (int i = 0; i < m_nBatchSize; i++) { SimpleDatum sd = m_imgdb.QueryImage(m_ds.TrainingSource.ID, m_nCurrentIdx + i, IMGDB_LABEL_SELECTION_METHOD.NONE, IMGDB_IMAGE_SELECTION_METHOD.NONE); rgSd.Add(sd); nCount++; if (nCount == m_ds.TrainingSource.ImageCount) { break; } } List <ResultCollection> rgRes = m_mycaffe.Run(rgSd, ref m_blobWork); if (m_nWidth == 0) { m_nWidth = rgRes[0].ResultsOriginal.Count; m_nDim = rgRes[0].ResultsOriginal.Count * 2; } // Fill SimpleDatum with the ordered label,score pairs starting with the detected label. for (int i = 0; i < rgRes.Count; i++) { m_scores.Add(new Score(rgSd[i].TimeStamp, rgSd[i].Index, rgRes[i])); m_nCurrentIdx++; } if (sw.Elapsed.TotalMilliseconds > 1000) { m_log.Progress = (double)m_nCurrentIdx / (double)m_ds.TrainingSource.ImageCount; m_log.WriteLine("Running model on image " + m_nCurrentIdx.ToString() + " of " + m_ds.TrainingSource.ImageCount.ToString() + " of '" + m_strDataset + "' dataset."); if (m_evtCancel.WaitOne(0)) { return(null); } } } save(m_nDim, m_nWidth, m_scores); } else { m_nCurrentIdx = m_scores.Count; } scores = m_scores; } float[] rgfRes = scores.Data; SimpleDatum sdRes = new SimpleDatum(scores.Count, m_nWidth, 2, rgfRes, 0, rgfRes.Length); data.SetData(sdRes); m_nCurrentIdx = 0; return(new Tuple <State, double, bool>(data, 0, false)); }
private bool loadFile(string strImagesFile, string strSourceName, int nTotal, ref int nIdx, Log log) { Stopwatch sw = new Stopwatch(); int nStart = nIdx; reportProgress(nIdx, nTotal, " Source: " + strSourceName); reportProgress(nIdx, nTotal, " loading " + strImagesFile + "..."); FileStream fs = null; try { fs = new FileStream(strImagesFile, FileMode.Open, FileAccess.Read); using (BinaryReader br = new BinaryReader(fs)) { fs = null; int nSrcId = m_factory.AddSource(strSourceName, 3, 32, 32, false, 0, true); m_factory.Open(nSrcId, 500, Database.FORCE_LOAD.NONE, log); if (nIdx == 0) { m_factory.DeleteSourceData(); } sw.Start(); for (int i = 0; i < 10000; i++) { int nLabel = (int)br.ReadByte(); byte[] rgImgBytes = br.ReadBytes(3072); Bitmap img = createImage(rgImgBytes); Datum d = ImageData.GetImageDataD(img, 3, false, nLabel); m_factory.PutRawImageCache(nIdx, d, 5); m_rgImg.Add(new SimpleDatum(d)); nIdx++; if (sw.ElapsedMilliseconds > 1000) { reportProgress(nStart + i, nTotal, "loading " + strImagesFile + " " + i.ToString("N0") + " of 10,000..."); sw.Restart(); } if (m_evtCancel.WaitOne(0)) { return(false); } } m_factory.ClearImageCache(true); if (nIdx == nTotal) { m_factory.UpdateSourceCounts(); } } } finally { if (fs != null) { fs.Dispose(); } } return(true); }
/// <summary> /// The query thread is where all data is collected from the underlying custom query managed. /// </summary> private void queryThread() { try { int nWait = 0; m_iquery.Open(); while (!m_evtCancel.WaitOne(nWait)) { if (!m_evtQueryEnabled.WaitOne(0)) { m_evtPaused.Set(); nWait = 250; continue; } m_evtPaused.Reset(); if (m_rgDataQueue.Count >= m_nMaxCount) { nWait = 10; continue; } double[] rgData = m_iquery.QueryByTime(m_dt, m_tsInc, m_nSegmentSize); if (rgData == null) { m_bQueryEnd = true; nWait = 10; continue; } nWait = 0; m_bQueryEnd = false; int nItemCount = rgData.Length / m_nSegmentSize; int nSrcIdx = 0; lock (m_objSync) { for (int i = 0; i < m_nSegmentSize; i++) { double[] rgItem = new double[nItemCount]; Array.Copy(rgData, nSrcIdx, rgItem, 0, nItemCount); nSrcIdx += nItemCount; m_rgDataQueue.Enqueue(rgItem); } } m_dt += TimeSpan.FromMilliseconds(m_nSegmentSize * m_tsInc.TotalMilliseconds); } } catch (Exception excpt) { throw excpt; } finally { m_iquery.Close(); } }
private bool loadFile(DatasetFactory factory, List <Tuple <byte[], int> > rgData, int nC, int nH, int nW, string strSourceName, string strExportPath) { if (strExportPath != null) { strExportPath += strSourceName; if (!Directory.Exists(strExportPath)) { Directory.CreateDirectory(strExportPath); } } Stopwatch sw = new Stopwatch(); reportProgress(0, 0, " Source: " + strSourceName); try { if (factory != null) { int nSrcId = factory.AddSource(strSourceName, nC, nW, nH, false, 0, true); factory.Open(nSrcId, 500, Database.FORCE_LOAD.NONE, m_log); factory.DeleteSourceData(); } // Storing to database; int nLabel; byte[] rgPixels; Datum datum = new Datum(false, nC, nW, nH, -1, DateTime.MinValue, new List <byte>(), 0, false, -1); string strAction = (m_param.ExportToFile) ? "exporing" : "loading"; reportProgress(0, rgData.Count, " " + strAction + " a total of " + rgData.Count.ToString() + " items."); reportProgress(0, rgData.Count, " (with rows: " + nH.ToString() + ", cols: " + nW.ToString() + ")"); sw.Start(); List <SimpleDatum> rgImg = new List <SimpleDatum>(); FileStream fsFileDesc = null; StreamWriter swFileDesc = null; if (m_param.ExportToFile) { string strFile = strExportPath + "\\file_list.txt"; fsFileDesc = File.OpenWrite(strFile); swFileDesc = new StreamWriter(fsFileDesc); } for (int i = 0; i < rgData.Count; i++) { rgPixels = rgData[i].Item1; nLabel = rgData[i].Item2; if (sw.Elapsed.TotalMilliseconds > 1000) { reportProgress(i, rgData.Count, " " + strAction + " data..."); sw.Restart(); } datum.SetData(rgPixels, nLabel); if (factory != null) { factory.PutRawImageCache(i, datum, 5); } else if (strExportPath != null) { saveToFile(strExportPath, i, datum, swFileDesc); } rgImg.Add(new SimpleDatum(datum)); if (m_evtCancel.WaitOne(0)) { return(false); } } if (swFileDesc != null) { swFileDesc.Flush(); swFileDesc.Close(); swFileDesc.Dispose(); fsFileDesc.Close(); fsFileDesc.Dispose(); } if (factory != null) { factory.ClearImageCache(true); factory.UpdateSourceCounts(); factory.SaveImageMean(SimpleDatum.CalculateMean(m_log, rgImg.ToArray(), new WaitHandle[] { new ManualResetEvent(false) }), true); } reportProgress(rgData.Count, rgData.Count, " " + strAction + " completed."); } finally { } return(true); }
/// <summary> /// Process the content image by applying the style to it that was learned from the style image. /// </summary> /// <param name="bmpStyle">Specifies the image used to train the what style to apply to the content.</param> /// <param name="bmpContent">Specifies the content image to which the style is to be applied.</param> /// <param name="nIterations">Specifies the number of training iterations.</param> /// <param name="strResultDir">Optionally, specifies an output directory where intermediate images are stored.</param> /// <param name="nIntermediateOutput">Optionally, specifies how often to output an intermediate image.</param> /// <param name="dfTvLoss">Optionally, specifies the TV-Loss weight for smoothing (default = 0, which disables this loss).</param> /// <returns>The resulting image is returned.</returns> public Bitmap Process(Bitmap bmpStyle, Bitmap bmpContent, int nIterations, string strResultDir = null, int nIntermediateOutput = -1, double dfTvLoss = 0) { Solver <T> solver = null; Net <T> net = null; BlobCollection <T> colContentActivations = new BlobCollection <T>(); BlobCollection <T> colGramActivations = new BlobCollection <T>(); double dfLoss; try { m_dfTVLossWeight = dfTvLoss; m_nIterations = nIterations; if (bmpStyle.Width != bmpContent.Width || bmpStyle.Height != bmpContent.Height) { bmpStyle = ImageTools.ResizeImage(bmpStyle, bmpContent.Width, bmpContent.Height); } m_log.WriteLine("Creating input network..."); m_log.Enable = false; net = new Net <T>(m_cuda, m_log, m_param, m_evtCancel, null, Phase.TEST); m_log.Enable = true; if (m_rgWeights != null) { net.LoadWeights(m_rgWeights, m_persist); } //----------------------------------------- // Get style and content activations. //----------------------------------------- prepare_data_blob(net, bmpStyle); net.Forward(out dfLoss); foreach (KeyValuePair <string, double> kvGram in m_rgLayers["gram"]) { string strGram = kvGram.Key; Blob <T> blobGram = net.blob_by_name(strGram); colGramActivations.Add(blobGram.Clone()); } prepare_data_blob(net, bmpContent); net.Forward(out dfLoss); foreach (KeyValuePair <string, double> kvContent in m_rgLayers["content"]) { string strContent = kvContent.Key; Blob <T> blobContent = net.blob_by_name(strContent); colContentActivations.Add(blobContent.Clone()); } //----------------------------------------- // Prepare the network by adding new layers. //----------------------------------------- NetParameter net_param = m_param; foreach (KeyValuePair <string, double> kvInput in m_rgLayers["input"]) { string strName = kvInput.Key; LayerParameter p = new LayerParameter(LayerParameter.LayerType.INPUT); p.name = "input_" + strName; p.top.Add(p.name); Blob <T> blob = net.blob_by_name(strName); p.input_param.shape.Add(new BlobShape(blob.shape())); net_param.layer.Add(p); } foreach (KeyValuePair <string, double> kvContent in m_rgLayers["content"]) { string strName = kvContent.Key; string strScale1 = "input_" + strName; string strScale2 = strName; if (m_dfContentDataScale != 1.0) { strScale1 += "b"; LayerParameter ps1 = new LayerParameter(LayerParameter.LayerType.SCALAR); ps1.scalar_param.value = m_dfContentDataScale; ps1.scalar_param.operation = ScalarParameter.ScalarOp.MUL; ps1.scalar_param.passthrough_gradient = true; ps1.bottom.Add("input_" + strName); ps1.top.Add(strScale1); net_param.layer.Add(ps1); strScale2 += "b"; LayerParameter ps2 = new LayerParameter(LayerParameter.LayerType.SCALAR); ps2.scalar_param.value = m_dfContentDataScale; ps2.scalar_param.operation = ScalarParameter.ScalarOp.MUL; ps2.scalar_param.passthrough_gradient = true; ps2.bottom.Add(strName); ps2.top.Add(strScale2); net_param.layer.Add(ps2); } LayerParameter event_param = new LayerParameter(LayerParameter.LayerType.EVENT); event_param.name = "event_" + strName; event_param.bottom.Add(strScale2); event_param.bottom.Add(strScale1); event_param.top.Add("event_" + strName); net_param.layer.Add(event_param); LayerParameter p = new LayerParameter(LayerParameter.LayerType.EUCLIDEAN_LOSS); p.name = "loss_" + strName; Blob <T> blobContent = colContentActivations[strName]; double dfScale = get_content_scale(blobContent); p.loss_weight.Add(kvContent.Value * dfScale); p.bottom.Add("event_" + strName); p.bottom.Add(strScale1); p.top.Add("loss_" + strName); net_param.layer.Add(p); } foreach (KeyValuePair <string, double> kvGram in m_rgLayers["gram"].ToList()) { string strGramName = kvGram.Key; LayerParameter event_param = new LayerParameter(LayerParameter.LayerType.EVENT); event_param.name = "event_" + strGramName; event_param.bottom.Add(strGramName); event_param.bottom.Add("input_" + strGramName); event_param.top.Add("event_" + strGramName); net_param.layer.Add(event_param); LayerParameter p = new LayerParameter(LayerParameter.LayerType.EUCLIDEAN_LOSS); p.name = "loss_" + strGramName; Blob <T> blobGram = colGramActivations[strGramName]; double dfScale = get_style_scale(blobGram); p.loss_weight.Add(kvGram.Value * dfScale); p.bottom.Add("input_" + strGramName); p.bottom.Add("event_" + strGramName); p.top.Add("loss_" + strGramName); net_param.layer.Add(p); } // Add TV Loss; if (m_dfTVLossWeight != 0) { LayerParameter p = new LayerParameter(LayerParameter.LayerType.TV_LOSS); p.name = "loss_tv"; double dfWeight = m_dfTVLossWeight; p.loss_weight.Add(dfWeight); p.bottom.Add("data"); p.top.Add("loss_tv"); net_param.layer.Add(p); } // Replace InputLayer with ParameterLayer, // so that we'll be able to backprop into the image. Blob <T> data = net.blob_by_name("data"); for (int i = 0; i < net_param.layer.Count; i++) { LayerParameter p = net_param.layer[i]; if (p.name == "input1") { net_param.layer[i].SetType(LayerParameter.LayerType.PARAMETER); net_param.layer[i].parameter_param.shape = new BlobShape(data.shape()); break; } } // Disable weights learning. List <LayerParameter.LayerType> rgTypes = new List <LayerParameter.LayerType>(); rgTypes.Add(LayerParameter.LayerType.CONVOLUTION); rgTypes.Add(LayerParameter.LayerType.DECONVOLUTION); rgTypes.Add(LayerParameter.LayerType.INNERPRODUCT); rgTypes.Add(LayerParameter.LayerType.PRELU); rgTypes.Add(LayerParameter.LayerType.BIAS); rgTypes.Add(LayerParameter.LayerType.EMBED); rgTypes.Add(LayerParameter.LayerType.LSTM); rgTypes.Add(LayerParameter.LayerType.LSTM_SIMPLE); rgTypes.Add(LayerParameter.LayerType.RNN); foreach (LayerParameter layer in net_param.layer) { if (rgTypes.Contains(layer.type)) { layer.parameters = new List <ParamSpec>(); layer.parameters.Add(new ParamSpec(0, 0)); layer.parameters.Add(new ParamSpec(0, 0)); } } net.Dispose(); net = null; //----------------------------------------- // Create solver and assign inputs. //----------------------------------------- RawProto proto1 = net_param.ToProto("root"); string str = proto1.ToString(); SolverParameter solver_param = new SolverParameter(); solver_param.display = m_nDisplayEvery; solver_param.train_net_param = net_param; solver_param.test_iter.Clear(); solver_param.test_interval = 0; solver_param.test_initialization = false; solver_param.base_lr = m_dfLearningRate; solver_param.type = m_solverType; m_log.WriteLine("Creating " + m_solverType.ToString() + " solver with learning rate = " + m_dfLearningRate.ToString() + "..."); m_log.Enable = false; if (m_solverType == SolverParameter.SolverType.LBFGS) { solver = new LBFGSSolver <T>(m_cuda, m_log, solver_param, m_evtCancel, null, null, null, m_persist); } else { solver = Solver <T> .Create(m_cuda, m_log, solver_param, m_evtCancel, null, null, null, m_persist); } m_log.Enable = true; solver.OnSnapshot += Solver_OnSnapshot; solver.OnTrainingIteration += Solver_OnTrainingIteration; foreach (Layer <T> layer in solver.net.layers) { if (layer.type == LayerParameter.LayerType.EVENT) { EventLayer <T> eventLayer = layer as EventLayer <T>; eventLayer.OnBackward += EventLayer_OnBackward; } } prepare_input_param(solver.net, bmpContent); foreach (KeyValuePair <string, double> kvContent in m_rgLayers["content"]) { string strName = kvContent.Key; Blob <T> blobDst = solver.net.blob_by_name("input_" + strName); Blob <T> blobSrc = colContentActivations[strName]; blobDst.CopyFrom(blobSrc); } foreach (KeyValuePair <string, double> kvGram in m_rgLayers["gram"]) { string strName = kvGram.Key; Blob <T> blobDst = solver.net.blob_by_name("input_" + strName); Blob <T> blobSrc = colGramActivations[strName]; blobDst.CopyFrom(blobSrc); } //----------------------------------------- // Optimize. //----------------------------------------- int nIterations1 = m_nIterations; if (strResultDir != null && nIntermediateOutput > 0) { nIterations1 /= nIntermediateOutput; } if (m_rgWeights != null) { Blob <T> blobInput = solver.net.learnable_parameters[0]; solver.net.learnable_parameters.RemoveAt(0); solver.net.LoadWeights(m_rgWeights, m_persist); solver.net.learnable_parameters.Insert(0, blobInput); } if (strResultDir != null) { strResultDir = strResultDir.TrimEnd('\\'); strResultDir += "\\"; } for (int i = 0; i < nIterations1; i++) { if (m_evtCancel.WaitOne(0)) { break; } solver.Step(nIntermediateOutput, TRAIN_STEP.NONE, true, true, true); if (strResultDir != null) { Bitmap bmpTemp = save(solver.net); string strFile = strResultDir + i.ToString() + "_temp.png"; if (File.Exists(strFile)) { File.Delete(strFile); } bmpTemp.Save(strFile); } } Bitmap bmpOutput = save(solver.net); return(bmpOutput); } catch (Exception excpt) { throw excpt; } finally { if (net != null) { net.Dispose(); } if (solver != null) { solver.Dispose(); } colGramActivations.Dispose(); colContentActivations.Dispose(); } }