public virtual void Test(Dataset data) { for (int i = 0; i < data.input.Count; ++i) { Console.WriteLine("{0} -> {1}, net = {2}", data.input[i].Print(), data.output[i].Print(), Predict(data.input[i]).Print()); } }
public void SupportVectorMachinePerformanceTest() { ex = null; var cv = new CrossValidation(); cv.Algorithm = new SupportVectorRegression(); var rand = new HeuristicLab.Random.MersenneTwister(); double[,] data = GenerateData(1000, rand); List<string> variables = new List<string>() { "x1", "x2", "x3", "x4", "x5", "x6", "x7", "x8", "x9", "x10", "y" }; Dataset ds = new Dataset(variables, data); cv.Problem.ProblemDataParameter.ActualValue = new RegressionProblemData(ds, variables.Take(10), variables.Last()); cv.Folds.Value = 5; cv.SamplesStart.Value = 0; cv.SamplesEnd.Value = 999; cv.ExceptionOccurred += new EventHandler<EventArgs<Exception>>(cv_ExceptionOccurred); cv.Stopped += new EventHandler(cv_Stopped); cv.Prepare(); cv.Start(); trigger.WaitOne(); if (ex != null) throw ex; TestContext.WriteLine("Runtime: {0}", cv.ExecutionTime.ToString()); }
public void TestMissingValues() { var contact = new Contact("Jim", "Does", "*****@*****.**", "Lollipops", "837773"); var dataSet = new Dataset(new Site(4, "New Site", "Tim Jones", contact, contact, new GPSCoords(0, 0, "argis"))); var sensor = new Sensor("Dummy Sensor", "Does stuff", 10, 0, "C", 5, dataSet); sensor.AddState(new SensorState(sensor, DateTime.Now)); sensor.CurrentState.Values.Add(new DateTime(1990, 5, 1, 4, 0, 0), 15); sensor.CurrentState.Values.Add(new DateTime(1990, 5, 1, 5, 0, 0), 15); sensor.CurrentState.Values.Add(new DateTime(1991, 8, 2, 0, 0, 0), 15); dataSet.AddSensor(sensor); dataSet.DataInterval = 60; dataSet.HighestYearLoaded = 1; Assert.AreEqual(60, dataSet.DataInterval); var missingValues = missingValuesDetector.GetDetectedValues(sensor); for (var i = new DateTime(1990, 5, 1, 6, 0, 0); i < new DateTime(1991, 8, 2, 0, 0, 0); i = i.AddHours(1)) { Assert.Contains(new ErroneousValue(i, missingValuesDetector, sensor), missingValues); } }
public LegendItemBlockContainer(Arrangement arrangement, Dataset dataset, IComparable seriesKey) : base(arrangement) { LegendItemBlockContainer itemBlockContainer = this; this.dataset = dataset; this.seriesKey = seriesKey; }
protected void OnExecuteActionActivated(Object sender, System.EventArgs e) { String connectionString ="Server=localhost;Database=dbprueba;User Id=dbprueba;Password=1234"; NpgsqlCommand selectCommand = dbConnection.CreateCommand (); selectCommand.CommandText = "select * from categoria"; NpgsqlDataAdapter dbDataAdapter = new NpgsqlDataAdapter(); new NpgsqlCommandBuilder(dbDataAdapter); dbDataAdapter.SelectCommand = selectCommand; DataSet dataset = new Dataset(); dbDataAdapter.Fill (DataSet); //Console.WriteLine("Tables.Count={0}", dataSet.Tables.Count); foreach(DataTable dataTable in dataset.Tables) show (dataTable); DataRow dataRow = dataset.Tables[0].Rows[0]; dataRow["Nombre"] = DateTime.Now.ToString(); Console.WriteLine("Cambios"); show (dataset.Tables[0]); dbDataAdapter.Update(dataset); }
public override IRegressionProblemData ImportData(string path) { TableFileParser csvFileParser = new TableFileParser(); csvFileParser.Parse(path, csvFileParser.AreColumnNamesInFirstLine(path)); Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); string targetVar = dataset.DoubleVariables.Last(); // turn off input variables that are constant in the training partition var allowedInputVars = new List<string>(); var trainingIndizes = Enumerable.Range(0, (csvFileParser.Rows * 2) / 3); if (trainingIndizes.Count() >= 2) { foreach (var variableName in dataset.DoubleVariables) { if (dataset.GetDoubleValues(variableName, trainingIndizes).Range() > 0 && variableName != targetVar) allowedInputVars.Add(variableName); } } else { allowedInputVars.AddRange(dataset.DoubleVariables.Where(x => !x.Equals(targetVar))); } IRegressionProblemData regressionData = new RegressionProblemData(dataset, allowedInputVars, targetVar); var trainingPartEnd = trainingIndizes.Last(); regressionData.TrainingPartition.Start = trainingIndizes.First(); regressionData.TrainingPartition.End = trainingPartEnd; regressionData.TestPartition.Start = trainingPartEnd; regressionData.TestPartition.End = csvFileParser.Rows; regressionData.Name = Path.GetFileName(path); return regressionData; }
public PlatformDb(IDbContext context) { #region Preconditions if (context == null) throw new ArgumentNullException(nameof(context)); #endregion this.context = context; Apps = new Dataset<App>(context); AppInstances = new Dataset<AppInstance>(context); AppReleases = new Dataset<AppRelease>(context); AppEvents = new Dataset<AppEvent>(context); AppErrors = new Dataset<AppError>(context); // Frontends Frontends = new Dataset<Frontend>(context); FrontendBranches = new Dataset<FrontendBranch>(context); FrontendReleases = new Dataset<FrontendRelease>(context); // Networks Networks = new Dataset<Network>(context); NetworkInterfaces = new Dataset<NetworkInterfaceInfo>(context); Hosts = new Dataset<Host>(context); Volumes = new Dataset<VolumeInfo>(context); Images = new Dataset<Image>(context); }
private void bTrainLines_Click(object sender, EventArgs e) { data = PragmaLearn.Exampels.Datasets.Lines.Create(100000); var hidden = data.GetInputDimension(); if (network.GetInputs() != data.GetInputDimension() || network.GetOutputs() != data.GetOutputDimension()) network.Init(data.GetInputDimension(), hidden, hidden, data.GetOutputDimension()); network.learningRate = 0.0001f; network.lambda = 0.0f; Task.Run(() => { running = true; int t = 0; while (running) { t++; // network.Train(data); var batch = genMiniBatch(100); network.TrainMiniBatch(data, batch); Console.WriteLine("LEARNING RATE: " + network.learningRate); //if (network.learningRate > 0.0001) // network.learningRate *= 0.9998; if (t % 10 == 0) { this.Invoke(test); } } }); }
protected Visualization createVisualization(Dataset dataset, Transform parent) { GameObject go = new GameObject(); //string folderName = new DirectoryInfo(rootFolder).Name; Visualization v = go.AddComponent<Visualization>(); go.name = dataset.csvMetadataFile.name; go.transform.parent = parent; AutographerParser parser = go.AddComponent<AutographerParser>(); //parser.allowInterp = this.GetComponent<AutographerParser>().allowInterp; //parser.imageResolution = this.GetComponent<AutographerParser>().imageResolution; v.projectName = dataset.csvMetadataFile.name; v.csvMetadataFile = dataset.csvMetadataFile; v.imageDirectory = dataset.imageDirectory; v.imageExtension = dataset.imageExtension; //v.tb = this; v.targetMetadataParser = v.GetComponent<MetadataParser>(); //v.pather = this.pather; //v.DoubleSidedMesh = this.DoubleSidedMesh; //v.quadTemplate = this.quadTemplate; v.quadTemplates = dataset.quadTemplates; v.tvisLayout = this.tvisLayout; v.usePhotos = this.usePhotos; //v.transform.parent = this.sceneParent; return v; }
protected override ITimeSeriesPrognosisProblemData ImportData(string path, TimeSeriesPrognosisImportType type, TableFileParser csvFileParser) { Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); // turn of input variables that are constant in the training partition var allowedInputVars = new List<string>(); int trainingPartEnd = (csvFileParser.Rows * type.TrainingPercentage) / 100; trainingPartEnd = trainingPartEnd > 0 ? trainingPartEnd : 1; var trainingIndizes = Enumerable.Range(0, trainingPartEnd); if (trainingIndizes.Count() >= 2) { foreach (var variableName in dataset.DoubleVariables) { if (dataset.GetDoubleValues(variableName, trainingIndizes).Range() > 0 && variableName != type.TargetVariable) allowedInputVars.Add(variableName); } } else { allowedInputVars.AddRange(dataset.DoubleVariables.Where(x => !x.Equals(type.TargetVariable))); } TimeSeriesPrognosisProblemData timeSeriesPrognosisData = new TimeSeriesPrognosisProblemData(dataset, allowedInputVars, type.TargetVariable); timeSeriesPrognosisData.TrainingPartition.Start = 0; timeSeriesPrognosisData.TrainingPartition.End = trainingPartEnd; timeSeriesPrognosisData.TestPartition.Start = trainingPartEnd; timeSeriesPrognosisData.TestPartition.End = csvFileParser.Rows; timeSeriesPrognosisData.Name = Path.GetFileName(path); return timeSeriesPrognosisData; }
public override IRegressionProblemData LoadData(IDataDescriptor id) { var descriptor = (ResourceRegressionDataDescriptor)id; var instanceArchiveName = GetResourceName(FileName + @"\.zip"); using (var instancesZipFile = new ZipArchive(GetType().Assembly.GetManifestResourceStream(instanceArchiveName), ZipArchiveMode.Read)) { var entry = instancesZipFile.GetEntry(descriptor.ResourceName); NumberFormatInfo numberFormat; DateTimeFormatInfo dateFormat; char separator; using (Stream stream = entry.Open()) { TableFileParser.DetermineFileFormat(stream, out numberFormat, out dateFormat, out separator); } TableFileParser csvFileParser = new TableFileParser(); using (Stream stream = entry.Open()) { csvFileParser.Parse(stream, numberFormat, dateFormat, separator, true); } Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); if (!descriptor.CheckVariableNames(csvFileParser.VariableNames)) { throw new ArgumentException("Parsed file contains variables which are not in the descriptor."); } return descriptor.GenerateRegressionData(dataset); } }
public override IClusteringProblemData ImportData(string path) { var csvFileParser = new TableFileParser(); csvFileParser.Parse(path, csvFileParser.AreColumnNamesInFirstLine(path)); Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); // turn of input variables that are constant in the training partition var allowedInputVars = new List<string>(); var trainingIndizes = Enumerable.Range(0, (csvFileParser.Rows * 2) / 3); if (trainingIndizes.Count() >= 2) { foreach (var variableName in dataset.DoubleVariables) { if (dataset.GetDoubleValues(variableName, trainingIndizes).Range() > 0) allowedInputVars.Add(variableName); } } else { allowedInputVars.AddRange(dataset.DoubleVariables); } ClusteringProblemData clusteringData = new ClusteringProblemData(dataset, allowedInputVars); int trainingPartEnd = trainingIndizes.Last(); clusteringData.TrainingPartition.Start = trainingIndizes.First(); clusteringData.TrainingPartition.End = trainingPartEnd; clusteringData.TestPartition.Start = trainingPartEnd; clusteringData.TestPartition.End = csvFileParser.Rows; clusteringData.Name = Path.GetFileName(path); return clusteringData; }
public override ITimeSeriesPrognosisProblemData ImportData(string path) { TableFileParser csvFileParser = new TableFileParser(); csvFileParser.Parse(path, csvFileParser.AreColumnNamesInFirstLine(path)); Dataset dataset = new Dataset(csvFileParser.VariableNames, csvFileParser.Values); string targetVar = csvFileParser.VariableNames.Last(); IEnumerable<string> allowedInputVars = dataset.DoubleVariables.Where(x => !x.Equals(targetVar)); ITimeSeriesPrognosisProblemData timeSeriesPrognosisData = new TimeSeriesPrognosisProblemData(dataset, allowedInputVars, targetVar); int trainingPartEnd = csvFileParser.Rows * 2 / 3; timeSeriesPrognosisData.TrainingPartition.Start = 0; timeSeriesPrognosisData.TrainingPartition.End = trainingPartEnd; timeSeriesPrognosisData.TestPartition.Start = trainingPartEnd; timeSeriesPrognosisData.TestPartition.End = csvFileParser.Rows; int pos = path.LastIndexOf('\\'); if (pos < 0) timeSeriesPrognosisData.Name = path; else { pos++; timeSeriesPrognosisData.Name = path.Substring(pos, path.Length - pos); } return timeSeriesPrognosisData; }
public PolhemusController(Boolean polhemusConnected) { lastDatasetForEachStation = new Dictionary<int, Dataset>(); //for (int i = 1; i <= Constants.numberOfPlayers; i++) lastDatasetForEachStation[1] = new Dataset(); lastDatasetForEachStation[2] = new Dataset(); if (polhemusConnected == true) { try { Factory.CreateInstance(Debugmode.NoDebug); Factory.Instance.PollingDelay = 30; Factory.Instance.OnPolhemusButtonDown += new PolhemusEvent(OnPolhemusButtonDown); Factory.Instance.OnPolhemusButtonUp += new PolhemusEvent(OnPolhemusButtonUp); Factory.Instance.OnPolhemusMove += new PolhemusEvent(OnPolhemusMove); isConnected = true; Console.WriteLine("done with polhemus setup"); } catch (Exception e) { Console.WriteLine("There is no Polhemus connection: " + e); isConnected = false; } sharedPolhemusController = this; } }
public void addPolhemusDataset(Dataset dataset) { polhemusDatasets.Add(dataset); if (polhemusDatasets.Count == 4) { setUpCalibration(); } }
/// <summary> /// /// </summary> /// <remarks></remarks> /// <seealso cref=""/> /// <param name="dataset"></param> public DatasetVersion(Dataset dataset) : this() { this.Dataset = dataset; //if (this.Dataset.DataStructure is UnStructuredDataStructure) //{ //} }
//--------------------------------------------------------------------- /// <summary> /// Gets an alphabetical list of extensions from a dataset. /// </summary> /// <param name="dataset"></param> /// <returns></returns> public static List<ExtensionInfo> GetExtsInAlphaOrder(Dataset dataset) { List<ExtensionInfo> extensions = new List<ExtensionInfo>(dataset.Count); for (int i = 0; i < dataset.Count; i++) extensions.Add(dataset[i]); extensions.Sort(CompareNames); return extensions; }
internal DatasetGetResponse(Core.Models.DatasetGetResponse internalResponse, DataFactoryManagementClient client) : this() { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.Dataset, "internalResponse.Dataset"); DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.Dataset = ((DatasetOperations)client.Datasets).Converter.ToWrapperType(internalResponse.Dataset); }
public static Dataset Create() { Dataset result = new Dataset(); Bitmap bmp = new Bitmap(width, height); StringFormat stringFormat = new StringFormat(); stringFormat.Alignment = StringAlignment.Center; stringFormat.LineAlignment = StringAlignment.Center; float size = 8; Rectangle rect = new Rectangle(0, 0, width, height); using (Graphics g = Graphics.FromImage(bmp)) { g.TextRenderingHint = TextRenderingHint.ClearTypeGridFit; for (int x = 0; x < 100; ++x) { // var fonts = fontsToTrain.Shuffle().ToList(); // foreach (var f in fonts) { var ai = Enumerable.Range(0, alphabet.Length).Shuffle().ToList(); // for (int i = 0; i < alphabet.Length; ++i) foreach (var i in ai) { var f = fontsToTrain[Tools.rnd.Next(fontsToTrain.Length)]; size = 9.0f + (float)Tools.rnd.NextDouble() * 4.0f; using (Font font = new Font(f, size)) { var a = alphabet[i]; g.Clear(Color.Black); g.TextRenderingHint = TextRenderingHint.AntiAlias; g.CompositingQuality = System.Drawing.Drawing2D.CompositingQuality.HighQuality; g.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality; g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.HighQualityBicubic; g.TranslateTransform(width / 2, height / 2); g.RotateTransform(((float)Tools.rnd.NextDouble()-0.5f) * 45); g.TranslateTransform(-width / 2, -height / 2); var dx = ((float)Tools.rnd.NextDouble() - 0.5f) * 2.0f; var dy = ((float)Tools.rnd.NextDouble() - 0.5f) * 2.0f; g.TranslateTransform(dx, dy); g.DrawString(a, font, Brushes.White, rect, stringFormat); g.ResetTransform(); var inp = Tools.bmp_to_float(bmp); var outp = new float[alphabet.Length]; outp[i] = 1.0f; result.AddPair(inp, outp); } } } } } result.VisualizeInput = visualizeInput; result.VisualizeOutput = visualizeOutput; return result; }
public float CalcMSE(Dataset data) { float se = 0; for (int i = 0; i < data.input.Count; ++i) { se += CalcMSE(data.output[i], Predict(data.input[i])); } return se / data.input.Count; }
public IRegressionProblemData GenerateRegressionData(Dataset dataset) { RegressionProblemData regData = new RegressionProblemData(dataset, AllowedInputVariables, TargetVariable); regData.Name = this.Name; regData.Description = this.Description; regData.TrainingPartition.Start = this.TrainingPartitionStart; regData.TrainingPartition.End = this.TrainingPartitionEnd; regData.TestPartition.Start = this.TestPartitionStart; regData.TestPartition.End = this.TestPartitionEnd; return regData; }
public IClassificationProblemData GenerateClassificationData(Dataset dataset) { IClassificationProblemData claData = new ClassificationProblemData(dataset, AllowedInputVariables, TargetVariable); claData.Name = this.Name; claData.Description = this.Description; claData.TrainingPartition.Start = this.TrainingPartitionStart; claData.TrainingPartition.End = this.TrainingPartitionEnd; claData.TestPartition.Start = this.TestPartitionStart; claData.TestPartition.End = this.TestPartitionEnd; return claData; }
public void IsValidDataset0() { var dataset = new Dataset { Name = "validname", Period = PeriodCodes.Week, Horizon = 1 }; dataset.Validate(); }
public void IsValidDataset1() { var dataset = new Dataset { Name = "invalid-name", Period = "Week", Horizon = 1 }; dataset.Validate(); }
public void IsValidDataset2() { var dataset = new Dataset { Name = "validname", Period = "invalid", Horizon = 1 }; dataset.Validate(); }
public static Dataset CreateRandomDataset(MersenneTwister twister, int rows, int columns) { double[,] data = new double[rows, columns]; for (int i = 0; i < rows; i++) { for (int j = 0; j < columns; j++) { data[i, j] = twister.NextDouble() * 2.0 - 1.0; } } IEnumerable<string> variableNames = new string[] { "y" }.Concat(Enumerable.Range(0, columns - 1).Select(x => "x" + x.ToString())); Dataset ds = new Dataset(variableNames, data); return ds; }
public void IsValidDataset3() { var dataset = new Dataset { Name = "validname", Period = "week", Horizon = 0 }; dataset.Validate(); }
public static ISymbolicExpressionTree[] CreateRandomTrees(MersenneTwister twister, Dataset dataset, ISymbolicExpressionGrammar grammar, int popSize, int minSize, int maxSize, int maxFunctionDefinitions, int maxFunctionArguments) { foreach (Variable variableSymbol in grammar.Symbols.OfType<Variable>()) { variableSymbol.VariableNames = dataset.VariableNames.Skip(1); } ISymbolicExpressionTree[] randomTrees = new ISymbolicExpressionTree[popSize]; for (int i = 0; i < randomTrees.Length; i++) { randomTrees[i] = ProbabilisticTreeCreator.Create(twister, grammar, maxSize, 10); } return randomTrees; }
public static DatasetModels Convert(Dataset dataset) { return new DatasetModels() { Id = dataset.Id, Version = dataset.VersionNo, Title = dataset.Versions.Last().Metadata.SelectNodes("Metadata/Description/Description/Title/Title")[0].InnerText, TechnicalContact = dataset.Versions.Last().Metadata.SelectNodes("Metadata/TechnicalContact/Person/Name/Name")[0].InnerText, ContentContact = dataset.Versions.Last().Metadata.SelectNodes("Metadata/ContentContact/Person/Name/Name")[0].InnerText, Owner = dataset.Versions.Last().Metadata.SelectNodes("Metadata/Owner/Owner/FullName/Name")[0].InnerText }; }
public void CreateClustersMovieLens() { var reader = new MovieLensReader(Paths.MovieLens1M); var dataset = new Dataset<ItemRating>(reader); var clusterer = new Clusterer(dataset); for (int i = 2; i < 15; i += 2) { clusterer.WriteUsersCluster(Paths.MovieLens1MUsersCluster + i + ".csv", i, 5); clusterer.WriteItemsCluster(Paths.MovieLens1MItemsCluster + i + ".csv", i, 5); } }
public static List <ResultObject> EvaluateACOMinerDR_WekaClassifier_Multi(string algorithm, string trainingSetPath, string testSetPath, Dataset trainingSet, bool useAttributes, bool useInstances) { Classifier classifier = WekaNETBridge.WekaClassification.GetWekaClassifier(algorithm, trainingSetPath); DefaultDRHeuristicCalculator calculator = new DefaultDRHeuristicCalculator(); DRComponentInvalidator invalidator = new DRComponentInvalidator(); WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier); WekaClassificationQualityEvaluator evaluator = new WekaClassificationQualityEvaluator(wekaClassification, !useAttributes); DRLocalSearch localSearch = new DRLocalSearch(evaluator); Problem <DRComponent> problem = new Problem <DRComponent>(invalidator, calculator, evaluator, localSearch); ADRMiner acominer = new ADRMiner(maxIterations, colonySize, convergenceIterations, problem, useAttributes, useInstances, false, trainingSet); acominer.OnPostAntSolutionContruction += OnPostAntSolutionContruction; acominer.OnPostColonyIteration += OnPostColonyIteration; acominer.Initialize(); acominer.Work(); List <ResultObject> results = new List <ResultObject>(); foreach (string algo in WekaNETBridge.WekaClassification.GetWekaAlgorithmNames()) { Classifier currentClassifier = WekaNETBridge.WekaClassification.GetWekaClassifier(algo, trainingSetPath); WekaNETBridge.WekaClassifier final = evaluator.CreateWekaClassifier(currentClassifier, acominer.BestAnt.Solution); double quality = WekaNETBridge.WekaClassification.EvaluateClassifier(final, testSetPath); double attribueReduction = acominer.BestAnt.Solution.FeatureCount() / (double)trainingSet.Metadata.Attributes.Length; double instanceReduction = acominer.BestAnt.Solution.InstanceCount() / (double)trainingSet.Size; ResultObject result = new ResultObject() { Quality = quality, AttributeReduction = attribueReduction, InstanceReduciton = instanceReduction }; results.Add(result); } return(results); }
public static void Main(string[] args) { int iOverview = -1; if (args.Length < 2) { usage(); } if (args.Length == 3) { iOverview = int.Parse(args[2]); } // Using early initialization of System.Console Console.WriteLine(""); try { /* -------------------------------------------------------------------- */ /* Register driver(s). */ /* -------------------------------------------------------------------- */ Gdal.AllRegister(); /* -------------------------------------------------------------------- */ /* Open dataset. */ /* -------------------------------------------------------------------- */ Dataset ds = Gdal.Open(args[0], Access.GA_ReadOnly); if (ds == null) { Console.WriteLine("Can't open " + args[0]); System.Environment.Exit(-1); } Console.WriteLine("Raster dataset parameters:"); Console.WriteLine(" Projection: " + ds.GetProjectionRef()); Console.WriteLine(" RasterCount: " + ds.RasterCount); Console.WriteLine(" RasterSize (" + ds.RasterXSize + "," + ds.RasterYSize + ")"); /* -------------------------------------------------------------------- */ /* Get driver */ /* -------------------------------------------------------------------- */ Driver drv = ds.GetDriver(); if (drv == null) { Console.WriteLine("Can't get driver."); System.Environment.Exit(-1); } Console.WriteLine("Using driver " + drv.LongName); /* -------------------------------------------------------------------- */ /* Get raster band */ /* -------------------------------------------------------------------- */ for (int iBand = 1; iBand <= ds.RasterCount; iBand++) { Band band = ds.GetRasterBand(iBand); Console.WriteLine("Band " + iBand + " :"); Console.WriteLine(" DataType: " + band.DataType); Console.WriteLine(" Size (" + band.XSize + "," + band.YSize + ")"); Console.WriteLine(" PaletteInterp: " + band.GetRasterColorInterpretation().ToString()); for (int iOver = 0; iOver < band.GetOverviewCount(); iOver++) { Band over = band.GetOverview(iOver); Console.WriteLine(" OverView " + iOver + " :"); Console.WriteLine(" DataType: " + over.DataType); Console.WriteLine(" Size (" + over.XSize + "," + over.YSize + ")"); Console.WriteLine(" PaletteInterp: " + over.GetRasterColorInterpretation().ToString()); } } /* -------------------------------------------------------------------- */ /* Processing the raster */ /* -------------------------------------------------------------------- */ SaveBitmapDirect(ds, args[1], iOverview); } catch (Exception e) { Console.WriteLine("Application error: " + e.Message); } }
public static double CalculateEvaluatedNodesPerSec(ISymbolicExpressionTree[] trees, ISymbolicTimeSeriesPrognosisExpressionTreeInterpreter interpreter, Dataset dataset, int repetitions) { interpreter.TargetVariable = dataset.VariableNames.First(); // warm up IEnumerable <int> rows = Enumerable.Range(0, dataset.Rows - horizon); long nNodes = 0; for (int i = 0; i < trees.Length; i++) { nNodes += trees[i].Length * (dataset.Rows - horizon) * horizon; interpreter.GetSymbolicExpressionTreeValues(trees[i], dataset, rows, horizon).Count(); // count needs to evaluate all rows } Stopwatch watch = new Stopwatch(); for (int rep = 0; rep < repetitions; rep++) { watch.Start(); for (int i = 0; i < trees.Length; i++) { interpreter.GetSymbolicExpressionTreeValues(trees[i], dataset, rows, horizon).Count(); // count needs to evaluate all rows } watch.Stop(); } Console.WriteLine("Random tree evaluation performance of " + interpreter.GetType() + ": " + watch.ElapsedMilliseconds + "ms " + Util.NodesPerSecond(nNodes * repetitions, watch) + " nodes/sec"); return(Util.NodesPerSecond(nNodes * repetitions, watch)); }
int AddDataset(List <string> data, int index, bool pro) { List <string> tempData; Dataset tempSet; int temp; GameObject tempPrefab; // on parse la première ligne tempData = data[index].Split(';').ToList(); index++; // On crée un dataset tempSet = new Dataset(tempData[0], int.Parse(tempData[2], c)); // Get the time unit tempSet.timeUnit = tempData[1]; // on récupère les timeStamps for (int i = 3; i < tempData.Count(); ++i) { // si la string peut être convertie en int, on ajoute cet int à la liste dans le dataset if (int.TryParse(tempData[i], NumberStyles.Any, c, out temp)) { tempSet.timestamps.Add(temp); } } // on parse les électrodes for (int i = 0; i < tempSet.electrodesNb; ++i) { // on récupère les infos de la ligne tempData = data[index].Split(';').ToList(); //Debug.Log(tempData.Count + " " + data[index]); // on crée une nouvelle struct d'electrode et on lui met les infos de base ElectrodeLocal EL = new ElectrodeLocal(int.Parse(tempData[0], c), tempData[1], tempData[2], float.Parse(tempData[3], c), float.Parse(tempData[4], c), float.Parse(tempData[5], c), tempData.Count() - 7); // on récupère les valeurs de relevés et on les mets dans la struct for (int j = 6; j < tempData.Count() - 1; ++j) { float.TryParse(tempData[j], NumberStyles.Any, c, out EL.values[j - 6]); } // on ajoute la structure ainsi créée à la liste des électrodes du dataset tempSet.data.Add(EL); index++; } // si on assigne le set calculé if (pro) { electrodesData.proSet = tempSet; electrodesData.InstantiateElectrodes(); } // sinon, on fait la création normale d'un dataset else { // on incrémente le compteur de datasets electrodesData.datasetsCount++; // une fois le dataset complet, on ajoute le dataset à la liste globale electrodesData.sets.Add(tempSet); // on crée le prefab d'UI (si il y a un endroit où le mettre) if (datasetsPanel != null) { tempPrefab = (GameObject)Instantiate(electrodesData.prefabUI, new Vector3(0, 0, 0), Quaternion.identity, datasetsPanel.transform); // on met les bons textes dans le prefab tempPrefab.GetComponentsInChildren <Text>().ElementAt(0).text = "Set" + electrodesData.datasetsCount.ToString(); tempPrefab.GetComponentsInChildren <Text>().ElementAt(1).text = tempSet.electrodesNb.ToString() + " electrodes"; tempPrefab.GetComponentsInChildren <Text>().ElementAt(2).text = tempSet.name; // on change l'ordre du prefab dans la hiérarchie tempPrefab.transform.SetSiblingIndex(tempPrefab.transform.GetSiblingIndex() - 1); // on assigne le bon callback au bouton remove tempPrefab.GetComponentInChildren <Button>().onClick.AddListener(() => electrodesData.RemoveDataset(tempPrefab)); } } return(index); }
public CollectionDescription(Dataset dataset, IHistoricalCollection collection) : this() { this.dataset = dataset; this.collection = collection; }
static void Main(string[] args) { // Platform-specific settings for dataset JSON var platform = new { platformName = "AcmeBooker", platformUrl = "https://acmebooker.example.com/" }; // Customer-specific settings for dataset JSON (these should come from a database) var settings = new { organisationName = "Better", datasetSiteUrl = "https://halo-odi.legendonlineservices.co.uk/openactive/", datasetSiteDiscussionUrl = "https://github.com/gll-better/opendata", documentationUrl = "https://docs.acmebooker.example.com/", legalEntity = "GLL", plainTextDescription = "Established in 1993, GLL is the largest UK-based charitable social enterprise delivering leisure, health and community services. Under the consumer facing brand Better, we operate 258 public Sports and Leisure facilities, 88 libraries, 10 children’s centres and 5 adventure playgrounds in partnership with 50 local councils, public agencies and sporting organisations. Better leisure facilities enjoy 46 million visitors a year and have more than 650,000 members.", email = "*****@*****.**", url = "https://www.better.org.uk/", logoUrl = "http://data.better.org.uk/images/logo.png", backgroundImageUrl = "https://data.better.org.uk/images/bg.jpg", baseUrl = "https://halo-odi.legendonlineservices.co.uk/api/" }; // Strongly typed JSON generation based on OpenActive.NET var data = new Dataset { Id = settings.datasetSiteUrl.ParseUrlOrNull(), Url = settings.datasetSiteUrl.ParseUrlOrNull(), Name = settings.organisationName + " Sessions and Facilities", Description = $"Near real-time availability and rich descriptions relating to the sessions and facilities available from {settings.organisationName}, published using the OpenActive Modelling Specification 2.0.", Keywords = new List <string> { "Sessions", "Facilities", "Activities", "Sports", "Physical Activity", "OpenActive" }, License = new Uri("https://creativecommons.org/licenses/by/4.0/"), DiscussionUrl = settings.datasetSiteDiscussionUrl.ParseUrlOrNull(), Documentation = settings.documentationUrl.ParseUrlOrNull(), InLanguage = new List <string> { "en-GB" }, BookingService = new BookingService { Name = platform.platformName, Url = platform.platformUrl.ParseUrlOrNull(), SoftwareVersion = Utils.ApplicationVersion.GetVersion(), }, SchemaVersion = "https://www.openactive.io/modelling-opportunity-data/2.0/".ParseUrlOrNull(), Publisher = new Organization { Name = settings.organisationName, LegalName = settings.legalEntity, Description = settings.plainTextDescription, Email = settings.email, Url = settings.url.ParseUrlOrNull(), Logo = new ImageObject { Url = settings.logoUrl.ParseUrlOrNull() } }, Distribution = new List <DataDownload> { new DataDownload { Name = "SessionSeries", AdditionalType = new Uri("https://openactive.io/SessionSeries"), EncodingFormat = OpenActiveMediaTypes.RealtimePagedDataExchange.Version1, ContentUrl = (settings.baseUrl + "feeds/session-series").ParseUrlOrNull(), Identifier = "SessionSeries" }, new DataDownload { Name = "ScheduledSession", AdditionalType = new Uri("https://openactive.io/ScheduledSession"), EncodingFormat = OpenActiveMediaTypes.RealtimePagedDataExchange.Version1, ContentUrl = (settings.baseUrl + "feeds/scheduled-sessions").ParseUrlOrNull(), Identifier = "ScheduledSession" }, new DataDownload { Name = "FacilityUse", AdditionalType = new Uri("https://openactive.io/FacilityUse"), EncodingFormat = OpenActiveMediaTypes.RealtimePagedDataExchange.Version1, ContentUrl = (settings.baseUrl + "feeds/facility-uses").ParseUrlOrNull(), Identifier = "FacilityUse" }, new DataDownload { Name = "Slot for FacilityUse", AdditionalType = new Uri("https://openactive.io/Slot"), EncodingFormat = OpenActiveMediaTypes.RealtimePagedDataExchange.Version1, ContentUrl = (settings.baseUrl + "feeds/slots").ParseUrlOrNull(), Identifier = "FacilityUseSlot" } }, DatePublished = DateTimeOffset.UtcNow, DateModified = DateTimeOffset.UtcNow, BackgroundImage = new ImageObject { Url = settings.backgroundImageUrl.ParseUrlOrNull() } }; // OpenActive.NET creates complete JSON from the strongly typed structure, complete with schema.org types. var jsonString = OpenActiveSerializer.SerializeToHtmlEmbeddableString(data); // Deserialize the completed JSON object to make it compatible with the mustache template dynamic jsonData = JsonConvert.DeserializeObject(jsonString); // Stringify the input JSON, and place the contents of the string // within the "json" property at the root of the JSON itself. jsonData.json = jsonString; // Download the mustache template // FOR PRODUCTION USE DO NOT DOWNLOAD THE MUSTACHE FILE LIVE, A COPY MUST BE STORED LOCALLY TO PREVENT XSS ATTACKS var client = new RestClient("https://www.openactive.io/"); var request = new RestRequest("dataset-site-template/datasetsite.mustache", Method.GET); request.OnBeforeDeserialization = resp => { resp.ContentType = "application/json"; }; var queryResult = client.Execute(request); var template = queryResult.Content; //Use the resulting JSON with the mustache template to render the dataset site. var stubble = new StubbleBuilder().Configure(s => s.AddJsonNet()).Build(); var output = stubble.Render(template, jsonData); //Output HTML for the completed page // Note to test this simply add "> output.txt" to the command-line arguments in Visual Studio's debug properties. Console.WriteLine(output); }
public void DataParseTest() { string xml = @"<?xml version='1.0' encoding='UTF-8'?> <feed xmlns='http://www.w3.org/2005/Atom' xmlns:dxp='http://schemas.google.com/analytics/2009' xmlns:openSearch='http://a9.com/-/spec/opensearch/1.1/' xmlns:gd='http://schemas.google.com/g/2005' gd:etag='W/"DUINSHcycSp7I2A9WxRWFEQ."' gd:kind='analytics#data'> <id>http://www.google.com/analytics/feeds/data?ids=ga:1174&dimensions=ga:medium,ga:source&metrics=ga:bounces,ga:visits&filters=ga:medium%3D%3Dreferral&start-date=2008-10-01&end-date=2008-10-31</id> <updated>2008-10-31T16:59:59.999-07:00</updated> <title>Google Analytics Data for Profile 1174</title> <link rel='self' type='application/atom+xml' href='http://www.google.com/analytics/feeds/data?max-results=5&sort=-ga%3Avisits&end-date=2008-10-31&start-date=2008-10-01&metrics=ga%3Avisits%2Cga%3Abounces&ids=ga%3A1174&dimensions=ga%3Asource%2Cga%3Amedium&filters=ga%3Amedium%3D%3Dreferral'/> <link rel='next' type='application/atom+xml' href='http://www.google.com/analytics/feeds/data?start-index=6&max-results=5&sort=-ga%3Avisits&end-date=2008-10-31&start-date=2008-10-01&metrics=ga%3Avisits%2Cga%3Abounces&ids=ga%3A1174&dimensions=ga%3Asource%2Cga%3Amedium&filters=ga%3Amedium%3D%3Dreferral'/> <author> <name>Google Analytics</name> </author> <generator version='1.0'>Google Analytics</generator> <openSearch:totalResults>6451</openSearch:totalResults> <openSearch:startIndex>1</openSearch:startIndex> <openSearch:itemsPerPage>5</openSearch:itemsPerPage> <dxp:aggregates> <dxp:metric confidenceInterval='0.0' name='ga:visits' type='integer' value='136540'/> <dxp:metric confidenceInterval='0.0' name='ga:bounces' type='integer' value='101535'/> </dxp:aggregates> <dxp:dataSource> <dxp:property name='ga:profileId' value='1174'/> <dxp:property name='ga:webPropertyId' value='UA-30481-1'/> <dxp:property name='ga:accountName' value='Google Store'/> <dxp:tableId>ga:1174</dxp:tableId> <dxp:tableName>www.googlestore.com</dxp:tableName> </dxp:dataSource> <dxp:endDate>2008-10-31</dxp:endDate> <dxp:startDate>2008-10-01</dxp:startDate> <dxp:segment id='gaid::-11' name='Mobile Traffic'> <dxp:definition>ga:operatingSystem==iPhone</dxp:definition> </dxp:segment> <entry gd:etag='W/"C0UEQX47eSp7I2A9WxRWFEw."' gd:kind='analytics#datarow'> <id>http://www.google.com/analytics/feeds/data?ids=ga:1174&ga:medium=referral&ga:source=blogger.com&filters=ga:medium%3D%3Dreferral&start-date=2008-10-01&end-date=2008-10-31</id> <updated>2008-10-30T17:00:00.001-07:00</updated> <title>ga:source=blogger.com | ga:medium=referral</title> <link rel='alternate' type='text/html' href='http://www.google.com/analytics'/> <dxp:dimension name='ga:source' value='blogger.com'/> <dxp:dimension name='ga:medium' value='referral'/> <dxp:metric confidenceInterval='0.0' name='ga:visits' type='integer' value='68140'/> <dxp:metric confidenceInterval='0.0' name='ga:bounces' type='integer' value='61095'/> </entry> <entry gd:etag='W/"C0UEQX47eSp7I2A9WxRWFEw."' gd:kind='analytics#datarow'> <id>http://www.google.com/analytics/feeds/data?ids=ga:1174&ga:medium=referral&ga:source=google.com&filters=ga:medium%3D%3Dreferral&start-date=2008-10-01&end-date=2008-10-31</id> <updated>2008-10-30T17:00:00.001-07:00</updated> <title>ga:source=google.com | ga:medium=referral</title> <link rel='alternate' type='text/html' href='http://www.google.com/analytics'/> <dxp:dimension name='ga:source' value='google.com'/> <dxp:dimension name='ga:medium' value='referral'/> <dxp:metric confidenceInterval='0.0' name='ga:visits' type='integer' value='29666'/> <dxp:metric confidenceInterval='0.0' name='ga:bounces' type='integer' value='14979'/> </entry> </feed> "; Google.GData.Analytics.DataFeed feed = Parse(xml); Dataset f = new Dataset(feed); f.AutoPaging = false; Assert.IsNotNull(f.Aggregates); Assert.IsNotNull(f.DataSource); Assert.IsNotNull(f.Entries); Assert.AreEqual(136540, f.Aggregates.Metrics[0].IntegerValue); Assert.AreEqual("0.0", f.Aggregates.Metrics[0].ConfidenceInterval); Assert.AreEqual("ga:visits", f.Aggregates.Metrics[0].Name); Assert.AreEqual("integer", f.Aggregates.Metrics[0].Type); Assert.AreEqual("ga:1174", f.DataSource.TableId); Assert.AreEqual("www.googlestore.com", f.DataSource.TableName); Assert.AreEqual("1174", f.DataSource.ProfileId); Assert.AreEqual("UA-30481-1", f.DataSource.WebPropertyId); Assert.AreEqual("Google Store", f.DataSource.AccountName); Assert.AreEqual(136540, Int32.Parse(feed.Aggregates.Metrics[0].Value)); DataEntry blogger = feed.Entries[0] as DataEntry; Assert.IsNotNull(blogger, "entry"); Assert.IsNotNull(blogger.Dimensions); Assert.IsNotNull(blogger.Metrics); Assert.AreEqual("ga:source", blogger.Dimensions[0].Name); Assert.AreEqual("blogger.com", blogger.Dimensions[0].Value); Assert.AreEqual("ga:visits", blogger.Metrics[0].Name); Assert.AreEqual(68140, int.Parse(blogger.Metrics[0].Value)); DataEntry google = feed.Entries[1] as DataEntry; Assert.IsNotNull(google, "entry"); Assert.IsNotNull(google.Dimensions); Assert.IsNotNull(google.Metrics); Assert.AreEqual("ga:source", google.Dimensions[0].Name); Assert.AreEqual("google.com", google.Dimensions[0].Value); Assert.AreEqual("ga:visits", google.Metrics[0].Name); Assert.AreEqual(29666, int.Parse(google.Metrics[0].Value)); foreach (DataEntry entry in feed.Entries) { Assert.IsNotNull(entry, "entry"); Assert.IsNotNull(entry.Dimensions); Assert.IsNotNull(entry.Metrics); Assert.AreEqual("ga:source", entry.Dimensions[0].Name); Assert.IsNotEmpty(entry.Dimensions[0].Value); Assert.AreEqual("ga:visits", entry.Metrics[0].Name); Assert.Greater(int.Parse(entry.Metrics[0].Value), 0); } foreach (Data d in f.Entries) { Assert.IsNotNull(d, "entry"); Assert.IsNotNull(d.Dimensions); Assert.IsNotNull(d.Metrics); Assert.AreEqual("ga:source", d.Dimensions[0].Name); Assert.IsNotEmpty(d.Dimensions[0].Value); Assert.AreEqual("ga:visits", d.Metrics[0].Name); Assert.Greater(int.Parse(d.Metrics[0].Value), 0); } Assert.IsNotEmpty(feed.Segments); Segment s = feed.Segments[0]; Assert.IsNotNull(s.Name); Assert.AreEqual(s.Id, "gaid::-11"); Assert.IsNotNull(s.Id); Assert.AreEqual(s.Name, "Mobile Traffic"); Assert.IsNotNull(s.Definition); Assert.IsNotEmpty(s.Definition.Value); Assert.AreEqual(s.Definition.Value, "ga:operatingSystem==iPhone"); }
public IRegressionProblemData GenerateRegressionData() { Dataset dataset = new Dataset(VariableNames, this.GenerateValues()); return(GenerateRegressionData(dataset)); }
public IActionResult Dashboard() { Widgets widgets = new Widgets { Users = userService.GetAll().Count(), Roles = roleService.GetAll().Count(), Menus = menuService.GetAll().Count(), Accounts = ledgerAccountService.GetAll().Count() }; List <RootChart> rootChart = new List <RootChart>(); int lastDay = 7; var lastDays = DateTime.Now.Date.AddDays(-lastDay); List <string> dateList = new List <string>(); List <int> dataList = new List <int>(); List <Dataset> datasets = new List <Dataset>(); var lastRegister = userService.GetAll().Where(i => i.DateAdded >= lastDays).Select(i => new { DateAdded = i.DateAdded }).ToArray(); for (int i = 0; i < lastDay; i++) { var dateDynamic = DateTime.Now.Date.AddDays(-i); int year = dateDynamic.Year; int month = dateDynamic.Month; int day = dateDynamic.Day; DateTime newDate = new DateTime(year, month, day); var hav = lastRegister.Where(j => j.DateAdded.Value.Date == newDate.Date); if (hav.Count() > 0) { dateList.Add(newDate.ToString("yyyy-MM-dd")); dataList.Add(hav.Count()); } else { dateList.Add(newDate.ToString("yyyy-MM-dd")); dataList.Add(0); } } Dataset dataset = new Dataset { Data = dataList, Label = "Users Count", Fill = false, BorderColor = "#4bc0c0", BackgroundColor = "#42A5F5" }; datasets.Add(dataset); RootChart rootChartLi = new RootChart(); rootChartLi.Labels = dateList; rootChartLi.Datasets = datasets; rootChart.Add(rootChartLi); return(Json(new { widget = widgets, chart = rootChart })); }
/// <summary> /// This is the method that actually does the work. /// </summary> /// <param name="DA">The DA object is used to retrieve from inputs and store in outputs.</param> protected override void SolveInstance(IGH_DataAccess DA) { string datasourceFileLocation = string.Empty; DA.GetData <string>(0, ref datasourceFileLocation); string dstFileLocation = string.Empty; DA.GetData <string>(1, ref dstFileLocation); string options = string.Empty; DA.GetData <string>(2, ref options); var re = new System.Text.RegularExpressions.Regex("(?<=\")[^\"]*(?=\")|[^\" ]+"); string[] translateOptions = re.Matches(options).Cast <Match>().Select(m => m.Value).ToArray(); string datasourceInfo = string.Empty; string dstInfo = string.Empty; string dstOutput = string.Empty; RESTful.GdalConfiguration.ConfigureGdal(); OSGeo.GDAL.Gdal.AllRegister(); AddRuntimeMessage(GH_RuntimeMessageLevel.Remark, "Look for more information about options at:"); AddRuntimeMessage(GH_RuntimeMessageLevel.Remark, "https://gdal.org/programs/gdalwarp.html"); if (!string.IsNullOrEmpty(datasourceFileLocation)) { using (Dataset datasource = Gdal.Open(datasourceFileLocation, Access.GA_ReadOnly)) { if (datasource == null) { throw new Exception("Can't open GDAL dataset: " + datasourceFileLocation); } SpatialReference sr = new SpatialReference(datasource.GetProjection()); ///Check if SRS needs to be converted from ESRI format to WKT to avoid error: ///"No translation for Lambert_Conformal_Conic to PROJ.4 format is known." ///https://gis.stackexchange.com/questions/128266/qgis-error-6-no-translation-for-lambert-conformal-conic-to-proj-4-format-is-kn SpatialReference srEsri = sr; srEsri.MorphFromESRI(); string projEsri = string.Empty; srEsri.ExportToWkt(out projEsri); ///If no SRS exists, check Ground Control Points SRS SpatialReference srGCP = new SpatialReference(datasource.GetGCPProjection()); string projGCP = string.Empty; srGCP.ExportToWkt(out projGCP); if (!string.IsNullOrEmpty(projEsri)) { datasource.SetProjection(projEsri); sr = srEsri; AddRuntimeMessage(GH_RuntimeMessageLevel.Remark, "Spatial Reference System (SRS) morphed form ESRI format."); } else if (!string.IsNullOrEmpty(projGCP)) { datasource.SetProjection(projGCP); sr = srGCP; AddRuntimeMessage(GH_RuntimeMessageLevel.Remark, "Spatial Reference System (SRS) set from Ground Control Points (GCPs)."); } else { AddRuntimeMessage(GH_RuntimeMessageLevel.Remark, "Spatial Reference System (SRS) is unknown or unsupported. " + "Try setting the SRS with the GdalWarp component using -t_srs EPSG:4326 for the option input."); //sr.SetWellKnownGeogCS("WGS84"); } ///Get info about image List <string> infoOptions = new List <string> { "-stats" }; datasourceInfo = Gdal.GDALInfo(datasource, new GDALInfoOptions(infoOptions.ToArray())); if (!string.IsNullOrEmpty(dstFileLocation)) { if (string.IsNullOrEmpty(options) && File.Exists(dstFileLocation)) { Dataset dst = Gdal.Open(dstFileLocation, Access.GA_ReadOnly); dstInfo = Gdal.GDALInfo(dst, null); dst.Dispose(); dstOutput = dstFileLocation; } else { ///https://github.com/OSGeo/gdal/issues/813 ///https://lists.osgeo.org/pipermail/gdal-dev/2017-February/046046.html ///Odd way to go about setting source dataset in parameters for Warp is a known issue var ptr = new[] { Dataset.getCPtr(datasource).Handle }; var gcHandle = GCHandle.Alloc(ptr, GCHandleType.Pinned); try { var dss = new SWIGTYPE_p_p_GDALDatasetShadow(gcHandle.AddrOfPinnedObject(), false, null); Dataset dst = Gdal.wrapper_GDALWarpDestName(dstFileLocation, 1, dss, new GDALWarpAppOptions(translateOptions), null, null); if (dst == null) { throw new Exception("GdalWarp failed: " + Gdal.GetLastErrorMsg()); } dstInfo = Gdal.GDALInfo(dst, new GDALInfoOptions(infoOptions.ToArray())); dst.Dispose(); dstOutput = dstFileLocation; } finally { if (gcHandle.IsAllocated) { gcHandle.Free(); } } } } datasource.Dispose(); } } DA.SetData(0, datasourceInfo); DA.SetData(1, dstInfo); DA.SetData(2, dstOutput); }
private void QuestionEditor_Load(object sender, EventArgs e) { //dataSet = (Dataset)data.Copy(); isInit = true; dataSet = (Dataset)dbObject.FullDataset.Copy(); _questionRow = ((Question)dbObject).Value; textTextBox.Text = _questionRow.Text; int setId = _questionRow.SetId; difficutlyLevelcomboBox.DataSource = dataSet.DifficultyLevel; difficutlyLevelcomboBox.DisplayMember = "Name"; difficutlyLevelcomboBox.SelectedIndex = dataSet.QuestionsEx.FindByIdSetId(_questionRow.Id, _questionRow.SetId).DifficultyLevelId - 1; difficutlyLevelcomboBox.Update(); if (_questionRow.SubtypeId == 4) { subTypecomboBox.Enabled = false; } typeComboBox.DataSource = dataSet.QuestionTypes; typeComboBox.DisplayMember = "Name"; typeComboBox.ValueMember = "id"; typeComboBox.SelectedValue = dataSet.QuestionsEx.FindByIdSetId(_questionRow.Id, _questionRow.SetId).TypeId; typeComboBox.Update(); subTypecomboBox.DataSource = dataSet.QuestionSubtypes; subTypecomboBox.DisplayMember = "Name"; subTypecomboBox.ValueMember = "id"; subTypecomboBox.SelectedValue = dataSet.QuestionsEx.FindByIdSetId(_questionRow.Id, _questionRow.SetId).SubtypeId; subTypecomboBox.Update(); if (!_questionRow.IsPictureNull()) { File.WriteAllBytes(Application.StartupPath + @"\templates\Picture.bmp", _questionRow.Picture); //pictureBox.Image = Image.FromFile(Application.StartupPath + @"\templates\Picture.bmp"); pictureBox.ImageLocation = Application.StartupPath + @"\templates\Picture.bmp"; addPicturebutton.Text = "edit"; picture = _questionRow.Picture; } if (!dataSet.QuestionSetsEx.FindById(setId).IsQuestionTypeIdNull()) { typeComboBox.SelectedIndex = dataSet.QuestionsEx.FindByIdSetId(_questionRow.Id, _questionRow.SetId).TypeId - 1; typeComboBox.Update(); typeComboBox.Enabled = false; } if (dbObject.Parent is PassageQuestion) { subTypecomboBox.Enabled = false; } if (!dataSet.QuestionSetsEx.FindById(setId).IsQuestionSubtypeIdNull()) { subTypecomboBox.SelectedIndex = dataSet.QuestionsEx.FindByIdSetId(_questionRow.Id, _questionRow.SetId).SubtypeId - 1; subTypecomboBox.Update(); subTypecomboBox.Enabled = false; } for (int i = 0; i < dataSet.Answers.Count; ++i) { if (dataSet.Answers[i].QuestionId != Id) { dataSet.Answers[i].Delete(); } else { lastOrder++; } } if (textTextBox.Text.LastIndexOf("<math display = \"block\">") != -1) { textTextBox.Enabled = false; addFormulaButton.Text = "edit question with formulas"; } answersDataGridView.DataSource = dataSet; ListSortDirection direction; direction = ListSortDirection.Ascending; answersDataGridView.Sort(answersDataGridView.Columns[orderDataGridViewTextBoxColumn.Index], direction); isInit = false; }
public CollectionDescription(int id, Dataset dataset, IHistoricalCollection collection) { this.id = id; this.dataset = dataset; this.collection = collection; }
private static void SaveBitmapDirect(Dataset ds, string filename, int iOverview) { // Get the GDAL Band objects from the Dataset Band redBand = ds.GetRasterBand(1); if (redBand.GetRasterColorInterpretation() == ColorInterp.GCI_PaletteIndex) { SaveBitmapPaletteDirect(ds, filename, iOverview); return; } if (redBand.GetRasterColorInterpretation() == ColorInterp.GCI_GrayIndex) { SaveBitmapGrayDirect(ds, filename, iOverview); return; } if (ds.RasterCount < 3) { Console.WriteLine("The number of the raster bands is not enough to run this sample"); System.Environment.Exit(-1); } if (iOverview >= 0 && redBand.GetOverviewCount() > iOverview) { redBand = redBand.GetOverview(iOverview); } Band greenBand = ds.GetRasterBand(2); if (iOverview >= 0 && greenBand.GetOverviewCount() > iOverview) { greenBand = greenBand.GetOverview(iOverview); } Band blueBand = ds.GetRasterBand(3); if (iOverview >= 0 && blueBand.GetOverviewCount() > iOverview) { blueBand = blueBand.GetOverview(iOverview); } // Get the width and height of the Dataset int width = redBand.XSize; int height = redBand.YSize; // Create a Bitmap to store the GDAL image in Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format32bppRgb); DateTime start = DateTime.Now; // Use GDAL raster reading methods to read the image data directly into the Bitmap BitmapData bitmapData = bitmap.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, PixelFormat.Format32bppRgb); try { int stride = bitmapData.Stride; IntPtr buf = bitmapData.Scan0; blueBand.ReadRaster(0, 0, width, height, buf, width, height, DataType.GDT_Byte, 4, stride); greenBand.ReadRaster(0, 0, width, height, new IntPtr(buf.ToInt32() + 1), width, height, DataType.GDT_Byte, 4, stride); redBand.ReadRaster(0, 0, width, height, new IntPtr(buf.ToInt32() + 2), width, height, DataType.GDT_Byte, 4, stride); TimeSpan renderTime = DateTime.Now - start; Console.WriteLine("SaveBitmapDirect fetch time: " + renderTime.TotalMilliseconds + " ms"); } finally { bitmap.UnlockBits(bitmapData); } bitmap.Save(filename); }
public static ResultObject EvaluateRandDR_WekaClassifier(string algorithm, string trainingSetPath, string testSetPath, Dataset trainingSet, bool useAttributes, bool useInstances) { Classifier classifier = WekaNETBridge.WekaClassification.GetWekaClassifier(algorithm, trainingSetPath); //DefaultDRHeuristicCalculator calculator = new DefaultDRHeuristicCalculator(); DefaultHeuristicCalculator <DRComponent> calculator = new DefaultHeuristicCalculator <DRComponent>(trainingSet); DRComponentInvalidator invalidator = new DRComponentInvalidator(); WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier); WekaClassificationQualityEvaluator evaluator = new WekaClassificationQualityEvaluator(wekaClassification, true); DRLocalSearch localSearch = new DRLocalSearch(evaluator); Problem <DRComponent> problem = new Problem <DRComponent>(invalidator, calculator, evaluator, localSearch); RandomDR random = new RandomDR(maxIterations, colonySize, 50, problem, useAttributes, useInstances, false, trainingSet); random.OnPostAntSolutionContruction += OnPostAntSolutionContruction; random.OnPostColonyIteration += OnPostColonyIteration; WekaNETBridge.WekaClassifier wekcaClassifier = random.CreateWekaClassifier(); double quality = WekaNETBridge.WekaClassification.EvaluateClassifier(wekcaClassifier, testSetPath); double attribueReduction = random.BestAnt.Solution.FeatureCount() / (double)trainingSet.Metadata.Attributes.Length; double instanceReduction = random.BestAnt.Solution.InstanceCount() / (double)trainingSet.Size; ResultObject result = new ResultObject() { Quality = quality, AttributeReduction = attribueReduction, InstanceReduciton = instanceReduction }; return(result); }
public static ResultObject EvaluateGreedyDR_WekaClassifier(string algorithm, string trainingSetPath, string testSetPath, Dataset trainingSet, bool useAttributes, bool useInstances) { Classifier classifier = WekaNETBridge.WekaClassification.GetWekaClassifier(algorithm, trainingSetPath); WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier); WekaClassificationQualityEvaluator evaluator = new WekaClassificationQualityEvaluator(wekaClassification, true); DRLocalSearch localSearch = new DRLocalSearch(evaluator); Problem <DRComponent> problem = new Problem <DRComponent>(null, null, evaluator, localSearch); GreedyDR greedy = new GreedyDR(maxIterations, colonySize, convergenceIterations, problem, useAttributes, useInstances, trainingSet); greedy.OnPostAntSolutionContruction += OnPostAntSolutionContruction; greedy.OnPostColonyIteration += OnPostColonyIteration; WekaNETBridge.WekaClassifier wekcaClassifier = greedy.CreateWekaClassifier(); double quality = WekaNETBridge.WekaClassification.EvaluateClassifier(wekcaClassifier, testSetPath); double attribueReduction = greedy.BestAnt.Solution.FeatureCount() / (double)trainingSet.Metadata.Attributes.Length; double instanceReduction = greedy.BestAnt.Solution.InstanceCount() / (double)trainingSet.Size; ResultObject result = new ResultObject() { Quality = quality, AttributeReduction = attribueReduction, InstanceReduciton = instanceReduction }; return(result); }
public ConjugateGradientDescent(InternalTreeEnsemble ensemble, Dataset trainData, double[] initTrainScores, IGradientAdjuster gradientWrapper) : base(ensemble, trainData, initTrainScores, gradientWrapper) { _currentDk = new double[trainData.NumDocs]; }
//http://www.gisremotesensing.com/2015/09/vector-to-raster-conversion-using-gdal-c.html public static void Rasterize(string inputFeature, string outRaster, string fieldName, int cellSize) { // Define pixel_size and NoData value of new raster int rasterCellSize = cellSize; const double noDataValue = -9999; string outputRasterFile = outRaster; //Register the vector drivers Ogr.RegisterAll(); //Reading the vector data DataSource dataSource = Ogr.Open(inputFeature, 0); var count = dataSource.GetLayerCount(); Layer layer = dataSource.GetLayerByIndex(0); var litems = layer.GetFeatureCount(0); var lname = layer.GetName(); Envelope envelope = new Envelope(); layer.GetExtent(envelope, 0); //Compute the out raster cell resolutions int x_res = Convert.ToInt32((envelope.MaxX - envelope.MinX) / rasterCellSize); int y_res = Convert.ToInt32((envelope.MaxY - envelope.MinY) / rasterCellSize); Console.WriteLine("Extent: " + envelope.MaxX + " " + envelope.MinX + " " + envelope.MaxY + " " + envelope.MinY); Console.WriteLine("X resolution: " + x_res); Console.WriteLine("X resolution: " + y_res); //Register the raster drivers Gdal.AllRegister(); //Check if output raster exists & delete (optional) if (File.Exists(outputRasterFile)) { File.Delete(outputRasterFile); } //Create new tiff OSGeo.GDAL.Driver outputDriver = Gdal.GetDriverByName("GTiff"); Dataset outputDataset = outputDriver.Create(outputRasterFile, x_res, y_res, 1, DataType.GDT_Float64, null); //Extrac srs from input feature string inputShapeSrs; SpatialReference spatialRefrence = layer.GetSpatialRef(); spatialRefrence.ExportToWkt(out inputShapeSrs); //Assign input feature srs to outpur raster outputDataset.SetProjection(inputShapeSrs); //Geotransform double[] argin = new double[] { envelope.MinX, rasterCellSize, 0, envelope.MaxY, 0, -rasterCellSize }; outputDataset.SetGeoTransform(argin); //Set no data Band band = outputDataset.GetRasterBand(1); band.SetNoDataValue(noDataValue); //close tiff outputDataset.FlushCache(); outputDataset.Dispose(); //Feature to raster rasterize layer options //No of bands (1) int[] bandlist = new int[] { 1 }; //Values to be burn on raster (10.0) double[] burnValues = new double[] { 10.0 }; Dataset myDataset = Gdal.Open(outputRasterFile, Access.GA_Update); //additional options string[] rasterizeOptions; //rasterizeOptions = new string[] { "ALL_TOUCHED=TRUE", "ATTRIBUTE=" + fieldName }; //To set all touched pixels into raster pixel rasterizeOptions = new string[] { "ATTRIBUTE=" + fieldName }; //Rasterize layer //Gdal.RasterizeLayer(myDataset, 1, bandlist, layer, IntPtr.Zero, IntPtr.Zero, 1, burnValues, null, null, null); // To burn the given burn values instead of feature attributes Gdal.RasterizeLayer(myDataset, 1, bandlist, layer, IntPtr.Zero, IntPtr.Zero, 1, burnValues, rasterizeOptions, new Gdal.GDALProgressFuncDelegate(ProgressFunc), "Raster conversion"); }
public static ISymbolicExpressionTree[] CreateRandomTrees(MersenneTwister twister, Dataset dataset, ISymbolicExpressionGrammar grammar, int popSize, int minSize, int maxSize, int maxFunctionDefinitions, int maxFunctionArguments) { foreach (Variable variableSymbol in grammar.Symbols.OfType <Variable>()) { variableSymbol.VariableNames = dataset.VariableNames.Skip(1); } ISymbolicExpressionTree[] randomTrees = new ISymbolicExpressionTree[popSize]; for (int i = 0; i < randomTrees.Length; i++) { randomTrees[i] = ProbabilisticTreeCreator.Create(twister, grammar, maxSize, 10); } return(randomTrees); }
private IEnumerable <bool> GetClassificationLabelsFromRatings(Dataset set) { // REVIEW: Historically FastTree has this test as >= 1. TLC however // generally uses > 0. Consider changing FastTree to be consistent. return(set.Ratings.Select(x => x >= 1)); }
private static Bitmap CreateCompatibleBitmap(Dataset ds, int imageWidth, int imageHeight) { if (ds.RasterCount == 0) { return(null); } bandMap = new int[4] { 1, 1, 1, 1 }; channelCount = 1; hasAlpha = false; isPremultiplied = false; isIndexed = false; channelSize = 8; // Evaluate the bands and find out a proper image transfer format for (int i = 0; i < ds.RasterCount; i++) { Band band = ds.GetRasterBand(i + 1); if (Gdal.GetDataTypeSize(band.DataType) > 8) { channelSize = 16; } // retrieving the premultiplied alpha flag string[] metadata = band.GetMetadata(""); for (int iMeta = 0; iMeta < metadata.Length; iMeta++) { if (metadata[iMeta].StartsWith("PREMULTIPLIED_ALPHA")) { isPremultiplied = true; } } switch (band.GetRasterColorInterpretation()) { case ColorInterp.GCI_AlphaBand: channelCount = 4; hasAlpha = true; bandMap[3] = i + 1; break; case ColorInterp.GCI_BlueBand: if (channelCount < 3) { channelCount = 3; } bandMap[0] = i + 1; break; case ColorInterp.GCI_RedBand: if (channelCount < 3) { channelCount = 3; } bandMap[2] = i + 1; break; case ColorInterp.GCI_GreenBand: if (channelCount < 3) { channelCount = 3; } bandMap[1] = i + 1; break; case ColorInterp.GCI_PaletteIndex: ct = band.GetRasterColorTable(); isIndexed = true; bandMap[0] = i + 1; break; case ColorInterp.GCI_GrayIndex: isIndexed = true; bandMap[0] = i + 1; break; default: // we create the bandmap using the dataset ordering by default if (i < 4 && bandMap[i] == 0) { if (channelCount < i) { channelCount = i; } bandMap[i] = i + 1; } break; } } // find out the pixel format based on the gathered information if (isIndexed) { pixelFormat = PixelFormat.Format8bppIndexed; dataType = DataType.GDT_Byte; pixelSpace = 1; } else { if (channelCount == 1) { if (channelSize > 8) { pixelFormat = PixelFormat.Format16bppGrayScale; dataType = DataType.GDT_Int16; pixelSpace = 2; } else { pixelFormat = PixelFormat.Format24bppRgb; channelCount = 3; dataType = DataType.GDT_Byte; pixelSpace = 3; } } else { if (hasAlpha) { if (channelSize > 8) { if (isPremultiplied) { pixelFormat = PixelFormat.Format64bppArgb; } else { pixelFormat = PixelFormat.Format64bppPArgb; } dataType = DataType.GDT_UInt16; pixelSpace = 8; } else { if (isPremultiplied) { pixelFormat = PixelFormat.Format32bppPArgb; } else { pixelFormat = PixelFormat.Format32bppArgb; } dataType = DataType.GDT_Byte; pixelSpace = 4; } channelCount = 4; } else { if (channelSize > 8) { pixelFormat = PixelFormat.Format48bppRgb; dataType = DataType.GDT_UInt16; pixelSpace = 6; } else { pixelFormat = PixelFormat.Format24bppRgb; dataType = DataType.GDT_Byte; pixelSpace = 3; } channelCount = 3; } } } // Create a Bitmap to store the GDAL image in return(new Bitmap(imageWidth, imageHeight, pixelFormat)); }
private void LightGbmHelper(bool useSoftmax, out string modelString, out List <GbmExample> mlnetPredictions, out double[] lgbmRawScores, out double[] lgbmProbabilities) { // Prepare data and train LightGBM model via ML.NET // Training matrix. It contains all feature vectors. var dataMatrix = new float[_rowNumber * _columnNumber]; // Labels for multi-class classification var labels = new uint[_rowNumber]; // Training list, which is equivalent to the training matrix above. var dataList = new List <GbmExample>(); for (/*row index*/ int i = 0; i < _rowNumber; ++i) { int featureSum = 0; var featureVector = new float[_columnNumber]; for (/*column index*/ int j = 0; j < _columnNumber; ++j) { int featureValue = (j + i * _columnNumber) % 10; featureSum += featureValue; dataMatrix[j + i * _columnNumber] = featureValue; featureVector[j] = featureValue; } labels[i] = (uint)featureSum % _classNumber; dataList.Add(new GbmExample { Features = featureVector, Label = labels[i], Score = new float[_classNumber] }); } var mlContext = new MLContext(seed: 0, conc: 1); var dataView = ComponentCreation.CreateDataView(mlContext, dataList); int numberOfTrainingIterations = 3; var gbmTrainer = new LightGbmMulticlassTrainer(mlContext, labelColumn: "Label", featureColumn: "Features", numBoostRound: numberOfTrainingIterations, advancedSettings: s => { s.MinDataPerGroup = 1; s.MinDataPerLeaf = 1; s.UseSoftmax = useSoftmax; }); var gbm = gbmTrainer.Fit(dataView); var predicted = gbm.Transform(dataView); mlnetPredictions = new List <GbmExample>(predicted.AsEnumerable <GbmExample>(mlContext, false)); // Convert training to LightGBM's native format and train LightGBM model via its APIs // Convert the whole training matrix to CSC format required by LightGBM interface. Notice that the training matrix // is dense so this conversion is simply a matrix transpose. double[][] sampleValueGroupedByColumn = new double[_columnNumber][]; int[][] sampleIndicesGroupedByColumn = new int[_columnNumber][]; int[] sampleNonZeroCntPerColumn = new int[_columnNumber]; for (int j = 0; j < _columnNumber; ++j) { // Allocate memory for the j-th column in the training matrix sampleValueGroupedByColumn[j] = new double[_rowNumber]; sampleIndicesGroupedByColumn[j] = new int[_rowNumber]; sampleNonZeroCntPerColumn[j] = _rowNumber; // Copy the j-th column in training matrix for (int i = 0; i < _rowNumber; ++i) { // data[j + i * _columnNumber] is the value at the j-th column and the i-th row. sampleValueGroupedByColumn[j][i] = dataMatrix[j + i * _columnNumber]; // Row index of the assigned value. sampleIndicesGroupedByColumn[j][i] = i; } } // LightGBM only accepts float labels. float[] floatLabels = new float[_rowNumber]; for (int i = 0; i < _rowNumber; ++i) { floatLabels[i] = labels[i]; } // Allocate LightGBM data container (called Dataset in LightGBM world). var gbmDataSet = new Dataset(sampleValueGroupedByColumn, sampleIndicesGroupedByColumn, _columnNumber, sampleNonZeroCntPerColumn, _rowNumber, _rowNumber, "", floatLabels); // Push training examples into LightGBM data container. gbmDataSet.PushRows(dataMatrix, _rowNumber, _columnNumber, 0); // Probability output. lgbmProbabilities = new double[_rowNumber * _classNumber]; // Raw score. lgbmRawScores = new double[_rowNumber * _classNumber]; // Get parameters used in ML.NET's LightGBM var gbmParams = gbmTrainer.GetGbmParameters(); // Call LightGBM C-style APIs to do prediction. modelString = null; using (var ch = (mlContext as IChannelProvider).Start("Training LightGBM...")) using (var pch = (mlContext as IProgressChannelProvider).StartProgressChannel("Training LightGBM...")) { var host = (mlContext as IHostEnvironment).Register("Training LightGBM..."); var gbmNative = WrappedLightGbmTraining.Train(ch, pch, gbmParams, gbmDataSet, numIteration: numberOfTrainingIterations); int nativeLength = 0; unsafe { fixed(float *data = dataMatrix) fixed(double *result0 = lgbmProbabilities) fixed(double *result1 = lgbmRawScores) { WrappedLightGbmInterface.BoosterPredictForMat(gbmNative.Handle, (IntPtr)data, WrappedLightGbmInterface.CApiDType.Float32, _rowNumber, _columnNumber, 1, (int)WrappedLightGbmInterface.CApiPredictType.Normal, numberOfTrainingIterations, "", ref nativeLength, result0); WrappedLightGbmInterface.BoosterPredictForMat(gbmNative.Handle, (IntPtr)data, WrappedLightGbmInterface.CApiDType.Float32, _rowNumber, _columnNumber, 1, (int)WrappedLightGbmInterface.CApiPredictType.Raw, numberOfTrainingIterations, "", ref nativeLength, result1); } modelString = gbmNative.GetModelString(); } } }
public static void Main(string[] args) { if (args.Length != 2) { usage(); } // Using early initialization of System.Console Console.WriteLine("Adjusting the image: " + args[0]); try { float contrastRatio = float.Parse(args[1]); /* -------------------------------------------------------------------- */ /* Register driver(s). */ /* -------------------------------------------------------------------- */ Gdal.AllRegister(); /* -------------------------------------------------------------------- */ /* Open dataset. */ /* -------------------------------------------------------------------- */ Dataset ds = Gdal.Open(args[0], Access.GA_Update); if (ds == null) { Console.WriteLine("Can't open " + args[0]); System.Environment.Exit(-1); } Bitmap bmp = CreateCompatibleBitmap(ds, ds.RasterXSize, ds.RasterYSize); LoadBitmapDirect(ds, bmp, 0, 0, ds.RasterXSize, ds.RasterYSize, ds.RasterXSize, ds.RasterYSize, 0); Bitmap newBitmap = (Bitmap)bmp.Clone(); //create the ColorMatrix float[][] colormatrix = new float[][] { new float[] { contrastRatio, 0, 0, 0, 0 }, new float[] { 0, contrastRatio, 0, 0, 0 }, new float[] { 0, 0, contrastRatio, 0, 0 }, new float[] { 0, 0, 0, 1, 0 }, new float[] { 0, 0, 0, 0, 1 } }; ColorMatrix colorMatrix = new ColorMatrix(colormatrix); //create the image attributes ImageAttributes attributes = new ImageAttributes(); //set the color matrix attribute attributes.SetColorMatrix(colorMatrix); //get a graphics object from the new image Graphics g = Graphics.FromImage(newBitmap); //draw the original image on the new image g.DrawImage(bmp, new Rectangle(0, 0, bmp.Width, bmp.Height), 0, 0, bmp.Width, bmp.Height, GraphicsUnit.Pixel, attributes); SaveBitmapDirect(ds, newBitmap, 0, 0, ds.RasterXSize, ds.RasterYSize, ds.RasterXSize, ds.RasterYSize); ds.FlushCache(); } catch (Exception e) { Console.WriteLine("Application error: " + e.Message); } }
public TimeSeriesFrameBuilder(Dataset dataset, IReadOnlyDictionary <string, string> dimensionIdsMap, Func <string, Dictionary <string, string> > dimensionMappingFactory) { var dims = new List <string>(); foreach (var dim in dataset.Dimensions) { dims.Add(dim.Id); } _dimensions = dims.ToArray(); var dimensionIds = new Dictionary <string, int>(); for (var i = 0; i < _dimensions.Length; i++) { dimensionIds[_dimensions[i]] = i; } if (dimensionIdsMap != null) { foreach (var p in dimensionIdsMap) { if (!dimensionIds.ContainsKey(p.Key)) { int dimIndex; if (dimensionIds.TryGetValue(p.Value, out dimIndex)) { dimensionIds[p.Key] = dimIndex; } } } } _dimensionIdsMap = dimensionIds; var attrs = new List <string>(); attrs.AddRange(_dimensions); if (dataset.TimeSeriesAttributes != null) { foreach (var attr in dataset.TimeSeriesAttributes) { attrs.Add(attr.Name); } } _attributes = attrs.ToArray(); var attributeIds = new Dictionary <string, int>(dimensionIds); for (var i = _dimensions.Length; i < _attributes.Length; i++) { attributeIds[_attributes[i]] = i; } _attributeIds = attributeIds; _dimensionKeyMaps = new Dictionary <string, string> [_dimensions.Length]; for (var i = 0; i < _dimensionKeyMaps.Length; i++) { _dimensionKeyMaps[i] = dimensionMappingFactory(_dimensions[i]) ?? new Dictionary <string, string>(); } _values = new Dictionary <TimeSeriesId, TimeSeriesValues>(); _timeRangeCache = new Dictionary <Tuple <DateTime, DateTime, Frequency>, IReadOnlyList <DateTime> >(); }
public static ResultObject EvaluateACOMinerDR2_WekaClassifier(string algorithm, string trainingSetPath, string testSetPath, Dataset trainingSet, bool attributeFirst) { Classifier classifier = WekaNETBridge.WekaClassification.GetWekaClassifier(algorithm, trainingSetPath); ADRHeuristicCalculator calculator = new ADRHeuristicCalculator(); DRComponentInvalidator invalidator = new DRComponentInvalidator(); WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier); WekaClassificationQualityEvaluator evaluator = new WekaClassificationQualityEvaluator(wekaClassification, false); DRLocalSearch localSearch = new DRLocalSearch(evaluator); Problem <DRComponent> problem = new Problem <DRComponent>(invalidator, calculator, evaluator, localSearch); ADRMiner2 acominer = new ADRMiner2(maxIterations, colonySize, convergenceIterations, problem, attributeFirst, false, trainingSet); acominer.ACO1.OnPostAntSolutionContruction += OnPostAntSolutionContruction; acominer.ACO1.OnPostColonyIteration += OnPostColonyIteration; acominer.ACO2.OnPostAntSolutionContruction += OnPostAntSolutionContruction; acominer.ACO2.OnPostColonyIteration += OnPostColonyIteration; WekaNETBridge.WekaClassifier wekcaClassifier = acominer.CreateWekaClassifier(); double quality = WekaNETBridge.WekaClassification.EvaluateClassifier(wekcaClassifier, testSetPath); double attribueReduction = acominer.BestSolution.AttributesToRemove().Length; double instanceReduction = acominer.BestSolution.InstanceCount() / (double)trainingSet.Size; ResultObject result = new ResultObject() { Quality = quality, AttributeReduction = attribueReduction, InstanceReduciton = instanceReduction }; return(result); }
private static async Task <string> GetOrCreateDataset(PowerBIClient powerBIClient, Dataset dataset, PowerBIIdentity[] existingDatasets, CancellationToken ct) { var existingDataset = existingDatasets.SingleOrDefault(d => d.Name == dataset.Name); #if !REUSE_EXISTING if (existingDataset != null) { await powerBIClient.DeleteDatasetAsync(existingDataset.Id, ct).ConfigureAwait(false); existingDataset = null; } #endif string datasetId; if (existingDataset == null) { var created = await powerBIClient.CreateDatasetAsync(dataset, DefaultRetentionPolicy.None, ct).ConfigureAwait(false); datasetId = created.Id; } else { datasetId = existingDataset.Id; } return(datasetId); }
private static void SaveBitmapPaletteDirect(Dataset ds, string filename, int iOverview) { // Get the GDAL Band objects from the Dataset Band band = ds.GetRasterBand(1); if (iOverview >= 0 && band.GetOverviewCount() > iOverview) { band = band.GetOverview(iOverview); } ColorTable ct = band.GetRasterColorTable(); if (ct == null) { Console.WriteLine(" Band has no color table!"); return; } if (ct.GetPaletteInterpretation() != PaletteInterp.GPI_RGB) { Console.WriteLine(" Only RGB palette interp is supported by this sample!"); return; } // Get the width and height of the Dataset int width = band.XSize; int height = band.YSize; // Create a Bitmap to store the GDAL image in Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format8bppIndexed); DateTime start = DateTime.Now; byte[] r = new byte[width * height]; band.ReadRaster(0, 0, width, height, r, width, height, 0, 0); // Use GDAL raster reading methods to read the image data directly into the Bitmap BitmapData bitmapData = bitmap.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, PixelFormat.Format8bppIndexed); try { int iCol = ct.GetCount(); ColorPalette pal = bitmap.Palette; for (int i = 0; i < iCol; i++) { ColorEntry ce = ct.GetColorEntry(i); pal.Entries[i] = Color.FromArgb(ce.c4, ce.c1, ce.c2, ce.c3); } bitmap.Palette = pal; int stride = bitmapData.Stride; IntPtr buf = bitmapData.Scan0; band.ReadRaster(0, 0, width, height, buf, width, height, DataType.GDT_Byte, 1, stride); TimeSpan renderTime = DateTime.Now - start; Console.WriteLine("SaveBitmapDirect fetch time: " + renderTime.TotalMilliseconds + " ms"); } finally { bitmap.UnlockBits(bitmapData); } bitmap.Save(filename); }
public QualityVerificationDataset([NotNull] Dataset dataset) { _dataset = dataset; }
public static ISymbolicExpressionTree[] CreateRandomTrees(MersenneTwister twister, Dataset dataset, ISymbolicExpressionGrammar grammar, int popSize) { return(CreateRandomTrees(twister, dataset, grammar, popSize, 1, 200, 3, 3)); }
public string UpdateDataset(DatasetDTO datasetDTO) { Dataset datasetToUpdate = GetActivityDataset(datasetDTO.Id); datasetToUpdate.State = datasetDTO.State; List <VariableDefinition> variableDefsOfStudy = getVariableDefinitionsOfStudy(datasetDTO.ProjectId).ToList(); var datasetVarsToUpdate = new HashSet <string>( datasetToUpdate.Variables.Select(c => c.VariableDefinition.Name)); foreach (var variableDto in datasetDTO.Variables) { if (variableDto.isSelected) { //TODO: NOTE NO UPDATE IS ASSUMED other than adding or removing the whole variable //WILL HAVE tO CHANGE WHEN UPDATING CONTROLLED VOCAB if (!datasetVarsToUpdate.Contains(variableDto.Name)) { //add VarDEF and addVarREF VariableDefinition varDef; varDef = variableDefsOfStudy.SingleOrDefault(d => d.Accession.Equals(variableDto.Accession)); if (varDef == null) { varDef = new VariableDefinition(); varDef.Accession = variableDto.Accession; varDef.Name = variableDto.Name; varDef.Label = variableDto.Label; varDef.Description = variableDto.Description; varDef.IsComputed = variableDto.IsComputed; varDef.DataType = variableDto.DataType; varDef.IsCurated = variableDto.IsCurated; varDef.RoleId = variableDto.RoleId; varDef.ProjectId = datasetDTO.ProjectId; varDef.VariableTypeStr = variableDto.varType; if (variableDto.IsComputed) { varDef.IsComputed = true; varDef.ComputedVarExpression = variableDto.ExpressionList.Select(t => t.val).Aggregate((i, j) => i + ',' + j); } } VariableReference varRef = new VariableReference(); varRef.OrderNumber = variableDto.OrderNumber; varRef.IsRequired = variableDto.IsRequired; varRef.KeySequence = variableDto.KeySequence; varRef.VariableDefinition = varDef; datasetToUpdate.Variables.Add(varRef); } datasetToUpdate.Variables.First(v => v.VariableDefinition.Name == variableDto.Name).OrderNumber = variableDto.OrderNumber; } else if (datasetVarsToUpdate.Contains(variableDto.Name)) { //remove variable from dataset VariableReference var = datasetToUpdate.Variables.Single(v => v.VariableDefinitionId.Equals(variableDto.Id)); datasetToUpdate.Variables.Remove(var); } } _datasetRepository.Update(datasetToUpdate); return(_dataServiceUnit.Save()); }
/// <summary> /// Retrieves datasetDTO for selected activity including a union of VarDefs and TemplateVariables from the relevant Domain /// //TODO: This method should change once users are allowed to add their own Variables /// In this case VarDEFs should take precedence and the issue of adding CVterms to VarDEFs shuold be settled /// </summary> /// <param name="datasetId"></param> /// <returns></returns> public DatasetDTO GetActivityDatasetDTO(int datasetId) { DatasetDTO dto = new DatasetDTO(); Dataset ds = _datasetRepository.FindSingle( d => d.Id.Equals(datasetId), new List <string>() { "Variables.VariableDefinition", "Template.Fields.ControlledVocabulary.Xref.DB", "Activity" }); dto.Id = ds.Id; //Set DatasetDTO id to Dataset.Id (int) dto.Class = ds.Template.Class; dto.Description = ds.Template.Description; dto.Name = ds.Template.Domain; dto.DomainId = ds.Template.Id; dto.Structure = ds.Template.Structure; dto.Code = ds.Template.Code; dto.ProjectId = ds.Activity.ProjectId; //ds.Domain.Variables.Where(v=> !v.IsGeneric) foreach (DatasetTemplateField vt in ds.Template.Fields.OrderBy(v => v.Order)) //foreach (var vt in ds.Variables) { DatasetVariableDTO dv = new DatasetVariableDTO(); dv.Name = vt.Name; dv.Description = vt.Description; dv.Label = vt.Label; dv.Accession = vt.Id; dv.DataType = vt.DataType; dv.IsCurated = true; dv.RoleId = vt.RoleId; if (vt.ControlledVocabulary != null) { dv.DictionaryName = vt.ControlledVocabulary.Name; dv.DictionaryDefinition = vt.ControlledVocabulary.Definition; dv.DictionaryXrefURL = vt.ControlledVocabulary.Xref.DB.UrlPrefix + vt.ControlledVocabulary.Xref.Accession; } var vr = ds.Variables.SingleOrDefault(v => v.VariableDefinition.Name.Equals(vt.Name)); if (vr != null) { dv.IsRequired = vr.IsRequired; dv.KeySequence = vr.KeySequence; dv.OrderNumber = vt.Order; dv.Id = vr.VariableDefinitionId; dv.isSelected = true; } dto.Variables.Add(dv); } foreach (VariableReference vr in ds.Variables) { if (!dto.Variables.Exists(v => v.Accession.Equals(vr.VariableDefinition.Accession))) { DatasetVariableDTO dv = new DatasetVariableDTO(); dv.Name = vr.VariableDefinition.Name; dv.Description = vr.VariableDefinition.Description; dv.Label = vr.VariableDefinition.Label; dv.Accession = vr.VariableDefinition.Accession; dv.DataType = vr.VariableDefinition.DataType; dv.IsCurated = vr.VariableDefinition.IsCurated; dv.IsComputed = vr.VariableDefinition.IsComputed ?? false; dv.RoleId = vr.VariableDefinition.RoleId; dv.IsRequired = vr.IsRequired; dv.KeySequence = vr.KeySequence; dv.OrderNumber = ds.Template.Fields.Count + 1; dv.Id = vr.VariableDefinitionId; dv.isSelected = true; dto.Variables.Add(dv); } } return(dto); }