/// <summary> /// Gets the list of tables for the current dbcontext. /// </summary> /// <returns>IReadOnlyCollection<DatasetInfo>.</returns> public IReadOnlyCollection <DatasetInfo> GetDatasets() { var entityTypes = DbContext.Model.GetEntityTypes(); var tables = new List <DatasetInfo>(entityTypes.Count()); foreach (var entityType in entityTypes) { var tableName = entityType.GetTableName(); var schemaName = entityType.GetSchema(); var dataset = new DatasetInfo(tableName, schemaName); foreach (var property in entityType.GetProperties()) { var name = property.GetColumnName(); var type = property.ClrType; var isPK = property.IsPrimaryKey(); dataset.AddColumn(new ColumnInfo(name, type, isPK)); } tables.Add(dataset); } return(tables.AsReadOnly()); }
public string QueryRecords(DatasetInfo schema) { return new SqlBuilder(adapter) .Select(schema.Members) .From(schema.Name) .ToString(); }
//Call the webAPI to extract data from the Dataset private string GetDatafromWebApi(DatasetInfo model) { string info = HttpContext.Session.GetString("info"); string apiUrl = _configuration["WebApis:AVISTEDDataExtractor"] + info; try { using (HttpClient client = new HttpClient()) { client.BaseAddress = new Uri(apiUrl); client.DefaultRequestHeaders.Accept.Clear(); client.DefaultRequestHeaders.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json")); HttpResponseMessage response = client.GetAsync(apiUrl).GetAwaiter().GetResult(); string reader = response.Content.ReadAsStringAsync().GetAwaiter().GetResult(); if (response.IsSuccessStatusCode) { var data = response.Content.ReadAsStringAsync().GetAwaiter().GetResult(); List <Dictionary <string, string> > ls = JsonConvert.DeserializeObject <List <Dictionary <string, string> > >(data); HttpContext.Session.SetString("Data", JsonConvert.SerializeObject(ls)); return("success"); } else { return("Data Not found."); } } } catch (Exception ex) { return(ex.Message); } }
public string DeleteRecord(DatasetInfo table) { var sql = new SqlBuilder(adapter); sql.Delete(new SchemaObject(table.Name), GetKeyFilter(table)); return sql.ToString(); }
/// <summary> /// Finilizes the updating process. /// </summary> public void FinishUpdating() { DbContext.SaveChanges(); CurrentTable = null; Logger?.LogDebug("Finish updating: " + CurrentTable.Name); }
private Dataset CreateDatasetResponse(DatasetInfo dataset) { return(new Dataset() { Id = dataset.Id, DataType = dataset.DataType }); }
public SqlQuery QueryRecords(DatasetInfo dataset, Expression expression) { var builder = new SqlBuilder(adapter) .Select(dataset.Members) .From(dataset.Name) .Where(expression); return new SqlQuery(builder.ToString(), builder.Parameters); }
public string InsertRecord(DatasetInfo table) { var sql = new SqlBuilder(adapter); var insertColumns = table.Members.Where(c => !c.IsGenerated); sql.InsertInto(new SchemaObject(table.Name), members: table.Members.Where(c => !c.IsGenerated)); return sql.ToString(); }
public string InsertRecordAndReturnIdentity(DatasetInfo table) { var insertColumns = table.Members.Where(c => !c.IsGenerated); var sql = new SqlBuilder(adapter); var insertSqlBuilder = sql.InsertInto(new SchemaObject(table.Name), insertColumns); return insertSqlBuilder.SelectLastInsertId().ToString(); }
private float[, ,] ReadLat() { m_latInfo = m_hdfOperator.GetDatasetInfo("Latitude", "/"); float[, ,] datasetFloat = new float[m_latInfo.band, m_latInfo.row, m_latInfo.col]; m_hdfOperator.GetDataset("Latitude", "/", datasetFloat, m_latInfo.type); return(datasetFloat); }
/// <summary> /// A class to manage (load, save) files being a part of a single examination BIDS package. /// </summary> /// <param name="rootFolder">path to the folder where the subject folder persists</param> /// <param name="subject">determines subject folder and is part of file names</param> /// <param name="session">determines session folder and is part of file names. May be null, in this case session folder will not be considered and file names will not refer to session</param> /// <param name="task">is part of file names</param> public BidsPackage(string rootFolder, string subject, string?session, string task) { ThrowExceptionIfInvalidRootFolder(rootFolder); ThrowExceptionIfInvalidSubject(subject); ThrowExceptionIfInvalidSession(session); ThrowExceptionIfInvalidTask(task); _datasetInfo = new DatasetInfo(rootFolder, subject, session, task); RootFolder = new RootFolder(_datasetInfo); }
/// <summary> /// Exports the dataset's content to XML /// </summary> /// <param name="dataReader">The data reader which reads the dataset's content.</param> /// <param name="outStream">The output stream.</param> /// <param name="dataset"> /// An instance of <see cref="DatasetInfo"/> class which represents basic table information. /// Can be ommitted if you export only one table /// </param> public void ExportDataset(IDataReader dataReader, Stream outStream, DatasetInfo dataset = null) { _logger?.LogInformation("Start saving dataset: " + dataset?.Name); var columns = new string[dataReader.FieldCount]; for (int i = 0; i < dataReader.FieldCount; i++) { columns[i] = dataReader.GetName(i); } using (var writer = new XmlTextWriter(outStream, Encoding)) { writer.Formatting = Formatting.Indented; writer.WriteStartDocument(); writer.WriteStartElement("Dataset"); if (!string.IsNullOrEmpty(dataset?.Name)) { writer.WriteAttributeString("name", dataset.Name); } writer.WriteStartElement("Schema"); WriteDatasetSchema(writer, dataReader); writer.WriteEndElement(); writer.WriteStartElement("Data"); while (dataReader.Read()) { _logger?.LogDebug("Start writting row."); writer.WriteStartElement("Row"); for (var i = 0; i < dataReader.FieldCount; i++) { var column = columns[i]; var value = dataReader.GetValue(i); if (value.GetType() != typeof(DBNull)) { writer.WriteStartElement("Col"); writer.WriteAttributeString("n", column); writer.WriteValue(value); writer.WriteEndElement(); } _logger?.LogDebug($"Column={column}; Value={value}"); } writer.WriteEndElement(); _logger?.LogDebug("Finish writting row."); } writer.WriteEndElement(); //Data writer.WriteEndElement(); //Dataset writer.WriteEndDocument(); _logger?.LogInformation("Finish saving dataset: " + dataset?.Name); } }
/// <summary> /// Starts the updating process for the specified table. /// </summary> /// <param name="table">The table.</param> /// <exception cref="DbContextBridgeException">Updating is not finised. Call FinishUpdating() before start another one.</exception> public void StartUpdating(DatasetInfo table) { if (!(CurrentTable is null)) { throw new DbContextBridgeException("Updating is not finised. Call FinishUpdating() before start another one."); } CurrentTable = table; Logger?.LogDebug("Start updating: " + CurrentTable.Name); }
public void DatasetUpdated(DatasetInfo datasetInfo) { try { _hubConnection.InvokeAsync("DatasetUpdated", datasetInfo.OwnerId, datasetInfo.RepositoryId, datasetInfo); } catch (Exception ex) { _log.Error(ex, "Error notifying SignalR hub DatasetUpdated method"); } }
private Dictionary <AggregationUnit, double[]> ApplyAggregationFunction <T>(DatasetInfo dataset, T[] data, byte[] status, List <AggregationUnit> aggregationUnits) where T : unmanaged { var nanLimit = 0.99; var dataset_double = default(double[]); var partialBuffersMap = new Dictionary <AggregationUnit, double[]>(); foreach (var unit in aggregationUnits) { var aggregation = unit.Aggregation; var period = unit.Period; var method = unit.Method; var argument = unit.Argument; var sampleCount = dataset.GetSampleRate(ensureNonZeroIntegerHz: true).SamplesPerSecondAsUInt64 *(ulong)period; switch (unit.Method) { case AggregationMethod.Mean: case AggregationMethod.MeanPolar: case AggregationMethod.Min: case AggregationMethod.Max: case AggregationMethod.Std: case AggregationMethod.Rms: case AggregationMethod.SampleAndHold: case AggregationMethod.Sum: if (dataset_double == null) { dataset_double = BufferUtilities.ApplyDatasetStatus <T>(data, status); } partialBuffersMap[unit] = this.ApplyAggregationFunction(method, argument, (int)sampleCount, dataset_double, nanLimit, _logger); break; case AggregationMethod.MinBitwise: case AggregationMethod.MaxBitwise: partialBuffersMap[unit] = this.ApplyAggregationFunction(method, argument, (int)sampleCount, data, status, nanLimit, _logger); break; default: _logger.LogWarning($"The aggregation method '{unit.Method}' is not known. Skipping period {period}."); continue; } } return(partialBuffersMap); }
public DatasetViewModel(IDataDockUriService uriService, DatasetInfo datasetInfo, string prefLang = null, bool isOwner = false) { _uriService = uriService; _datasetInfo = datasetInfo; _csvwMetadata = datasetInfo.CsvwMetadata as JObject; _voidMetadata = datasetInfo.VoidMetadata as JObject; _prefLang = prefLang; Title = this.GetTitle(); Description = this.GetDescription(); IsOwner = isOwner; }
public FileResult getFile(string path) { path = Server.UrlDecode(path); if (FileHelper.FileExist(Path.Combine(AppConfiguration.DataPath, path))) { EntityPermissionManager entityPermissionManager = null; DatasetManager datasetManager = null; try { entityPermissionManager = new EntityPermissionManager(); datasetManager = new DatasetManager(); DatasetInfo datasetInfo = (DatasetInfo)Session["DatasetInfo"]; string entityType = (string)Session["EntityType"]; long datasetID = datasetInfo.DatasetId; bool access = entityPermissionManager.HasEffectiveRight(HttpContext.User.Identity.Name, typeof(Dataset), datasetID, RightType.Read); if (access) { path = Path.Combine(AppConfiguration.DataPath, path); FileInfo fileInfo = new FileInfo(path); Session["DatasetInfo"] = datasetInfo; Session["EntityType"] = entityType; // after 2.14.1 files are stored in original names // by download only the files, the user need to know th edataset id and the version number int versionNr = datasetManager.GetDatasetVersionNr(datasetInfo.DatasetVersionId); string filename = datasetInfo.DatasetId + "_" + versionNr + "_" + fileInfo.Name; return(File(path, MimeMapping.GetMimeMapping(fileInfo.Name), filename)); } else { Session["DatasetInfo"] = datasetInfo; return(null); } } catch (Exception ex) { return(null); } finally { entityPermissionManager.Dispose(); datasetManager.Dispose(); } } else { WebRequest request = WebRequest.Create(path); HttpWebResponse response = (HttpWebResponse)request.GetResponse(); return(File(response.GetResponseStream(), MimeMapping.GetMimeMapping(response.ResponseUri.Segments.LastOrDefault()), response.ResponseUri.Segments.LastOrDefault())); } }
public static string GetCurrentValueAndIncrement <T>(string columnName) { var dataset = DatasetInfo.Get <T>(); var column = dataset[columnName]; return ($@"SELECT `{column.Name}` FROM `{dataset.Name}` WHERE id = @id FOR UPDATE; UPDATE `{dataset.Name}` SET `{column.Name}` = `{columnName}` + 1 WHERE id = @id"); }
/// <summary> /// Exports the dataset's content to JSON /// </summary> /// <param name="dataReader">The data reader which reads the dataset's content.</param> /// <param name="outStream">The output stream.</param> /// <param name="dataset"> /// An instance of <see cref="DatasetInfo"/> class which represents basic table information. /// Can be ommitted if you export only one table /// </param> public void ExportDataset(IDataReader dataReader, Stream outStream, DatasetInfo dataset = null) { _logger?.LogDebug("Start saving dataset: " + dataset?.Name); var columns = new string[dataReader.FieldCount]; for (int i = 0; i < dataReader.FieldCount; i++) { columns[i] = dataReader.GetName(i); } using (var writer = new JsonTextWriter(new StreamWriter(outStream, _uf8Encoding))) { //TODO: Set encoding via options writer.Formatting = Formatting.Indented; writer.WriteStartObject(); //root object start writer.WritePropertyName("name"); writer.WriteValue(dataset?.Name); writer.WritePropertyName("schema"); writer.WriteStartObject(); //schema object start WriteSchemaProperties(writer, dataReader); writer.WriteEndObject(); //schema object end writer.WritePropertyName("data"); writer.WriteStartArray(); //data array start while (dataReader.Read()) { _logger?.LogDebug("Start writting row."); writer.WriteStartObject(); foreach (var column in columns) { var value = dataReader.GetValue(dataReader.GetOrdinal(column)); if (value.GetType() != typeof(DBNull)) { writer.WritePropertyName(column); writer.WriteValue(value); _logger?.LogDebug($"Column={column}; Value={value}"); } } writer.WriteEndObject(); _logger?.LogDebug("Finish writting row."); } writer.WriteEndArray(); //data array end writer.WriteEndObject(); //root object end _logger?.LogDebug("Finish saving dataset: " + dataset?.Name); } }
public IEnumerable <DataReaderProgressRecord> Read( DatasetInfo dataset, DateTime begin, DateTime end, ulong upperBlockSize, TimeSpan fundamentalPeriod, CancellationToken cancellationToken) { return(this.Read(new List <DatasetInfo>() { dataset }, begin, end, upperBlockSize, fundamentalPeriod, cancellationToken)); }
public Stream getFileStream(string path) { path = Server.UrlDecode(path); if (FileHelper.FileExist(Path.Combine(AppConfiguration.DataPath, path))) { EntityPermissionManager entityPermissionManager = null; try { entityPermissionManager = new EntityPermissionManager(); DatasetInfo datasetInfo = (DatasetInfo)Session["DatasetInfo"]; string entityType = (string)Session["EntityType"]; long datasetID = datasetInfo.DatasetId; bool access = entityPermissionManager.HasEffectiveRight(HttpContext.User.Identity.Name, typeof(Dataset), datasetID, RightType.Read); if (access) { path = Path.Combine(AppConfiguration.DataPath, path); Session["DatasetInfo"] = datasetInfo; Session["EntityType"] = entityType; return(System.IO.File.OpenRead(path)); } else { Session["DatasetInfo"] = datasetInfo; Session["EntityType"] = entityType; return(null); } } catch { return(null); } finally { entityPermissionManager.Dispose(); } } else { try { WebRequest request = WebRequest.Create(path); HttpWebResponse response = (HttpWebResponse)request.GetResponse(); return(response.GetResponseStream()); } catch { return(null); } } }
public string UpdateRecord(DatasetInfo table) { var updateColumns = table.Members.Where(c => c.IsMutable || c.IsVersion).ToArray(); var sql = new SqlBuilder(adapter); sql.Update( table : new SchemaObject(table.Name), columns : updateColumns, condition : GetKeyFilter(table) ); return sql.ToString(); }
public DatasetInfo MakeDataset(SqlDataReader results) { DatasetInfo data = new DatasetInfo() { ExperimentID = results["id"].ToString(), ExperimentType = results["experiment_type"].ToString(), CreatedDate = (DateTime)results["created_date"], PartnerName = results["partner"].ToString(), FormulationDescription = results["formulation_description"].ToString(), Notes = results["notes"].ToString() }; return(data); }
/// <summary> /// 拷贝构造函数。 /// </summary> /// <param name="datasetInfo">数据集信息对象。</param> /// <exception cref="ArgumentNullException">当数据集信息对象为 Null 时抛出异常。</exception> public DatasetInfo(DatasetInfo datasetInfo) { if (datasetInfo == null) throw new ArgumentNullException("datasetInfo", Resources.ArgumentIsNotNull); if (datasetInfo.Bounds != null) this.Bounds = new Rectangle2D(datasetInfo.Bounds); this.DataSourceName = datasetInfo.DataSourceName; this.Description = datasetInfo.Description; this.EncodeType = datasetInfo.EncodeType; this.IsReadOnly = datasetInfo.IsReadOnly; this.Name = datasetInfo.Name; if (datasetInfo.PrjCoordSys != null) this.PrjCoordSys = new PrjCoordSys(datasetInfo.PrjCoordSys); this.TableName = datasetInfo.TableName; this.Type = datasetInfo.Type; }
private List <DatasetInfo> GetDummyDatasets(string ownerId, string repoId, int num) { var datasets = new List <DatasetInfo>(); for (var i = 0; i < num; i++) { var ds = new DatasetInfo { OwnerId = ownerId, RepositoryId = string.IsNullOrEmpty(repoId) ? $"{ownerId}-repo-{i}" : repoId, DatasetId = Guid.NewGuid().ToString() }; datasets.Add(ds); } return(datasets); }
private async Task InitializeStore() { var count = 0; for (var o = 0; o < 5; o++) { for (var r = 0; r < 5; r++) { for (var d = 0; d < 5; d++) { count++; var tags = new List <string> { "test", $"owner-{o}", $"repo-{r}", $"set-{d}" }; if (d == 0) { tags.Add("foo"); } var csvwJson = new JObject(new JProperty("dc:title", $"Test Dataset {d} (Owner {o} Repo {r})"), new JProperty("dc:description", $"Dataset description {o}.{r}.{d}"), new JProperty("dcat:keyword", new JArray(tags))); var voidJson = new JObject( new JProperty("void:triples", "100"), new JProperty("void:dataDump", new JArray( $"https://github.com/owner-{o}/repo-{r}/releases/download/test-{d}_csv_20180207_170200/test-{d}_csv_20180207_170200.nt.gz", $"http://datadock.io/owner-{o}/repo-{r}/csv/test-{d}.csv/test-{d}.csv"))); // 4 of the 5 datasets will have showOnHomepage = true, the final dataset will be showOnHomepage = false var datasetInfo = new DatasetInfo { OwnerId = $"owner-{o}", RepositoryId = $"repo-{r}", DatasetId = $"test-{d}.csv", ShowOnHomePage = d < 4, LastModified = DateTime.UtcNow.Subtract(TimeSpan.FromDays(d)), CsvwMetadata = csvwJson, VoidMetadata = voidJson, Tags = tags }; await Store.CreateOrUpdateDatasetRecordAsync(datasetInfo); } } } Console.WriteLine($"{count} datasets created"); }
public IEnumerable <DataReaderProgressRecord> Read( DatasetInfo dataset, DateTime begin, DateTime end, ulong upperBlockSize, CancellationToken cancellationToken) { #warning This is only a workaround. Should not be necessary when 1 Minute Base limit has been removed and all code is unit tested and rewritten. var fundamentalPeriod = (dataset.GetSampleRate().SamplesPerDay == 144) ? TimeSpan.FromMinutes(10) : TimeSpan.FromMinutes(1); return(this.Read(new List <DatasetInfo>() { dataset }, begin, end, upperBlockSize, fundamentalPeriod, cancellationToken)); }
/// <summary> /// Starts the importing process. /// This function processes the first part of the dataset stream and collect necessary information about the dataset /// </summary> /// <param name="datasetStream">The dataset stream.</param> /// <returns>An instance of the <see cref="T:Korzh.DbUtils.DatasetInfo" /> which contains some basic information about the dataset (table).</returns> /// <exception cref="DatasetImporterException"> /// Wrong file format at {_jsonReader.LineNumber}:{_jsonReader.LinePosition} /// or /// Wrong file format. No 'schema' property /// or /// Wrong file format. No 'schema' property /// or /// Wrong file format. No 'data' property /// or /// Wrong file format at {_jsonReader.LineNumber}:{_jsonReader.LinePosition} /// </exception> public DatasetInfo StartImport(Stream datasetStream) { _jsonReader = new JsonTextReader(new StreamReader(datasetStream, Encoding.UTF8)); _jsonReader.Read(); if (_jsonReader.TokenType != JsonToken.StartObject) { throw new DatasetImporterException($"Wrong file format at {_jsonReader.LineNumber}:{_jsonReader.LinePosition}"); } _isEndOfData = false; if (!ReadToProperty("name")) { _isEndOfData = true; throw new DatasetImporterException($"Wrong file format. No 'schema' property"); } var datasetInfo = new DatasetInfo(_jsonReader.ReadAsString(), ""); _datasetInfo = datasetInfo; if (!ReadToProperty("schema")) { _isEndOfData = true; throw new DatasetImporterException($"Wrong file format. No 'schema' property"); } _jsonReader.Read(); ReadSchema(); if (!ReadToProperty("data")) { _isEndOfData = true; throw new DatasetImporterException($"Wrong file format. No 'data' property"); } _jsonReader.Read(); if (_jsonReader.TokenType != JsonToken.StartArray) { throw new DatasetImporterException($"Wrong file format at {_jsonReader.LineNumber}:{_jsonReader.LinePosition}"); } //read first object start if (!_jsonReader.Read() || _jsonReader.TokenType != JsonToken.StartObject) { _isEndOfData = true; } return(datasetInfo); }
private async void button3_Click(object sender, EventArgs e) { DatasetInfo response = await client.GetDatasetInformationAsync(); cols.Text = response.ColumnNumber.ToString(); rows.Text = response.RowsNumber.ToString(); string classDistribution = ""; foreach (var keyValuePair in response.ClassDistribution) { classDistribution += "class: " + keyValuePair.Key + " values: " + keyValuePair.Value + "\n"; } classes.Text = classDistribution; await client.IncreaseRequestsQuantityAsync(); }
private void Inputdialog_Load(object sender, EventArgs e) { TextBox firstTxbox = null; int tabIndex = 0; int y = 30; foreach (string s in inputFields) { Label l = new Label(); l.Location = new Point(5, y); l.Text = s; TextBox t = new TextBox(); t.Location = new Point(l.Width + 10, y); if (tabIndex == 0) { firstTxbox = t; } t.TabIndex = tabIndex++; if (historicNameValues.ContainsKey(s)) { t.Text = historicNameValues[s]; } inputGroupBox.Controls.Add(l); inputGroupBox.Controls.Add(t); y += 35; outputs.Add(s, t); } submitBtn.TabIndex = tabIndex; if (infos != null) { infoTxbox.Text = DatasetInfo.renderInfoList(infos); } firstTxbox.Focus(); }
public FileResult getFileStreamResult(string path) { path = Server.UrlDecode(path); if (FileHelper.FileExist(Path.Combine(AppConfiguration.DataPath, path))) { EntityPermissionManager entityPermissionManager = null; try { entityPermissionManager = new EntityPermissionManager(); DatasetInfo datasetInfo = (DatasetInfo)Session["DatasetInfo"]; string entityType = (string)Session["EntityType"]; long datasetID = datasetInfo.DatasetId; bool access = entityPermissionManager.HasEffectiveRight(HttpContext.User.Identity.Name, typeof(Dataset), datasetID, RightType.Read); if (access) { path = Path.Combine(AppConfiguration.DataPath, path); FileInfo fileInfo = new FileInfo(path); Session["DatasetInfo"] = datasetInfo; Session["EntityType"] = entityType; return(new FileStreamResult(new FileStream(path, FileMode.Open), MimeMapping.GetMimeMapping(fileInfo.Name))); } else { Session["DatasetInfo"] = datasetInfo; Session["EntityType"] = entityType; return(null); } } catch { return(null); } finally { entityPermissionManager.Dispose(); } } else { WebRequest request = WebRequest.Create(path); HttpWebResponse response = (HttpWebResponse)request.GetResponse(); return(new FileStreamResult(response.GetResponseStream(), MimeMapping.GetMimeMapping(response.ResponseUri.Segments.LastOrDefault()))); } }
private void KeysAndIndexesAreUnder767Bytes(object database) { foreach (var property in database.GetType().GetProperties()) { var type = property.PropertyType; if (!type.IsGenericType) { continue; } type = type.GetGenericArguments()[0]; var dataset = DatasetInfo.Get(type); foreach (var index in dataset.Indexes) { var indexLength = 0; foreach (var member in index.Members) { var m = dataset[member.Name]; indexLength += GetLength(m, dataset); } if (indexLength > 767) { throw new Exception($"table index '{index.Name}' on '{dataset.Name}' exceeds 767 bytes"); } } int primaryKeyLength = 0; foreach (var key in dataset.PrimaryKey) { primaryKeyLength += GetLength(key, dataset); } if (primaryKeyLength > 767) { throw new Exception($"primary key for '{dataset.Name}' exceeds 767 bytes. Was {primaryKeyLength} bytes."); } } }
public DataReaderDoubleStream ReadAsDoubleStream( DatasetInfo dataset, DateTime begin, DateTime end, ulong upperBlockSize, CancellationToken cancellationToken) { var progressRecords = this.Read(new List <DatasetInfo>() { dataset }, begin, end, upperBlockSize, TimeSpan.FromMinutes(1), cancellationToken); var samplesPerSecond = new SampleRateContainer(dataset.Id).SamplesPerSecond; var length = (long)Math.Round(samplesPerSecond * (decimal)(end - begin).TotalSeconds, MidpointRounding.AwayFromZero) * NexusUtilities.SizeOf(NexusDataType.FLOAT64); return(new DataReaderDoubleStream(length, progressRecords)); }
public static bool TryGetFilterCodeDefinition(DatasetInfo datasetInfo, out CodeDefinition codeDefinition) { codeDefinition = default; if (FilterDataReader.FilterDataReaderCache.TryGetValue(datasetInfo.Registration, out var cacheEntries)) { var cacheEntry = cacheEntries .FirstOrDefault(entry => entry.SupportedChanneIds.Contains(datasetInfo.Parent.Id)); if (cacheEntry is not null) { codeDefinition = cacheEntry.FilterCodeDefinition; return(true); } } return(false); }
private void timer1_Tick(object sender, EventArgs e) { double ops = Math.Round(dataminingDb.getOperationsPerSecond(), 2); //Progress Text progress_label.Text = "Op/s " + (ops != 0.0 ? ops.ToString() : "Idle") + Environment.NewLine + dataminingDb.getProgress().getString(); //State text state_label.Text = stateMessage; //Render dataInfo string infoString = DatasetInfo.renderInfoList(dataminingDb.getInfo()); if (dataInfoTextbox.Text != infoString) { dataInfoTextbox.Text = infoString; } }
public async void CreateDatasetThrowsWhenInsertFails() { var mockResponse = new Mock <IIndexResponse>(); mockResponse.SetupGet(x => x.IsValid).Returns(false); var client = new Mock <IElasticClient>(); AssertIndexExists(client, _indexName); client.Setup(x => x.IndexDocumentAsync <DatasetInfo>(It.IsAny <DatasetInfo>(), It.IsAny <CancellationToken>())) .ReturnsAsync(mockResponse.Object).Verifiable(); var datasetStore = new DatasetStore(client.Object, new ApplicationConfiguration { DatasetIndexName = _indexName }); var emptyDatasetInfo = new DatasetInfo(); await Assert.ThrowsAsync <DatasetStoreException>(() => datasetStore.CreateOrUpdateDatasetRecordAsync(emptyDatasetInfo)); client.Verify(); }
private Array DeserializeArray(Type elementType, DbValue[] values) { var elementModel = DatasetInfo.Get(elementType); var list = Array.CreateInstance(elementType, values.Length); var i = 0; foreach (var value in values) { if (value.Kind == DbValueType.M) { list.SetValue(((AttributeCollection)value.Value).DeserializeMap(elementModel), i); } i++; } return(list); }
public SqlQuery QueryFirst(DatasetInfo dataset, Expression expression) { var sql = new SqlBuilder(adapter) .Select(dataset.Members) .From(dataset.Name) .Where(expression) .Limit(1); return new SqlQuery(sql.ToString(), sql.Parameters); }
/// <summary> /// 在指定的数据源中,更新指定数据集的信息。 /// </summary> /// <param name="datasourceName">数据源名称。</param> /// <param name="datasetName">待更新的数据集的名称。</param> /// <param name="newDatasetInfo">新的数据集信息。</param> /// <returns>数据集创建成功返回 true, 否则返回 false。</returns> /// <exception cref="ArgumentNullException">参数 datasourceName、datasetName 为空时抛出异常。</exception> /// <exception cref="SuperMap.Connector.Utility.ServiceException">服务端处理错误时抛出异常。</exception> /// <remarks>现支持更新IsFileCache,Description,PrjCoordSys,Charset,Palette,NoValue 属性。</remarks> public bool UpdateDatasetInfo(string datasourceName, string datasetName, DatasetInfo newDatasetInfo) { return _dataProvider.UpdateDatasetInfo(datasourceName, datasetName, newDatasetInfo); }
private Expression Expand(Expression expression, DatasetInfo baseTable) { if (expression is Symbol) { var symbol = (Symbol)expression; var member = baseTable[symbol.Name]; var prefix = member.IsKey || member.IsMutable || member.IsVersion ? "v" : "a"; return new Symbol(prefix, member.Name); } else if (expression is BinaryExpression) { var binary = (BinaryExpression)expression; var lhs = Expand(binary.Left, baseTable); var rhs = Expand(binary.Right, baseTable); return new BinaryExpression(lhs, binary.Op, rhs); } return expression; }
public Expression GetKeyFilter(DatasetInfo schema) { var conditions = new Expression[schema.PrimaryKey.Size]; for (var i = 0; i < conditions.Length; i++) { var key = schema.PrimaryKey[i]; conditions[i] = Expression.Eq(key.Name, new Symbol("@" + key.PropertyName)); } return Expression.Conjunction(conditions); }
private float[, ,] ReadLat() { m_latInfo = m_hdfOperator.GetDatasetInfo("Latitude", "/"); float[, ,] datasetFloat = new float[m_latInfo.band, m_latInfo.row, m_latInfo.col]; m_hdfOperator.GetDataset("Latitude", "/", datasetFloat, m_latInfo.type); return datasetFloat; }
//static void BuildHierachy2(JSONNode resultData) //{ // var component = PersistantSettings.Instance; // component.SceneHierachy.Clear(); // var currentPath = new List<string>(); // //currentPath.Add(resultData["recipe"]["name"]); // //component.hierachy.Add(new Node(currentPath.Last())); // if (resultData["cytoplasme"] != null) // { // currentPath.Add("cytoplasm"); // component.SceneHierachy.Add(new PersistantSettings.Node("cytoplasm", GetPath(currentPath))); // var ingredients = resultData["cytoplasme"]; // for (int j = 0; j < ingredients["ingredients"].Count; j++) // { // string iname = ingredients["ingredients"][j]["name"]; // if (!SceneManager.Instance.ProteinIngredientNames.Contains("cytoplasme" + "_" + iname)) continue; // currentPath.Add(iname); // component.SceneHierachy.Add(new PersistantSettings.Node(iname, GetPath(currentPath))); // currentPath.Remove(currentPath.Last()); // } // currentPath.Remove(currentPath.Last()); // } // for (int i = 0; i < resultData["compartments"].Count; i++) // { // var surfaceName = "surface" + i; // var interiorName = "interior" + i; // var compartment = resultData["compartments"].GetKey(i); // currentPath.Add(compartment); // component.SceneHierachy.Add(new PersistantSettings.Node(compartment, GetPath(currentPath))); // //*****// // var surface = "surface"; // var ingredients = resultData["compartments"][i][surface]; // if (ingredients["ingredients"].Count > 0) // { // currentPath.Add(surface); // component.SceneHierachy.Add(new PersistantSettings.Node(surface, GetPath(currentPath))); // for (int j = 0; j < ingredients["ingredients"].Count; j++) // { // string iname = ingredients["ingredients"][j]["name"]; // if (!SceneManager.Instance.ProteinIngredientNames.Contains(surfaceName + "_" + iname)) continue; // currentPath.Add(iname); // component.SceneHierachy.Add(new PersistantSettings.Node(iname, GetPath(currentPath))); // currentPath.Remove(currentPath.Last()); // } // currentPath.Remove(currentPath.Last()); // } // //*****// // var interior = "interior"; // ingredients = resultData["compartments"][i][interior]; // if (ingredients["ingredients"].Count > 0) // { // currentPath.Add(interior); // component.SceneHierachy.Add(new PersistantSettings.Node(interior, GetPath(currentPath))); // for (int j = 0; j < ingredients["ingredients"].Count; j++) // { // string iname = ingredients["ingredients"][j]["name"]; // if (!SceneManager.Instance.ProteinIngredientNames.Contains(interiorName + "_" + iname)) continue; // currentPath.Add(iname); // component.SceneHierachy.Add(new PersistantSettings.Node(iname, GetPath(currentPath))); // currentPath.Remove(currentPath.Last()); // } // currentPath.Remove(currentPath.Last()); // } // currentPath.Remove(currentPath.Last()); // } // int a = 0; // //foreach (var node in component.hierachy) // //{ // // Debug.Log(" path: " + node.path); // //} //} //public static void LoadCellPackResults(bool load = true) //{ // LoadAdditionalCellPackDataset //} public static void LoadCellPackResults(bool load=true) { #if UNITY_EDITOR Debug.Log("Loading"); var directory = ""; if (string.IsNullOrEmpty(PersistantSettings.Instance.LastSceneLoaded) || !Directory.Exists(Path.GetDirectoryName(PersistantSettings.Instance.LastSceneLoaded))) { directory = Application.dataPath; } else { directory = Path.GetDirectoryName(PersistantSettings.Instance.LastSceneLoaded); } var path = EditorUtility.OpenFilePanel("Select .cpr", directory, "cpr"); if (string.IsNullOrEmpty(path)) return; PersistantSettings.Instance.LastSceneLoaded = path; LoadIngredients(path); Debug.Log("*****"); Debug.Log("Total protein atoms number: " + SceneManager.Get.TotalNumProteinAtoms); // Update dataset info var datasetInfo = new DatasetInfo(); datasetInfo.DatasetID = SceneManager.Get.DatasetInformation.Count; if (SceneManager.Get.DatasetInformation.Count == 0) { datasetInfo.BeginInstanceID = 0; } else { datasetInfo.BeginInstanceID = SceneManager.Get.DatasetInformation.Last().EndInstanceID; } datasetInfo.EndInstanceID = SceneManager.Get.ProteinInstancePositions.Count; datasetInfo.NumInstances = datasetInfo.EndInstanceID - datasetInfo.BeginInstanceID; // Add dataset info SceneManager.Get.DatasetInformation.Add(datasetInfo); // Upload scene data to the GPU SceneManager.Get.UploadAllData(); #endif }
public void UpdateDatasetInfo() { DatasetInfo datasetInfo = new DatasetInfo(); datasetInfo.Name = "Countries"; datasetInfo.Type = DatasetType.POINT; datasetInfo.Description = "11111"; Data data = new Data("http://" + ip + ":8090/iserver/services/data-world/rest"); bool result = data.UpdateDatasetInfo("World", "Countries", datasetInfo); Assert.IsNotNull(result); }
/// <summary> /// 在指定的数据源中,更新指定数据集的信息。 /// </summary> /// <param name="datasourceName">数据源名称。</param> /// <param name="datasetName">待更新的数据集的名称。</param> /// <param name="newDatasetInfo">新的数据集信息。</param> /// <returns>数据集创建成功返回 true, 否则返回 false。</returns> public bool UpdateDatasetInfo(string datasourceName, string datasetName, DatasetInfo newDatasetInfo) { if (string.IsNullOrEmpty(datasourceName)) throw new ArgumentNullException("datasourceName", Resources.ArgumentIsNotNull); if (string.IsNullOrEmpty(datasetName)) throw new ArgumentNullException("datasetName", Resources.ArgumentIsNotNull); string uri = string.Format("{0}/data/datasources/{1}/datasets/{2}.json", this._serviceUrl, datasourceName, datasetName); string result = SynchHttpRequest.GetRequestString(uri, HttpRequestMethod.PUT, JsonConvert.SerializeObject(newDatasetInfo)); EditResult datasetResult = JsonConvert.DeserializeObject<EditResult>(result); return datasetResult.Succeed; }
public void CreateDatasetTest() { Data data = new Data("http://" + ip + ":8090/iserver/services/data-world/rest"); DatasetInfo datasetInfo = new DatasetInfo(); datasetInfo.Name = "World4"; datasetInfo.Type = DatasetType.POINT; bool result = data.CreateDataset("World", datasetInfo.Name, datasetInfo.Type); Assert.IsTrue(result); bool deleteResult = data.DeleteDataset("World", "World4"); Assert.IsTrue(deleteResult); }
public bool ReadData(string fileName) { m_hdfOperator.Close(); m_L1file = fileName; m_hdfOperator.Open(fileName); m_EVInfo = m_hdfOperator.GetDatasetInfo("EV_RefSB", "/"); m_EV = ReadEV(); m_lonInfo = m_hdfOperator.GetDatasetInfo("Longitude", "/"); m_lon = ReadLon(); m_latInfo = m_hdfOperator.GetDatasetInfo("Latitude", "/"); m_lat = ReadLat(); m_calcoef = ReadCalcoefAttr(); this.Update(); return true; }
public SqlQuery QueryVersions(DatasetInfo baseTable, DatasetInfo versionedTable, Expression expression) { var sql = new SqlBuilder(adapter); var v = new SchemaObject(versionedTable.Name, "v"); var a = new SchemaObject(baseTable.Name, "a"); sql.Append("SELECT "); sql.WriteColumns(versionedTable.Members, prefix: "v"); // mutable values, primary keys, ... sql.Append(", "); sql.WriteColumns(baseTable.Members.Where(m => !m.IsVersion && !m.IsKey && !m.IsMutable), prefix: "a"); // immutable values from base table sql.From(v); // SELECT from v var keyName = baseTable.PrimaryKey[0].Name; // join the base (immutable) values with the versioned values sql.InnerJoin( baseTable : v, // versioned values joinedTable : a, // base values baseColumn : keyName, joinedColumn : keyName ); sql.Where(Expand(expression, baseTable)); return new SqlQuery(sql.ToString(), sql.Parameters); }
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { Dictionary<string, object> datasetInfoValues = serializer.Deserialize<Dictionary<string, object>>(reader); if (datasetInfoValues == null) return null; DatasetInfo datasetInfo = null; if (datasetInfoValues.ContainsKey("recordCount")) { datasetInfo = new DatasetVectorInfo(); DatasetVectorInfo datasetVectorInfo = datasetInfo as DatasetVectorInfo; int recordCount = 0; if (datasetInfoValues.ContainsKey("recordCount") && datasetInfoValues["recordCount"] != null) { recordCount = int.Parse(datasetInfoValues["recordCount"].ToString()); } datasetVectorInfo.RecordCount = recordCount; bool isFileCache = false; if (datasetInfoValues.ContainsKey("isFileCache") && datasetInfoValues["isFileCache"] != null) { isFileCache = bool.Parse(datasetInfoValues["isFileCache"].ToString()); } datasetVectorInfo.IsFileCache = isFileCache; Charset charset = Charset.ANSI; if (datasetInfoValues.ContainsKey("charset") && datasetInfoValues["charset"] != null) { charset = (Charset)Enum.Parse(typeof(Charset), datasetInfoValues["charset"].ToString(), false); } datasetVectorInfo.Charset = charset; } else if (datasetInfoValues.ContainsKey("isMultiBand")) { datasetInfo = new DatasetImageInfo(); DatasetImageInfo datasetImageInfo = datasetInfo as DatasetImageInfo; int blockSize = 0; if (datasetInfoValues.ContainsKey("blockSize") && datasetInfoValues["blockSize"] != null) { blockSize = int.Parse(datasetInfoValues["blockSize"].ToString()); } datasetImageInfo.BlockSize = blockSize; int height = 0; if (datasetInfoValues.ContainsKey("height") && datasetInfoValues["height"] != null) { height = int.Parse(datasetInfoValues["height"].ToString()); } datasetImageInfo.Height = height; bool isMultiBand = false; if (datasetInfoValues.ContainsKey("isMultiBand") && datasetInfoValues["isMultiBand"] != null) { isMultiBand = bool.Parse(datasetInfoValues["isMultiBand"].ToString()); } datasetImageInfo.IsMultiBand = isMultiBand; datasetImageInfo.Palette = datasetInfoValues.ContainsKey("palette") && datasetInfoValues["palette"] != null ? JsonConvert.DeserializeObject<Color[]>(datasetInfoValues["palette"].ToString()) : null; PixelFormat pixelFormat = PixelFormat.SINGLE; if (datasetInfoValues.ContainsKey("pixelFormat") && datasetInfoValues["pixelFormat"] != null) { pixelFormat = (PixelFormat)Enum.Parse(typeof(PixelFormat), datasetInfoValues["pixelFormat"].ToString(), false); } datasetImageInfo.PixelFormat = pixelFormat; int width = 0; if (datasetInfoValues.ContainsKey("width") && datasetInfoValues["width"] != null) { width = int.Parse(datasetInfoValues["width"].ToString()); } datasetImageInfo.Width = width; } else if (datasetInfoValues.ContainsKey("noValue")) { datasetInfo = new DatasetGridInfo(); DatasetGridInfo datasetGridInfo = datasetInfo as DatasetGridInfo; int blockSize = 0; if (datasetInfoValues.ContainsKey("blockSize") && datasetInfoValues["blockSize"] != null) { blockSize = int.Parse(datasetInfoValues["blockSize"].ToString()); } datasetGridInfo.BlockSize = blockSize; int height = 0; if (datasetInfoValues.ContainsKey("height") && datasetInfoValues["height"] != null) { height = int.Parse(datasetInfoValues["height"].ToString()); } datasetGridInfo.Height = height; double maxValue = 0.0; if (datasetInfoValues.ContainsKey("maxValue") && datasetInfoValues["maxValue"] != null) { maxValue = double.Parse(datasetInfoValues["maxValue"].ToString()); } datasetGridInfo.MaxValue = maxValue; double minValue = 0.0; if (datasetInfoValues.ContainsKey("minValue") && datasetInfoValues["minValue"] != null) { maxValue = double.Parse(datasetInfoValues["minValue"].ToString()); } datasetGridInfo.MinValue = minValue; double noValue = 0.0; if (datasetInfoValues.ContainsKey("noValue") && datasetInfoValues["noValue"] != null) { noValue = double.Parse(datasetInfoValues["noValue"].ToString()); } datasetGridInfo.NoValue = noValue; PixelFormat pixelFormat = PixelFormat.SINGLE; if (datasetInfoValues.ContainsKey("pixelFormat") && datasetInfoValues["pixelFormat"] != null) { pixelFormat = (PixelFormat)Enum.Parse(typeof(PixelFormat), datasetInfoValues["pixelFormat"].ToString(), false); } datasetGridInfo.PixelFormat = pixelFormat; int width = 0; if (datasetInfoValues.ContainsKey("width") && datasetInfoValues["width"] != null) { width = int.Parse(datasetInfoValues["width"].ToString()); } datasetGridInfo.Width = width; } else { datasetInfo = new DatasetInfo(); } datasetInfo.Bounds = datasetInfoValues.ContainsKey("bounds") && datasetInfoValues["bounds"] != null ? JsonConvert.DeserializeObject<Rectangle2D>(datasetInfoValues["bounds"].ToString()) : null; datasetInfo.DataSourceName = datasetInfoValues.ContainsKey("dataSourceName") && datasetInfoValues["dataSourceName"] != null ? datasetInfoValues["dataSourceName"].ToString() : ""; datasetInfo.Description = datasetInfoValues.ContainsKey("description") && datasetInfoValues["description"] != null ? datasetInfoValues["description"].ToString() : ""; EncodeType encodeType = EncodeType.NONE; if (datasetInfoValues.ContainsKey("encodeType") && datasetInfoValues["encodeType"] != null) { encodeType = (EncodeType)Enum.Parse(typeof(EncodeType), datasetInfoValues["encodeType"].ToString(), false); } datasetInfo.EncodeType = encodeType; bool isReadOnly = true; if (datasetInfoValues.ContainsKey("isReadOnly") && datasetInfoValues["isReadOnly"] != null) { isReadOnly = bool.Parse(datasetInfoValues["isReadOnly"].ToString()); } datasetInfo.IsReadOnly = isReadOnly; datasetInfo.Name = datasetInfoValues.ContainsKey("name") && datasetInfoValues["name"] != null ? datasetInfoValues["name"].ToString() : ""; datasetInfo.PrjCoordSys = datasetInfoValues.ContainsKey("prjCoordSys") && datasetInfoValues["prjCoordSys"] != null ? JsonConvert.DeserializeObject<PrjCoordSys>(datasetInfoValues["prjCoordSys"].ToString()) : null; datasetInfo.TableName = datasetInfoValues.ContainsKey("tableName") && datasetInfoValues["tableName"] != null ? datasetInfoValues["tableName"].ToString() : ""; DatasetType type = DatasetType.UNDEFINED; if (datasetInfoValues.ContainsKey("type") && datasetInfoValues["type"] != null) { type = (DatasetType)Enum.Parse(typeof(DatasetType), datasetInfoValues["type"].ToString(), false); } datasetInfo.Type = type; return datasetInfo; }