/// <summary> /// Get Document object representing the table /// </summary> /// <param name="database">Database id</param> /// <param name="id">Id of document (table)</param> /// <param name="path">Path to table within database</param> /// <param name="path">Table</param> /// <param name="meta">PXMeta object</param> /// <returns>Document object representing the table</returns> private Document GetDocument(string database, string id, string path, string table, string title, DateTime published, PXMeta meta) { Document doc = new Document(); if (meta != null) { if (string.IsNullOrEmpty(path) || string.IsNullOrEmpty(table) || string.IsNullOrEmpty(database) || string.IsNullOrEmpty(meta.Title) || string.IsNullOrEmpty(meta.Matrix) || meta.Variables.Count == 0) { return(doc); } doc.Add(new Field(SearchConstants.SEARCH_FIELD_DOCID, id, Field.Store.YES, Field.Index.NOT_ANALYZED)); // Used as id when updating a document - NOT searchable!!! doc.Add(new Field(SearchConstants.SEARCH_FIELD_SEARCHID, id, Field.Store.NO, Field.Index.ANALYZED)); // Used for finding a document by id - will be used for generating URL from just the tableid - Searchable!!! doc.Add(new Field(SearchConstants.SEARCH_FIELD_PATH, path, Field.Store.YES, Field.Index.NO)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_TABLE, table, Field.Store.YES, Field.Index.NO)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_DATABASE, database, Field.Store.YES, Field.Index.NOT_ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_PUBLISHED, published.DateTimeToPxDateString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_MATRIX, meta.Matrix, Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_TITLE, title, Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_VARIABLES, string.Join(" ", (from v in meta.Variables select v.Name).ToArray()), Field.Store.NO, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_PERIOD, meta.GetTimeValues(), Field.Store.NO, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_VALUES, meta.GetAllValues(), Field.Store.NO, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_CODES, meta.GetAllCodes(), Field.Store.NO, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_GROUPINGS, meta.GetAllGroupings(), Field.Store.NO, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_GROUPINGCODES, meta.GetAllGroupingCodes(), Field.Store.NO, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_VALUESETS, meta.GetAllValuesets(), Field.Store.NO, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_VALUESETCODES, meta.GetAllValuesetCodes(), Field.Store.NO, Field.Index.ANALYZED)); doc.Add(new Field(SearchConstants.SEARCH_FIELD_TABLEID, meta.TableID == null?meta.Matrix:meta.TableID, Field.Store.YES, Field.Index.ANALYZED)); if (!string.IsNullOrEmpty(meta.Synonyms)) { doc.Add(new Field(SearchConstants.SEARCH_FIELD_SYNONYMS, meta.Synonyms, Field.Store.NO, Field.Index.ANALYZED)); } } return(doc); }
/// <summary> /// Update Paxiom Document (table) in index /// </summary> /// <param name="writer">IndexWriter object</param> /// <param name="database">Database id</param> /// <param name="id">Id of document (table)</param> /// <param name="path">Path to table within database</param> /// <param name="meta">PXMeta object</param> /// <returns>Document object representing the updated table</returns> private Document UpdatePaxiomDocument(IndexWriter writer, string database, string id, string path, string table, string title, DateTime published, PXMeta meta) { Document doc = GetDocument(database, id, path, table, title, published, meta); writer.UpdateDocument(new Term(SearchConstants.SEARCH_FIELD_DOCID, doc.Get(SearchConstants.SEARCH_FIELD_DOCID)), doc); return(doc); }
private string GenerateJsonData(PXModel model) { meta = model.Meta; _logger.Info("Trying to parse .CreationDate from Paxiom => " + meta.CreationDate); BuildDataSymbolMap(); var jsonResult = new JsonStat.Model.JsonStat(); var dataset = new JsonStat.Model.JsonStatDatasetBase(model.Data.MatrixSize); var formatter = new DataFormatter(model); dataset.source = meta.Source; dataset.updated = meta.CreationDate.PxDateStringToDateTime().ToString(); dataset.dimension = new Dictionary <string, object>(); //Extension, PX if (meta.InfoFile != null || meta.TableID != null || meta.Decimals != -1) { dataset.extension = new Dictionary <string, object>(); var px = new JsonStat.Model.JsonStatPx(); px.infofile = meta.InfoFile; px.tableid = meta.TableID; //If not Showdecimal has value use Decimal var decimals = meta.ShowDecimals < 0 ? meta.Decimals : meta.ShowDecimals; px.decimals = decimals; dataset.extension.Add(PX, px); } if (DatasetTitle != null) { dataset.label = DatasetTitle; } else { dataset.label = meta.Title; } var roles = new Model.JsonStatDimension(); var id = new string[meta.Variables.Count]; var size = new int[meta.Variables.Count]; var variableIndex = 0; #region Variables foreach (var variable in meta.Variables) { var variableName = variable.Code; var category = new Dictionary <string, object>(); var index = new Dictionary <string, int>(); var label = new Dictionary <string, string>(); var variableEntry = new Dictionary <string, object>(); var unit = new Dictionary <string, object>(); var link = new Dictionary <string, object>(); if (variable.IsContentVariable) { // The reason roles are an array, is that it's possible to have several time-variables (in theory) roles.Add(METRIC, variableName); } else if (variable.IsTime) { roles.Add(TIME, variableName); } // Adding regional variables to the geo-field. if (variable.Map != null || geoVariableMap.ContainsKey(variableName)) { roles.Add(GEO, variableName); } id[variableIndex] = variableName; size[variableIndex] = variable.Values.Count; variableIndex++; var variableValueIndex = 0; foreach (var value in variable.Values) { index.Add(value.Code, variableValueIndex); label.Add(value.Code, value.Value); if (variable.IsContentVariable) { var unitContent = new Dictionary <string, object>(); unitContent.Add(BASE, value.ContentInfo.Units); //If not Showdecimal has value use Decimal var currentDecimal = meta.ShowDecimals < 0 ? meta.Decimals : meta.ShowDecimals; var decimals = (value.HasPrecision()) ? value.Precision : currentDecimal; unitContent.Add(DECIMALS, decimals); unit.Add(value.Code, unitContent); } variableValueIndex++; } category.Add(INDEX, index); category.Add(LABEL, label); if (variable.IsContentVariable) { category.Add(UNIT, unit); } variableEntry.Add(LABEL, variable.Name); variableEntry.Add(CATEGORY, category); var extensions = GetAllSerializedMetaIdsForVariable(variable); if (extensions.Count > 0) { link.Add(DESCRIBEDBY, new List <object> { extensions }); variableEntry.Add(LINK, link); } dataset.dimension.Add(variableName, variableEntry); } #endregion dataset.dimension.Add(ID, id); dataset.dimension.Add(SIZE, size); dataset.dimension.Add(ROLE, roles); // All data is in a single array in JSON-stat. #region Data var haveObsStatus = false; var observationStatus = new Dictionary <string, string>(); var bufferIndex = 0; var buffer = new double[model.Data.MatrixColumnCount]; string note = string.Empty; string dataNote = string.Empty; var numberFormatInfo = new System.Globalization.NumberFormatInfo(); for (int i = 0; i < model.Data.MatrixRowCount; i++) { model.Data.ReadLine(i, buffer); for (int j = 0; j < model.Data.MatrixColumnCount; j++) { string symbol = null; if (dataSymbolMap.TryGetValue(buffer[j], out symbol)) { observationStatus.Add(bufferIndex.ToString(), symbol); haveObsStatus = true; dataset.value[bufferIndex] = null; } else { dataset.value[bufferIndex] = Convert.ToDouble(formatter.ReadElement(i, j, ref note, ref dataNote, ref numberFormatInfo), numberFormatInfo); if (!string.IsNullOrEmpty(dataNote)) { observationStatus.Add(bufferIndex.ToString(), dataNote); haveObsStatus = true; } } bufferIndex++; } } if (!haveObsStatus) { jsonResult.dataset = dataset; } else { var datasetEx = new Model.JsonStatDatasetExtended(model.Data.MatrixSize); // Copy values from the baseobject to the extended object. And add observation status. datasetEx.dimension = dataset.dimension; datasetEx.label = dataset.label; datasetEx.source = dataset.source; datasetEx.updated = dataset.updated; datasetEx.value = dataset.value; datasetEx.status = observationStatus; jsonResult.dataset = datasetEx; } #endregion // override converter to stop adding ".0" after interger values. string result = JsonConvert.SerializeObject(jsonResult, new DecimalJsonConverter()); return(result); }
/// <summary> /// Add Paxiom Document (table) to index /// </summary> /// <param name="writer">IndexWriter object</param> /// <param name="database">Database id</param> /// <param name="id">Id of document (table)</param> /// <param name="path">Path to table within database</param> /// <param name="meta">PXMeta object</param> /// <returns>Document object representing the added table</returns> private Document AddPaxiomDocument(IndexWriter writer, string database, string id, string path, string table, string title, DateTime published, PXMeta meta) { Document doc = GetDocument(database, id, path, table, title, published, meta); writer.AddDocument(doc); return(doc); }
/// <summary> /// Returns the values of a variable in a PXMeta instance. Returns the values of the time variable in reversed order. /// </summary> /// <param name="meta"></param> /// <param name="code">The code of the variable for which to return values.</param> /// <returns></returns> public static IEnumerable <Value> ValuesForVariable(this PXMeta meta, string code) { Variable variable = meta.Variables.First(x => x.Code == code); return(variable.IsTime ? variable.Values.Reverse <Value>() : variable.Values); }