public static Reader.Metadata GenerateStandardChannel(string groupName, string name, string description, string yUnitString, string xUnitString, string xName, DateTime startTime, double increment, Type dataType, int dataCount, int stringBlobLength = 0) { Reader.Metadata meta = GenerateStandardProperties(name, description, groupName, name); if (!string.IsNullOrEmpty(yUnitString)) { meta.Properties.Add("unit_string", yUnitString); } if (!string.IsNullOrEmpty(xUnitString)) { meta.Properties.Add("wf_xunit_string", xUnitString); } if (!string.IsNullOrEmpty(xName)) { meta.Properties.Add("wf_xname", xName); } if (startTime != new DateTime(1, 1, 1)) { meta.Properties.Add("wf_start_time", startTime); } if (increment != 0) { meta.Properties.Add("wf_increment", increment); } meta.RawData = new Reader.RawData(); meta.RawData.DataType = DataType.GetDataType(Activator.CreateInstance(dataType)); meta.RawData.Count = dataCount; meta.RawData.Dimension = 1; //always 1 if (dataType == typeof(string)) { meta.RawData.Size = stringBlobLength; } return(meta); }
public static Reader.Metadata GenerateStandardProperties(string description, params string[] path) { Reader.Metadata meta = new Reader.Metadata(); meta.Path = path; meta.Properties = new Dictionary <string, object>(); if (!string.IsNullOrEmpty(description)) { meta.Properties.Add("description", description); } return(meta); }
public static Reader.Metadata GenerateStandardRoot(string name, string author, string description, DateTime datetime) { Reader.Metadata meta = GenerateStandardProperties(name, description, new string[0]); if (!string.IsNullOrEmpty(author)) { meta.Properties.Add("author", author); } if (datetime != new DateTime(1, 1, 1)) { meta.Properties.Add("datetime", datetime); } return(meta); }
private static IEnumerable <Reader.Metadata> LoadMetadata(Reader reader) { var segments = GetSegments(reader).ToList(); var prevMetaDataLookup = new Dictionary <string, Dictionary <string, Reader.Metadata> >(); foreach (var segment in segments) { if (!(segment.TableOfContents.ContainsNewObjects || segment.TableOfContents.HasDaqMxData || segment.TableOfContents.HasMetaData || segment.TableOfContents.HasRawData)) { continue; } var metadatas = reader.ReadMetadata(segment); long rawDataSize = 0; long nextOffset = segment.RawDataOffset; foreach (var m in metadatas) { if (m.RawData.Count == 0 && m.Path.Length > 1) { // apply previous metadata if available if (prevMetaDataLookup.ContainsKey(m.Path[0]) && prevMetaDataLookup[m.Path[0]].ContainsKey(m.Path[1])) { var prevMetaData = prevMetaDataLookup[m.Path[0]][m.Path[1]]; if (prevMetaData != null) { m.RawData.Count = segment.TableOfContents.HasRawData ? prevMetaData.RawData.Count : 0; m.RawData.DataType = prevMetaData.RawData.DataType; m.RawData.ClrDataType = prevMetaData.RawData.ClrDataType; m.RawData.Offset = segment.RawDataOffset + rawDataSize; m.RawData.IsInterleaved = prevMetaData.RawData.IsInterleaved; m.RawData.InterleaveStride = prevMetaData.RawData.InterleaveStride; m.RawData.Size = prevMetaData.RawData.Size; m.RawData.Dimension = prevMetaData.RawData.Dimension; } } } if (m.RawData.IsInterleaved && segment.NextSegmentOffset <= 0) { m.RawData.Count = segment.NextSegmentOffset > 0 ? (segment.NextSegmentOffset - m.RawData.Offset + m.RawData.InterleaveStride - 1) / m.RawData.InterleaveStride : (reader.FileSize - m.RawData.Offset + m.RawData.InterleaveStride - 1) / m.RawData.InterleaveStride; } if (m.Path.Length > 1) { rawDataSize += m.RawData.Size; nextOffset += m.RawData.Size; } } var implicitMetadatas = new List <Reader.Metadata>(); if (metadatas.All(m => !m.RawData.IsInterleaved && m.RawData.Size > 0)) { while (nextOffset < segment.NextSegmentOffset || (segment.NextSegmentOffset == -1 && nextOffset < reader.FileSize)) { // Incremental Meta Data see http://www.ni.com/white-paper/5696/en/#toc1 foreach (var m in metadatas) { if (m.Path.Length > 1) { var implicitMetadata = new Reader.Metadata() { Path = m.Path, RawData = new Reader.RawData() { Count = m.RawData.Count, DataType = m.RawData.DataType, ClrDataType = m.RawData.ClrDataType, Offset = nextOffset, IsInterleaved = m.RawData.IsInterleaved, Size = m.RawData.Size, Dimension = m.RawData.Dimension }, Properties = m.Properties }; implicitMetadatas.Add(implicitMetadata); nextOffset += implicitMetadata.RawData.Size; } } } } var metadataWithImplicit = metadatas.Concat(implicitMetadatas).ToList(); foreach (var metadata in metadataWithImplicit) { if (metadata.Path.Length == 2) { if (!prevMetaDataLookup.ContainsKey(metadata.Path[0])) { prevMetaDataLookup[metadata.Path[0]] = new Dictionary <string, Reader.Metadata>(); } prevMetaDataLookup[metadata.Path[0]][metadata.Path[1]] = metadata; } yield return(metadata); } } }
/// <summary> /// This will re-write the TDMS file. Mostly used for write demonstration. Although, this will also defragment the file. /// </summary> /// <param name="stream"></param> public void ReWrite(Stream stream) { WriteSegment segment = new WriteSegment(stream); segment.Header.TableOfContents.HasRawData = Groups.SelectMany(g => g.Value.Channels.Values, (g, c) => c.HasData).Any(); //when we re-write the file, no data shall be interleaved. (It's an all or nothing situation, with only 1 segment) //segment.Header.TableOfContents.RawDataIsInterleaved = Groups.SelectMany(g => g.Value.Channels.Values, (g, c) => c.RawData.First().IsInterleaved).Any(); //Top level Reader.Metadata m = new Reader.Metadata(); m.Path = new string[0]; m.Properties = Properties; segment.MetaData.Add(m); //Groups foreach (KeyValuePair <string, Group> group in Groups) { m = new Reader.Metadata(); m.Path = new string[] { group.Key }; m.Properties = group.Value.Properties; segment.MetaData.Add(m); //Channels foreach (KeyValuePair <string, Channel> channel in group.Value.Channels) { Reader.RawData[] raws = channel.Value.RawData.ToArray(); //Add first part m = new Reader.Metadata(); m.Path = new string[] { group.Key, channel.Key }; m.Properties = channel.Value.Properties; m.RawData = raws?[0]; segment.MetaData.Add(m); //Add the other parts (if any) for (int i = 1; i < raws?.Length; i++) { m = new Reader.Metadata(); m.Path = new string[] { group.Key, channel.Key }; m.RawData = raws[i]; segment.MetaData.Add(m); } } } //Write all raw data Writer writer = segment.Open(); var reader = new Reader(_stream.Value); foreach (KeyValuePair <string, Group> group in Groups) { foreach (KeyValuePair <string, Channel> channel in group.Value.Channels) { foreach (Reader.RawData raw in channel.Value.RawData) { var data = reader.ReadRawData(raw); raw.IsInterleaved = false; //when we re-write the file, no data shall be interleaved writer.WriteRawData(raw, data); } } } //close up segment.Close(); }
private static IEnumerable <Reader.Metadata> LoadMetadata(Reader reader) { var segments = GetSegments(reader).ToList(); var segmentMetadata = new List <Tuple <Reader.Segment, List <Reader.Metadata> > >(); Tuple <Reader.Segment, List <Reader.Metadata> > prevSegment = null; foreach (var segment in segments) { var metadatas = reader.ReadMetadata(segment); long rawDataSize = 0; long nextOffset = segment.RawDataOffset; foreach (var m in metadatas) { if (m.RawData.Count == 0 && prevSegment != null && m.Path.Length > 1) { // apply previous metadata if available var prevMetaData = prevSegment.Item2.FirstOrDefault(md => md.Path.Length > 1 && md.Path[1] == m.Path[1]); if (prevMetaData != null) { m.RawData.Count = prevMetaData.RawData.Count; m.RawData.DataType = prevMetaData.RawData.DataType; m.RawData.ClrDataType = prevMetaData.RawData.ClrDataType; m.RawData.Offset = segment.RawDataOffset + rawDataSize; m.RawData.IsInterleaved = prevMetaData.RawData.IsInterleaved; m.RawData.InterleaveStride = prevMetaData.RawData.InterleaveStride; m.RawData.Size = prevMetaData.RawData.Size; m.RawData.Dimension = prevMetaData.RawData.Dimension; } } if (m.RawData.IsInterleaved && segment.NextSegmentOffset <= 0) { m.RawData.Count = segment.NextSegmentOffset > 0 ? (segment.NextSegmentOffset - m.RawData.Offset + m.RawData.InterleaveStride - 1) / m.RawData.InterleaveStride : (reader.FileSize - m.RawData.Offset + m.RawData.InterleaveStride - 1) / m.RawData.InterleaveStride; } if (m.Path.Length > 1) { rawDataSize += m.RawData.Size; nextOffset += m.RawData.Size; } } var implicitMetadatas = new List <Reader.Metadata>(); if (metadatas.All(m => !m.RawData.IsInterleaved && m.RawData.Size > 0)) { while (nextOffset < segment.NextSegmentOffset || (segment.NextSegmentOffset == -1 && nextOffset < reader.FileSize)) { // Incremental Meta Data see http://www.ni.com/white-paper/5696/en/#toc1 foreach (var m in metadatas) { if (m.Path.Length > 1) { var implicitMetadata = new Reader.Metadata() { Path = m.Path, RawData = new Reader.RawData() { Count = m.RawData.Count, DataType = m.RawData.DataType, ClrDataType = m.RawData.ClrDataType, Offset = nextOffset, IsInterleaved = m.RawData.IsInterleaved, Size = m.RawData.Size, Dimension = m.RawData.Dimension }, Properties = m.Properties }; implicitMetadatas.Add(implicitMetadata); nextOffset += implicitMetadata.RawData.Size; } } } } var metadataWithImplicit = metadatas.Concat(implicitMetadatas).ToList(); prevSegment = Tuple.Create(segment, metadataWithImplicit); segmentMetadata.Add(prevSegment); } return(segmentMetadata.SelectMany(st => st.Item2)); }
public static Reader.Metadata GenerateStandardGroup(string name, string description) { Reader.Metadata meta = GenerateStandardProperties(name, description, name); return(meta); }