public static DataSet GetSeriesQuery(Dictionary <string, string> args) { DataSet dicom = new DataSet(); dicom.Add(t.SpecificCharacterSet, null); dicom.Add(t.QueryRetrieveLevel, "SERIES"); dicom.Add(t.Modality, null); dicom.Add(t.SeriesDescription, null); dicom.Add(t.BodyPartExamined, null); dicom.Add(t.PatientPosition, null); dicom.Add(t.ViewPosition, null); dicom.Add(t.StudyInstanceUID, null); dicom.Add(t.SeriesInstanceUID, null); dicom.Add(t.SeriesNumber, null); if (args != null) { foreach (KeyValuePair <string, string> entry in args) { if (entry.Key.Contains("(")) { dicom[entry.Key].Value = entry.Value; } } } return(dicom); }
public void Append_to_file_works_for_all_data_types() { var ms = new MemoryStream(); var schema = new Schema(); schema.Elements.Add(new SchemaElement <int>("Id")); schema.Elements.Add(new SchemaElement <DateTime>("Timestamp")); schema.Elements.Add(new SchemaElement <DateTimeOffset>("Timestamp2")); schema.Elements.Add(new SchemaElement <string>("Message")); schema.Elements.Add(new SchemaElement <byte[]>("Data")); schema.Elements.Add(new SchemaElement <bool>("IsDeleted")); schema.Elements.Add(new SchemaElement <float>("Amount")); schema.Elements.Add(new SchemaElement <decimal>("TotalAmount")); schema.Elements.Add(new SchemaElement <long>("Counter")); schema.Elements.Add(new SchemaElement <double>("Amount2")); schema.Elements.Add(new SchemaElement <byte>("Flag")); schema.Elements.Add(new SchemaElement <sbyte>("Flag2")); schema.Elements.Add(new SchemaElement <short>("Flag3")); schema.Elements.Add(new SchemaElement <ushort>("Flag4")); var ds1 = new DataSet(schema); ds1.Add(1, DateTime.Now, DateTimeOffset.Now, "Record1", System.Text.Encoding.ASCII.GetBytes("SomeData"), false, 123.4f, 200M, 100000L, 1331313D, (byte)1, (sbyte)-1, (short)-500, (ushort)500); ds1.Add(1, DateTime.Now, DateTimeOffset.Now, "Record2", System.Text.Encoding.ASCII.GetBytes("SomeData2"), false, 124.4f, 300M, 200000L, 2331313D, (byte)2, (sbyte)-2, (short)-400, (ushort)400); ParquetWriter.Write(ds1, ms, CompressionMethod.Snappy, null, null, false); var ds2 = new DataSet(schema); ds2.Add(1, DateTime.Now, DateTimeOffset.Now, "Record3", System.Text.Encoding.ASCII.GetBytes("SomeData3"), false, 125.4f, 400M, 300000L, 3331313D, (byte)3, (sbyte)-3, (short)-600, (ushort)600); ds2.Add(1, DateTime.Now, DateTimeOffset.Now, "Record4", System.Text.Encoding.ASCII.GetBytes("SomeData4"), false, 126.4f, 500M, 400000L, 4331313D, (byte)4, (sbyte)-4, (short)-700, (ushort)700); ParquetWriter.Write(ds2, ms, CompressionMethod.Snappy, null, null, true); }
public void Datetime_as_null_writes() { var schemaElements = new List <Data.SchemaElement>(); schemaElements.Add(new SchemaElement <string>("primary-key")); schemaElements.Add(new SchemaElement <DateTime>("as-at-date")); var ds = new DataSet(schemaElements); // row 1 var row1 = new List <object>(schemaElements.Count); row1.Add(Guid.NewGuid().ToString()); row1.Add(DateTime.UtcNow.AddDays(-5)); ds.Add(new Row(row1)); // row 2 var row2 = new List <object>(schemaElements.Count); row2.Add(Guid.NewGuid().ToString()); row2.Add(DateTime.UtcNow); ds.Add(new Row(row2)); // row 3 var row3 = new List <object>(schemaElements.Count); row3.Add(Guid.NewGuid().ToString()); row3.Add(null); //objData3.Add(DateTime.UtcNow); ds.Add(new Row(row3)); DataSet dsRead = DataSetGenerator.WriteRead(ds); Assert.Equal(3, dsRead.RowCount); }
public void I_can_write_snappy_and_read_back() { var ms = new MemoryStream(); var ds1 = new DataSet( new DataField<int>("id"), new DataField<int>("no")); ds1.Add(1, 3); ds1.Add(2, 4); DataSet ds2; //write using (var writer = new ParquetWriter(ms)) { writer.Write(ds1, CompressionMethod.Snappy); } //read back using (var reader = new ParquetReader(ms)) { ms.Position = 0; ds2 = reader.Read(); } Assert.Equal(1, ds2[0].GetInt(0)); Assert.Equal(2, ds2[1].GetInt(0)); Assert.Equal(3, ds2[0].GetInt(1)); Assert.Equal(4, ds2[1].GetInt(1)); }
public MainWindow() { InitializeComponent(); // Create dataset with three variables sharing one dimension DataSet ds = DataSet.Open("msds:memory"); ds.Add <double[]>("x"); ds.Add <double[]>("sin"); ds.Add <double[]>("cos"); // Populate dataset with data. Following code is short, but not very fast. // It is better to fill array and use one PutData method instead of appending in loop. for (double x = 0; x < 2 * Math.PI; x += 0.01) { ds.Append("x", x); ds.Append("sin", Math.Sin(x)); ds.Append("cos", Math.Cos(x)); } // Hint at best visualization for some variable ds.PutAttr("sin", "VisualHints", "Style:Polyline(x); Stroke:Orange; Thickness:3"); ds.PutAttr("cos", "VisualHints", "Style:Markers(x); Marker:Circle; Color:Blue; Size:10"); ds.Commit(); // Attach data to visualizer dsvc.DataSet = ds; }
public void GetPathTest() { DataSet dicom = new DataSet(); string value = "1.1.2.3"; Sequence sequence = new Sequence(t.ScheduledProcedureStepSequence); dicom.Add(sequence); Elements item = sequence.NewItem(); item = sequence.NewItem(); item.Add(t.Modality, null); item.Add(t.RequestedContrastAgent, null); item.Add(t.ScheduledProcedureStepStartDate, null); item.Add(t.ScheduledProcedureStepStartTime, null); item.Add(t.ScheduledProcedureStepDescription, null); item.Add(t.ScheduledProcedureStepID, null); sequence = new Sequence(t.ScheduledProtocolCodeSequence); item.Add(sequence); item = sequence.NewItem(); item = sequence.NewItem(); item = sequence.NewItem(); item.Add(t.CodeValue, null); item.Add(t.CodingSchemeDesignator, null); Element element = item.Add(t.CodingSchemeVersion, value); item.Add(t.CodeMeaning, null); dicom.Add(t.RequestedProcedureID, null); dicom.Add(t.RequestedProcedurePriority, null); string path = element.GetPath(); Assert.AreEqual(value, dicom[path].Value.ToString()); }
public void Insert(Exception exception) { _errors.Add(nameof(Exception.HResult), exception.HResult); _errors.Add(nameof(Exception.Message), exception.Message); _errors.Add(nameof(Exception.StackTrace), exception.StackTrace); _anyErrors = true; }
public void Insert(MathModel mathModel) { _experiments.Add(nameof(MathModel.SynthesizedModelInLpFormat), mathModel.SynthesizedModelInLpFormat); _experiments.Add(nameof(MathModel.ReferenceModelInLpFormat), mathModel.ReferenceModelInLpFormat); _mathModels.Add(nameof(MathModel.SynthesizedModelInLpFormat), mathModel.SynthesizedModelInLpFormat); _mathModels.Add(nameof(MathModel.ReferenceModelInLpFormat), mathModel.ReferenceModelInLpFormat); }
/// <summary> /// Concatenates DICOM file meta information and DICOM data set to one /// and returns it. Aim of this method is to ease the use of /// <see cref="AcrNemaFile" /> and <see cref="DicomFile" /> without /// differentiation. But be careful! Resulting data set will always /// use the default transfer syntax! /// </summary> /// <remarks> /// If an exception occurres, because of duplicate key ids /// (DICOM tags), DICOM tag uniqueness overall data sets /// will not be given. This exception is not supposed to be /// thrown. DICOM data sets for concatenation are supposed to /// complement one another. The differences in transfer syntaxes /// cannot be processed. Thus, the default transfer syntax is /// defined for concatenation. /// </remarks> public override DataSet GetJointDataSets() { DataSet dataSet = new DataSet(); dataSet.Add(MetaInformation); dataSet.Add(DataSet); return(dataSet); }
public void Insert(Version version) { _experiments.Add(nameof(Version.StartDateTime), version.StartDateTime); _experiments.Add(nameof(Version.ImplementationVersion), Version.ImplementationVersion); _versions.Add(nameof(Version.StartDateTime), version.StartDateTime); _versions.Add(nameof(Version.ImplementationVersion), Version.ImplementationVersion); }
public void Simplest_write_read() { var ds = new DataSet(new DataField <int>("id")); ds.Add(1); ds.Add(2); ds.WriteRead(); }
public void Simplest_write_read() { var ds = new DataSet(new SchemaElement <int>("id")); ds.Add(1); ds.Add(2); DataSetGenerator.WriteRead(ds); }
public void Insert(Version version) { _experiments.Add(nameof(Version.StartDateTime), version.StartDateTime); _experiments.Add(nameof(Version.ImplementationVersion), Version.ImplementationVersion); _experiments.Add(nameof(Version.ExperimentParametersHashString), version.ExperimentParametersHashString); _versions.Add(nameof(Version.StartDateTime), version.StartDateTime); _versions.Add(nameof(Version.ImplementationVersion), Version.ImplementationVersion); _versions.Add(nameof(Version.ExperimentParametersHashString), version.ExperimentParametersHashString); }
public static DataSet GetImageQuery(Dictionary <string, string> args) { DataSet dicom = new DataSet(); dicom.Add(t.SpecificCharacterSet, null); dicom.Add(t.SOPClassUID, null); dicom.Add(t.SOPInstanceUID, null); dicom.Add(t.AcquisitionDate, null); dicom.Add(t.AcquisitionTime, null); dicom.Add(t.QueryRetrieveLevel, "IMAGE"); dicom.Add(t.StudyInstanceUID, null); dicom.Add(t.SeriesInstanceUID, null); dicom.Add(t.InstanceNumber, null); if (args != null) { foreach (KeyValuePair <string, string> entry in args) { if (entry.Key.Contains("(")) { dicom[entry.Key].Value = entry.Value; } } } return(dicom); }
public void Run_perfect_expressive_boolean_column() { var schema = new Schema(new DataField("id", DataType.Boolean, false, false)); var ds = new DataSet(schema); ds.Add(true); ds.Add(false); ds.Add(true); DataSet ds1 = ds.WriteRead(); }
private unsafe static DataSet ReadPgm(FileStream stream) { /* * 1. A "magic number" for identifying the file type. A pgm image's magic number is the two * characters "P5". * 2. Whitespace (blanks, TABs, CRs, LFs). * 3. A width, formatted as ASCII characters in decimal. * 4. Whitespace. * 5. A height, again in ASCII decimal. * 6. Whitespace. * 7. The maximum gray value (Maxval), again in ASCII decimal. Must be less than 65536, and more than zero. * 8. A single whitespace character (usually a newline). * * A raster of Height rows, in order from top to bottom. Each row consists of Width gray values, * in order from left to right. Each gray value is a number from 0 through Maxval, with 0 being * black and Maxval being white. Each gray value is represented in pure binary by either 1 or 2 * bytes. If the Maxval is less than 256, it is 1 byte. Otherwise, it is 2 bytes. The most * significant byte is first. */ string pattern = @"(?<magic>P5)\s+(?<width>\d+)\s+(?<height>\d+)\s+(?<end>((?<maximum>\d+)\s))"; int size = 1024; BinaryReader reader = new BinaryReader(stream); byte[] chunk = reader.ReadBytes(size); String header = ASCIIEncoding.ASCII.GetString(chunk).ToUpper(); DataSet dicom = new DataSet(); Match match = Regex.Match(header, pattern); if (match.Success) { ImageAttributes attributes = new ImageAttributes(TypeCode.UInt16, null, 0, 0, 0, 12, 168, true); attributes.width = int.Parse(match.Groups["width"].ToString()); attributes.height = int.Parse(match.Groups["height"].ToString()); int maximum = int.Parse(match.Groups["maximum"].ToString()); attributes.bitsperpixel = (maximum <= 256) ? 8 : 12; int pos = match.Groups["end"].Index + match.Groups["end"].Length; stream.Seek(pos, SeekOrigin.Begin); dicom.Add(t.Columns, attributes.width); dicom.Add(t.Rows, attributes.height); dicom.Add(t.PhotometricInterpretation, "MONOCHROME1"); ReadBytes(stream, dicom, attributes); } return(dicom); }
public void TestMethod1() { int r = 360; int z = 126; int t = 24 * 60; Int16[,,] grid = new Int16[r, z, t]; int[] R = new int[r]; int[] Z = new int[z]; DateTime[] T = new DateTime[t]; Random rnd = new Random(); for (int k = 0; k < r; k++) { R[k] = k; for (int i = 0; i < z; i++) { for (int j = 0; j < t; j++) { grid[k, i, j] = (Int16)rnd.Next(1024); } } } for (int j = 0; j < t; j++) { T[j] = new DateTime(2012, 1, 1).AddMinutes(j); } for (int i = 0; i < z; i++) { Z[i] = i; } // ... compute grid, x and y values DataSet ds = DataSet.Open(NetCDFFileName + "?openMode=create"); ds.Add("R", "degrees", R, "R"); ds.Add("Z", "distance", Z, "Z"); ds.Add("T", T, "T"); var val = ds.Add("values", "count", grid, "R", "Z", "T"); //ds.PutAttr("values", "units", "m/sec2"); ds.Commit(); //ds.Clone(NetCDFFileName + "?openMode=create").Dispose(); ds.Dispose(); }
private DataSet generateTestData() { String[] headers = { "text", "numeric", "dec" }; var ds = new DataSet(headers); Object[] row1 = { "One", "1", "0.1" }; Object[] row2 = { "Two", "2", "0.2" }; Object[] row3 = { "Three", "3", "0.3" }; ds.Add(row1); ds.Add(row2); ds.Add(row3); return(ds); }
/// <summary> /// Set the Default Tag Type List for this Entity. /// </summary> protected override void SetDefaultTagTypeList() { TagTypeList.Add(new TagType(Tag.SPECIFIC_CHARACTER_SET, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.STUDY_DATE, TagTypeEnum.TagRequired)); TagTypeList.Add(new TagType(Tag.STUDY_TIME, TagTypeEnum.TagRequired)); TagTypeList.Add(new TagType(Tag.ACCESSION_NUMBER, TagTypeEnum.TagRequired)); TagTypeList.Add(new TagType(Tag.STUDY_ID, TagTypeEnum.TagRequired)); TagTypeList.Add(new TagType(Tag.STUDY_INSTANCE_UID, TagTypeEnum.TagUnique)); TagTypeList.Add(new TagType(Tag.MODALITIES_IN_STUDY, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.REFERRING_PHYSICIANS_NAME, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.STUDY_DESCRIPTION, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.PROCEDURE_CODE_SEQUENCE, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.NAME_OF_PHYSICIANS_READING_STUDY, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.ADMITTING_DIAGNOSIS_DESCRIPTION, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.REFERENCED_STUDY_SEQUENCE, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.PATIENTS_AGE, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.PATIENTS_SIZE, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.PATIENTS_WEIGHT, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.OCCUPATION, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.ADDITIONAL_PATIENT_HISTORY, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.OTHER_STUDY_NUMBERS, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.NUMBER_OF_STUDY_RELATED_SERIES, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.NUMBER_OF_STUDY_RELATED_INSTANCES, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.INTERPRETATION_AUTHOR, TagTypeEnum.TagOptional)); // Add the Query Retrieve Level Attribute DvtkData.Dimse.Attribute attribute = new DvtkData.Dimse.Attribute(0x00080052, VR.CS, "STUDY"); DataSet.Add(attribute); }
public void Structure_in_a_list_in_a_structure_of_lists_writes_reads() { var ds = new DataSet( new DataField <string>("name"), new ListField("addresses", new StructField("address", new ListField("lines", new StructField("first", new DataField <int>("one")))))); ds.Add( "Ivan", // name new Row[] // addresses { new Row // addresses.address ( true, new Row[] // addresses.address.lines { new Row // addresses.address.lines.first ( 1 // addresses.address.lines.first.one ) } ) }); DataSet ds1 = DataSetGenerator.WriteRead(ds); Assert.Equal("{Ivan;[{[{1}]}]}", ds1[0].ToString()); }
public void All_compression_methods_supported(CompressionMethod compressionMethod) { //v2 var ms = new MemoryStream(); DataSet ds1 = new DataSet(new DataField <int>("id")); DataSet ds2; ds1.Add(5); //write using (var writer = new ParquetWriter(ms)) { writer.Write(ds1, CompressionMethod.Gzip); } //read back using (var reader = new ParquetReader(ms)) { ms.Position = 0; ds2 = reader.Read(); } Assert.Equal(5, ds2[0].GetInt(0)); //v3 const int value = 5; object actual = WriteReadSingle(new DataField <int>("id"), value, compressionMethod); Assert.Equal(5, (int)actual); }
static void Main(string[] args) { L.Config .WriteTo.PoshConsole(); using (var time = new TimeMeasure()) { var ds = new DataSet( new SchemaElement <int>("id"), new SchemaElement <string>("name"), new SchemaElement <double>("lat"), new SchemaElement <double>("lon")); log.Trace(ds.Schema.ToString()); for (int i = 0; i < 10; i++) { ds.Add( i, NameGenerator.GeneratePersonFullName(), Generator.RandomDouble, Generator.RandomDouble); } ParquetWriter.WriteFile(ds, "c:\\tmp\\perf.parquet"); log.Trace("written in {0}", time.Elapsed); } }
private DataSet GetTrainingSet(List <SupervisedPattern> list) { DataSet ds = new DataSet(inputNumber, outputNumber); foreach (SupervisedPattern p in list) { if (p != null && p.Entrada != null && p.Entrada != "" && p.Saída != null && p.Saída != "") { double[] outputValues = new double[outputNumber]; int y = 0; string[] outputNeuronsList = p.Saída.Split(','); foreach (string neuron in outputNeuronsList) { outputValues[y] = Convert.ToDouble(neuron.Replace(".", ",")); y++; } double[] inputValues = new double[inputNumber]; int x = 0; string[] neuronsList = p.Entrada.Split(','); foreach (string neuron in neuronsList) { inputValues[x] = Convert.ToDouble(neuron.Replace(".", ",")); x++; } ds.Add(new DataSetObject(inputValues, outputValues)); } } return(ds); }
public static string testserialize(List <string> args) { DataSet <IncidentData> test = new DataSet <IncidentData>(); test.Data["Test"] = "Test"; IncidentData incident = new IncidentData(); incident.Id = "Moo"; incident.Time = DateTime.Now; ResponseData response = new ResponseData(); response.Id = "Moo"; response.Data.Add("Unit", "Also Moo"); response.Data.Add("UnitType", "Cow"); response.Data.Add("Shift", "Night Shift"); incident.Responses.Add(response); test.Add(incident); JsonSerializerSettings settings = new JsonSerializerSettings() { Formatting = Formatting.Indented, TypeNameHandling = TypeNameHandling.Auto, PreserveReferencesHandling = PreserveReferencesHandling.All }; string json = JsonConvert.SerializeObject(test, test.GetType(), settings); Console.WriteLine(json); return("Test passed."); }
public List <double> Remove(List <double> Data, ref Stack <ITransformation> Transforms) { DataSet d = new DataSet(); for (int i = 0; i < Data.Count; i++) { d.Add((i + 1), Data[i], OBSERVED_ERROR); } double[] parameters = d.FitToPolynomial((int)this.Order).Parameters(); List <double> detrendedData = (List <double>)Utilities.DeepClone(Data); double lastX = 0.0; for (int i = 0; i < Data.Count; i++) { double regression = parameters[0] + (parameters[1] * (i + 1)) + ((this.Order == 2) ? ((parameters.Length > 2) ? parameters[2] * Math.Pow(i + 1, 2) : 0.0) : 0.0) + ((this.Order == 3) ? ((parameters.Length > 3) ? parameters[3] * Math.Pow(i + 1, 3) : 0.0) : 0.0) + ((this.Order == 4) ? ((parameters.Length > 4) ? parameters[4] * Math.Pow(i + 1, 4) : 0.0) : 0.0); detrendedData[i] -= regression; lastX = (i + 1); } Transforms.Push(new PolyTransformation(lastX, parameters)); return(detrendedData); }
//Chris Han add DeviceId to Json public async Task<DataSet> Process(DataSet dataset, CancellationToken ct, string DeviceId) { #if AggravatedSerialization //Serialize the whole dataset as single json string var json = await Task.Factory.StartNew( new Func<object, string>(JsonConvert.SerializeObject), dataset, ct); var data = new Data(); data.Add("stringContent", json); var output = new DataSet(); output.Add(data); return output; #else //Serialize each data as one json string foreach (var data in dataset) { data["DeviceId"] = DeviceId;//Chris han add DeviceId to Json var json = await Task.Factory.StartNew( new Func<object, string>(JsonConvert.SerializeObject), data, ct); data.Add("stringContent", json); } return dataset; #endif }
private async Task ExecuteLoadItemsCommand() { if (IsBusy) { return; } IsBusy = true; try { DataSet.Clear(); var dataset = await DataStore.GetAllAsync_Item(true); dataset = dataset .OrderBy(a => a.Id) .ThenBy(a => a.Name) .ThenBy(a => a.Description) .ToList(); foreach (var data in dataset) { DataSet.Add(data); } } catch (Exception ex) { Debug.WriteLine(ex); } finally { IsBusy = false; } }
async Task ExecuteLoadItemsCommand() { if (IsBusy) { return; } IsBusy = true; try { DataSet.Clear(); var items = await DataStore.IndexAsync(true); foreach (var item in items) { DataSet.Add(item); } } catch (Exception ex) { Debug.WriteLine(ex); } finally { IsBusy = false; } }
/// <summary> /// Set the Default Tag Type List for this Entity. /// </summary> protected override void SetDefaultTagTypeList() { TagTypeList.Add(new TagType(Tag.SPECIFIC_CHARACTER_SET, VR.CS, TagTypeEnum.TagOptional)); TagTypeList.Add(new TagType(Tag.MODALITY, VR.CS, TagTypeEnum.TagRequired)); TagTypeList.Add(new TagType(Tag.SERIES_NUMBER, VR.IS, TagTypeEnum.TagRequired)); TagTypeList.Add(new TagType(Tag.SERIES_INSTANCE_UID, VR.UI, TagTypeEnum.TagUnique)); TagTypeList.Add(new TagType(Tag.NUMBER_OF_SERIES_RELATED_INSTANCES, VR.IS, TagTypeEnum.TagOptional)); // plus all other attributes at a series level! for (int i = 0; i < CustomQueryAttributes.Instance.SeriesList.Count; i++) { TagTypeList.Add(new TagType(new Tag(Convert.ToUInt16(CustomQueryAttributes.Instance.SeriesList[i].group, 16), Convert.ToUInt16(CustomQueryAttributes.Instance.SeriesList[i].element, 16)) , TagTypeEnum.TagOptional)); } // add the specific character set attribute as a conditonal attribute at this level // - used purely to return the correct value in the Specific Character Set attribute of the C-FIND-RSP dataset TagTypeList.Add(new TagType(Tag.SPECIFIC_CHARACTER_SET, VR.CS, TagTypeEnum.TagConditional)); // Add the Query Retrieve Level Attribute DvtkData.Dimse.Attribute attribute = new DvtkData.Dimse.Attribute(0x00080052, VR.CS, "SERIES"); DataSet.Add(attribute); }
public override void SubmitAdd() { ListboxEntry <Person> selectedPerson = Options.PersonComboBox.SelectedItem as ListboxEntry <Person>; int personKey = selectedPerson.Entry.Id; ListboxEntry <Course> selectedCourse = Options.CourseComboBox.SelectedItem as ListboxEntry <Course>; int courseKey = selectedCourse.Entry.Id; ListboxEntry <Semester> selectedSemester = Options.SemesterComboBox.SelectedItem as ListboxEntry <Semester>; int semesterKey = selectedSemester.Entry.Id; ListboxEntry <Section> selectedSection = Options.SectionComboBox.SelectedItem as ListboxEntry <Section>; int sectionKey = selectedSection.Entry.Id; ListboxEntry <Grade> selectedGrade = Options.GradeComboBox.SelectedItem as ListboxEntry <Grade>; int gradeKey = selectedGrade.Entry.Id; Enrollment enrollment = new Enrollment() { Person_ID = personKey, Semester = semesterKey, Course_ID = courseKey, Section_ID = sectionKey, FinalGrade_ID = gradeKey }; Options.PersonComboBox.SelectedItem = defaultIndex; Options.CourseComboBox.SelectedItem = defaultIndex; Options.SemesterComboBox.SelectedItem = defaultIndex; Options.SectionComboBox.SelectedItem = defaultIndex; Options.GradeComboBox.SelectedItem = defaultIndex; DataSet.Add(enrollment); SaveChanges(); }
private static DataSet <SessionDuration> LoadDataSet( string fileName, SessionDurationSource dataSource) { var dataSet = new DataSet <SessionDuration>(dataSource); using (var reader = GetStreamReader(fileName)) { while (!reader.EndOfStream) { var values = reader.ReadLine()?.Split(','); try { var name = values[0]; var title = values[1]; var x = DateTime.Parse(values[2]); var y = DateTime.Parse(values[3]); dataSet.Add(new SessionDuration(name, title, x, y)); } catch { // ignored } } } return(dataSet); }
#pragma warning disable 1998 public async Task<DataSet> Read(CancellationToken ct) { if (_generator == null) { return null; } else { var data = _generator.Read(); data.Add("utcTimestamp", DateTime.UtcNow); var output = new DataSet(); output.Add(data); return output; } }
public static DataSet LoadDataSet(HttpServerUtilityBase server) { if (dataSet == null) { lock (loadLock) { if (dataSet == null) { dataSet = new DataSet(); dataSets = new List<DataSet>(); foreach (var ds in Directory.GetFiles(server.MapPath("~/App_Data"), "*.xml").Where(p => DataSetNames.Any(ds => ds == Path.GetFileNameWithoutExtension(p))).Select(p => LoadDataSet(p))) { dataSets.Add(ds); dataSet.Add(ds); } } } } return dataSet; }
public async Task<DataSet> Read(CancellationToken ct) { var output = new DataSet(); using (var stream = new MemoryStream()) { while (true) { var status = await _transport.ReceiveCommand(stream, ct); if (status != HttpStatusCode.OK) { break; } var data = new Data(); data.Add("bytesContent", stream.ToArray()); output.Add(data); } } return output; }
private static void Main(string[] args) { Console.WriteLine("Pathfinder-fr Wiki Export Parser v{0}", typeof (Program).Assembly.GetName().Version); allCommands = new List<ICommand>(); allCommands.AddRange(CommandLoader.LoadCommandFromAssemblyOf(typeof (ICommand))); #if DEBUG allCommands.AddRange(CommandLoader.LoadCommandFromAssemblyOf(typeof (ScanConjurateurCommand))); #endif if (args == null || args.Length < FixedArgs || args.Any(x => x.Equals("/help", StringComparison.OrdinalIgnoreCase))) { ShowHelp(); return; } Console.WriteLine(); var xmlPath = args[0]; var xmlOut = args[1]; var commandName = args[2]; if (!Directory.Exists(xmlPath)) { Console.WriteLine("ERREUR: Le dossier spécifié {0} n'existe pas", xmlPath); return; } var options = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); var commandNames = new List<string>(); foreach (var arg in args.Skip(FixedArgs)) { if (arg[0] == '/') { if (arg.Length > 1) { var i = arg.IndexOf(':'); if (i == -1 || i == 1 || i == arg.Length - 1) { options.Add(arg.Substring(1), string.Empty); } else { options.Add(arg.Substring(1, i - 1), arg.Substring(i + 1)); } } } else { commandNames.Add(arg); } } Console.WriteLine("Commandes à exécuter :"); var commands = LoadCommands(allCommands, commandName, commandNames); foreach (var command in commands) { Console.WriteLine("- {0}", command.Alias); } Console.WriteLine("Chargement de l'export wiki..."); var export = new WikiExport(); export.Load(xmlPath); ILog log = new ConsoleLog(); FileLog fileLog = null; try { var dataSets = new DataSetCollection(); dataSets.Lang = "fr-FR"; string logFileName; if (options.TryGetValue("log", out logFileName)) { fileLog = new FileLog(logFileName); log = new CombineLog(log, fileLog); } foreach (var command in commands) { log.Information("Exécution commande {0}", command.Alias); command.Wiki = export; command.Log = log; command.Execute(dataSets); } // Serialisation var directory = Path.GetDirectoryName(xmlOut); if (!string.IsNullOrWhiteSpace(directory) && !Directory.Exists(directory)) { Directory.CreateDirectory(directory); } var writers = new IDataSetWriter[] { new XmlDataSetWriter(), new JsonDataSetWriter(), new CsvDataSetWriter(), new XmlSingleDataSetWriter() }; if (options.Any(o => string.Equals(o.Key, "unique", StringComparison.OrdinalIgnoreCase))) { // un seul fichier var mergedDataSet = new DataSet(); foreach (var dataSet in dataSets.DataSets) { mergedDataSet.Add(dataSet.Value); } foreach (var writer in writers.Where(w => w.Accept("", mergedDataSet, options))) { writer.Write("", mergedDataSet, xmlOut); } } else { // un fichier par dataset foreach (var dataSet in dataSets.DataSets) { foreach (var writer in writers.Where(w => w.Accept(dataSet.Key, dataSet.Value, options))) { writer.Write(dataSet.Key, dataSet.Value, xmlOut); } } } } finally { if (fileLog != null) { fileLog.Dispose(); } } #if DEBUG Console.WriteLine("Press any key to exit..."); Console.ReadKey(); #endif }
private void LoadAllData(LocateFile file) { DataSet channel1Data = new DataSet(); DataSet channel2Data = new DataSet(); string line; while ((line = streamReader.ReadLine()) != null) { string trimmedLine = line.Trim(); if (trimmedLine.Length == 0) continue; int[] tokens = TokenizeData(trimmedLine); if (tokens.Length != 8) throw new Exception("Invalid number of data entries"); channel1Data.Add(new Datum(new Vector(tokens[0], tokens[1]), CalculateAxialPosition(tokens[6], file))); channel2Data.Add(new Datum(new Vector(tokens[2], tokens[3]), CalculateAxialPosition(tokens[6], file))); } file.Channel1Data = channel1Data; file.Channel2Data = channel2Data; }