public Sample() { _points = new Sample2[4]; _points[0] = new Sample2(1.0); _points[1] = new Sample2(4.0); _points[2] = new Sample2(4.0); _points[3] = new Sample2(1.0); }
public void TestProtobufSimpleProps() { var v1 = new Sample2 { X = 150, Y = "test" }; var ps = new ProtobufSerializer(); var result = ps.ToBytes(v1); CollectionAssert.AreEqual(new byte[] { 0x08, 0x96, 0x01, 0x12, 0x04, (byte)'t', (byte)'e', (byte)'s', (byte)'t' }, result); var v2 = new Sample2(); (new ProtobufDeserializer()).FromBytes(v2, result); Assert.AreEqual(v1.X, v2.X); Assert.AreEqual(v1.Y, v2.Y); }
public void TestAllowReadingFromAncestor() { var bs = new BinarySerializer(); var v1 = new Sample2 { X = 83, Y = "83" }; var result1 = bs.ToBytes(v1); Assert.AreEqual( "20 01 00 " + XS(typeof(Sample2)) + " 02 00 " + XS("X", RoughType.Int, "Y", RoughType.String) + " 01 00 53 00 00 00 00 00", XS(result1)); var w1 = new Sample2Allow(); var bd = new BinaryDeserializer(); bd.FromBytes(w1, result1); Assert.AreEqual(v1.X, w1.X); }
void CreateCheckNoteItem(string name, string type) { GameObject item = Instantiate(NotePanel); item.transform.SetParent(GameObject.FindGameObjectWithTag("Note").transform, false); item.transform.localScale = new Vector3(1, 1, 1); Sample2 itemObject = item.GetComponent <Sample2>(); int index = index1; if (SA.Count > 0) { index = SA.Count - 1; } itemObject.SetObjecInfo(name, type, index); SA.Add(itemObject); Sample2 temp = itemObject; Destroy(Itself); }
public void Test_MapTo_3() { var sample = new Sample { StringValue = "a" }; var sample2 = sample.MapTo <Sample2>(); Assert.Equal("a", sample2.StringValue); sample2 = new Sample2 { StringValue = "b" }; sample = sample2.MapTo <Sample>(); Assert.Equal("b", sample.StringValue); sample = new Sample { StringValue = "c" }; sample2 = sample.MapTo <Sample2>(); Assert.Equal("c", sample2.StringValue); }
private static void Main(string[] args) { new DB("test"); var sample1 = new Sample1 { SomeProp = "some prop value", TimeStamp = DateTime.UtcNow }; var sample2 = new Sample2 { AnotherProp = "another prop", TimeStamp = DateTime.UtcNow }; DB.Save(sample1); DB.Save(sample2); var s1 = FindSamples <Sample1>(); var s2 = FindSamples <Sample2>(); List <T> FindSamples <T>() where T : MySample { return(DB.Find <T>() .Many(s => s.TimeStamp >= DateTime.UtcNow.AddMinutes(-1) && s.TimeStamp <= DateTime.UtcNow.AddMinutes(1))); } }
static void Main(string[] args) { // Find where this executable is launched from string[] cargs = Environment.GetCommandLineArgs(); _thisFolder = Path.GetDirectoryName(cargs[0]); if (String.IsNullOrEmpty(_thisFolder)) { _thisFolder = Environment.CurrentDirectory; } string appData = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData); _impulsesFolder = Path.GetFullPath(Path.Combine(appData, "InguzEQ" + slash + "Impulses" + slash)); string[] inFiles = new string[4]; string inL = ""; string inR = ""; if (!DisplayInfo()) { return; } bool ok = (args.Length > 0); bool longUsage = false; for (int j = 0; ok && j < args.Length; j++) { string arg = args[j]; switch (args[j].ToUpperInvariant()) { case "/?": case "-?": case "/H": case "/HELP": ok = false; longUsage = true; break; case "/L": case "/0": inFiles[0] = args[++j]; _nInFiles = Math.Max(_nInFiles, 1); break; case "/R": case "/1": inFiles[1] = args[++j]; _nInFiles = Math.Max(_nInFiles, 2); break; case "/2": inFiles[2] = args[++j]; _nInFiles = Math.Max(_nInFiles, 3); break; case "/3": inFiles[3] = args[++j]; _nInFiles = Math.Max(_nInFiles, 4); break; case "/LENGTH": _filterLen = int.Parse(args[++j], CultureInfo.InvariantCulture); if (_filterLen < 16) { throw new Exception("Length is too small."); } break; case "/DBL": _dbl = true; break; case "/PCM": _pcm = true; break; case "/NODRC": _noDRC = true; break; case "/NOSKEW": _noSkew = true; break; case "/NONORM": // No normalization of the impulse response (undocumented) _noNorm = true; break; case "/SPLIT": _split = true; break; case "/COPY": _copy = true; break; case "/GAIN": _gain = double.Parse(args[++j], CultureInfo.InvariantCulture); break; case "/ALL": // Returns negative-time components as part of the impulse response // (experimental, to be used for THD measurement) _returnAll = true; break; case "/POWER": // Raises sweep to power n // (experimental, to be used for THD measurement) _power = int.Parse(args[++j], CultureInfo.InvariantCulture); break; case "/FMIN": // (experimental, i.e. broken) _fmin = int.Parse(args[++j], CultureInfo.InvariantCulture); _fminSpecified = true; break; case "/FMAX": // (experimental, i.e. broken) _fmax = int.Parse(args[++j], CultureInfo.InvariantCulture); _fmaxSpecified = true; break; case "/DIRECT": // Create filtered (direct-sound) filters _doDirectFilters = true; break; case "/NOSUB": // Don't apply subsonic filter to the impulse response _noSubsonicFilter = true; break; case "/NOOVER": // Don't override DRC's settings for filter type and length _noOverrideDRC = true; break; case "/KEEPTEMP": // Undocumented _keepTempFiles = true; break; case "/REFCH": // Override the reference-channel detection _refchannel = int.Parse(args[++j], CultureInfo.InvariantCulture); if (_refchannel<0 || _refchannel > _nInFiles - 1) { throw new Exception(String.Format("RefCh can only be from 0 to {0}.", _nInFiles-1)); } break; case "/ENV": // Undocumented. Save the Hilbert envelope _env = true; break; case "-": // ignore break; default: ok = false; break; } } if (!ok) { DisplayUsage(longUsage); } else { try { if (!_noDRC) { if (!File.Exists(GetDRCExe())) { stderr.WriteLine("Denis Sbragion's DRC (http://drc-fir.sourceforge.net/) was not found."); stderr.WriteLine("Only the impulse response will be calculated, not correction filters."); stderr.WriteLine(""); _noDRC = true; } } if (!_noDRC) { FileInfo[] drcfiles = new DirectoryInfo(_thisFolder).GetFiles("*.drc"); if (drcfiles.Length == 0) { stderr.WriteLine("No .drc files were found in the current folder."); stderr.WriteLine("Only the impulse response will be calculated, not correction filters."); stderr.WriteLine(""); _noDRC = true; } } for(int i=0; i<_nInFiles; i++) { string inFile = inFiles[i]; if (String.IsNullOrEmpty(inFile)) { stderr.WriteLine("Error: The {0} input file was not specified.", FileDescription(i)); return; } if (!File.Exists(inFile)) { stderr.WriteLine("Error: The {0} input file {1} was not found.", FileDescription(i), inFile); return; } for (int j = 0; j < i; j++) { if (inFile.Equals(inFiles[j])) { stderr.WriteLine("Warning: The same input file ({0}) was specified for both {1} and {2}!", inFile, FileDescription(j), FileDescription(i)); //stderr.WriteLine(); } } } // Temporary if (_nInFiles != 2) { stderr.WriteLine("Error: Two input files must be specified."); return; } inL = inFiles[0]; inR = inFiles[1]; // end temporary uint sampleRate; List<SoundObj> impulses; List<ISoundObj> filteredImpulses; List<string> impDirects; List<Complex[]> impulseFFTs; List<double> maxs; SoundObj impulseL; SoundObj impulseR; ISoundObj filteredImpulseL = null; ISoundObj filteredImpulseR = null; string impDirectL = null; string impDirectR = null; Complex[] impulseLFFT; Complex[] impulseRFFT; WaveWriter writer; ISoundObj buff; double g; if (!_keepTempFiles) { _tempFiles.Add("rps.pcm"); _tempFiles.Add("rtc.pcm"); } // Find the left impulse stderr.WriteLine("Processing left measurement ({0})...", inL); impulseL = Deconvolve(inL, out impulseLFFT, out _peakPosL); sampleRate = impulseL.SampleRate; _sampleRate = sampleRate; double peakM = Math.Round(MathUtil.Metres(_peakPosL, sampleRate), 2); double peakFt = Math.Round(MathUtil.Feet(_peakPosL, sampleRate), 2); stderr.WriteLine(" Impulse peak at sample {0} ({1}m, {2}ft)", _peakPosL, peakM, peakFt); // Write to PCM string impFileL = Path.GetFileNameWithoutExtension(inL) + "_imp" + ".pcm"; if (!_keepTempFiles) { _tempFiles.Add(impFileL); } writer = new WaveWriter(impFileL); writer.Input = impulseL; writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Raw = true; writer.Run(); writer.Close(); // Write the impulseFFT to disk int L = impulseLFFT.Length; string impTempL = Path.GetFileNameWithoutExtension(inL) + "_imp" + ".dat"; _tempFiles.Add(impTempL); writer = new WaveWriter(impTempL); writer.Input = new CallbackSource(2, sampleRate, delegate(long j) { if (j >= L / 2) { return null; } Complex si = impulseLFFT[j]; // +impulseLFFT[L - j - 1]; ISample s = new Sample2(); s[0] = si.Magnitude; s[1] = si.Phase / Math.PI; return s; }); writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Raw = false; writer.Run(); writer.Close(); writer = null; impulseLFFT = null; GC.Collect(); if (_doDirectFilters) { // Sliding low-pass filter over the impulse stderr.WriteLine(" Filtering..."); filteredImpulseL = SlidingLowPass(impulseL, _peakPosL); // Write PCM for the filtered impulse impDirectL = Path.GetFileNameWithoutExtension(inL) + "_impfilt" + ".pcm"; if (!_keepTempFiles) { _tempFiles.Add(impDirectL); } writer = new WaveWriter(impDirectL); writer.Input = filteredImpulseL; writer.Format = WaveFormat.IEEE_FLOAT; writer.SampleRate = _sampleRate; writer.BitsPerSample = 32; writer.Raw = false; writer.Run(); writer.Close(); writer = null; filteredImpulseL.Reset(); } GC.Collect(); stderr.WriteLine(" Deconvolution: left impulse done."); stderr.WriteLine(); // Find the right impulse stderr.WriteLine("Processing right measurement ({0})...", inR); impulseR = Deconvolve(inR, out impulseRFFT, out _peakPosR); peakM = Math.Round(MathUtil.Metres(_peakPosR, sampleRate), 2); peakFt = Math.Round(MathUtil.Feet(_peakPosR, sampleRate), 2); stderr.WriteLine(" Impulse peak at sample {0} ({1}m, {2}ft)", _peakPosR, peakM, peakFt); // Write to PCM string impFileR = Path.GetFileNameWithoutExtension(inR) + "_imp" + ".pcm"; if (!_keepTempFiles) { _tempFiles.Add(impFileR); } writer = new WaveWriter(impFileR); writer.Input = impulseR; writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Raw = true; writer.Run(); writer.Close(); // Write the impulseFFT magnitude to disk L = impulseRFFT.Length; string impTempR = Path.GetFileNameWithoutExtension(inR) + "_imp" + ".dat"; _tempFiles.Add(impTempR); writer = new WaveWriter(impTempR); writer.Input = new CallbackSource(2, impulseR.SampleRate, delegate(long j) { if (j >= L / 2) { return null; } Complex si = impulseRFFT[j]; // +impulseRFFT[L - j - 1]; ISample s = new Sample2(); s[0] = si.Magnitude; s[1] = si.Phase / Math.PI; return s; }); writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Raw = false; writer.Run(); writer.Close(); writer = null; impulseRFFT = null; GC.Collect(); if (_doDirectFilters) { // Sliding low-pass filter over the impulse stderr.WriteLine(" Filtering..."); filteredImpulseR = SlidingLowPass(impulseR, _peakPosR); // Write PCM for the filtered impulse impDirectR = Path.GetFileNameWithoutExtension(inR) + "_impfilt" + ".pcm"; if (!_keepTempFiles) { _tempFiles.Add(impDirectR); } writer = new WaveWriter(impDirectR); writer.Input = filteredImpulseR; writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Raw = false; writer.Run(); writer.Close(); writer = null; filteredImpulseR.Reset(); } GC.Collect(); stderr.WriteLine(" Deconvolution: right impulse done."); stderr.WriteLine(); // Join the left and right impulse files (truncated at 65536) into a WAV // and normalize loudness for each channel stderr.WriteLine("Splicing and normalizing (1)"); ChannelSplicer longstereoImpulse = new ChannelSplicer(); // (Don't normalize each channel's volume separately if _returnAll, it's just too expensive) if (_returnAll) { buff = impulseL; } else { buff = new SoundBuffer(new SampleBuffer(impulseL).Subset(0, 131071)); g = Loudness.WeightedVolume(buff); (buff as SoundBuffer).ApplyGain(1 / g); } longstereoImpulse.Add(buff); if (_returnAll) { buff = impulseR; } else { buff = new SoundBuffer(new SampleBuffer(impulseR).Subset(0, 131071)); g = Loudness.WeightedVolume(buff); (buff as SoundBuffer).ApplyGain(1 / g); } longstereoImpulse.Add(buff); ISoundObj stereoImpulse = longstereoImpulse; _impulseFiles.Add("Impulse_Response_Measured.wav: stereo impulse response from measurements"); writer = new WaveWriter("Impulse_Response_Measured.wav"); writer.Input = longstereoImpulse; writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Normalization = -1; writer.Raw = false; writer.Run(); writer.Close(); writer = null; if (_env) { // Also save the Hilbert envelope HilbertEnvelope env = new HilbertEnvelope(8191); env.Input = longstereoImpulse; _impulseFiles.Add("Impulse_Response_Envelope.wav: Hilbert envelope of the impulse response"); writer = new WaveWriter("Impulse_Response_Envelope.wav"); writer.Input = env; writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Normalization = -1; writer.Raw = false; writer.Run(); writer.Close(); writer = null; } if (_dbl) { // Create DBL files for Acourate _impulseFiles.Add("PulseL.dbl: impulse response, raw data (64-bit float), left channel "); _impulseFiles.Add("PulseR.dbl: impulse response, raw data (64-bit float), right channel"); _impulseFiles.Add(" (use skew=" + (_peakPosL - _peakPosR) + " for time alignment)"); WriteImpulseDBL(stereoImpulse, "PulseL.dbl", "PulseR.dbl"); } if (_pcm) { // Create PCM files for Octave (etc) _impulseFiles.Add("LUncorrected.pcm: impulse response, raw data (32-bit float), left channel"); _impulseFiles.Add("RUncorrected.pcm: impulse response, raw data (32-bit float), right channel"); WriteImpulsePCM(stereoImpulse, "LUncorrected.pcm", "RUncorrected.pcm"); } stereoImpulse = null; longstereoImpulse = null; buff = null; GC.Collect(); if (_doDirectFilters) { // Same for the filtered impulse response stderr.WriteLine("Splicing and normalizing (2)"); ChannelSplicer longstereoImpulseF = new ChannelSplicer(); buff = new SoundBuffer(new SampleBuffer(filteredImpulseL).Subset(0, 131071)); double gL = Loudness.WeightedVolume(buff); (buff as SoundBuffer).ApplyGain(1 / gL); longstereoImpulseF.Add(buff); FilterProfile lfgDirectL = new FilterProfile(buff, 0.5); buff = new SoundBuffer(new SampleBuffer(filteredImpulseR).Subset(0, 131071)); double gR = Loudness.WeightedVolume(buff); (buff as SoundBuffer).ApplyGain(1 / gR); longstereoImpulseF.Add(buff); FilterProfile lfgDirectR = new FilterProfile(buff, 0.5); _impulseFiles.Add("Impulse_Response_Filtered.wav: approximation to direct-sound impulse response"); writer = new WaveWriter("Impulse_Response_Filtered.wav"); writer.Input = longstereoImpulseF; writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Normalization = -1; writer.Raw = false; writer.Run(); writer.Close(); double gg = writer.Gain; writer = null; longstereoImpulseF = null; ChannelSplicer longstereoImpulseD = new ChannelSplicer(); Mixer diffuse = new Mixer(); diffuse.Add(impulseL, 1.0); diffuse.Add(filteredImpulseL, -1.0); buff = new SoundBuffer(new SampleBuffer(diffuse).Subset(0, 131071)); (buff as SoundBuffer).ApplyGain(1 / gL); longstereoImpulseD.Add(buff); FilterProfile lfgDiffuseL = new FilterProfile(buff, 0.5); diffuse = new Mixer(); diffuse.Add(impulseR, 1.0); diffuse.Add(filteredImpulseR, -1.0); buff = new SoundBuffer(new SampleBuffer(diffuse).Subset(0, 131071)); (buff as SoundBuffer).ApplyGain(1 / gR); longstereoImpulseD.Add(buff); FilterProfile lfgDiffuseR = new FilterProfile(buff, 0.5); _impulseFiles.Add("Impulse_Response_Diffuse.wav: approximation to diffuse-field remnant"); writer = new WaveWriter("Impulse_Response_Diffuse.wav"); writer.Input = longstereoImpulseD; writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Gain = gg; writer.Raw = false; writer.Run(); writer.Close(); writer = null; // Filter the diffuse-field curve against double the diffuse-field curve FilterImpulse fiDiffuse = new FilterImpulse(8192, HRTF.diffuseDiff0() * 2, FilterInterpolation.COSINE, sampleRate); FastConvolver co = new FastConvolver(longstereoImpulseD, fiDiffuse); SoundBuffer buffd = new SoundBuffer(co); _impulseFiles.Add("Impulse_Response_Diffuse_Comp.wav: filtered diffuse-field remnant"); writer = new WaveWriter("Impulse_Response_Diffuse_Comp.wav"); writer.Input = buffd.Subset(4096); writer.Format = WaveFormat.IEEE_FLOAT; writer.BitsPerSample = 32; writer.SampleRate = _sampleRate; writer.Gain = gg; writer.Raw = false; writer.Run(); writer.Close(); writer = null; longstereoImpulseD = null; bool any = false; string jsonFile = "Diff.json"; FileStream fs = new FileStream(jsonFile, FileMode.Create); StreamWriter sw = new StreamWriter(fs); sw.WriteLine("{"); FilterProfile lfgDiffL = lfgDirectL - lfgDiffuseL; if (lfgDiffL != null) { if (any) sw.WriteLine(","); any = true; sw.Write(lfgDiffL.ToJSONString("DiffL", "Diffuse field relative to direct, left channel")); } FilterProfile lfgDiffR = lfgDirectR - lfgDiffuseR; if (lfgDiffR != null) { if (any) sw.WriteLine(","); any = true; sw.Write(lfgDiffR.ToJSONString("DiffR", "Diffuse field relative to direct, right channel")); } sw.WriteLine("}"); sw.Close(); fs.Close(); } buff = null; GC.Collect(); System.Console.Error.WriteLine(); if (!_noDRC) { // Analyze the freq response // and create targets // target_full.txt and target_half.txt stderr.WriteLine("Analyzing response curves."); Prep(impTempL, impTempR, "Impulse_Response_Measured.wav", "NoCorrection"); // Call DRC to create the filters // then splice the DRC left & right output files together stderr.WriteLine("Preparing for DRC."); if (DoDRC(impFileL, impFileR, impDirectL, impDirectR, _peakPosL, _peakPosR, "Impulse_Response_Measured.wav", "Impulse_Response_Filtered.wav")) { stderr.WriteLine("Success!"); } } // Report names of the impulse files created if (_impulseFiles.Count == 0) { System.Console.Error.WriteLine("No impulse response files were created."); } if (_impulseFiles.Count > 0) { System.Console.Error.WriteLine("Impulse response files were created:"); foreach (string f in _impulseFiles) { string s = " " + f; System.Console.Error.WriteLine(s); } } // Report names of the filter files created if (_filterFiles.Count == 0 && !_noDRC) { System.Console.Error.WriteLine("No correction filter files were created."); } if (_filterFiles.Count > 0) { System.Console.Error.WriteLine("Correction filter files were created:"); foreach (string f in _filterFiles) { string s = " " + f; if (_copy) { try { File.Copy(f, Path.Combine(_impulsesFolder, f), true); s += " (copied)"; } catch (Exception e) { s += " (not copied: " + e.Message + ")"; } } System.Console.Error.WriteLine(s); } } if (_peakPosL == _peakPosR) { System.Console.Error.WriteLine(); System.Console.Error.WriteLine("Zero time difference between channels. Are you sure the recordings are correct?"); } } catch (Exception e) { stderr.WriteLine(); stderr.WriteLine(e.Message); stderr.WriteLine(e.StackTrace); } finally { foreach (string tempFile in _tempFiles) { try { File.Delete(tempFile); } catch (Exception) { /* ignore */ } } } } stderr.Flush(); }
static void Main() { Sample1.Run(); Sample2.Run(); }
public static void Run() { Console.WriteLine(Sample2.add3(1, 2, 3)); Run2(); }
public void TestMapTo2() { var param = new PublishBigBackstageConfigParam(); param.UniqueId = Guid.NewGuid().ToString("N"); param.ProjectId = Guid.NewGuid().ToString("N"); param.RoleId = Guid.NewGuid().ToString("N"); param.ReleaseTime = DateTime.Now; param.Visits = new System.Collections.Generic.List <VisitEntity>(); param.Visits.Add(new VisitEntity() { Id = Guid.NewGuid().ToString("N"), Name = "访视名称", Sort = 1, Type = 2, TypeName = "随意", Modules = new System.Collections.Generic.List <ModuleEntity>() { new ModuleEntity() { Id = Guid.NewGuid().ToString("N"), HtmlJson = Newtonsoft.Json.JsonConvert.SerializeObject(new PublishBigBackstageConfigParam() { ProjectId = Guid.NewGuid().ToString("N") }), ResultJson = Newtonsoft.Json.JsonConvert.SerializeObject(new PublishBigBackstageConfigParam() { ProjectId = Guid.NewGuid().ToString("N") }), Sort = 1, Name = "这是啥", SpecialJson = Newtonsoft.Json.JsonConvert.SerializeObject(new PublishBigBackstageConfigParam() { ProjectId = Guid.NewGuid().ToString("N") }) } } }); param.Visits.Add(new VisitEntity() { Id = Guid.NewGuid().ToString("N"), Name = "访视名称", Sort = 1, Type = 2, TypeName = "随意", Modules = new System.Collections.Generic.List <ModuleEntity>() { new ModuleEntity() { Id = Guid.NewGuid().ToString("N"), HtmlJson = Newtonsoft.Json.JsonConvert.SerializeObject(new PublishBigBackstageConfigParam() { ProjectId = Guid.NewGuid().ToString("N") }), ResultJson = Newtonsoft.Json.JsonConvert.SerializeObject(new PublishBigBackstageConfigParam() { ProjectId = Guid.NewGuid().ToString("N") }), Sort = 1, Name = "这是啥", SpecialJson = Newtonsoft.Json.JsonConvert.SerializeObject(new PublishBigBackstageConfigParam() { ProjectId = Guid.NewGuid().ToString("N") }) } } }); var entity = param.MapTo <BigBackstageConfig>(); //var config = new MapperConfiguration(cfg => cfg.CreateMap<PublishBigBackstageConfigParam, BigBackstageConfig>()); //var mapper = config.CreateMapper(); //var userDTO = mapper.Map<BigBackstageConfig>(param); Sample2 sample2 = new Sample2 { StringValue = "a" }; var sample = sample2.MapTo <Sample>(); Assert.Equal("a", sample.StringValue); }
public void TestDefault() { var js = new JsonSerializer(); js.JsonOptions.Indent = ""; js.JsonOptions.FieldSeparator = " "; var v1 = new Sample1 { X = 6, Y = "ttt" }; var result1 = js.ToString(v1); Assert.AreEqual("{ \"X\":6 }", result1); var w1 = (Sample1)Sample1_JsonDeserializer.Instance.FromString(result1); Assert.AreEqual(6, w1.X); Assert.AreEqual("zzz", w1.Y); var v2 = new Sample2 { X = 5, Y = "5" }; var result2 = js.ToString(v2); Assert.AreEqual("{ \"X\":5 }", result2); var v3 = new SampleDefault(); Assert.AreEqual("{ }", js.ToString(new SampleDefault())); v3.B = "z"; var result3 = js.ToString(v3); Assert.AreEqual("{ \"B\":\"z\" }", result3); var w3 = new SampleDefault(); (new JsonDeserializer()).FromString(w3, result3); Assert.AreEqual(3, w3.A); Assert.AreEqual("z", w3.B); Assert.AreEqual(new SamplePoint { X = 7, Y = 2 }, w3.P); }
private void buildSamples(string s) { Random rn = new Random(); double mS = maxSample * 1.05; // to make sure we get max sample give random nature of Random function //double minS = maxSample / Variables.Length; // to make sure we get min samples given random nature of Random function sample1 = new double[Variables.Length][]; sample2 = new double[Variables.Length][]; int[] strataCnt = cntDic[s]; int s1 = strataCnt[0] * Variables.Length; int s2 = strataCnt[1] * Variables.Length; double r1 = 1; double r2 = 1; double sr1 = 1; double sr2 = 1; if (s1 > mS) { r1 = mS / s1; sr1 = maxSample / s1; } if (s2 > mS) { r2 = mS / s2; sr2 = maxSample / s2; } int ss1 = System.Convert.ToInt32((sr1 * strataCnt[0])); // / Variables.Length); int ss2 = System.Convert.ToInt32((sr2 * strataCnt[1])); // / Variables.Length); //Console.WriteLine("ss1,ss2 = " + ss1.ToString() + "," + ss2.ToString()); for (int i = 0; i < Variables.Length; i++) { double[] vlArr1 = new double[ss1]; double[] vlArr2 = new double[ss2]; sample1[i] = vlArr1; sample2[i] = vlArr2; } IQueryFilter qf = new QueryFilterClass(); if (StrataField == "") { qf.SubFields = String.Join(",", Variables); } else { qf.SubFields = StrataField + "," + String.Join(",", Variables); string d = ""; if (Sample1.Fields.get_Field(Sample1.FindField(StrataField)).Type == esriFieldType.esriFieldTypeString) { d = "'"; } qf.WhereClause = StrataField + " = " + d + s + d; } ICursor cur = Sample1.Search(qf, false); int[] fldIndexArr = new int[Variables.Length]; double[] minArr = new double[Variables.Length]; double[] maxArr = new double[Variables.Length]; minArr[0] = Double.MaxValue - 100; maxArr[0] = Double.MinValue + 100; for (int i = 0; i < fldIndexArr.Length; i++) { fldIndexArr[i] = cur.FindField(Variables[i]); } IRow rw = cur.NextRow(); int tCnt = 0; double[] vArr = new double[Variables.Length]; double[] vArr2 = new double[Variables.Length]; clusCountDic.Add(s, new int[numberOfBins]); int[] clusVArr = clusCountDic[s]; //Console.WriteLine("Start iteration 1"); while (rw != null && tCnt < ss1) { double nRn = rn.NextDouble(); if (nRn <= r1) { bool checkVls = true; for (int i = 0; i < fldIndexArr.Length; i++) { object vlObj = rw.get_Value(fldIndexArr[i]); if (vlObj == null) { checkVls = false; break; } vArr[i] = System.Convert.ToDouble(vlObj); } if (checkVls) { int clusCl = cluster.computNew(vArr); clusVArr[clusCl] = clusVArr[clusCl] + 1; if (Oridinate) { vArr2 = pca.computNew(vArr); } else { vArr2 = vArr; } for (int i = 0; i < fldIndexArr.Length; i++) { double vl = vArr2[i]; sample1[i][tCnt] = vl; if (vl < minArr[i]) { minArr[i] = vl; } if (vl > maxArr[i]) { maxArr[i] = vl; } } tCnt += 1; } } rw = cur.NextRow(); } double[][] minMax = new double[2][]; minMax[0] = minArr; minMax[1] = maxArr; //Console.WriteLine(String.Join(",", (from double d in minArr select d.ToString()).ToArray())); //Console.WriteLine(String.Join(",", (from double d in maxArr select d.ToString()).ToArray())); minMaxDic1.Add(s, minMax); qf = new QueryFilterClass(); if (StrataField == "") { qf.SubFields = String.Join(",", Variables); } else { qf.SubFields = StrataField + "," + String.Join(",", Variables); string d = ""; if (Sample1.Fields.get_Field(Sample1.FindField(StrataField)).Type == esriFieldType.esriFieldTypeString) { d = "'"; } qf.WhereClause = StrataField + " = " + d + s + d; } cur = Sample2.Search(qf, false); minArr = new double[Variables.Length]; maxArr = new double[Variables.Length]; minArr[0] = Double.MaxValue; maxArr[0] = Double.MinValue; for (int i = 0; i < fldIndexArr.Length; i++) { fldIndexArr[i] = cur.FindField(Variables[i]); } rw = cur.NextRow(); tCnt = 0; clusSampleCountDic.Add(s, new int[numberOfBins]); clusVArr = clusSampleCountDic[s]; //Console.WriteLine("Start iteration2"); while (rw != null && tCnt < ss2) { double nRn = rn.NextDouble(); if (nRn <= r2) { bool checkVls = true; for (int i = 0; i < fldIndexArr.Length; i++) { object vlObj = rw.get_Value(fldIndexArr[i]); if (vlObj == null) { checkVls = false; break; } vArr[i] = System.Convert.ToDouble(vlObj); } if (checkVls) { int clusCl = cluster.computNew(vArr); clusVArr[clusCl] = clusVArr[clusCl] + 1; if (Oridinate) { vArr2 = pca.computNew(vArr); } else { vArr2 = vArr; } for (int i = 0; i < fldIndexArr.Length; i++) { double vl = vArr2[i]; sample2[i][tCnt] = vl; if (vl < minArr[i]) { minArr[i] = vl; } if (vl > maxArr[i]) { maxArr[i] = vl; } } tCnt += 1; } } rw = cur.NextRow(); } minMax = new double[2][]; minMax[0] = minArr; minMax[1] = maxArr; //Console.WriteLine(String.Join(",", (from double d in minArr select d.ToString()).ToArray())); //Console.WriteLine(String.Join(",", (from double d in maxArr select d.ToString()).ToArray())); minMaxDic2.Add(s, minMax); System.Runtime.InteropServices.Marshal.ReleaseComObject(cur); //Console.WriteLine("Finished iterations"); }
public void DoSample2jpg() { Sample2.GetSample("sample2jpg.pdf", imagefile: "v3v.jpg"); }
public void DoSample2() { Sample2.GetSample("sample2.pdf", imagefile: "logo.png"); }
static void Main(string[] args) { Console.WriteLine(Sample1.add(1, 2)); Console.WriteLine(Sample2.multiply(3, 5)); }
public static void ShowExample() { Sample2 wnd = GetWindow <Sample2>(); wnd.titleContent = new GUIContent("Sample2"); }
public void DoSample2png() { Sample2.GetSample("sample2png.pdf", imagefile: "v3v.png"); }
public void TestDefault() { var bs = new BinarySerializer(); var bd = new BinaryDeserializer(); var bdg = new BinaryDeserializerGen(); var v1 = new Sample1 { X = 6, Y = "ttt" }; var result1 = bs.ToBytes(v1); Assert.AreEqual( "20 01 00 " + XS(typeof(Sample1)) + " 02 00 " + XS("X", RoughType.Int, "Y", RoughType.String) + " 01 00 06 00 00 00 00 00", XS(result1)); var w1 = (Sample1)bd.FromBytes(result1); Assert.AreEqual(6, w1.X); Assert.AreEqual("zzz", w1.Y); var w1g = (Sample1)bdg.FromBytes(result1); Assert.AreEqual(6, w1g.X); Assert.AreEqual("zzz", w1g.Y); var v2 = new Sample2 { X = 5, Y = "5" }; var result2 = bs.ToBytes(v2); Assert.AreEqual( "20 02 00 " + XS(typeof(Sample2)) + " 02 00 " + XS("X", RoughType.Int, "Y", RoughType.String) + " 01 00 05 00 00 00 00 00", XS(result2)); Assert.IsInstanceOfType(bd.FromBytes(result2), typeof(Sample2)); var v3 = new SampleDefault(); var result3 = bs.ToBytes(new SampleDefault()); Assert.AreEqual( "20 03 00 " + XS(typeof(SampleDefault)) + " 03 00 " + XS("A", RoughType.Int, "B", RoughType.String, "P", RoughType.Record) + " 00 00", XS(result3)); Assert.IsInstanceOfType(bd.FromBytes(result3), typeof(SampleDefault)); v3.B = "z"; var result3m = bs.ToBytes(v3); Assert.AreEqual("20 03 00 02 00 " + XS("z") + " 00 00", XS(result3m)); var w3 = new SampleDefault(); bd.FromBytes(w3, result3m); Assert.AreEqual(3, w3.A); Assert.AreEqual("z", w3.B); Assert.AreEqual(new SamplePoint { X = 7, Y = 2 }, w3.P); var result4 = SX( "20 01 00 " + XS(typeof(SampleDefault)) + " 02 00 " + XS("A", RoughType.Int, "P", RoughType.Record) + " 01 00 05 00 00 00 " + "02 00 02 00 " + XS(typeof(SamplePoint)) + " 02 00 " + XS("X", RoughType.Int, "Y", RoughType.Int) + " 04 00 00 00 06 00 00 00 " + "00 00" ); bdg.ClearClassIds(); var w4 = bdg.FromBytes<SampleDefault>(result4); Assert.AreEqual(5, w4.A); Assert.AreEqual("default", w4.B); Assert.AreEqual(new SamplePoint { X = 4, Y = 6 }, w4.P); }
public void GetPoint(int index, out Sample2 dest) { dest = _points[index]; }
static public bool IsNull(Sample2 point) { return(null == point); }
public bool IsEqual(Sample2 pt) { return(Math.Abs(pt.X - X) < .00000001); }
static public bool IsNull(Sample2 point) { return (null == point); }
public void TestCodeAssignSimple() { var v1 = new Sample1 { X = 150, Y = "test" }; var cs = new CodeAssignSerializer(); var result1 = cs.ToString(v1); Assert.AreEqual("void Init(Sample1 obj) {\n\tobj.X = 150;\n\tobj.Y = \"test\";\n}\n", result1); var v2 = new Sample2 { X = 150, Y = "test" }; var result2 = cs.ToString(v2); Assert.AreEqual("void Init(Sample2 obj) {\n\tobj.X = 150;\n\tobj.Y = \"test\";\n}\n", result2); }
public void DoSample2b() { Sample2.GetSample("sample2b.pdf", imagefile: "3d_down.png"); }
public bool IsEqual(Sample2 pt) { return (Math.Abs(pt.X - X) < .00000001); }
public void TestAllowReadingFromAncestor() { var js = new JsonSerializer(); js.JsonOptions.Indent = ""; js.JsonOptions.SaveRootClass = true; var v1 = new Sample2 { X = 83, Y = "83" }; var result1 = js.ToString(v1); Assert.AreEqual("{\n\"class\":\"YuzuTest.Sample2, YuzuTest\",\n\"X\":83\n}", result1); var w1 = new Sample2Allow(); var jd = new JsonDeserializer(); jd.FromString(w1, result1); Assert.AreEqual(v1.X, w1.X); }
public void TestSimpleProps() { var js = new JsonSerializer(); var v1 = new Sample2 { X = 345, Y = "test" }; js.JsonOptions.Indent = ""; var result = js.ToString(v1); Assert.AreEqual("{\n\"X\":345,\n\"Y\":\"test\"\n}", result); var v2 = new Sample2(); var jd = new JsonDeserializer(); jd.FromString(v2, result); Assert.AreEqual(v1.X, v2.X); Assert.AreEqual(v1.Y, v2.Y); jd.FromString(v2, "{\"X\":999}"); Assert.AreEqual(999, v2.X); Assert.AreEqual(v1.Y, v2.Y); }
static ISoundObj DecodeBFormatBinaural(ISoundObj source) { throw new NotImplementedException(); ISoundObj input = source; uint sr = input.SampleRate; // Convolve the BFormat data with the matrix filter if (!String.IsNullOrEmpty(_bformatFilter)) { string ignore; WaveReader rdr = GetAppropriateImpulseReader(_bformatFilter, out ignore); FastConvolver ambiConvolver = new FastConvolver(source, rdr); input = ambiConvolver; } // Cardioid directed at four (or six) virtual loudspeakers IEnumerator<ISample> src = input.Samples; CallbackSource bin = new CallbackSource(2, sr, delegate(long j) { if (src.MoveNext()) { ISample s = src.Current; double w = s[0]; double x = s[1]; double y = s[2]; double z = s[3]; double wFactor = -0.5; double left = x + y + z + (wFactor * w); double right = x - y + z + (wFactor * w); ISample sample = new Sample2(left, right); return sample; } return null; }); return bin; }