public void TestDownloadNew() { if (TestDataFile.Exists) { TestDataFile.Delete(); } ValidateDownload(Update()); }
public void TestUpgradeLiteOpen() { SetLiteDataFile(); using (var fileHandle = TestDataFile.OpenRead()) { Assert.IsTrue(Update() == AutoUpdate.AutoUpdateStatus.AUTO_UPDATE_MASTER_FILE_CANT_RENAME); } }
/// <summary> /// Make sure the data file gets replaced with a Lite one to emulate an /// existing Lite data file. /// </summary> protected void SetLiteDataFile() { String templateFile = AppDomain.CurrentDomain.BaseDirectory + "\\..\\..\\..\\data\\51Degrees-LiteV3.2.dat"; // Delete existing file in case it's already of the latest version. if (TestDataFile.Exists) { TestDataFile.Delete(); } File.Copy(templateFile, TestDataFile.FullName); Console.WriteLine("Test Data File: {0}", TestDataFile); }
/// <summary> /// This method is to get test data files from database /// </summary> /// <returns></returns> public List <Models.TestDataFile> GetSampleDataFiles() { string strCon = ConfigurationManager.ConnectionStrings["easyMigCon"].ConnectionString.ToString(); string strPath = ConfigurationManager.AppSettings["filePath"].ToString(); SqlConnection con = new SqlConnection(strCon); SqlDataAdapter da = new SqlDataAdapter(); DataSet ds = new DataSet(); List <Models.TestDataFile> objFile = new List <TestDataFile>(); try { SqlCommand cmd = new SqlCommand(); cmd.CommandText = "SELECT * FROM TestDataFile"; cmd.Connection = con; da.SelectCommand = cmd; da.Fill(ds); if (ds.Tables[0].Rows.Count > 0) { for (int i = 0; i < ds.Tables[0].Rows.Count; i++) { TestDataFile obj = new TestDataFile(); obj.Id = Convert.ToInt32(ds.Tables[0].Rows[i]["Id"]); obj.Name = ds.Tables[0].Rows[i]["Name"].ToString(); obj.CreatedOn = Convert.ToDateTime(ds.Tables[0].Rows[i]["CreatedOn"]); obj.DownloadPath = strPath + ds.Tables[0].Rows[i]["Name"].ToString(); objFile.Add(obj); } } return(objFile); } catch (Exception ex) { throw ex; } finally { if (con.State == ConnectionState.Open) { con.Close(); } } }
public void Test_Replace() { BdsVersion ver1 = new BdsVersion { Major = 10, Minor = 11, Revision = 12, Build = 13, BuildInfo = "build info 1", ScmInfo = "scm info 1", Description = "description 1", UserDescription = "user description 1" }; BdsVersion ver2 = new BdsVersion { Major = 20, Minor = 21, Revision = 22, Build = 23, BuildInfo = "build info 2", ScmInfo = "scm info 2", Description = "description 2", UserDescription = "user description 2" }; TestDataFile file1 = new TestDataFile { Version = ver1, Data = "data" }; string fileName = Path.Combine(_outDir, "test-file.dat"); file1.Write(fileName); BdsVersion.ReplaceInDataFile(fileName, (ref BdsVersion v) => v = ver2); TestDataFile file2 = new TestDataFile(); file2.Read(fileName); Assert.AreNotEqual(file1.Version, file2.Version); Assert.AreEqual(ver2, file2.Version); Assert.AreEqual(file1.Data, file2.Data); }
void AddTestDataCycle(int run, uint run_seconds, double run_count_time, Measurement meas, TestDataFile td, string pivot = "", int cfindex = -1) { Cycle cycle = new Cycle(datalog); try { cycle.UpdateDataSourceId(ConstructedSource.CycleFile, meas.Detector.Id.SRType, td.DTO.AddSeconds(run_seconds), td.Filename); cycle.seq = run; cycle.TS = TimeSpan.FromSeconds(run_count_time); // dev note: check if this is always only in seconds, or fractions of a second // hn -- 9/4/14 -- not integer for count time. Convert from double seconds here. // Joe still has force to int. bleck! /* init run tests */ cycle.SetQCStatus(meas.Detector.MultiplicityParams, QCTestStatus.None); // APluralityOfMultiplicityAnalyzers: creates entry if not found, expand from the single mult key from detector here meas.Add(cycle, cfindex); /* singles, reals + accidentals, accidentals */ string l = td.reader.ReadLine(); string[] zorks = l.Split((char[])null, StringSplitOptions.RemoveEmptyEntries); double[] v = new double[5]; for (int z = 0; z < 5; z++) { double d; bool b = double.TryParse(zorks[z], out d); if (b) { v[z] = d; } } cycle.Totals = (ulong)v[0]; MultiplicityCountingRes mcr = new MultiplicityCountingRes(meas.Detector.MultiplicityParams.FA, cycle.seq); // APluralityOfMultiplicityAnalyzers: expand when detector has multiple analyzers cycle.CountingAnalysisResults.Add(meas.Detector.MultiplicityParams, mcr); // APluralityOfMultiplicityAnalyzers: expand when detector has multiple analyzers mcr.AB.TransferIntermediates(meas.Detector.AB); // copy alpha beta onto the cycle's results mcr.Totals = cycle.Totals; mcr.TS = cycle.TS; mcr.ASum = v[4]; mcr.RASum = v[3]; mcr.Scaler1.v = v[1]; mcr.Scaler2.v = v[2]; cycle.SinglesRate = v[0] / run_count_time; // assign the hits to a single channel (0) cycle.HitsPerChannel[0] = cycle.Totals; mcr.RawSinglesRate.v = cycle.SinglesRate; /* number of multiplicity values */ string mv = td.reader.ReadLine(); ushort k = 0; ushort.TryParse(mv, out k); if (k == 0) // test data files require an entry with 1 bin set 0s for the absence of multiplicity, go figure { ctrllog.TraceEvent(LogLevels.Error, 440, "This" + pivot + " cycle " + run.ToString() + " has no good multiplicity data."); return; } mcr.MinBins = mcr.MaxBins = k; mcr.RAMult = new ulong[k]; mcr.NormedAMult = new ulong[k]; mcr.UnAMult = new ulong[k]; // todo: compute this /* multiplicity values */ for (ushort j = 0; j < k; j++) { string ra = td.reader.ReadLine(); string[] blorks = ra.Split((char[])null, StringSplitOptions.RemoveEmptyEntries); double[] ve = new double[2]; for (int z = 0; z < 2; z++) { double d; bool b = double.TryParse(blorks[z], out d); if (b) { ve[z] = d; } } mcr.RAMult[j] = (ulong)ve[0]; mcr.NormedAMult[j] = (ulong)ve[1]; } ctrllog.TraceEvent(LogLevels.Verbose, 5439, "Cycle " + cycle.seq.ToString() + pivot + ((mcr.RAMult[0] + mcr.NormedAMult[0]) > 0 ? " max:" + mcr.MaxBins.ToString() : " *")); } catch (Exception e) { ctrllog.TraceEvent(LogLevels.Warning, 33085, pivot + "cycle processing error {0} {1} {2}", run, e.Message, pivot); } }