internal static void valueComparator(RowSet rawrowset, List<object[]> insertedRows) { List<Row> rowset = rawrowset.GetRows().ToList(); Assert.True(rowset.Count == insertedRows.Count, string.Format( "Returned rows count is not equal with the count of rows that were inserted! \n Returned: {0} \n Expected: {1} \n", rowset.Count, insertedRows.Count)); int i = 0; foreach (Row row in rowset) { if (row.Any(col => col.GetType() == typeof (byte[]))) for (int j = 0; j < row.Length; j++) { Assert.AreEqual(insertedRows[i][j], row[j]); } else { for (int m = 0; m < row.Length; m++) { if (!row[m].Equals(insertedRows[i][m])) { insertedRows.Reverse(); // To check if needed and why if (!row[m].Equals(insertedRows[i][m])) insertedRows.Reverse(); } Assert.AreEqual(insertedRows[i][m], row[m], "Inserted data does not match with returned data."); } } i++; } }
public override IEnumerable<Row> Reduce(RowSet input, Row output, string[] args) { DateTime lastLogin = default(DateTime); int sessionId = 1; foreach (Row row in input.Rows) { DateTime currentLoginTime = (DateTime)row["logintime"].Value; if (lastLogin == default(DateTime)) { lastLogin = currentLoginTime; } else { if (lastLogin.AddMinutes(30) < currentLoginTime) sessionId++; lastLogin = currentLoginTime; } row.CopyTo(output); output["sessionId"].Set(sessionId); yield return output; } }
/// <summary> /// Creates a rowset. /// The columns are named: col_0, ..., col_n /// The rows values are: row_0_col_0, ..., row_m_col_n /// </summary> public RowSet CreateStringsRowset(int columnLength, int rowLength, string valueModifier = null) { var columns = new List<CqlColumn>(); var columnIndexes = new Dictionary<string, int>(); for (var i = 0; i < columnLength; i++) { var c = new CqlColumn() { Index = i, Name = "col_" + i, TypeCode = ColumnTypeCode.Text, Type = typeof(string) }; columns.Add(c); columnIndexes.Add(c.Name, c.Index); } var rs = new RowSet(); for (var j = 0; j < rowLength; j++) { var rowValues = new List<byte[]>(); foreach (var c in columns) { var value = valueModifier + "row_" + j + "_col_" + c.Index; rowValues.Add(Encoding.UTF8.GetBytes(value)); } rs.AddRow(new Row(1, rowValues.ToArray(), columns.ToArray(), columnIndexes)); } return rs; }
/// <summary> /// Main processing script. /// </summary> /// <param name="input">The input row.</param> /// <param name="outputRow">The output row.</param> /// <param name="args">The arguments.</param> /// <returns>The IEnumerable output row.</returns> public override IEnumerable<Row> Process(RowSet input, Row outputRow, string[] args) { string f0Dir = "f0"; string expandDir = "expand"; string svmDir = "svm"; Directory.CreateDirectory(f0Dir); Directory.CreateDirectory(expandDir); Directory.CreateDirectory(svmDir); foreach (var row in input.Rows) { outputRow["WaveID"].Set(row["WaveID"].String); outputRow["WaveBinary"].Set(row["WaveBinary"].Binary); outputRow["WaveAlignments"].Set(row["WaveAlignments"].String); outputRow["RawF0"].Set(row["RawF0"].String); outputRow["LPCC"].Set(row["LPCC"].Binary); outputRow["OF0"].Set(row["OF0"].String); outputRow["LSP"].Set(row["LSP"].Binary); outputRow["Pow"].Set(row["Pow"].String); outputRow["MBE"].Set(row["MBE"].String); string waveId = row["WaveID"].String; string f0File = JobBase.GenerateLocalFile(waveId, row["RawF0"].String, FileExtensions.F0File, true, f0Dir); string expandFeatureFile = JobBase.GenerateLocalFile(waveId, row["EXP"].String, FileExtensions.Text, false, expandDir); string svmFile = Path.Combine(svmDir, waveId + "." + FileExtensions.Text); string[] argument = { f0File, expandFeatureFile, svmFile }; F0ExtractorCOSMOS.FormatFeaturesOneFile(argument, null); outputRow["SVM"].Set(File.ReadAllText(svmFile)); yield return outputRow; } }
/// <summary> /// Main processing script. /// </summary> /// <param name="input">The input row.</param> /// <param name="outputRow">The output row.</param> /// <param name="args">The arguments.</param> /// <returns>The IEnumerable output row.</returns> public override IEnumerable<Row> Process(RowSet input, Row outputRow, string[] args) { string frameShift = args[0]; string frameLength = args[1]; Directory.CreateDirectory("relatedFeatures"); foreach (var row in input.Rows) { outputRow["WaveID"].Set(row["WaveID"].String); outputRow["WaveBinary"].Set(row["WaveBinary"].Binary); outputRow["WaveAlignments"].Set(row["WaveAlignments"].String); outputRow["RawF0"].Set(row["RawF0"].String); outputRow["LPCC"].Set(row["LPCC"].Binary); outputRow["OF0"].Set(row["OF0"].String); outputRow["LSP"].Set(row["LSP"].Binary); outputRow["Pow"].Set(row["Pow"].String); outputRow["MBE"].Set(row["MBE"].String); outputRow["NCCF"].Set(row["NCCF"].String); string waveId = row["WaveID"].String; string wave = JobBase.GenerateLocalFile(waveId, row["WaveBinary"].Binary, FileExtensions.Waveform); string relatedFeatureFile = Path.Combine("relatedFeatures", waveId + "." + FileExtensions.Text); string[] argument = { wave, relatedFeatureFile, frameShift, frameLength }; F0ExtractorCOSMOS.ExtractRelatedFeaturesOneFile(argument, null); outputRow["RF"].Set(File.ReadAllText(relatedFeatureFile)); yield return outputRow; } }
/// <summary> /// Main processing script. /// </summary> /// <param name="input">The input row.</param> /// <param name="outputRow">The output row.</param> /// <param name="args">The arguments.</param> /// <returns>The IEnumerable output row.</returns> public override IEnumerable<Row> Process(RowSet input, Row outputRow, string[] args) { string svmDir = "svm"; string scaledSVMDir = "scaledSVM"; Directory.CreateDirectory(svmDir); Directory.CreateDirectory(scaledSVMDir); foreach (var row in input.Rows) { outputRow["WaveID"].Set(row["WaveID"].String); outputRow["WaveBinary"].Set(row["WaveBinary"].Binary); outputRow["WaveAlignments"].Set(row["WaveAlignments"].String); outputRow["RawF0"].Set(row["RawF0"].String); outputRow["LPCC"].Set(row["LPCC"].Binary); outputRow["OF0"].Set(row["OF0"].String); outputRow["LSP"].Set(row["LSP"].Binary); outputRow["Pow"].Set(row["Pow"].String); outputRow["MBE"].Set(row["MBE"].String); string waveId = row["WaveID"].String; string svmFile = JobBase.GenerateLocalFile(waveId, row["SVM"].String, FileExtensions.Text, false, svmDir); string scaledSVMFile = Path.Combine(scaledSVMDir, waveId + "." + FileExtensions.Text); string svmRangeFile = Path.GetFileName(this.Job.ReplaceVariable["SVMRANGE"]); // File.WriteAllText(svmRangeFile, TmocFile.NormalizeOutput(svmRangeFile)). string argument = Helper.NeutralFormat(" -r \"{0}\" \"{1}\"", svmRangeFile, svmFile); DelayedLogger logger = new DelayedLogger(new TextLogger(scaledSVMFile)); CommandLine.RunCommand(Path.GetFileName(this.Job.ReplaceVariable["SVMSCALETOOL"]), argument, Environment.CurrentDirectory, logger.Writer, logger.Writer, null); logger.Dispose(); outputRow["SSVM"].Set(File.ReadAllText(scaledSVMFile)); yield return outputRow; } }
/// <summary> /// Main processing script. /// </summary> /// <param name="input">The input row.</param> /// <param name="outputRow">The output row.</param> /// <param name="args">The arguments.</param> /// <returns>The IEnumerable output row.</returns> public override IEnumerable<Row> Process(RowSet input, Row outputRow, string[] args) { float minF0Value = float.Parse(args[0]); float maxF0Value = float.Parse(args[1]); string uvDir = "uv"; string fZeroDir = "f0"; string smoothedFZeroDir = "smoothedF0"; Directory.CreateDirectory(uvDir); Directory.CreateDirectory(fZeroDir); Directory.CreateDirectory(smoothedFZeroDir); foreach (var row in input.Rows) { outputRow["WaveID"].Set(row["WaveID"].String); outputRow["WaveBinary"].Set(row["WaveBinary"].Binary); outputRow["WaveAlignments"].Set(row["WaveAlignments"].String); outputRow["RawF0"].Set(row["RawF0"].String); outputRow["LPCC"].Set(row["LPCC"].Binary); outputRow["OF0"].Set(row["OF0"].String); outputRow["LSP"].Set(row["LSP"].Binary); outputRow["Pow"].Set(row["Pow"].String); outputRow["MBE"].Set(row["MBE"].String); string waveId = row["WaveID"].String; string f0File = JobBase.GenerateLocalFile(waveId, row["RawF0"].String, FileExtensions.F0File, true, fZeroDir); string uvFile = JobBase.GenerateLocalFile(waveId, row["UV"].String, FileExtensions.Text, true, uvDir); string smoothedF0File = Path.Combine(smoothedFZeroDir, waveId + "." + FileExtensions.F0File); string[] argument = { f0File, uvFile, smoothedF0File, minF0Value.ToString(), maxF0Value.ToString() }; F0ExtractorCOSMOS.SmoothOneF0File(argument, null); outputRow["SF0"].Set(JobBase.GetTextFile(smoothedF0File)); yield return outputRow; } }
/// <summary> /// Main processing script. /// </summary> /// <param name="input">The input row.</param> /// <param name="outputRow">The output row.</param> /// <param name="args">The arguments.</param> /// <returns>The IEnumerable output row.</returns> public override IEnumerable<Row> Process(RowSet input, Row outputRow, string[] args) { string uvDir = "uv"; string scaledSVMDir = "scaledSVM"; Directory.CreateDirectory(uvDir); Directory.CreateDirectory(scaledSVMDir); foreach (var row in input.Rows) { outputRow["WaveID"].Set(row["WaveID"].String); outputRow["WaveBinary"].Set(row["WaveBinary"].Binary); outputRow["WaveAlignments"].Set(row["WaveAlignments"].String); outputRow["RawF0"].Set(row["RawF0"].String); outputRow["LPCC"].Set(row["LPCC"].Binary); outputRow["OF0"].Set(row["OF0"].String); outputRow["LSP"].Set(row["LSP"].Binary); outputRow["Pow"].Set(row["Pow"].String); outputRow["MBE"].Set(row["MBE"].String); string waveId = row["WaveID"].String; string scaledSVMFile = JobBase.GenerateLocalFile(waveId, row["SSVM"].String, FileExtensions.Text, false, scaledSVMDir); string uvFile = Path.Combine(uvDir, waveId + "." + FileExtensions.Text); string argument = Helper.NeutralFormat(" \"{0}\" \"{1}\" \"{2}\"", scaledSVMFile, Path.GetFileName(this.Job.ReplaceVariable["UVMODELFILE"]), uvFile); CommandLine.RunCommand(Path.GetFileName(this.Job.ReplaceVariable["SVMPREDICTTOOL"]), argument, "./"); outputRow["UV"].Set(JobBase.GetTextFile(uvFile)); yield return outputRow; } }
private void AddDataToWorksheet(RowSet resultset, Excel.Range target, bool useFormula) { var rgTitles = target.Resize[1, resultset.ColLen]; Utils.AddTitlesToRange(rgTitles, resultset.Titles); var rgData = ((Excel.Range)target.Cells[2, 1]).Resize[resultset.RowLen, resultset.ColLen]; Utils.AddDataToRange(rgData, resultset.Data, useFormula); Utils.AddTagsToRange(rgData, resultset.Diff, ctrlInteriorColor.SelectedColor, ctrlFontColor.SelectedColor); }
internal CqlReader(RowSet rows) { popul = rows; for (int idx = 0; idx < popul.Columns.Length; idx++) colidx.Add(popul.Columns[idx].Name, idx); enumRows = popul.GetRows(); enumerRows = enumRows.GetEnumerator(); }
public void FetchAsync_Pocos_WithCql_Empty() { var rowset = new RowSet(); var mappingClient = GetMappingClient(rowset); var userTask = mappingClient.FetchAsync<PlainUser>("SELECT * FROM users"); var users = userTask.Result; Assert.NotNull(users); Assert.AreEqual(0, users.Count()); }
/// <summary> /// Main processing script. /// </summary> /// <param name="input">The input row.</param> /// <param name="outputRow">The output row.</param> /// <param name="args">The arguments.</param> /// <returns>The IEnumerable output row.</returns> public override IEnumerable<Row> Process(RowSet input, Row outputRow, string[] args) { foreach (var row in input.Rows) { outputRow["WaveID"].Set(row["WaveID"].String); outputRow["SVM"].Set(row["SVM"].String); yield return outputRow; } }
private ISession GetSession(RowSet result) { var sessionMock = new Mock<ISession>(MockBehavior.Strict); sessionMock .Setup(s => s.ExecuteAsync(It.IsAny<IStatement>())) .Returns(TestHelper.DelayedTask(result, 200)) .Verifiable(); sessionMock.Setup(s => s.PrepareAsync(It.IsAny<string>())).Returns(TaskHelper.ToTask(GetPrepared("Mock query"))); sessionMock.Setup(s => s.BinaryProtocolVersion).Returns(2); return sessionMock.Object; }
public void CommandExecuteReaderUsesSyncExecute() { var connection = new CqlConnection(); var sessionMock = new Mock<ISession>(); var rowset = new RowSet(); sessionMock .Setup(s => s.Execute(It.IsAny<string>(), It.IsAny<ConsistencyLevel>())) .Returns(rowset) .Verifiable(); connection.ManagedConnection = sessionMock.Object; var cmd = (CqlCommand) connection.CreateCommand(); cmd.CommandText = "INSERT INTO dummy_cf (a,b) VALUES (1,2)"; var reader = cmd.ExecuteReader(); reader.Dispose(); sessionMock.Verify(); }
private void AddColumnInfoToWorksheet(RowSet resultset, Excel.Range target) { object[,] info = ExcelArray<object>.Create(resultset.RowLen, 1); var rgTitles = (Excel.Range)target.Rows[0]; var rgInfo = (Excel.Range)target.Columns[0]; for (int ri = 1; ri <= resultset.RowLen; ri++) { switch (resultset.Match[ri]) { case XlRowMatch.MatchWithDiff: info[ri, 1] = "'!="; break; case XlRowMatch.NoMatch: info[ri, 1] = "'+"; break; } } Utils.SetCellStyle(rgInfo, 0xFF); rgTitles.Value = resultset.Titles; rgTitles.Font.Bold = true; rgInfo.Value = info; rgInfo.HorizontalAlignment = Excel.Constants.xlCenter; rgInfo.ColumnWidth = 2.5; }
public async Task RetryDuringSplitCell() { var request = new ReadRowsRequest { Rows = RowSet.FromRowRanges(RowRange.Closed("a", "z")) }; var client = Utilities.CreateReadRowsMockClient( request, initialStreamResponse: new[] { new ReadRowsResponse { Chunks = { CreateChunk("a", "cf1", "column1", "valu", valueSize: 6) } } }, responsesForRetryStreams: new[] { new [] { new ReadRowsResponse { Chunks = { CreateChunk("a", "cf1", "column1", "valu", valueSize: 6), CreateContinuationChunk("e1", commitRow: true) } } } }); var rows = await client.ReadRows(request).ToList(); Assert.Equal(1, rows.Count); var row = rows[0]; Assert.Equal("a", row.Key.ToStringUtf8()); Assert.Equal("cf1", row.Families[0].Name); Assert.Equal("column1", row.Families[0].Columns[0].Qualifier.ToStringUtf8()); Assert.Equal("value1", row.Families[0].Columns[0].Cells[0].Value.ToStringUtf8()); }
public override IEnumerable <Row> Reduce(RowSet input, Row outputRow, string[] args) { Dictionary <string, long> columns = new Dictionary <string, long>(); Dictionary <string, string> log = new Dictionary <string, string>(); foreach (Row row in input.Rows) { foreach (ColumnInfo item in row.Schema.Columns) { if (item.Name == "log") { foreach (string i in (row[item.Name].String ?? string.Empty).Split(';')) { List <string> pair = i.Split(':').ToList(); if (pair.Count() > 1) { if (!log.ContainsKey(pair[0])) { log.Add(pair[0], pair[1]); } } } } else { if (!columns.ContainsKey(item.Name)) { columns.Add(item.Name, 0); } columns[item.Name] = columns[item.Name] + row[item.Name].LongQ ?? 0; } } } foreach (string name in columns.Keys) { outputRow[name].Set(columns[name]); } outputRow["log"].Set(string.Join(";", log.Select(x => string.Format("{0}:{1}", x.Key, x.Value)))); yield return(outputRow); }
/// <summary> /// Creates a new user account. /// </summary> public override async Task <CreateUserResponse> CreateUser(CreateUserRequest request, ServerCallContext context) { // Hash the user's password string hashedPassword = PasswordHash.CreateHash(request.Password); DateTimeOffset timestamp = DateTimeOffset.UtcNow; PreparedStatement preparedCredentials = await _statementCache.GetOrAddAsync( "INSERT INTO user_credentials (email, password, userid) VALUES (?, ?, ?) IF NOT EXISTS"); // Insert the credentials info (this will return false if a user with that email address already exists) IStatement insertCredentialsStatement = preparedCredentials.Bind(request.Email, hashedPassword, request.UserId.ToGuid()); RowSet credentialsResult = await _session.ExecuteAsync(insertCredentialsStatement).ConfigureAwait(false); // The first column in the row returned will be a boolean indicating whether the change was applied (TODO: Compensating action for user creation failure?) var applied = credentialsResult.Single().GetValue <bool>("[applied]"); if (applied == false) { var status = new Status(StatusCode.AlreadyExists, "A user with that email address already exists"); throw new RpcException(status); } PreparedStatement preparedUser = await _statementCache.GetOrAddAsync( "INSERT INTO users (userid, firstname, lastname, email, created_date) VALUES (?, ?, ?, ?, ?)"); // Insert the "profile" information using a parameterized CQL statement IStatement insertUserStatement = preparedUser.Bind(request.UserId.ToGuid(), request.FirstName, request.LastName, request.Email, timestamp) .SetTimestamp(timestamp); await _session.ExecuteAsync(insertUserStatement).ConfigureAwait(false); // Tell the world about the new user await _bus.Publish(new UserCreated { UserId = request.UserId, FirstName = request.FirstName, LastName = request.LastName, Email = request.Email, Timestamp = timestamp.ToTimestamp() }).ConfigureAwait(false); return(new CreateUserResponse()); }
protected void Page_Load(object sender, EventArgs e) { if (!Page.IsPostBack) { Cluster cluster = Cluster.Builder().AddContactPoint("127.0.0.1").Build(); ISession session = cluster.Connect("cardb"); RowSet result = session.Execute("select * from tbl_order"); int i = 0; TableRow r1 = new TableRow(); TableCell cr1 = new TableCell(); cr1.Text = "Client Name"; TableCell cr2 = new TableCell(); cr2.Text = "Order Date"; TableCell cr3 = new TableCell(); cr3.Text = "Mobile"; TableCell cr4 = new TableCell(); cr4.Text = "billamt"; TableRow rr = new TableRow(); rr.Cells.Add(cr1); rr.Cells.Add(cr2); rr.Cells.Add(cr3); rr.Cells.Add(cr4); Table1.Rows.Add(rr); foreach (Row row in result) { TableRow r = new TableRow(); TableCell c1 = new TableCell(); c1.Text = row["username"].ToString(); r.Cells.Add(c1); TableCell c2 = new TableCell(); c2.Text = row["orddate"].ToString(); r.Cells.Add(c2); TableCell c3 = new TableCell(); c3.Text = row["mobile"].ToString(); r.Cells.Add(c3); TableCell c4 = new TableCell(); c4.Text = row["billamt"].ToString(); r.Cells.Add(c4); Table1.Rows.Add(r); } } }
public void UpdateRowSetWithUDT(RowSet R, string SQL) { IEnumerable <Row> Rows = R.GetRows(); CqlColumn[] Cols = R.Columns; Type TP = null; int i = 0; RowSet s = null; foreach (Row r in Rows) { int j = 0; foreach (CqlColumn col in Cols) { if (col.TypeCode.ToString() == "Udt" || col.TypeCode.ToString() == "Set" || col.TypeCode.ToString() == "Map" || col.TypeCode.ToString() == "List") { dynamic value = r.GetValue(typeof(object), col.Name); if (col.TypeCode.ToString() == "Udt") { UDT(col, R); } if (col.TypeCode.ToString() == "List") { ListUDT(col, R); } if (col.TypeCode.ToString() == "Set") { SetUDT(col, R); } if (col.TypeCode.ToString() == "Map") { MapUDT(col, R); } } j++; } i++; } //TODO: Need to find a way to Execute the SQL once mnot twice s = session.Execute(SQL); UpdateOutValues(s, TP); }
/// <summary> /// Marks this instance as completed. /// If ex is not null, sets the exception. /// If action is not null, it invokes it using the default task scheduler. /// </summary> private bool SetCompleted(Exception ex, RowSet result, Action action) { var finishedNow = Interlocked.CompareExchange(ref _state, StateCompleted, StateInit) == StateInit; if (!finishedNow) { return(false); } //Cancel the current timer //When the next execution timer is being scheduled at the *same time* //the timer is not going to be cancelled, in that case, this instance is going to stay alive a little longer if (_nextExecutionTimeout != null) { _nextExecutionTimeout.Cancel(); } foreach (var execution in _running) { execution.Cancel(); } if (ex != null) { _tcs.TrySetException(ex); return(true); } if (action != null) { //Create a new Task using the default scheduler, invoke the action and set the result Task.Factory.StartNew(() => { try { action(); _tcs.TrySetResult(result); } catch (Exception actionEx) { _tcs.TrySetException(actionEx); } }); return(true); } _tcs.TrySetResult(result); return(true); }
public T FirstOrDefault <T>(Cql cql) { // Get the statement to execute and execute it _cqlGenerator.AddSelect <T>(cql); Statement statement = _statementFactory.GetStatement(cql); RowSet rows = _session.Execute(statement); Row row = rows.FirstOrDefault(); // Map to return type or return default if (row == null) { return(default(T)); } Func <Row, T> mapper = _mapperFactory.GetMapper <T>(cql.Statement, rows); return(mapper(row)); }
public List <Post> getProductPosts(int idProduct) { ISession session = cluster.Connect("postsdb"); string transaction = "select * from post where " + "productid = " + idProduct + " allow filtering;"; RowSet rows = session.Execute(transaction); List <Post> posts = new List <Post>(); foreach (Row row in rows) { Post temp = new Post(); temp.postid = row.GetValue <int>("postid"); temp.productid = row.GetValue <int>("productid"); temp.userid = row.GetValue <string>("userid"); temp.text = row.GetValue <string>("text"); posts.Add(temp); } return(posts); }
public override IEnumerable <Row> Reduce(RowSet input, Row outputRow, string[] args) { int topk = 100; string iid = ""; List <Tuple <string, float> > uid_score_list = new List <Tuple <string, float> >(); foreach (Row row in input.Rows) { iid = row[1].String; string uid = row[0].String; float score = row[2].Float; uid_score_list.Add(new Tuple <string, float>(uid, score)); } uid_score_list.Sort((a, b) => b.Item2.CompareTo(a.Item2)); int k = Math.Min(topk, uid_score_list.Count); string value = ""; for (int i = 0; i < k; i++) { value += "," + uid_score_list[i].Item1; } if (value.Length > 0) { outputRow[0].Set(iid); outputRow[1].Set(value.Substring(1)); } else { outputRow[0].Set(iid); outputRow[1].Set(""); } yield return(outputRow); }
private async Task <List <Guid> > GetUserTweetIds(string user) { List <Guid> tweetIds = new List <Guid>(); if (getUserTweets == null) { getUserTweets = await session.PrepareAsync("select tweets from user_tweets where username = ?"); } var stmt = getUserTweets.Bind(user); RowSet results = await session.ExecuteAsync(stmt); Row row = results.FirstOrDefault(); if (row != null) { tweetIds = ((IEnumerable <Guid>)row["tweets"]).ToList(); } return(tweetIds); }
public List <decimal> GetAnnWeights(string annid, string mktdataid, int version) { if (_session == null) { throw new ApplicationException(EXCEPTION_CONNECTION_CLOSED); } RowSet weights = executeQuery(string.Format("select weights from staticdata.anncalibration where annid='{0}' and mktdataid='{1}' and version={2} ALLOW FILTERING", annid, mktdataid, version)); var weightLst = new List <decimal>(); foreach (var row in weights) { foreach (var weight in row) { weightLst.AddRange((IEnumerable <decimal>)weight); } } return(weightLst); }
public void TestColumnFiltering() { var wrap = new RowSet(GetDataTable001()); var that = new RowSet(GetDataTable002()); var ds = wrap.Join(that, (l, r) => l.FirstName == r.FirstName, ScopedAttribute.Create(new[] { new[] { "age", "firstname", "FirstName" }, new[] { "age", "middlename", "MiddleInitial" }, new[] { "age", "lastname", "LastName" }, new[] { "age", "age", "Age" }, new[] { "hobby", "hobby", "Hobby" } }).ToArray()); Assert.AreEqual(4, ds.Rows.Count); Assert.AreEqual(5, ds.Columns.Count); ds.WriteLine(); }
public static bool ObrisiSvePrijavePoUserimaPoUsername(string username) { try { ISession session = SessionManager.GetSession(); if (session == null) { return(false); } RowSet row = session.Execute("delete from \"Prijava_po_userima\" where username='******';"); return(true); } catch (Exception e) { return(false); } }
public static void CreateLicniMeni(string restoranID, string korisnikID, string licniMeniID, List <string> lista) { ISession session = SessionManager.GetSession(); if (session == null) { return; } RowSet licniMeniData = session.Execute("insert into \"LicniMeni\" (\"restoranID\", \"korisnikID\", \"licniMeniID\") values ('" + restoranID + "', '" + korisnikID + "', '" + licniMeniID + "')"); RowSet licniMeniData2 = session.Execute("insert into \"LicniMeni_by_korisnikID\" (\"restoranID\", \"korisnikID\", \"licniMeniID\") values ('" + restoranID + "', '" + korisnikID + "', '" + licniMeniID + "')"); RowSet data, data2; foreach (string l in lista) { data = session.Execute("update \"LicniMeni\" set spisak = spisak + { '" + l + "' } where \"restoranID\" = '" + restoranID + "' and \"korisnikID\"= '" + korisnikID + "' and \"licniMeniID\"= '" + licniMeniID + "'"); data2 = session.Execute("update \"LicniMeni_by_korisnikID\" set spisak = spisak + { '" + l + "' } where \"restoranID\" = '" + restoranID + "' and \"korisnikID\"= '" + korisnikID + "' and \"licniMeniID\"= '" + licniMeniID + "'"); } }
public Tuple <string, string> GetHtml(long id) { try { RowSet r = _session.Execute(string.Format("select html, url from html where id = {0}", id)); var row = r.GetRows().ElementAt(0); return(new Tuple <string, string>(row.GetValue(typeof(string), "url").ToString(), row.GetValue(typeof(string), "html").ToString())); } catch (ArgumentOutOfRangeException ex01) { return(null); } catch (Exception ex) { Log.Error(ex); return(null); } }
public override IEnumerable<Row> Reduce(RowSet input, Row outputRow, string[] args) { string uScreenName = null; Dictionary<string, int> sDic = new Dictionary<string, int>(); Dictionary<string, int> mDic = new Dictionary<string, int>(); foreach (Row row in input.Rows) { if (uScreenName == null) { uScreenName = row["uScreenName"].String; } string toname = row["toname"].String; Dictionary<string, int> tmpDic = mDic; if (row["relation"].String.Equals("s")) { tmpDic = sDic; } if (tmpDic.ContainsKey(toname)) { tmpDic[toname] = tmpDic[toname] + 1; } else { tmpDic[toname] = 1; } } sDic = Sorted(sDic); mDic = Sorted(mDic); List<string> res = new List<string>(); foreach (KeyValuePair<string, int> entity in mDic) { if (res.Count == 20) break; res.Add(entity.Key); } outputRow["uScreenName"].Set(uScreenName); outputRow["rNames"].Set(ListToDebugString(res)); yield return outputRow; }
public bool RemoveFailedTagData(IEnumerable <SignalsInfo> signals) { var batchStm = new BatchStatement(); RowSet rs = null; try { foreach (SignalsInfo signal in signals) { batchStm.Add(cassandraSessionMgr.deletePreparedStmt.Bind(signal.ID, Convert.ToInt32(signal.MYear), signal.FTime)); } if (ConnectionState) { rs = cassandraSessionMgr.currentSession.Execute(batchStm); } else { cassandraSessionMgr.StopCassandraSession(); throw new Exception("M:- RemoveFailedTagData | V:- Cassandra Session Down"); } } catch (Cassandra.NoHostAvailableException ex) { cassandraSessionMgr.StopCassandraSession(); log.Error("M:- RemoveFailedTagData | V:- cassandra db no host available | Ex:- ", ex); return(false); } catch (Exception ex) { log.Error("M:- RemoveFailedTagData | V:- error deleting batch [failed data] | Ex:- ", ex); return(false); } if (rs != null) { return(true); } else { return(false); } }
public static bool ObrisiSvePrijavePoPrezentacijamaPoPrezentaciji(string nazivPrezentacije) { try { ISession session = SessionManager.GetSession(); if (session == null) { return(false); } RowSet row = session.Execute("delete from \"Prijava_po_prezentacijama\" where naziv_prezentacije='" + nazivPrezentacije + "';"); return(true); } catch (Exception e) { return(false); } }
public List <HotelDetailsCassandra> GetAllHotels() { List <HotelDetailsCassandra> hotelDetails = new List <HotelDetailsCassandra>(); Cluster cluster = Cluster.Builder().AddContactPoint("127.0.0.1").Build(); ISession session = cluster.Connect("hotel"); string query = "select * from \"HotelDetails\""; RowSet dataReader = session.Execute(query); foreach (Row row in dataReader) { HotelDetailsCassandra hotel = new HotelDetailsCassandra(); hotel.HotelId = Convert.ToInt32(row[0].ToString()); hotel.AvailableFrom = row[1].ToString(); hotel.AvailableTill = row[2].ToString(); hotel.HotelRating = Convert.ToDecimal(row[3].ToString()); hotelDetails.Add(hotel); } return(hotelDetails); }
public bool isUserPost(int idPost, string idUser, int idProduct) { ISession session = cluster.Connect("postsdb"); string transaction = "select * from post where " + "postid = " + idPost + ";"; RowSet results = session.Execute(transaction); foreach (Row row in results.GetRows()) { Post temp = new Post(); temp.productid = row.GetValue <int>("productid"); temp.userid = row.GetValue <string>("userid"); if (temp.userid.Equals(idUser) && temp.productid == idProduct) { return(true); } } return(false); }
public async Task RetryingAfterTotalExpiration() { var settings = new BigtableServiceApiSettings(); // Don't allow for any time to retry. settings.ReadRowsSettings = CallSettings.FromExpiration(Expiration.FromTimeout(TimeSpan.Zero)); var request = new ReadRowsRequest { Rows = RowSet.FromRowKeys("a", "b", "c") }; var client = Utilities.CreateReadRowsMockClient( request, initialStreamResponse: new[] { new ReadRowsResponse { Chunks = { CreateChunk("a", "cf1", "column1", "value1", commitRow: true) } } }, responsesForRetryStreams: new[] { null, // A null entry will throw an Unavailable RpcException new [] { new ReadRowsResponse { Chunks = { CreateChunk("b", "cf1", "column2", "value2", commitRow: true) } } } }, settings: settings); var exception = await Assert.ThrowsAsync <RpcException>(() => client.ReadRows(request).ToListAsync().AsTask()); Assert.Equal(StatusCode.Unavailable, exception.StatusCode); }
private void VerifyRowSet(RowSet rs) { var rows = rs.ToArray(); Assert.AreEqual(1, rows.Length); Row row = rows[0]; var jsonObject = JObject.Parse(row.GetValue <string>("c2")); Assert.AreEqual(1, jsonObject.GetValue <int>("b")); var a1 = jsonObject.GetArray("a1"); Assert.False(a1[3].Value <bool>()); Assert.AreEqual(3.0, a1[2].Value <double>(), 1e-9); Assert.AreEqual(200, a1[5].Value <JObject>().GetArray("k2")[1].Value <int>()); var a = jsonObject.GetObject("a"); Assert.AreEqual(2147483647, a.GetObject("q").GetValue <int>("s")); Assert.AreEqual("hello", a.GetValue <string>("f")); }
public override IEnumerable <Row> Process(RowSet input, Row outputRow, string[] args) { foreach (Row input_row in input.Rows) { try { if (!IsBaiduZhidao(input_row["ClickedUrl"].String)) { continue; } } catch (Exception e) { ScopeRuntime.Diagnostics.DebugStream.WriteLine(e.Message); } input_row.CopyTo(outputRow); yield return(outputRow); } }
public override IEnumerable <Row> Process(RowSet input_rowset, Row output_row, string[] args) { foreach (Row input_row in input_rowset.Rows) { cnt++; if (cnt % 1000 == 0) { Console.WriteLine(cnt); } input_row.CopyTo(output_row); string doc = input_row["tText"].String; string CDSSM = GetCDSSM(doc, args[0], args[1]); if (CDSSM != null) { output_row["tCDSSM"].Set(CDSSM); yield return(output_row); } } }
private static void DisplayKeyspace(RowSet result) { try { foreach (var resKeyspace in result.GetRows()) { Console.WriteLine("durable_writes={0} keyspace_name={1} strategy_Class={2} strategy_options={3}", resKeyspace.GetValue <bool>("durable_writes"), resKeyspace.GetValue <string>("keyspace_name"), resKeyspace.GetValue <string>("strategy_class"), resKeyspace.GetValue <string>("strategy_options")); } Console.WriteLine(); } catch (Exception ex) { Console.WriteLine("Command failed {0}", ex.Message); } }
public static void AddMobile(MobileLog MobileLog) { ISession session = SessionManager.GetSession(); Mobile mobile = new Mobile(Guid.NewGuid().ToString(), MobileLog); var t1 = TimeUuid.NewId((DateTimeOffset)MobileLog.TimeStamp); DateTime t = MobileLog.TimeStamp; DateTimeOffset dto = new DateTimeOffset(t.Year, t.Month, t.Day, t.Hour, t.Minute, t.Second, t.Millisecond, TimeZone.CurrentTimeZone.GetUtcOffset(t)); if (session == null) { return; } var ps = session.Prepare("insert into \"Mobile\" (\"MobileID\", \"TimeStamp\", \"Password\", \"MobileNumber\") VALUES (?, ?, ?, ?)"); var batch = new BatchStatement().Add(ps.Bind(t1, dto.ToUniversalTime(), mobile.Password, mobile.MobileNumber)); RowSet mobileData = session.Execute(batch); }
public override IEnumerable <Row> Combine(RowSet left, RowSet right, Row outputRow, string[] args) { var _rowList = new RowList(); _rowList.Load(right); // Load the right RowSet into memory foreach (Row leftRow in left.Rows) { leftRow.CopyTo(outputRow); // Copy the data from the leftRow to the output // Copy the data from the rightRow to the output foreach (Row rightRow in _rowList.Rows) { for (int i = 0; i < rightRow.Count; ++i) { rightRow[i].CopyTo(outputRow[i + leftRow.Count]); } yield return(outputRow); } } }
public List <Employee> Get() { List <Employee> employees = new List <Employee>(); RowSet rows = _session.Execute("SELECT * FROM employees;"); foreach (Row row in rows) { employees.Add(new Employee() { EmployeeId = (int)row["employeeid"], Department = (string)row["department"], FirstName = (string)row["firstname"], LastName = (string)row["lastname"], Salary = (decimal)row["salary"] }); } return(employees); }
public async Task <UserCountsEntity> GetAsync(string userId) { BoundStatement boundStatement = _getStatement.Value.Bind(userId); RowSet rowset = await _session.Get().ExecuteAsync(boundStatement); List <Row> rows = rowset.ToList(); if (rows.Count == 0) { return(new UserCountsEntity { UserId = userId }); } Row row = rows.First(); return(_mapper.Map <Row, UserCountsEntity>(row)); }
// [END bigtable_reads_row_ranges] // [START bigtable_reads_prefix] /// <summary> /// /// Reads rows starting with a prefix from an existing table. ///</summary> /// <param name="projectId">Your Google Cloud Project ID.</param> /// <param name="instanceId">Your Google Cloud Bigtable Instance ID.</param> /// <param name="tableId">Your Google Cloud Bigtable table ID.</param> public string readPrefix(string projectId = "YOUR-PROJECT-ID", string instanceId = "YOUR-INSTANCE-ID", string tableId = "YOUR-TABLE-ID") { BigtableClient bigtableClient = BigtableClient.Create(); TableName tableName = new TableName(projectId, instanceId, tableId); String prefix = "phone"; Char prefixEndChar = prefix[prefix.Length - 1]; prefixEndChar++; String end = prefix.Substring(0, prefix.Length - 1) + prefixEndChar; RowSet rowSet = RowSet.FromRowRanges(RowRange.Closed(prefix, end)); ReadRowsStream readRowsStream = bigtableClient.ReadRows(tableName, rowSet); string result = ""; readRowsStream.ForEach(row => result += printRow(row)); return(result); }
/// <summary> /// Main processing script. /// </summary> /// <param name="input">The input row.</param> /// <param name="outputRow">The output row.</param> /// <param name="args">The arguments.</param> /// <returns>The IEnumerable output row.</returns> public override IEnumerable<Row> Process(RowSet input, Row outputRow, string[] args) { string frameShift = args[0]; string frameLength = args[1]; string waveDir = "wave"; string lpcDir = "lpc"; string residual0Dir = "residual"; Directory.CreateDirectory(waveDir); Directory.CreateDirectory(lpcDir); Directory.CreateDirectory(residual0Dir); foreach (var row in input.Rows) { outputRow["WaveID"].Set(row["WaveID"].String); outputRow["WaveBinary"].Set(row["WaveBinary"].Binary); outputRow["WaveAlignments"].Set(row["WaveAlignments"].String); outputRow["RawF0"].Set(row["RawF0"].String); outputRow["LPCC"].Set(row["LPCC"].Binary); outputRow["OF0"].Set(row["OF0"].String); outputRow["LSP"].Set(row["LSP"].Binary); outputRow["Pow"].Set(row["Pow"].String); outputRow["MBE"].Set(row["MBE"].String); outputRow["NCCF"].Set(row["NCCF"].String); outputRow["RF"].Set(row["RF"].String); string waveId = row["WaveID"].String; string waveFile = JobBase.GenerateLocalFile(waveId, row["WaveBinary"].Binary, FileExtensions.Waveform, waveDir); string lpcFile = JobBase.GenerateLocalFile(waveId, row["LPC"].String, FileExtensions.F0File, false, lpcDir); string errorFile = Path.Combine(residual0Dir, waveId + "." + FileExtensions.Text); string[] argument = { waveFile, lpcFile, errorFile, frameShift, frameLength }; F0ExtractorCOSMOS.ExtractLpcResidualErrorOneFile(argument, null); outputRow["ERR"].Set(File.ReadAllText(errorFile)); yield return outputRow; } }
/// <summary> /// Main processing script. /// </summary> /// <param name="input">The input row.</param> /// <param name="outputRow">The output row.</param> /// <param name="args">The arguments.</param> /// <returns>The IEnumerable output row.</returns> public override IEnumerable<Row> Process(RowSet input, Row outputRow, string[] args) { string relatedFeatureDir = "relatedFeature"; string residualDir = "residual"; string nccfDir = "nccf"; string mergedDir = "merged"; Directory.CreateDirectory(relatedFeatureDir); Directory.CreateDirectory(residualDir); Directory.CreateDirectory(nccfDir); Directory.CreateDirectory(mergedDir); foreach (var row in input.Rows) { outputRow["WaveID"].Set(row["WaveID"].String); outputRow["WaveBinary"].Set(row["WaveBinary"].Binary); outputRow["WaveAlignments"].Set(row["WaveAlignments"].String); outputRow["RawF0"].Set(row["RawF0"].String); outputRow["LPCC"].Set(row["LPCC"].Binary); outputRow["OF0"].Set(row["OF0"].String); outputRow["LSP"].Set(row["LSP"].Binary); outputRow["Pow"].Set(row["Pow"].String); outputRow["MBE"].Set(row["MBE"].String); string waveId = row["WaveID"].String; string relatedFeatureFile = JobBase.GenerateLocalFile(waveId, row["RF"].String, FileExtensions.Text, false, relatedFeatureDir); string residualFile = JobBase.GenerateLocalFile(waveId, row["ERR"].String, FileExtensions.Text, false, residualDir); string nccfFile = JobBase.GenerateLocalFile(waveId, row["NCCF"].String, FileExtensions.F0File, true, nccfDir); string mergedFeatureFile = Path.Combine(mergedDir, waveId + "." + FileExtensions.Text); string[] argument = { relatedFeatureFile, residualFile, nccfFile, mergedFeatureFile }; F0ExtractorCOSMOS.MergeFeaturesOneFile(argument, null); outputRow["MERG"].Set(File.ReadAllText(mergedFeatureFile)); yield return outputRow; } }
/* ** Turn bulk memory into a RowSet object. N bytes of memory ** are available at pSpace. The db pointer is used as a memory context ** for any subsequent allocations that need to occur. ** Return a pointer to the new RowSet object. ** ** It must be the case that N is sufficient to make a Rowset. If not ** an assertion fault occurs. ** ** If N is larger than the minimum, use the surplus as an initial ** allocation of entries available to be filled. */ static RowSet sqlite3RowSetInit( sqlite3 db, object pSpace, u32 N ) { RowSet p = new RowSet( db, (int)N ); //Debug.Assert(N >= ROUND8(sizeof(*p)) ); // p = pSpace; // p.pChunk = 0; // p.db = db; // p.pEntry = 0; // p.pLast = 0; // p.pTree = 0; // p.pFresh =(struct RowSetEntry*)(ROUND8(sizeof(*p)) + (char*)p); // p.nFresh = (u16)((N - ROUND8(sizeof(*p)))/sizeof(struct RowSetEntry)); // p.isSorted = 1; // p.iBatch = 0; return p; }
/* ** Insert a new value into a RowSet. ** ** The mallocFailed flag of the database connection is set if a ** memory allocation fails. */ static void sqlite3RowSetInsert( RowSet p, i64 rowid ) { RowSetEntry pEntry; /* The new entry */ RowSetEntry pLast; /* The last prior entry */ Debug.Assert( p != null ); if ( p.nFresh == 0 ) { RowSetChunk pNew; pNew = new RowSetChunk();//sqlite3DbMallocRaw(p.db, sizeof(*pNew)); if ( pNew == null ) { return; } pNew.pNextChunk = p.pChunk; p.pChunk = pNew; p.pFresh = pNew.aEntry; p.nFresh = ROWSET_ENTRY_PER_CHUNK; } p.pFresh[p.pFresh.Length - p.nFresh] = new RowSetEntry(); pEntry = p.pFresh[p.pFresh.Length - p.nFresh]; p.nFresh--; pEntry.v = rowid; pEntry.pRight = null; pLast = p.pLast; if ( pLast != null ) { if ( p.isSorted && rowid <= pLast.v ) { p.isSorted = false; } pLast.pRight = pEntry; } else { Debug.Assert( p.pEntry == null );/* Fires if INSERT after SMALLEST */ p.pEntry = pEntry; } p.pLast = pEntry; }
/* ** Convert the list in p.pEntry into a sorted list if it is not ** sorted already. If there is a binary tree on p.pTree, then ** convert it into a list too and merge it into the p.pEntry list. */ static void rowSetToList( RowSet p ) { if ( !p.isSorted ) { rowSetSort( p ); } if ( p.pTree != null ) { RowSetEntry pHead = new RowSetEntry(); RowSetEntry pTail = new RowSetEntry(); rowSetTreeToList( p.pTree, ref pHead, ref pTail ); p.pTree = null; p.pEntry = rowSetMerge( p.pEntry, pHead ); } }
/* ** Deallocate all chunks from a RowSet. This frees all memory that ** the RowSet has allocated over its lifetime. This routine is ** the destructor for the RowSet. */ static void sqlite3RowSetClear( RowSet p ) { RowSetChunk pChunk, pNextChunk; for ( pChunk = p.pChunk; pChunk != null; pChunk = pNextChunk ) { pNextChunk = pChunk.pNextChunk; sqlite3DbFree( p.db, ref pChunk ); } p.pChunk = null; p.nFresh = 0; p.pEntry = null; p.pLast = null; p.pTree = null; p.isSorted = true; }
public RowSet[] CompareRowsStatic(ref object[,] dataA, ref object[,] dataB, int[] colKeysA, int[] colKeysB, int[] colValA, int[] colValB, bool multi) { HashRow[] hashTabA = GetRowsKey(dataA, colKeysA); HashRow[] hashTabB = GetRowsKey(dataB, colKeysB); int nbRowA = hashTabA.Length; int nbRowB = hashTabB.Length; int nbValA = colValA.Length; int nbValB = colValB.Length; int[] ptrA = GetSortedHashPtr(ref hashTabA); int[] ptrB = GetSortedHashPtr(ref hashTabB); for (int r1 = 0, r2 = 0, nbMatch; r1 < nbRowA; r1++) { int r1ptr = ptrA[r1]; if (hashTabA[r1ptr].IsNotNull) { nbMatch = 0; while (r2 < nbRowB) { int r2ptr = ptrB[r2]; if (hashTabB[r2ptr].IsNull) r2++; else if (hashTabA[r1ptr] == hashTabB[r2ptr]) { hashTabA[r1ptr].LinkedRow = hashTabB[r2ptr].Row; hashTabB[r2ptr].LinkedRow = hashTabA[r1ptr].Row; r2++; nbMatch = 1; if (multi == false) break; } else if (hashTabA[r1ptr] < hashTabB[r2ptr]) break; else r2++; } if (multi) r2 -= nbMatch; } } var cellsA = new RowSet(dataA); var cellsB = new RowSet(dataB); int nbColA = cellsA.ColLen; int nbColB = cellsB.ColLen; for (int r = 0, indexRowA, indexRowB; r < nbRowA; r++) { indexRowA = hashTabA[r].Row; indexRowB = hashTabA[r].LinkedRow; if (indexRowB == 0) { if (hashTabA[r].IsNotNull) { cellsA.Match[indexRowA] = XlRowMatch.NoMatch; for (int ci = 1; ci <= nbColA; ci++) cellsA.Diff[indexRowA, ci] = true; } } else { cellsA.Match[indexRowA] = cellsB.Match[indexRowB] = XlRowMatch.FullMatch; for (int i = 0, indexColA, indexColB; i < nbValA; i++) { indexColA = colValA[i]; indexColB = colValB[i]; if (ObjectsNotEquals(dataA[indexRowA, indexColA], dataB[indexRowB, indexColB])) { cellsA.Diff[indexRowA, indexColA] = cellsB.Diff[indexRowB, indexColB] = true; cellsA.Match[indexRowA] = cellsB.Match[indexRowB] = XlRowMatch.MatchWithDiff; } } } } for (int r = 0, ri = 1, indexKeyA, indexKeyB; r < nbRowB; r++, ri++) { indexKeyB = hashTabB[r].Row; if (hashTabB[r].LinkedRow == 0) { if (hashTabB[r].IsNotNull) { cellsB.Match[indexKeyB] = XlRowMatch.NoMatch; for (int ci = 1; ci <= nbColB; ci++) cellsB.Diff[indexKeyB, ci] = true; } } else if (cellsB.Match[indexKeyB] == XlRowMatch.Empty) { indexKeyA = hashTabB[r].LinkedRow; cellsB.Match[indexKeyB] = XlRowMatch.FullMatch; for (int i = 0, indexColA, indexColB; i < nbValB; i++) { indexColA = colValA[i]; indexColB = colValB[i]; if (ObjectsNotEquals(dataA[indexKeyA, indexColA], dataB[indexKeyB, indexColB])) { cellsB.Diff[indexKeyB, indexColB] = true; cellsB.Match[indexKeyB] = XlRowMatch.MatchWithDiff; } } } } return new RowSet[] { cellsA, cellsB }; }
/* ** Sort all elements on the pEntry list of the RowSet into ascending order. */ static void rowSetSort( RowSet p ) { u32 i; RowSetEntry pEntry; RowSetEntry[] aBucket = new RowSetEntry[40]; Debug.Assert( p.isSorted == false ); //memset(aBucket, 0, sizeof(aBucket)); while ( p.pEntry != null ) { pEntry = p.pEntry; p.pEntry = pEntry.pRight; pEntry.pRight = null; for ( i = 0; aBucket[i] != null; i++ ) { pEntry = rowSetMerge( aBucket[i], pEntry ); aBucket[i] = null; } aBucket[i] = pEntry; } pEntry = null; for ( i = 0; i < aBucket.Length; i++ )//sizeof(aBucket)/sizeof(aBucket[0]) { pEntry = rowSetMerge( pEntry, aBucket[i] ); } p.pEntry = pEntry; p.pLast = null; p.isSorted = true; }
public RowSet CombineColumns(ref object[,] dataA, ref object[,] dataB, ref object[,] titlesA, ref object[,] titlesB, int[] colKeysA, int[] colKeysB, int[] colValsA, int[] colValsB) { HashRow[] hashTabA = GetRowsKey(dataA, colKeysA); HashRow[] hashTabB = GetRowsKey(dataB, colKeysB); int nbRowA = hashTabA.Length; int nbRowB = hashTabB.Length; int[] ptrA = GetSortedHashPtr(ref hashTabA); int[] ptrB = GetSortedHashPtr(ref hashTabB); int nbRow = 0; for (int r1 = 0, r2 = 0, r1ptr, r2ptr, nbMatch; r1 < nbRowA; r1++) { r1ptr = ptrA[r1]; if (hashTabA[r1ptr].IsNotNull) { nbRow++; nbMatch = 0; while (r2 < nbRowB) { r2ptr = ptrB[r2]; if (hashTabB[r2ptr].IsNull) r2++; else if (hashTabA[r1ptr] == hashTabB[r2ptr]) { hashTabA[r1ptr].LinkedRow = hashTabB[r2ptr].Row; hashTabB[r2ptr].LinkedRow = hashTabA[r1ptr].Row; r2++; nbMatch = 1; } else if (hashTabA[r1ptr] < hashTabB[r2ptr]) break; else r2++; } r2 -= nbMatch; } } foreach (HashRow hashrow in hashTabB) if (hashrow.IsNotNull && hashrow.LinkedRow == 0) nbRow++; var nbColA = colKeysA.Length + colValsA.Length; var nbColB = colValsB.Length; var nbCol = nbColA + nbColB; var cells = new RowSet(nbRow, nbCol); int[] colsA = Array<int>.Join(colKeysA, colValsA); Array<int>.Join(colKeysB, colValsB); //Copy titles for (int c = 0; c < colsA.Length; c++) cells.Titles[1, c + 1] = titlesA[1, colsA[c]]; for (int c = 0, ci = nbColA; c < colValsB.Length; c++) cells.Titles[1, ++ci] = titlesB[1, colValsB[c]]; //Copy data int row = 0; for (int r = 0, rowA, rowB; r < nbRowA; r++) { if (hashTabA[r].IsNotNull) { row++; rowA = hashTabA[r].Row; rowB = hashTabA[r].LinkedRow; for (int c = 0; c < colsA.Length; c++) cells.Data[row, c + 1] = dataA[rowA, colsA[c]]; if (rowB != 0) { for (int c = 0, ci = nbColA; c < nbColB; c++) cells.Data[row, ++ci] = dataB[rowB, colValsB[c]]; } else { for (int ci = 1; ci <= nbCol; ci++) cells.Diff[row, ci] = true; } } } for (int r = 0, rowB; r < nbRowB; r++) { if (hashTabB[r].IsNotNull) { if (hashTabB[r].LinkedRow == 0) { rowB = hashTabB[r].Row; row++; for (int c = 0; c < colKeysB.Length; c++) cells.Data[row, c + 1] = dataB[rowB, colKeysB[c]]; for (int c = 0, ci = nbColA; c < nbColB; c++) cells.Data[row, ++ci] = dataB[rowB, colValsB[c]]; for (int ci = 1; ci <= nbCol; ci++) cells.Diff[row, ci] = true; } } } return cells; }
private RowSet[] Build_Aligned_Columns(ref object[,] dataA, ref object[,] dataB, ref object[,] titlesA, ref object[,] titlesB, ref List<Match> listA, ref List<Match> listB, int[] colValsA, int[] colValsB) { int nbRowA = listA.Count, nbRowB = listB.Count; int nbColA = dataA.GetLength(1), nbColB = dataB.GetLength(1); int nbRow = nbRowA + nbRowB; int nbCol = Math.Max(nbColA, nbColB) * 2; int nbValA = colValsA.Length, nbValB = colValsB.Length; int nbColMin = Math.Min(nbColA, nbColB); var cells = new RowSet(nbRow, nbCol); for (int r = 0, ri = 1; r < nbRowA; r++, ri++) { bool nolink = listA[r].IndexB == 0; for (int ci = 1, cc = 1; ci <= nbColA; ci++, cc += 2) { cells.Data[ri, cc] = dataA[listA[r].IndexA, ci]; cells.Diff[ri, cc] = nolink; } if (nolink == false) { cells.Match[ri] = cells.Match[ri] = XlRowMatch.FullMatch; for (int ci = 1, cc = 2; ci <= nbColB; ci++, cc += 2) cells.Data[ri, cc] = dataB[listA[r].IndexB, ci]; for (int i = 0; i < nbValA; i++) { if (ObjectsNotEquals(dataA[listA[r].IndexA, colValsA[i]], dataB[listA[r].IndexB, colValsB[i]])) { cells.Diff[ri, (colValsA[i] * 2) - 1] = cells.Diff[ri, colValsB[i] * 2] = true; cells.Match[ri] = cells.Match[ri] = XlRowMatch.MatchWithDiff; } } } else { cells.Match[ri] = XlRowMatch.NoMatch; } } for (int r = 0, ri = nbRowA + 1; ri <= nbRow; r++, ri++) { cells.Match[ri] = XlRowMatch.NoMatch; for (int ci = 1, cc = 2; ci <= nbColB; ci++, cc += 2) { cells.Data[ri, cc] = dataB[listB[r].IndexB, ci]; cells.Diff[ri, cc] = true; } } for (int ci = 1, cc = 1; ci <= nbColA; ci++, cc += 2) cells.Titles[1, cc] = titlesA[1, ci]; for (int ci = 1, cc = 2; ci <= nbColB; ci++, cc += 2) cells.Titles[1, cc] = titlesB[1, ci]; return new RowSet[] { cells }; }
/* ** Extract the smallest element from the RowSet. ** Write the element into *pRowid. Return 1 on success. Return ** 0 if the RowSet is already empty. ** ** After this routine has been called, the sqlite3RowSetInsert() ** routine may not be called again. */ static int sqlite3RowSetNext( RowSet p, ref i64 pRowid ) { rowSetToList( p ); if ( p.pEntry != null ) { pRowid = p.pEntry.v; p.pEntry = p.pEntry.pRight; if ( p.pEntry == null ) { sqlite3RowSetClear( p ); } return 1; } else { return 0; } }
private RowSet[] Build_Aligned_Values(ref object[,] dataA, ref object[,] dataB, ref object[,] titlesA, ref object[,] titlesB, ref List<Match> listA, ref List<Match> listB, int[] colValsA, int[] colValsB) { int nbRowA = listA.Count, nbRowB = listB.Count; int nbColA = dataA.GetLength(1), nbColB = dataB.GetLength(1); int nbValA = colValsA.Length, nbValB = colValsB.Length; int nbColMin = Math.Min(nbColA, nbColB); int nbRow = nbRowA + nbRowB; int nbMaxCol = Math.Max(nbColA, nbColB); int nbMinCol = Math.Min(nbColA, nbColB); var cells = new RowSet(nbRow, nbMaxCol); for (int r = 0, ri = 1; r < nbRowA; r++, ri++) { if (listA[r].IndexB == 0) { cells.Match[ri] = XlRowMatch.NoMatch; for (int ci = 1; ci <= nbColA; ci++) { cells.Data[ri, ci] = "[" + dataA[listA[r].IndexA, ci] + "][]"; cells.Diff[ri, ci] = true; } } else { cells.Match[ri] = XlRowMatch.FullMatch; for (int ci = 1; ci <= nbMinCol; ci++) cells.Data[ri, ci] = "[" + dataA[listA[r].IndexA, ci] + "][" + dataB[listA[r].IndexB, ci] + "]"; for (int ci = nbMinCol + 1; ci <= nbColA; ci++) cells.Data[ri, ci] = "[" + dataA[listA[r].IndexA, ci] + "][]"; for (int ci = nbMinCol + 1; ci <= nbColB; ci++) cells.Data[ri, ci] = "[][" + dataB[listA[r].IndexB, ci] + "]"; for (int i = 0; i < nbValA; i++) { if (ObjectsNotEquals(dataA[listA[r].IndexA, colValsA[i]], dataB[listA[r].IndexB, colValsB[i]])) { cells.Diff[ri, colValsB[i]] = true; cells.Match[ri] = XlRowMatch.MatchWithDiff; } } } } for (int r = 0, ri = nbRowA + 1; ri <= nbRow; r++, ri++) { cells.Match[ri] = XlRowMatch.NoMatch; for (int ci = 1; ci <= nbColB; ci++) { cells.Data[ri, ci] = "[][" + dataB[listB[r].IndexB, ci] + "]"; cells.Diff[ri, ci] = true; } } for (int ci = 1; ci <= nbMinCol; ci++) cells.Titles[1, ci] = "[" + titlesA[1, ci] + "][" + titlesB[1, ci] + "]"; for (int ci = nbMinCol + 1; ci <= nbColA; ci++) cells.Titles[1, ci] = "[" + titlesA[1, ci] + "][]"; for (int ci = nbMinCol + 1; ci <= nbColB; ci++) cells.Titles[1, ci] = "[][" + titlesB[1, ci] + "]"; return new RowSet[] { cells }; }
private RowSet[] Build_Aligned_Tables(ref object[,] dataA, ref object[,] dataB, ref object[,] titlesA, ref object[,] titlesB, ref List<Match> listA, ref List<Match> listB, int[] colValsA, int[] colVaslB) { int nbRowLeft = listA.Count, nbRowRigth = listB.Count + nbRowLeft; int nbColA = dataA.GetLength(1), nbColB = dataB.GetLength(1); int nbValA = colValsA.Length, nbValB = colVaslB.Length; int nbColMin = Math.Min(nbColA, nbColB); var cellsA = new RowSet(nbRowLeft, nbColA); cellsA.Titles = titlesA; var cellsB = new RowSet(nbRowRigth, nbColB); cellsB.Titles = titlesB; for (int r = 0, ri = 1; r < nbRowLeft; r++, ri++) { bool nolink = listA[r].IndexB == 0; for (int c = 0, ci = 1; c < nbColA; c++, ci++) { cellsA.Data[ri, ci] = dataA[listA[r].IndexA, ci]; cellsA.Diff[ri, ci] = nolink; } if (nolink == false) { cellsA.Match[ri] = cellsB.Match[ri] = XlRowMatch.FullMatch; for (int ci = 1; ci <= nbColB; ci++) cellsB.Data[ri, ci] = dataB[listA[r].IndexB, ci]; for (int i = 0; i < nbValA; i++) { if (ObjectsNotEquals(dataA[listA[r].IndexA, colValsA[i]], dataB[listA[r].IndexB, colVaslB[i]])) { cellsA.Diff[ri, colValsA[i]] = cellsB.Diff[ri, colVaslB[i]] = true; cellsA.Match[ri] = cellsB.Match[ri] = XlRowMatch.MatchWithDiff; } } } else { cellsA.Match[ri] = XlRowMatch.NoMatch; //resB.Match[ri] = ERowMatch.NoMatch; } } for (int r = 0, ri = nbRowLeft + 1; ri <= nbRowRigth; r++, ri++) { cellsB.Match[ri] = XlRowMatch.NoMatch; for (int ci = 1; ci <= nbColB; ci++) { cellsB.Data[ri, ci] = dataB[listB[r].IndexB, ci]; cellsB.Diff[ri, ci] = true; } } return new RowSet[] { cellsA, cellsB }; }
/* ** Check to see if element iRowid was inserted into the the rowset as ** part of any insert batch prior to iBatch. Return 1 or 0. */ static int sqlite3RowSetTest( RowSet pRowSet, u8 iBatch, sqlite3_int64 iRowid ) { RowSetEntry p; if ( iBatch != pRowSet.iBatch ) { if ( pRowSet.pEntry != null ) { rowSetToList( pRowSet ); pRowSet.pTree = rowSetListToTree( pRowSet.pEntry ); pRowSet.pEntry = null; pRowSet.pLast = null; } pRowSet.iBatch = iBatch; } p = pRowSet.pTree; while ( p != null ) { if ( p.v < iRowid ) { p = p.pRight; } else if ( p.v > iRowid ) { p = p.pLeft; } else { return 1; } } return 0; }
/// <summary> /// Creates a RowSet with few rows with int, text columns (null values in the last row) /// </summary> private static RowSet CreateSampleRowSet() { var columns = new List<CqlColumn> { new CqlColumn() { Index = 0, Name = "text_sample", TypeCode = ColumnTypeCode.Text, Type = typeof (string) }, new CqlColumn() { Index = 1, Name = "int_sample", TypeCode = ColumnTypeCode.Int, Type = typeof(int) } }; var columnIndexes = columns.ToDictionary(c => c.Name, c => c.Index); var rs = new RowSet(); var rowValues = new object[] { "text value", 100 }; rs.AddRow(new Row(rowValues, columns.ToArray(), columnIndexes)); rs.AddRow(new Row(new object[] { null, null}, columns.ToArray(), columnIndexes)); return rs; }