long currentMigration(SQLiteDatabaseConnection db) { var sql = "SELECT id FROM __migrations ORDER BY id DESC LIMIT 1"; var result = db.Query(sql).SelectScalarInt64().FirstOrDefault(); return(result); }
bool migrationsTableExists(SQLiteDatabaseConnection db) { var sql = "SELECT COUNT(*) FROM sqlite_master WHERE name = '__migrations' AND type = 'table'"; var result = db.Query(sql).SelectScalarInt().FirstOrDefault(); return(result > 0); }
/// <summary> /// Executes a SQL Script and retrieves all data obteined /// </summary> /// <remarks> /// You must explicity close the returned DataReader once it's not used, /// otherwise the assigned connection would stay opened and cause /// a bad use of resources /// </remarks> /// <param name="command"> /// Sql Sentence to exectute for retrieve data /// </param> /// <returns> /// A System.Data.DbDataReader with all data obtained /// </returns> public override IDataReader GetDataReader(Command command) { //Local Vars IEnumerable <IReadOnlyList <IResultSetValue> > reader = null; lock (Locker) { try { //raising events CommandEventArgs e = new CommandEventArgs(command); OnBeforeGetDataReader(e); //running the script only if e.Cancel is false if (!e.Cancel) { object[] parameters = new object[command.Parameters.Count]; for (int i = 0; i < command.Parameters.Count; i++) { parameters[i] = command.Parameters[i].Value; } OpenConnection(); //Validating if there is a current transaction //Loading the data reader reader = Connection.Query(command.Script, parameters); } //raising events e.Result = reader; OnAfterGetDataReader(e); } catch (System.Exception ex) { //Closing the reader if apply CloseConnection(); //Re - throw the excepción to the caller throw new SqlException(command, "Error on creating DataReader (" + ex.Message + ")", ex); } } //Returning the DataReader return(new DataReader(this, reader)); }
public IEnumerable <IReadOnlyList <ResultSetValue> > Query(string sql) { return(db.Query(sql)); }
/// <summary> /// Get all changes fro SQLite Database /// </summary> /// <param name="schema">All Tables</param> /// <param name="lastModifiedDate">Changes since this date</param> /// <param name="uploadBatchSize">Maximum number of rows to upload</param> internal IEnumerable <SQLiteOfflineEntity> GetChanges(OfflineSchema schema, DateTime lastModifiedDate, int uploadBatchSize) { List <SQLiteOfflineEntity> lstChanges = new List <SQLiteOfflineEntity>(); using (SQLiteDatabaseConnection connection = SQLitePCL.pretty.SQLite3.Open(localFilePath)) { try { foreach (var ty in schema.Collections) { // Get mapping from my type var map = manager.GetMapping(ty); // Create query to select changes var querySelect = SQLiteConstants.SelectChanges; var columnsDcl = new List <String>(); var columnsPK = new List <String>(); // Foreach columns, create the tsql command to execute foreach (var c in map.Columns) { if (!c.IsPK) { columnsDcl.Add("[s].[" + c.Name + "]"); } // If it's the PK, add it from Tracking (because of deleted items not in real table if (c.IsPK) { columnsDcl.Add("[t].[" + c.Name + "]"); columnsPK.Add("[s].[" + c.Name + "] = [t].[" + c.Name + "]"); } } var decl = string.Join(",\n", columnsDcl.ToArray()); var pk = string.Join(" \nAND ", columnsPK.ToArray()); querySelect = String.Format(querySelect, map.TableName, pk, decl); // add limit if specified if (uploadBatchSize > 0) { querySelect += $" LIMIT {uploadBatchSize}"; } try { // Get mapping form the statement var cols = new TableMapping.Column[map.Columns.Length]; bool firstRow = true; // While row is available foreach (var row in connection.Query(querySelect, P(lastModifiedDate))) { if (firstRow) { // Foreach column, get the property in my object for (int i = 0; i < cols.Length; i++) { var name = row[i].ColumnInfo.Name; var c = map.FindColumn(name); if (c != null) { cols[i] = map.FindColumn(name); } } firstRow = false; } // Create the object SQLiteOfflineEntity obj = (SQLiteOfflineEntity)Activator.CreateInstance(map.MappedType); for (int i = 0; i < cols.Length; i++) { if (cols[i] == null) { continue; } // Read the column var val = ReadCol(row, i, cols[i].ColumnType); // Set the value cols[i].SetValue(obj, val); } // Read the Oem Properties var newIndex = map.Columns.Count(); obj.ServiceMetadata = new OfflineEntityMetadata(); obj.ServiceMetadata.IsTombstone = (bool)ReadCol(row, newIndex, typeof(Boolean)); obj.ServiceMetadata.Id = (String)ReadCol(row, newIndex + 1, typeof(String)); obj.ServiceMetadata.ETag = (String)ReadCol(row, newIndex + 2, typeof(String)); String absoluteUri = (String)ReadCol(row, newIndex + 3, typeof(String)); obj.ServiceMetadata.EditUri = String.IsNullOrEmpty(absoluteUri) ? null : new Uri(absoluteUri); lstChanges.Add(obj); } } catch (Exception ex) { Debug.WriteLine(ex.Message); throw; } // if we are batching uploads and the upload rowcount has been reached, skip if (uploadBatchSize > 0 && lstChanges.Count >= uploadBatchSize) { break; } } } catch (Exception ex) { Debug.WriteLine(ex.Message); throw; } } // if we are batching uploads, limit the in-memory result set as well if (uploadBatchSize > 0) { return(lstChanges.Take(uploadBatchSize)); } return(lstChanges); }