public async Task <ICursor <T> > GetCursorAsync <T>(Guid signalId, List <long> start, List <long> count, List <long> decimationFactor = null) { List <IPage <SEPayload> > listIPage = new List <IPage <SEPayload> >(); long queryStart = start.LastOrDefault(); long queryEnd; if (decimationFactor == null) { decimationFactor = new List <long>(); for (int i = 0; i < start.Count; i++) { decimationFactor.Add(1); } queryEnd = queryStart + count.LastOrDefault(); } else { queryEnd = queryStart + decimationFactor.LastOrDefault() * (count.LastOrDefault() - 1); } long num = count.LastOrDefault(); long factor = decimationFactor[decimationFactor.Count() - 1]; //from the second lowest rank find each line to read long lineCount = 1; //how many lines to read for (int i = 0; i < count.Count - 1; i++) { lineCount = lineCount * count[i]; } List <long> lineCursor = new List <long>(start.Count - 1); List <long> lineCounter = new List <long>(start.Count - 1); //the line cursor has every but last rank of the start point, point to the start line lineCursor.AddRange(start.Where((v, i) => i < start.Count - 1)); lineCounter.AddRange(count.Where((v, i) => i < count.Count - 1)); List <long> dimPage = new List <long>(); for (long line = 0; line < lineCount; line++) { #region Byte[] pagingState = null; string dimension = DimensionsToText(lineCursor); Cql cql = Cql.New("SELECT * FROM sepayload where parentid=? and dimensions=? and start<=? ", signalId, dimension, queryEnd); // Cql cql = Cql.New("SELECT * FROM sepayload where parentid=? and dimensions=? ", signalId, dimension); IPage <SEPayload> ipagePartialList = (await mapper.FetchPageAsync <SEPayload>(cql.WithOptions(opt => opt.SetPageSize(pagesize).SetPagingState(pagingState)))); listIPage.Add(ipagePartialList); while (ipagePartialList.PagingState != null) { pagingState = ipagePartialList.PagingState; ipagePartialList = (await mapper.FetchPageAsync <SEPayload>(cql.WithOptions(opt => opt.SetPageSize(pagesize).SetPagingState(pagingState)))); if (ipagePartialList.Count() > 0) { listIPage.Add(ipagePartialList); } } long dimPageCount = listIPage.Count() - dimPage.Sum(); dimPage.Add(dimPageCount); /***************************************/ #endregion for (var rankIndex = lineCursor.Count - 1; rankIndex >= 0; rankIndex--) { //try read next line lineCounter[rankIndex] -= 1; if (lineCounter[rankIndex] == 0) { lineCounter[rankIndex] = count[rankIndex]; lineCursor[rankIndex] = start[rankIndex]; } else { //move this rank forward lineCursor[rankIndex] += decimationFactor[rankIndex]; break; } } } Cursor <T> jdbccursor = new Cursor <T>(listIPage, start.LastOrDefault(), count, dimPage, decimationFactor.LastOrDefault()); return(jdbccursor); }
/// <summary> /// Delete all payload of signal by signalid /// </summary> /// <param name="signalId"></param> /// <returns></returns> public async Task DeleteDataAsync(Guid signalId) { Cql cql = Cql.New("where parentid=? ", signalId); await mapper.DeleteAsync <SEPayload>(cql); }
public async Task GetStatementAsync_Should_PrepareTwiceAndCache_When_FirstPrepareFails() { var createdPreparedStatementsBag = new ConcurrentBag <Task <PreparedStatement> >(); var sessionMock = new Mock <ISession>(MockBehavior.Strict); var firstTime = 1; var exceptionMessage = "Test exception in prepare."; sessionMock.Setup(s => s.Keyspace).Returns <string>(null); sessionMock .Setup(s => s.PrepareAsync(It.IsAny <string>())) .Returns <string>(q => { if (Interlocked.CompareExchange(ref firstTime, 0, 1) == 1) { PreparedStatement Func() => throw new Exception(exceptionMessage); return(Task.Run((Func <PreparedStatement>)Func)); } var task = Task.FromResult(GetPrepared(q)); createdPreparedStatementsBag.Add(task); return(task); }); var cql = Cql.New("Q"); var sf = new StatementFactory(); var concurrentTasks = 1000; var semaphore = new SemaphoreSlim(0, concurrentTasks); var tasks = Enumerable.Range(0, concurrentTasks) .Select(_ => Task.Run( async() => { await semaphore.WaitAsync().ConfigureAwait(false); return(await sf.GetStatementAsync(sessionMock.Object, cql).ConfigureAwait(false)); })) .ToArray(); semaphore.Release(concurrentTasks); try { await Task.WhenAll(tasks).ConfigureAwait(false); } catch (Exception ex) { Assert.AreEqual(exceptionMessage, ex.Message); } Assert.AreEqual(1, tasks.Count(t => t.IsFaulted)); Assert.AreEqual(concurrentTasks - 1, tasks.Count(t => t.IsCompleted && !t.IsFaulted)); tasks = tasks.Where(t => !t.IsFaulted).ToArray(); var boundStatementsSet = new HashSet <BoundStatement>( tasks.Select(t => (BoundStatement)t.Result), new ReferenceEqualityComparer <BoundStatement>()); var preparedStatementsSet = new HashSet <PreparedStatement>( tasks.Select(t => ((BoundStatement)t.Result).PreparedStatement), new ReferenceEqualityComparer <PreparedStatement>()); var createdPreparedStatementsSet = new HashSet <PreparedStatement>( createdPreparedStatementsBag.Select(t => t.Result), new ReferenceEqualityComparer <PreparedStatement>()); Assert.AreEqual(1, preparedStatementsSet.Count); Assert.AreEqual(concurrentTasks - 1, boundStatementsSet.Count); Assert.AreEqual(1, createdPreparedStatementsSet.Count); }
public async Task WriteStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { var isConcurrentState = _concurrentStateTypes.Contains(grainState.State.GetType()); var(id, cassandraState) = await GetCassandraGrainState(grainType, grainReference, isConcurrentState); try { var json = JsonConvert.SerializeObject(grainState.State, _jsonSettings); if (isConcurrentState) { var newEtag = 0; if (cassandraState == null) { cassandraState = new CassandraGrainState { Id = id, GrainType = grainType, State = json, ETag = newEtag.ToString() }; await _mapper.InsertIfNotExistsAsync(cassandraState, CqlQueryOptions.New().SetSerialConsistencyLevel(SerialConsistencyLevel)) .ConfigureAwait(false); } else { int.TryParse(grainState.ETag, out var stateEtag); newEtag = stateEtag; newEtag++; var appliedInfo = await _mapper.UpdateIfAsync <CassandraGrainState>( Cql.New( $"SET {nameof(CassandraGrainState.State)} = ?, {nameof(CassandraGrainState.ETag)} = ? " + $"WHERE {nameof(CassandraGrainState.Id)} = ? AND {nameof(CassandraGrainState.GrainType)} = ? " + $"IF {nameof(CassandraGrainState.ETag)} = ?", json, newEtag.ToString(), id, grainType, stateEtag.ToString()) .WithOptions(x => x.SetSerialConsistencyLevel(SerialConsistencyLevel))) .ConfigureAwait(false); if (!appliedInfo.Applied) { throw new CassandraConcurrencyException(cassandraState.Id, stateEtag.ToString(), appliedInfo.Existing.ETag); } } grainState.ETag = newEtag.ToString(); } else { if (cassandraState == null) { cassandraState = new CassandraGrainState { Id = id, GrainType = grainType, State = json, ETag = string.Empty }; await _mapper.InsertAsync(cassandraState, CqlQueryOptions.New().SetConsistencyLevel(DefaultConsistencyLevel)) .ConfigureAwait(false); } else { cassandraState.State = json; await _mapper.UpdateAsync(cassandraState, CqlQueryOptions.New().SetConsistencyLevel(DefaultConsistencyLevel)) .ConfigureAwait(false); } } } catch (DriverException) { _logger.LogWarning("Cassandra driver error occured while creating grain state for grain {grainId}.", id); throw; } }
public void Fetch_Pocos_WithCqlAndOptions() { List <PlainUser> users = CqlClient.Fetch <PlainUser>(Cql.New("SELECT * FROM users").WithOptions(opt => opt.DoNotPrepare())); users.ShouldAllBeEquivalentTo(TestDataHelper.Users, opt => opt.AccountForTimestampAccuracy()); }
public async void FetchAsync_Pocos_WithCqlAndOptions() { List <PlainUser> users = await CqlClient.FetchAsync <PlainUser>(Cql.New("SELECT * FROM users").WithOptions(opt => opt.SetConsistencyLevel(ConsistencyLevel.Quorum))); users.ShouldAllBeEquivalentTo(TestDataHelper.Users, opt => opt.AccountForTimestampAccuracy()); }
public Task <TransactionByIdReadModel> GetAsync(Guid transactionId) { return(this.connection.Mapper.FirstOrDefaultAsync <TransactionByIdReadModel>(Cql .New("WHERE transaction_id = ?", transactionId) .WithOptions(opt => opt.SetConsistencyLevel(this.settings.TransactionsReadConsistencyLevel)))); }
/// <summary> /// Update an inventory item /// </summary> /// <param name="oItem"></param> /// <returns></returns> public async Task <ResultModel <item> > UpdateInventoryAsync(item oItem) { //update item var updateItem = Cql.New( $"SET status = {ItemStatus.reserved.ToString()} , date_updated = {TimeUuid.NewId(DateTime.Now)}" + $" WHERE business_unit={oItem.businessUnit} and location={oItem.location} and upc={oItem.upc}" + $" and identifier_value = {oItem.identifierValue} and identifier_type= {oItem.identifierType}"); var updateItemByIdentifer = Cql.New( $" SET status = {ItemStatus.reserved.ToString()} , date_updated = {TimeUuid.NewId(DateTime.Now)}" + $" WHERE business_unit={oItem.businessUnit}" + $" and identifier_value = {oItem.identifierValue} and identifier_type= {oItem.identifierType}"); item item = new item { businessUnit = oItem.businessUnit, location = oItem.location, upc = oItem.upc, identifierValue = oItem.identifierValue, identifierType = oItem.identifierType, serialNumber = oItem.serialNumber, receiptId = oItem.receiptId, status = ItemStatus.reserved.ToString(), dateCreated = TimeUuid.NewId(), dateUpdated = TimeUuid.NewId(), itemAttributes = null }; //insert item history itemHistory itemHist = new itemHistory { businessUnit = oItem.businessUnit, location = oItem.location, upc = oItem.upc, histItemId = Guid.NewGuid(), identifierValue = oItem.identifierValue, identifierType = oItem.identifierType, serialNumber = oItem.serialNumber, receiptId = oItem.receiptId, status = ItemStatus.reserved.ToString(), dateCreated = TimeUuid.NewId(), dateUpdated = TimeUuid.NewId(), itemAttributes = null }; //insert itembyIdentifier itemByIdentifier itemById = new itemByIdentifier { businessUnit = oItem.businessUnit, location = oItem.location, upc = oItem.upc, identifierValue = oItem.identifierValue, identifierType = oItem.identifierType, serialNumber = oItem.serialNumber, receiptId = oItem.receiptId, status = ItemStatus.reserved.ToString(), dateCreated = TimeUuid.NewId(), dateUpdated = TimeUuid.NewId(), itemAttributes = null }; var batch = mapper.CreateBatch(BatchType.Logged); batch.Insert <item>(item); batch.Insert <itemHistory>(itemHist); batch.Insert <itemByIdentifier>(itemById); batch.WithOptions(ac => ac.SetConsistencyLevel(ConsistencyLevel.Quorum)); await mapper.ExecuteAsync(batch); return(new ResultModel <item> { resultContent = oItem, resultCode = ResultStatus.success }); }
public async Task <ICursor <T> > GetCursorAsync <T>(Guid signalId, List <long> start, List <long> count, List <long> decimationFactor = null) { myCoreService = CoreService.GetInstance(); var signal = await myCoreService.GetOneByIdAsync(signalId); int sampleCount = (int)signal.NumberOfSamples; int indexStart = (int)start.LastOrDefault(); int getNumber = (int)count.LastOrDefault(); List <IPage <SEPayload> > listIPage = new List <IPage <SEPayload> >(); if (decimationFactor == null) { decimationFactor = new List <long>(); for (int i = 0; i < start.Count; i++) { decimationFactor.Add(1); } } int factor = (int)decimationFactor[decimationFactor.Count() - 1]; //from the second lowest rank find each line to read long lineCount = 1; //how many lines to read for (int i = 0; i < count.Count - 1; i++) { lineCount = lineCount * count[i]; } List <long> lineCursor = new List <long>(start.Count - 1); List <long> lineCounter = new List <long>(start.Count - 1); //the line cursor has every but last rank of the start point, point to the start line lineCursor.AddRange(start.Where((v, i) => i < start.Count - 1)); lineCounter.AddRange(count.Where((v, i) => i < count.Count - 1)); List <long> dimPage = new List <long>(); List <long> indexes = new List <long>(); for (int line = 0; line < lineCount; line++) { #region Byte[] pagingState = null; string dimension = DimensionsToText(lineCursor); //计算index if (factor < sampleCount) { var lastPoint = (factor * getNumber + indexStart) - factor; var lastIndex = lastPoint / sampleCount + 1; var firstIndex = indexStart / sampleCount; for (int i = firstIndex; i < lastIndex; i++) { indexes.Add(i); } } else { int coordinate = 0; int variable = 0; //每个点取一次比较 for (int i = 0; i < getNumber; i++) { coordinate = indexStart + i * factor; variable = coordinate / sampleCount; indexes.Add(variable); } } // Cql cql = Cql.New("SELECT * FROM sepayload where parentid=? and dimensions=? and indexes in ? ", signalId, dimension, indexes); IPage <SEPayload> ipagePartialList = (await mapper.FetchPageAsync <SEPayload>(cql.WithOptions(opt => opt.SetPageSize(pagesize).SetPagingState(pagingState)))); // Debug.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss fff")+ " getPAge"); if (ipagePartialList.Count == 0) { throw new Exception(ErrorMessages.DataNotFoundError); } listIPage.Add(ipagePartialList); while (ipagePartialList.PagingState != null) { pagingState = ipagePartialList.PagingState; ipagePartialList = (await mapper.FetchPageAsync <SEPayload>(cql.WithOptions(opt => opt.SetPageSize(pagesize).SetPagingState(pagingState)))); if (ipagePartialList.Count() > 0) { listIPage.Add(ipagePartialList); } } long dimPageCount = listIPage.Count() - dimPage.Sum(); dimPage.Add(dimPageCount); /***************************************/ #endregion for (var rankIndex = lineCursor.Count - 1; rankIndex >= 0; rankIndex--) { //try read next line lineCounter[rankIndex] -= 1; if (lineCounter[rankIndex] == 0) { lineCounter[rankIndex] = count[rankIndex]; lineCursor[rankIndex] = start[rankIndex]; } else { //move this rank forward lineCursor[rankIndex] += decimationFactor[rankIndex]; break; } } } Cursor <T> jdbccursor = new Cursor <T>(listIPage, start.LastOrDefault(), count, dimPage, sampleCount, decimationFactor.LastOrDefault()); return(jdbccursor); }