public void TestOffset() { var testScheduler = new TestScheduler(); var recorder = testScheduler.CreateObserver<int>(); var notifier = new ScheduledNotifier<int>(testScheduler); notifier.Subscribe(recorder); var origin = new DateTimeOffset(1999, 1, 1, 1, 1, 1, TimeSpan.Zero); notifier.Report(1); notifier.Report(2); notifier.Report(3, origin); notifier.Report(4, origin.AddDays(10)); notifier.Report(5, origin.AddYears(1)); notifier.Report(6); testScheduler.Start(); recorder.Messages.Is( OnNext(1, 1), OnNext(1, 2), OnNext(1, 6), OnNext(origin.Ticks, 3), OnNext(origin.AddDays(10).Ticks, 4), OnNext(origin.AddYears(1).Ticks, 5)); }
public void SetUp() { fileSystem = Substitute.For<ICalamariFileSystem>(); deploymentJournal = Substitute.For<IDeploymentJournal>(); clock = Substitute.For<IClock>(); retentionPolicy = new RetentionPolicy(fileSystem, deploymentJournal, clock); now = new DateTimeOffset(new DateTime(2015, 01, 15), new TimeSpan(0, 0, 0)); clock.GetUtcTime().Returns(now); // Deployed 4 days prior to 'now' fourDayOldDeployment = new JournalEntry("fourDayOld", "blah", "blah", "blah", "blah", policySet1, now.AddDays(-4).LocalDateTime, "C:\\packages\\Acme.1.0.0.nupkg", "C:\\Applications\\Acme.1.0.0", null, true); // Deployed 3 days prior to 'now' threeDayOldDeployment = new JournalEntry("threeDayOld", "blah", "blah", "blah", "blah", policySet1, now.AddDays(-3).LocalDateTime, "C:\\packages\\Acme.1.1.0.nupkg", "C:\\Applications\\Acme.1.1.0", null, true); // Deployed 2 days prior to 'now' twoDayOldDeployment = new JournalEntry("twoDayOld", "blah", "blah", "blah", "blah", policySet1, now.AddDays(-2).LocalDateTime, "C:\\packages\\Acme.1.2.0.nupkg", "C:\\Applications\\Acme.1.2.0", null, true); // Deployed (unsuccessfully) 1 day prior to 'now' oneDayOldUnsuccessfulDeployment = new JournalEntry("oneDayOldUnsuccessful", "blah", "blah", "blah", "blah", policySet1, now.AddDays(-1).LocalDateTime, "C:\\packages\\Acme.1.3.0.nupkg", "C:\\Applications\\Acme.1.3.0", null, false); // Deployed 5 days prior to 'now', but has a different policy-set fiveDayOldNonMatchingDeployment = new JournalEntry("fiveDayOld", "blah", "blah", "blah", "blah", policySet2, now.AddDays(-5).LocalDateTime, "C:\\packages\\Beta.1.0.0.nupkg", "C:\\Applications\\Beta.1.0.0", null, true); var journalEntries = new List<JournalEntry> { fiveDayOldNonMatchingDeployment, fourDayOldDeployment, threeDayOldDeployment, twoDayOldDeployment, oneDayOldUnsuccessfulDeployment }; deploymentJournal.GetAllJournalEntries().Returns(journalEntries); foreach (var journalEntry in journalEntries) { fileSystem.FileExists(journalEntry.ExtractedFrom).Returns(true); fileSystem.DirectoryExists(journalEntry.ExtractedTo).Returns(true); } }
public void From_FromFixedDateTime_Tests(int value) { var originalPointInTime = new DateTimeOffset(1976, 12, 31, 17, 0, 0, 0, TimeSpan.Zero); Assert.AreEqual(value.Years().From(originalPointInTime), originalPointInTime.AddYears(value)); Assert.AreEqual(value.Months().From(originalPointInTime), originalPointInTime.AddMonths(value)); Assert.AreEqual(value.Weeks().From(originalPointInTime), originalPointInTime.AddDays(value*DaysPerWeek)); Assert.AreEqual(value.Days().From(originalPointInTime), originalPointInTime.AddDays(value)); Assert.AreEqual(value.Hours().From(originalPointInTime), originalPointInTime.AddHours(value)); Assert.AreEqual(value.Minutes().From(originalPointInTime), originalPointInTime.AddMinutes(value)); Assert.AreEqual(value.Seconds().From(originalPointInTime), originalPointInTime.AddSeconds(value)); Assert.AreEqual(value.Milliseconds().From(originalPointInTime), originalPointInTime.AddMilliseconds(value)); Assert.AreEqual(value.Ticks().From(originalPointInTime), originalPointInTime.AddTicks(value)); }
public async Task Synchronize_WhenLastSynchronizationTimeIsMoreRecentThanFileModificationDate_ShouldNotCallTranscode( double daysToAdd, [Frozen]Mock<ISynchronizedFilesRepository> synchronizedFileRepository, [Frozen]Mock<IFileTranscoder> fileTranscoder, MusicMirrorConfiguration config, FileSynchronizer sut, SourceFilePath sourceFile, TargetFilePath targetFile) { //arrange var lastWriteTime = new DateTimeOffset(2015, 04, 01, 0, 0, 0, TimeSpan.Zero); var lastSyncTime = lastWriteTime.AddDays(daysToAdd); sourceFile.LastWriteTime = lastWriteTime; synchronizedFileRepository.Setup(s => s.GetMirroredFilePath(It.IsAny<CancellationToken>(), sourceFile.File)).ReturnsTask(targetFile.File); synchronizedFileRepository.Setup(s => s.GetLastSynchronization(It.IsAny<CancellationToken>(), sourceFile.File)).ReturnsTask(lastSyncTime); //act await sut.Synchronize(CancellationToken.None, sourceFile); //assert fileTranscoder.Verify(f => f.Transcode( It.IsAny<CancellationToken>(), sourceFile.File, AudioFormat.Flac, It.Is((DirectoryInfo d) => d.FullName.Equals(targetFile.File.DirectoryName))), Times.Never()); }
public async Task UsesCustomPropertyNames() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
public async Task UsesCustomPropertyNames() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2 })) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[1].Should().Contain("fields\":{"); bulkJsonPieces[1].Should().Contain("@timestamp"); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); } }
internal long GetNextOccurrence() { DateTimeOffset now = _ClockSource.Now; DateTimeOffset next; switch (this.Periodicity) { case PeriodicityType.Hourly: next = new DateTimeOffset(now.Year, now.Month, now.Day, now.Hour, this.MinuteOffset, 0, now.Offset); if (next <= now) { next = next.AddHours(1.0); } break; case PeriodicityType.Daily: next = new DateTimeOffset(now.Year, now.Month, now.Day, this.HourOffset, this.MinuteOffset, 0, now.Offset); if (next <= now) { next = next.AddDays(1.0); } break; default: goto case PeriodicityType.Hourly; } return next.ToUniversalTime().Ticks; }
public void TestWeekDayExclusion() { // this is friday DateTimeOffset excluded = new DateTimeOffset(2007, 8, 3, 0, 0, 0, TimeSpan.Zero); cal.SetDayExcluded(DayOfWeek.Friday, true); // next monday should be next possible Assert.AreEqual(excluded.AddDays(3), cal.GetNextIncludedTimeUtc(excluded)); }
public async Task<IEnumerable<Meeting>> GetCalendarEvents(DateTimeOffset date) { string uri = BuildCalendarUri(date, date.AddDays(1)); var items = await new HttpHelper().GetItemsAsync<Meeting>(uri); return items ?? new List<Meeting>(); }
public static int GetIdOfFirstPostOfDay(DateTimeOffset day) { var items = Query( $"/2.2/posts", $"pagesize=1&order=asc&min={day.ToUnixTimeSeconds()}&max={day.AddDays(1).ToUnixTimeSeconds()}&sort=creation").Result; return items[0].post_id; }
public async Task CreateReturnsEditView_When_ActivityStartDateBeforeCampaignStartDate() { var campaignStartDate = new DateTimeOffset(new DateTime(1900, 1, 1)); var campaignEndDate = campaignStartDate.AddDays(4); var sut = GetActivityController(campaignStartDate, campaignEndDate); var activityModel = new ActivityDetailModel { EndDateTime = campaignStartDate, StartDateTime = campaignStartDate.AddDays(-1) }; var result = (ViewResult)await sut.Create(1, activityModel, null); Assert.Equal("Edit", result.ViewName); var errors = sut.ModelState.GetErrorMessages(); Assert.Equal(1, errors.Count); Assert.Equal("Start date cannot be earlier than the campaign start date " + campaignStartDate.ToString("d"), errors[0]); }
public void CanAddDaysAcrossDstTransition_StartWithMismatchedOffset() { var tz = TimeZoneInfo.FindSystemTimeZoneById("Pacific Standard Time"); var dto = new DateTimeOffset(2015, 3, 8, 4, 0, 0, TimeSpan.FromHours(-4)); var result = dto.AddDays(1, tz); var expected = new DateTimeOffset(2015, 3, 9, 0, 0, 0, TimeSpan.FromHours(-7)); Assert.Equal(expected, result); Assert.Equal(expected.Offset, result.Offset); }
public void CanAddDaysAcrossDstTransition_LandInOverlap() { var tz = TimeZoneInfo.FindSystemTimeZoneById("Pacific Standard Time"); var dto = new DateTimeOffset(2015, 10, 31, 1, 30, 0, TimeSpan.FromHours(-7)); var result = dto.AddDays(1, tz); var expected = new DateTimeOffset(2015, 11, 1, 1, 30, 0, TimeSpan.FromHours(-7)); Assert.Equal(expected, result); Assert.Equal(expected.Offset, result.Offset); }
private static void InitCustomers() { DateTimeOffset dto = new DateTimeOffset(2015, 1, 1, 1, 2, 3, 4, TimeSpan.Zero); _customers = Enumerable.Range(1, 5).Select(e => new DCustomer { Id = e, DateTime = dto.AddYears(e).DateTime, Offset = e % 2 == 0 ? dto.AddMonths(e) : dto.AddDays(e).AddMilliseconds(10), Date = e % 2 == 0 ? dto.AddDays(e).Date : dto.AddDays(-e).Date, TimeOfDay = e % 3 == 0 ? dto.AddHours(e).TimeOfDay : dto.AddHours(-e).AddMilliseconds(10).TimeOfDay, NullableDateTime = e % 2 == 0 ? (DateTime?)null : dto.AddYears(e).DateTime, NullableOffset = e % 3 == 0 ? (DateTimeOffset?)null : dto.AddMonths(e), NullableDate = e % 2 == 0 ? (Date?)null : dto.AddDays(e).Date, NullableTimeOfDay = e % 3 == 0 ? (TimeOfDay?)null : dto.AddHours(e).TimeOfDay, DateTimes = new [] { dto.AddYears(e).DateTime, dto.AddMonths(e).DateTime }, Offsets = new [] { dto.AddMonths(e), dto.AddDays(e) }, Dates = new [] { (Date)dto.AddYears(e).Date, (Date)dto.AddMonths(e).Date }, TimeOfDays = new [] { (TimeOfDay)dto.AddHours(e).TimeOfDay, (TimeOfDay)dto.AddMinutes(e).TimeOfDay }, NullableDateTimes = new [] { dto.AddYears(e).DateTime, (DateTime?)null, dto.AddMonths(e).DateTime }, NullableOffsets = new [] { dto.AddMonths(e), (DateTimeOffset?)null, dto.AddDays(e) }, NullableDates = new [] { (Date)dto.AddYears(e).Date, (Date?)null, (Date)dto.AddMonths(e).Date }, NullableTimeOfDays = new [] { (TimeOfDay)dto.AddHours(e).TimeOfDay, (TimeOfDay?)null, (TimeOfDay)dto.AddMinutes(e).TimeOfDay }, }).ToList(); }
public static void TestAddition() { DateTimeOffset dt = new DateTimeOffset(new DateTime(1986, 8, 15, 10, 20, 5, 70)); Assert.Equal(17, dt.AddDays(2).Day); Assert.Equal(13, dt.AddDays(-2).Day); Assert.Equal(10, dt.AddMonths(2).Month); Assert.Equal(6, dt.AddMonths(-2).Month); Assert.Equal(1996, dt.AddYears(10).Year); Assert.Equal(1976, dt.AddYears(-10).Year); Assert.Equal(13, dt.AddHours(3).Hour); Assert.Equal(7, dt.AddHours(-3).Hour); Assert.Equal(25, dt.AddMinutes(5).Minute); Assert.Equal(15, dt.AddMinutes(-5).Minute); Assert.Equal(35, dt.AddSeconds(30).Second); Assert.Equal(2, dt.AddSeconds(-3).Second); Assert.Equal(80, dt.AddMilliseconds(10).Millisecond); Assert.Equal(60, dt.AddMilliseconds(-10).Millisecond); }
public void Time() { var date = new DateTimeOffset(year: 2016, month: 4, day: 1, hour: 1, minute: 0, second: 0, offset: TimeSpan.Zero); EqualityUnit .Create(new DateTimeKey(date, DateTimeKeyFlags.Time)) .WithEqualValues( new DateTimeKey(date, DateTimeKeyFlags.Time), new DateTimeKey(date.AddDays(1), DateTimeKeyFlags.Time)) .WithNotEqualValues( new DateTimeKey(date, DateTimeKeyFlags.Date), new DateTimeKey(date.AddHours(1), DateTimeKeyFlags.Time)) .RunAll( (x, y) => x == y, (x, y) => x != y); }
public void TestTimesForYear() { PrayerTimesCalculator calc = new PrayerTimesCalculator(47.660918, -122.136371); calc.CalculationMethod = CalculationMethods.ISNA; calc.AsrJurusticMethod = AsrJuristicMethods.Shafii; var times = new Times[365]; for (int i = 0; i < 365; i ++) { var date = new DateTimeOffset(new DateTime(2015, 1, 1)); times[i] = calc.GetPrayerTimes(date.AddDays(i), -7); } Assert.Equal(new DateTime(2015, 2, 3), times[33].Date); Assert.Equal(new TimeSpan(6, 8, 0), times[33].Fajr); }
internal AdmiralRankingPoints(Admiral rpAdmiral) { r_Owner = rpAdmiral; r_AdmiralID = rpAdmiral.ID; ApiService.SubscribeOnce("api_port/port", delegate { ReloadInitialRankingPoints(); PreviousUpdateDifference = new AdmiralRankingPointsDifference(this, AdmiralRankingPointsDifferenceType.PreviousUpdate); DayDifference = new AdmiralRankingPointsDifference(this, AdmiralRankingPointsDifferenceType.Day); MonthDifference = new AdmiralRankingPointsDifference(this, AdmiralRankingPointsDifferenceType.Month); Update(); var rRankUpdateTime = new DateTimeOffset(DateTimeOffset.Now.Date, TimeSpan.FromHours(6.0)); var rDayTimeSpan = TimeSpan.FromDays(1.0); Observable.Timer(rRankUpdateTime.AddDays(1.0), rDayTimeSpan).Subscribe(delegate { PreviousUpdateDifference.Reload(); DayDifference.Reload(); OnPropertyChanged(nameof(PreviousUpdateDifference)); OnPropertyChanged(nameof(DayDifference)); }); rRankUpdateTime += TimeSpan.FromHours(12.0); if (DateTimeOffset.Now > rRankUpdateTime) rRankUpdateTime += rDayTimeSpan; Observable.Timer(rRankUpdateTime, rDayTimeSpan).Subscribe(delegate { PreviousUpdateDifference.Reload(); OnPropertyChanged(nameof(PreviousUpdateDifference)); }); var rNow = DateTimeOffset.Now; var rFinalizationTime = new DateTimeOffset(rNow.Year, rNow.Month, 1, 0, 0, 0, TimeSpan.FromHours(9.0)).AddMonths(1).AddHours(-2.0); if (rNow >= rFinalizationTime) FinalizeThisMonth(); else Observable.Return(Unit.Default).Delay(rFinalizationTime).Subscribe(_ => FinalizeThisMonth()); Observable.Return(Unit.Default).Delay(rFinalizationTime.AddHours(2.0)).Subscribe(_ => r_IsFinalized = false); RecordService.Instance.Update += RecordService_Update; }); }
public void TestDailyIntervalGetFireTimeAfter() { DateTimeOffset startCalendar = new DateTimeOffset(2005, 6, 1, 9, 30, 17, TimeSpan.Zero); CalendarIntervalTriggerImpl dailyTrigger = new CalendarIntervalTriggerImpl(); dailyTrigger.StartTimeUtc = startCalendar; dailyTrigger.RepeatIntervalUnit = IntervalUnit.Day; dailyTrigger.RepeatInterval = 90; // every ninety days DateTimeOffset targetCalendar = new DateTimeOffset(2005, 6, 1, 9, 30, 17, TimeSpan.Zero); targetCalendar = targetCalendar.AddDays(360); // jump 360 days (4 intervals) IList<DateTimeOffset> fireTimes = TriggerUtils.ComputeFireTimes(dailyTrigger, null, 6); DateTimeOffset fifthTime = fireTimes[4]; // get the fifth fire time Assert.AreEqual(targetCalendar, fifthTime, "Day increment result not as expected."); }
public void TestAnnualCalendarTimeZone() { TimeZoneInfo tz = TimeZoneInfo.FindSystemTimeZoneById("Eastern Standard Time"); AnnualCalendar c = new AnnualCalendar(); c.TimeZone = tz; DateTimeOffset excludedDay = new DateTimeOffset(2012, 11, 4, 0, 0, 0, TimeSpan.Zero); c.SetDayExcluded(excludedDay, true); // 11/5/2012 12:00:00 AM -04:00 translate into 11/4/2012 11:00:00 PM -05:00 (EST) DateTimeOffset date = new DateTimeOffset(2012, 11, 5, 0, 0, 0, TimeSpan.FromHours(-4)); Assert.IsFalse(c.IsTimeIncluded(date), "date was expected to not be included."); Assert.IsTrue(c.IsTimeIncluded(date.AddDays(1))); DateTimeOffset expectedNextAvailable = new DateTimeOffset(2012, 11, 5, 0, 0, 0, TimeSpan.FromHours(-5)); DateTimeOffset actualNextAvailable = c.GetNextIncludedTimeUtc(date); Assert.AreEqual(expectedNextAvailable, actualNextAvailable); }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await new HttpClient().GetStringAsync("http://i.do.not.exist"); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); _options.Serializer = new ElasticsearchJsonNetSerializer(); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2})) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } _seenHttpPosts.Should().NotBeEmpty().And.HaveCount(1); var json = _seenHttpPosts.First(); var bulkJsonPieces = json.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries); bulkJsonPieces.Should().HaveCount(4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); bulkJsonPieces[3].Should().Contain("exceptions\":[{"); } }
public async Task WhenPassingASerializer_ShouldExpandToJson() { try { await this.ThrowAsync(); } catch (Exception e) { var timestamp = new DateTimeOffset(2013, 05, 28, 22, 10, 20, 666, TimeSpan.FromHours(10)); var messageTemplate = "{Song}++"; var template = new MessageTemplateParser().Parse(messageTemplate); using (var sink = new ElasticsearchSink(_options)) { var properties = new List<LogEventProperty> { new LogEventProperty("Song", new ScalarValue("New Macabre")), new LogEventProperty("Complex", new ScalarValue(new { A = 1, B = 2})) }; var logEvent = new LogEvent(timestamp, LogEventLevel.Information, null, template, properties); //one off sink.Emit(logEvent); sink.Emit(logEvent); logEvent = new LogEvent(timestamp.AddDays(2), LogEventLevel.Information, e, template, properties); sink.Emit(logEvent); } var bulkJsonPieces = this.AssertSeenHttpPosts(_seenHttpPosts, 4); bulkJsonPieces[0].Should().Contain(@"""_index"":""logstash-2013.05.28"); bulkJsonPieces[1].Should().Contain("New Macabre"); bulkJsonPieces[1].Should().NotContain("Properties\""); bulkJsonPieces[2].Should().Contain(@"""_index"":""logstash-2013.05.30"); //since we pass a serializer objects should serialize as json object and not using their //tostring implemenation //DO NOTE that you cant send objects as scalar values through Logger.*("{Scalar}", {}); bulkJsonPieces[3].Should().Contain("Complex\":{"); bulkJsonPieces[3].Should().Contain("exceptions\":[{"); } }
public void TestDaylightSavingsTransitions() { // Pick a day before a daylight savings transition... DateTimeOffset startCalendar = DateBuilder.DateOf(9, 30, 17, 12, 3, 2010); var dailyTrigger = new CalendarIntervalTriggerImpl { StartTimeUtc = startCalendar, RepeatIntervalUnit = IntervalUnit.Day, RepeatInterval = 5 // every 5 days }; DateTimeOffset targetCalendar = startCalendar.AddDays(10); // jump 10 days (2 intervals) IList<DateTimeOffset> fireTimes = TriggerUtils.ComputeFireTimes(dailyTrigger, null, 6); DateTimeOffset testTime = fireTimes[2]; // get the third fire time Assert.AreEqual(targetCalendar, testTime, "Day increment result not as expected over spring daylight savings transition."); // Pick a day before a daylight savings transition... startCalendar = new DateTimeOffset(2010, 10, 31, 9, 30, 17, TimeSpan.Zero); dailyTrigger = new CalendarIntervalTriggerImpl { StartTimeUtc = startCalendar, RepeatIntervalUnit = IntervalUnit.Day, RepeatInterval = 5 // every 5 days }; targetCalendar = startCalendar.AddDays(15); // jump 15 days (3 intervals) fireTimes = TriggerUtils.ComputeFireTimes(dailyTrigger, null, 6); testTime = fireTimes[3]; // get the fourth fire time Assert.AreEqual(targetCalendar, testTime, "Day increment result not as expected over fall daylight savings transition."); }
private static DateTimeOffset? GetNextTransition(TimeZoneInfo zone, DateTimeOffset start, DateTimeOffset end) { TimeSpan startOffset = zone.GetUtcOffset(start); bool startDaylight = zone.IsDaylightSavingTime(start); DateTimeOffset now = start.AddDays(1); while (now <= end) { if (zone.GetUtcOffset(now) != startOffset || zone.IsDaylightSavingTime(now) != startDaylight) { // Right, so there's a transition strictly after now - (one day), and less than or equal to now. Binary search... long upperInclusiveTicks = now.Ticks; long lowerExclusiveTicks = now.AddDays(-1).Ticks; while (upperInclusiveTicks > lowerExclusiveTicks + 1) { long candidateTicks = (upperInclusiveTicks + lowerExclusiveTicks) / 2; var candidateDto = new DateTimeOffset(candidateTicks, TimeSpan.Zero); if (zone.GetUtcOffset(candidateDto) != startOffset || zone.IsDaylightSavingTime(candidateDto) != startDaylight) { // Still seeing a difference: look earlier upperInclusiveTicks = candidateTicks; } else { // Same as at start of day: look later lowerExclusiveTicks = candidateTicks; } } // If we turn out to have hit the end point, we're done without a final transition. return upperInclusiveTicks == end.Ticks ? (DateTimeOffset?)null : new DateTimeOffset(upperInclusiveTicks, TimeSpan.Zero); } now = now.AddDays(1); } return null; }
public void ArithmeticAccrossDSTBoudaries () { DateTime dt = new DateTime (633954393584177800, DateTimeKind.Local); //Dec 3, 2009, 12:16 DateTimeOffset dto = new DateTimeOffset (dt); DateTimeOffset dto2 = dto.AddDays (-60); //Should cross the late Oct boundary in most part of the world Assert.AreEqual (dto.Offset, dto2.Offset); Assert.AreEqual (dt.AddDays (-60), dto2.DateTime); }
public void Can_insert_datetimeoffsets_regardless_of_current_culture() { // datetimeoffset's default .ToString depends on culture, ensure we use one with MDY var previousCulture = System.Threading.Thread.CurrentThread.CurrentCulture; System.Threading.Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); try { using (var db = OpenDbConnection()) { // and set datestyle to DMY, crashing the insert when we're formatting it as the default on en-US db.ExecuteNonQuery("SET datestyle TO \"ISO, DMY\""); db.CreateTable<ModelWithDateTimeOffset>(true); var date = new DateTimeOffset(2010, 11, 29, 1, 2, 3, new TimeSpan(0)); var row = new ModelWithDateTimeOffset { Id = 1, Value = date }; db.Insert(row); db.Update<ModelWithDateTimeOffset>(new { Value = date.AddDays(30) }, r => r.Id == 1); var rows = db.Select<ModelWithDateTimeOffset>(); Assert.That(rows, Has.Count.EqualTo(1)); Assert.That(rows[0].Value, Is.EqualTo(date.AddDays(30))); } } finally { System.Threading.Thread.CurrentThread.CurrentCulture = previousCulture; } }
/// <summary> /// Get a <see cref="DateTimeOffset" /> object that represents the given time, on /// tomorrow's date. /// </summary> /// <param name="hour"></param> /// <param name="minute"></param> /// <param name="second"></param> /// <returns></returns> public static DateTimeOffset TomorrowAt(int hour, int minute, int second) { ValidateSecond(second); ValidateMinute(minute); ValidateHour(hour); DateTimeOffset now = DateTimeOffset.Now; DateTimeOffset c = new DateTimeOffset( now.Year, now.Month, now.Day, hour, minute, second, 0, now.Offset); // advance one day c = c.AddDays(1); return c; }
private static DateTimeOffset TranslatedAdd(DateTimeOffset date, IntervalUnit unit, int amountToAdd) { switch (unit) { case IntervalUnit.Day: return date.AddDays(amountToAdd); case IntervalUnit.Hour: return date.AddHours(amountToAdd); case IntervalUnit.Minute: return date.AddMinutes(amountToAdd); case IntervalUnit.Month: return date.AddMonths(amountToAdd); case IntervalUnit.Second: return date.AddSeconds(amountToAdd); case IntervalUnit.Millisecond: return date.AddMilliseconds(amountToAdd); case IntervalUnit.Week: return date.AddDays(amountToAdd*7); case IntervalUnit.Year: return date.AddYears(amountToAdd); default: throw new ArgumentException("Unknown IntervalUnit"); } }
public IHttpActionResult ResetDataSource() { DateAndTimeOfDayContext db = new DateAndTimeOfDayContext(); if (!db.Customers.Any()) { DateTimeOffset dateTime = new DateTimeOffset(2014, 12, 24, 1, 2, 3, 4, new TimeSpan(-8, 0, 0)); IEnumerable<EfCustomer> customers = Enumerable.Range(1, 5).Select(e => new EfCustomer { Id = e, DateTime = dateTime.AddYears(e).AddHours(e).AddMilliseconds(e).DateTime, NullableDateTime = e % 2 == 0 ? (DateTime?)null : dateTime.AddHours(e * 5).AddMilliseconds(e * 5).DateTime, Offset = dateTime.AddMonths(e).AddHours(e).AddMilliseconds(e), NullableOffset = e % 3 == 0 ? (DateTimeOffset?)null : dateTime.AddDays(e).AddHours(e * 5) }).ToList(); foreach (EfCustomer customer in customers) { db.Customers.Add(customer); } db.SaveChanges(); } return Ok(); }
/// <summary> /// Returns an enumerable collection of log blobs containing Analytics log records. The blobs are retrieved lazily. /// </summary> /// <param name="service">A <see cref="StorageService"/> enumeration value.</param> /// <param name="startTime">A <see cref="DateTimeOffset"/> object representing the start of the time range for which logs should be retrieved.</param> /// <param name="endTime">A <see cref="DateTimeOffset"/> object representing the end of the time range for which logs should be retrieved.</param> /// <param name="operations">A <see cref="LoggingOperations"/> enumeration value that indicates the types of logging operations on which to filter the log blobs.</param> /// <param name="details">A <see cref="BlobListingDetails"/> enumeration value that indicates whether or not blob metadata should be returned. Only <c>None</c> and <c>Metadata</c> are valid values. </param> /// <param name="options">A <see cref="BlobRequestOptions"/> object that specifies additional options for the request.</param> /// <param name="operationContext">An <see cref="OperationContext"/> object that represents the context for the current operation.</param> /// <returns>An enumerable collection of objects that implement <see cref="ICloudBlob"/> and are retrieved lazily.</returns> /// <remarks>Note that specifying a logging operation type for the <paramref name="operations"/> parameter will return any Analytics log blob that contains the specified logging operation, /// even if that log blob also includes other types of logging operations. Also note that the only currently supported values for the <paramref name="details"/> /// parameter are <c>None</c> and <c>Metadata</c>.</remarks> public IEnumerable<ICloudBlob> ListLogs(StorageService service, DateTimeOffset startTime, DateTimeOffset? endTime, LoggingOperations operations, BlobListingDetails details, BlobRequestOptions options, OperationContext operationContext) { CloudBlobDirectory logDirectory = this.GetLogDirectory(service); BlobListingDetails metadataDetails = details; DateTimeOffset utcStartTime = startTime.ToUniversalTime(); DateTimeOffset dateCounter = new DateTimeOffset(utcStartTime.Ticks - (utcStartTime.Ticks % TimeSpan.TicksPerHour), utcStartTime.Offset); DateTimeOffset? utcEndTime = null; string endPrefix = null; // Ensure that the date range is correct. if (endTime.HasValue) { utcEndTime = endTime.Value.ToUniversalTime(); endPrefix = logDirectory.Prefix + utcEndTime.Value.ToString("yyyy/MM/dd/HH", CultureInfo.InvariantCulture); if (utcStartTime > utcEndTime.Value) { string errorString = string.Format(CultureInfo.InvariantCulture, SR.StartTimeExceedsEndTime, startTime, endTime.Value); throw new ArgumentException(errorString); } } // Currently only support the ability to retrieve metadata on logs. if (details.HasFlag(BlobListingDetails.Copy) || details.HasFlag(BlobListingDetails.Snapshots) || details.HasFlag(BlobListingDetails.UncommittedBlobs)) { throw new ArgumentException(SR.InvalidListingDetails); } // At least one LogType must be specified. if (operations == LoggingOperations.None) { throw new ArgumentException(SR.InvalidLoggingLevel); } // If metadata or a specific LogType is specified, metadata should be retrieved. if (details.HasFlag(BlobListingDetails.Metadata) || !operations.HasFlag(LoggingOperations.All)) { metadataDetails = BlobListingDetails.Metadata; } // Check logs using an hour-based prefix until we reach a day boundary. while (dateCounter.Hour > 0) { string currentPrefix = logDirectory.Prefix + dateCounter.ToString("yyyy/MM/dd/HH", CultureInfo.InvariantCulture); IEnumerable<IListBlobItem> currentLogs = logDirectory.Container.ListBlobs(currentPrefix, true, metadataDetails, options, operationContext); foreach (ICloudBlob log in currentLogs) { if (!utcEndTime.HasValue || string.Compare(log.Parent.Prefix, endPrefix) <= 0) { if (IsCorrectLogType(log, operations)) { yield return log; } } else { yield break; } } dateCounter = dateCounter.AddHours(1); if (dateCounter > DateTimeOffset.UtcNow.AddHours(1)) { yield break; } } // Check logs using a day-based prefix until we reach a month boundary. while (dateCounter.Day > 1) { string currentPrefix = logDirectory.Prefix + dateCounter.ToString("yyyy/MM/dd", CultureInfo.InvariantCulture); IEnumerable<IListBlobItem> currentLogs = logDirectory.Container.ListBlobs(currentPrefix, true, metadataDetails, options, operationContext); foreach (ICloudBlob log in currentLogs) { if (!utcEndTime.HasValue || string.Compare(log.Parent.Prefix, endPrefix) <= 0) { if (IsCorrectLogType(log, operations)) { yield return log; } } else { yield break; } } dateCounter = dateCounter.AddDays(1); if (dateCounter > DateTimeOffset.UtcNow.AddHours(1)) { yield break; } } // Check logs using a month-based prefix until we reach a year boundary. while (dateCounter.Month > 1) { string currentPrefix = logDirectory.Prefix + dateCounter.ToString("yyyy/MM", CultureInfo.InvariantCulture); IEnumerable<IListBlobItem> currentLogs = logDirectory.Container.ListBlobs(currentPrefix, true, metadataDetails, options, operationContext); foreach (ICloudBlob log in currentLogs) { if (!utcEndTime.HasValue || string.Compare(log.Parent.Prefix, endPrefix) <= 0) { if (IsCorrectLogType(log, operations)) { yield return log; } } else { yield break; } } dateCounter = dateCounter.AddMonths(1); if (dateCounter > DateTimeOffset.UtcNow.AddHours(1)) { yield break; } } // Continue using a year-based prefix. while (true) { string currentPrefix = logDirectory.Prefix + dateCounter.ToString("yyyy", CultureInfo.InvariantCulture); IEnumerable<IListBlobItem> currentLogs = logDirectory.Container.ListBlobs(currentPrefix, true, metadataDetails, options, operationContext); foreach (ICloudBlob log in currentLogs) { if (!utcEndTime.HasValue || string.Compare(log.Parent.Prefix, endPrefix) <= 0) { if (IsCorrectLogType(log, operations)) { yield return log; } } else { yield break; } } dateCounter = dateCounter.AddYears(1); if (dateCounter > DateTimeOffset.UtcNow.AddHours(1)) { yield break; } } }