public override PerformanceRecord ReadSequential(PerfTracker perfTracker) { IEnumerable<uint> sequentialIds = Enumerable.Range(0, Constants.ReadItems).Select(x => (uint)x); ; return Read(string.Format("[FoundationDB] sequential read ({0} items)", Constants.ReadItems), sequentialIds, perfTracker); }
public override List<PerformanceRecord> WriteParallelRandom(IEnumerable<TestData> data, PerfTracker perfTracker, int numberOfThreads, out long elapsedMilliseconds) { return WriteParallel( string.Format("[FoundationDB] parallel random write ({0} items)", Constants.ItemsPerTransaction), data, Constants.ItemsPerTransaction, Constants.WriteTransactions, perfTracker, numberOfThreads, out elapsedMilliseconds); }
private PerformanceRecord ReadParallel(string operation, IEnumerable <uint> ids, PerfTracker perfTracker, int numberOfThreads) { return(ExecuteReadWithParallel(operation, ids, numberOfThreads, () => ReadInternal(ids, perfTracker, connectionString))); }
public override PerformanceRecord ReadParallelSequential(PerfTracker perfTracker, int numberOfThreads) { var sequentialIds = Enumerable.Range(0, Constants.ReadItems).Select(x => (uint)x); return ReadParallel(string.Format("[Voron] parallel sequential read ({0} items)", Constants.ReadItems), sequentialIds, perfTracker, numberOfThreads); }
private PerformanceRecord Read(string operation, IEnumerable<uint> ids, PerfTracker perfTracker) { var options = StorageEnvironmentOptions.ForPath(dataPath); options.ManualFlushing = true; using (var env = new StorageEnvironment(options)) { env.FlushLogToDataFile(); var sw = Stopwatch.StartNew(); var v = ReadInternal(ids, perfTracker, env); sw.Stop(); return new PerformanceRecord { Bytes = v, Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = ids.Count() }; } }
private static long ReadInternal(IEnumerable<uint> ids, PerfTracker perfTracker, FdbDatabase db) { return ReadInternalAsync(ids, perfTracker, db).Result; }
private PerformanceRecord Read(string operation, IEnumerable<uint> ids, PerfTracker perfTracker) { return ReadAsync(operation, ids, perfTracker).Result; }
private List<PerformanceRecord> WriteParallel(string operation, IEnumerable<TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker, int numberOfThreads, out long elapsedMilliseconds) { Tuple<List<PerformanceRecord>, long> result = WriteParallelAsync(operation, data, itemsPerTransaction, numberOfTransactions, perfTracker, numberOfThreads).Result; elapsedMilliseconds = result.Item2; return result.Item1; }
private PerformanceRecord ReadParallel(string operation, IEnumerable <uint> ids, PerfTracker perfTracker, int numberOfThreads) { using (var connection = new SQLiteConnection(connectionString)) { connection.Open(); return(ExecuteReadWithParallel(operation, ids, numberOfThreads, () => ReadInternal(ids, perfTracker, connection))); } }
private List <PerformanceRecord> WriteInternal(string operation, IEnumerator <TestData> enumerator, long itemsPerTransaction, long numberOfTransactions, PerfTracker perfTracker) { var sw = new Stopwatch(); byte[] valueToWrite = null; var records = new List <PerformanceRecord>(); using (var connection = new SQLiteConnection(connectionString)) { connection.Open(); sw.Restart(); for (var transactions = 0; transactions < numberOfTransactions; transactions++) { sw.Restart(); using (var tx = connection.BeginTransaction()) { for (var i = 0; i < itemsPerTransaction; i++) { enumerator.MoveNext(); valueToWrite = GetValueToWrite(valueToWrite, enumerator.Current.ValueSize); using (var command = new SQLiteCommand("INSERT INTO Items (Id, Value) VALUES (@id, @value)", connection)) { command.Parameters.Add("@id", DbType.Int32, 4).Value = enumerator.Current.Id; command.Parameters.Add("@value", DbType.Binary, valueToWrite.Length).Value = valueToWrite; var affectedRows = command.ExecuteNonQuery(); Debug.Assert(affectedRows == 1); } } tx.Commit(); } sw.Stop(); perfTracker.Record(sw.ElapsedMilliseconds); records.Add(new PerformanceRecord { Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = itemsPerTransaction }); } sw.Stop(); } return(records); }
private List <PerformanceRecord> WriteParallel(string operation, IEnumerable <TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker, int numberOfThreads, out long elapsedMilliseconds) { NewDatabase(); return(ExecuteWriteWithParallel( data, numberOfTransactions, itemsPerTransaction, numberOfThreads, (enumerator, itmsPerTransaction, nmbrOfTransactions) => WriteInternal(operation, enumerator, itmsPerTransaction, nmbrOfTransactions, perfTracker), out elapsedMilliseconds)); }
private List <PerformanceRecord> Write(string operation, IEnumerable <TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker) { NewDatabase(); var enumerator = data.GetEnumerator(); return(WriteInternal(operation, enumerator, itemsPerTransaction, numberOfTransactions, perfTracker)); }
void RenderDialog() { bool anyHovered = false; if (_hits == null) { return; } var state = Resolve <IGameState>(); var window = Resolve <IWindowManager>(); if (state == null) { return; } var scene = Resolve <ISceneManager>().ActiveScene; Vector3 cameraPosition = scene.Camera.Position; Vector3 cameraTilePosition = cameraPosition; var map = Resolve <IMapManager>().Current; if (map != null) { cameraTilePosition /= map.TileSize; } Vector3 cameraDirection = scene.Camera.LookDirection; float cameraMagnification = scene.Camera.Magnification; ImGui.Begin("Inspector"); ImGui.BeginChild("Inspector"); if (ImGui.Button("Close")) { _hits = null; ImGui.EndChild(); ImGui.End(); return; } void BoolOption(string name, Func <bool> getter, Action <bool> setter) { bool value = getter(); bool initialValue = value; ImGui.Checkbox(name, ref value); if (value != initialValue) { setter(value); } } if (ImGui.TreeNode("Fixed")) { for (int i = 0; i < _fixedObjects.Count; i++) { var thing = _fixedObjects[i]; Reflector.ReflectedObject reflected = Reflector.Reflect($"Fixed{i}", thing, null); anyHovered |= RenderNode(reflected, true); } ImGui.TreePop(); } anyHovered |= RenderNode(Reflector.Reflect("State", state, null), false, false); if (ImGui.TreeNode("Stats")) { if (ImGui.Button("Clear")) { PerfTracker.Clear(); } if (ImGui.TreeNode("Perf")) { ImGui.BeginGroup(); ImGui.Text(Resolve <IEngine>().FrameTimeText); var(descriptions, stats) = PerfTracker.GetFrameStats(); ImGui.Columns(2); ImGui.SetColumnWidth(0, 320); foreach (var description in descriptions) { ImGui.Text(description); } ImGui.NextColumn(); foreach (var stat in stats) { ImGui.Text(stat); } ImGui.Columns(1); ImGui.EndGroup(); ImGui.TreePop(); } if (ImGui.TreeNode("Audio")) { var audio = Resolve <IAudioManager>(); if (audio == null) { ImGui.Text("Audio Disabled"); } else { foreach (var sound in audio.ActiveSounds) { ImGui.Text(sound); } } ImGui.TreePop(); } if (ImGui.TreeNode("DeviceObjects")) { ImGui.Text(Resolve <IDeviceObjectManager>()?.Stats()); ImGui.TreePop(); } if (ImGui.TreeNode("Input")) { var im = Resolve <IInputManager>(); ImGui.Text($"Input Mode: {im.InputMode}"); ImGui.Text($"Mouse Mode: {im.MouseMode}"); ImGui.Text($"Input Mode Stack: {string.Join(", ", im.InputModeStack)}"); ImGui.Text($"Mouse Mode Stack: {string.Join(", ", im.MouseModeStack)}"); if (ImGui.TreeNode("Bindings")) { var ib = Resolve <IInputBinder>(); foreach (var mode in ib.Bindings) { ImGui.Text(mode.Item1.ToString()); foreach (var binding in mode.Item2) { ImGui.Text($" {binding.Item1}: {binding.Item2}"); } } ImGui.TreePop(); } ImGui.TreePop(); } if (ImGui.TreeNode("Event Contexts")) { if (Resolve <IEventManager>() is EventChainManager em) { foreach (var context in em.DebugActiveContexts) { ImGui.Text(context.ToString()); } } ImGui.TreePop(); } if (ImGui.TreeNode("Textures")) { ImGui.Text(Resolve <ITextureManager>()?.Stats()); ImGui.TreePop(); } ImGui.TreePop(); } if (ImGui.TreeNode("Settings")) { var settings = Resolve <ISettings>(); ImGui.BeginGroup(); #if DEBUG if (ImGui.TreeNode("Debug")) { void DebugFlagOption(DebugFlags flag) { BoolOption(flag.ToString(), () => settings.Debug.DebugFlags.HasFlag(flag), x => Raise(new DebugFlagEvent(x ? FlagOperation.Set : FlagOperation.Clear, flag))); } DebugFlagOption(DebugFlags.DrawPositions); DebugFlagOption(DebugFlags.HighlightTile); DebugFlagOption(DebugFlags.HighlightEventChainZones); DebugFlagOption(DebugFlags.HighlightCollision); DebugFlagOption(DebugFlags.ShowPaths); DebugFlagOption(DebugFlags.NoMapTileBoundingBoxes); DebugFlagOption(DebugFlags.ShowCursorHotspot); DebugFlagOption(DebugFlags.TraceAttachment); ImGui.TreePop(); } #endif if (ImGui.TreeNode("Engine")) { void EngineFlagOption(EngineFlags flag) { BoolOption(flag.ToString(), () => settings.Engine.Flags.HasFlag(flag), x => Raise(new EngineFlagEvent(x ? FlagOperation.Set : FlagOperation.Clear, flag))); } EngineFlagOption(EngineFlags.ShowBoundingBoxes); EngineFlagOption(EngineFlags.ShowCameraPosition); EngineFlagOption(EngineFlags.FlipDepthRange); EngineFlagOption(EngineFlags.FlipYSpace); EngineFlagOption(EngineFlags.VSync); EngineFlagOption(EngineFlags.HighlightSelection); EngineFlagOption(EngineFlags.UseCylindricalBillboards); EngineFlagOption(EngineFlags.RenderDepth); ImGui.TreePop(); } ImGui.EndGroup(); ImGui.TreePop(); } if (ImGui.TreeNode("Positions")) { var normPos = window.PixelToNorm(_mousePosition); var uiPos = window.NormToUi(normPos); uiPos.X = (int)uiPos.X; uiPos.Y = (int)uiPos.Y; Vector3?playerTilePos = Resolve <IParty>()?.WalkOrder.FirstOrDefault()?.GetPosition(); ImGui.Text($"Cursor Pix: {_mousePosition} UI: {uiPos} Scale: {window.GuiScale} PixSize: {window.Size} Norm: {normPos}"); ImGui.Text($"Camera World: {cameraPosition} Tile: {cameraTilePosition} Dir: {cameraDirection} Mag: {cameraMagnification}"); ImGui.Text($"TileSize: {map?.TileSize} PlayerTilePos: {playerTilePos}"); ImGui.TreePop(); } if (ImGui.TreeNode("Services")) { var reflected = Reflector.Reflect(null, _services, null); if (reflected.SubObjects != null) { foreach (var child in reflected.SubObjects.OrderBy(x => x.Name)) { anyHovered |= RenderNode(child, false); } } ImGui.TreePop(); } if (ImGui.TreeNode("Exchange")) { var reflected = Reflector.Reflect(null, Exchange, null); if (reflected.SubObjects != null) { foreach (var child in reflected.SubObjects) { anyHovered |= RenderNode(child, false); } } ImGui.TreePop(); } int hitId = 0; foreach (var hit in _hits) { if (ImGui.TreeNode($"{hitId} {hit.Target}")) { var reflected = Reflector.Reflect(null, hit.Target, null); if (reflected.SubObjects != null) { foreach (var child in reflected.SubObjects) { anyHovered |= RenderNode(child, false); } } ImGui.TreePop(); } hitId++; } ImGui.EndChild(); ImGui.End(); if (!anyHovered && _lastHoveredItem?.Object != null && _behaviours.TryGetValue(_lastHoveredItem.Object.GetType(), out var callback)) { callback(DebugInspectorAction.Blur, _lastHoveredItem); } /* * * Window: Begin & End * Menus: BeginMenuBar, MenuItem, EndMenuBar * Colours: ColorEdit4 * Graph: PlotLines * Text: Text, TextColored * ScrollBox: BeginChild, EndChild * */ }
static FormatUtil() { Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); // Required for code page 850 support in .NET Core PerfTracker.StartupEvent("Registered encodings"); AlbionEncoding = Encoding.GetEncoding(850); }
public IEnumerable <IRenderable> UpdatePerFrameResources(IRendererContext context, IEnumerable <IRenderable> renderables) { var c = (VeldridRendererContext)context; var cl = c.CommandList; var gd = c.GraphicsDevice; var sc = c.SceneContext; ITextureManager textureManager = Resolve <ITextureManager>(); IDeviceObjectManager objectManager = Resolve <IDeviceObjectManager>(); EngineFlags engineFlags = Resolve <IEngineSettings>().Flags; foreach (var renderable in renderables) { var sprite = (MultiSprite)renderable; if (sprite.ActiveInstances == 0) { continue; } var shaderKey = new SpriteShaderKey(sprite, engineFlags); if (!_pipelines.ContainsKey(shaderKey)) { _pipelines.Add(shaderKey, BuildPipeline(gd, sc, shaderKey)); } uint bufferSize = (uint)sprite.Instances.Length * SpriteInstanceData.StructSize; var buffer = objectManager.Get <DeviceBuffer>((sprite, sprite)); if (buffer?.SizeInBytes != bufferSize) { buffer = gd.ResourceFactory.CreateBuffer(new BufferDescription(bufferSize, BufferUsage.VertexBuffer)); buffer.Name = $"B_SpriteInst:{sprite.Name}"; PerfTracker.IncrementFrameCounter("Create InstanceBuffer"); objectManager.Set((sprite, sprite), buffer); } if (sprite.InstancesDirty) { cl.UpdateBuffer(buffer, 0, sprite.Instances); PerfTracker.IncrementFrameCounter("Update InstanceBuffers"); } textureManager?.PrepareTexture(sprite.Key.Texture, context); TextureView textureView = (TextureView)textureManager?.GetTexture(sprite.Key.Texture); var resourceSet = objectManager.Get <ResourceSet>((sprite, textureView)); if (resourceSet == null) { resourceSet = gd.ResourceFactory.CreateResourceSet(new ResourceSetDescription( _perSpriteResourceLayout, gd.PointSampler, textureView, _uniformBuffer)); resourceSet.Name = $"RS_Sprite:{sprite.Key.Texture.Name}"; PerfTracker.IncrementFrameCounter("Create ResourceSet"); objectManager.Set((sprite, textureView), resourceSet); } sprite.InstancesDirty = false; yield return(sprite); } Resolve <ISpriteManager>().Cleanup(); }
private List <PerformanceRecord> WriteInternal(string operation, IEnumerator <TestData> enumerator, long itemsPerTransaction, long numberOfTransactions, PerfTracker perfTracker, FdbDatabase db) { return (WriteInternalAsync(operation, enumerator, itemsPerTransaction, numberOfTransactions, perfTracker, db) .Result); }
private List<PerformanceRecord> Write(string operation, IEnumerable<TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker) { return WriteAsync(operation, data, itemsPerTransaction, numberOfTransactions, perfTracker).Result; }
public override List <PerformanceRecord> WriteSequential(IEnumerable <TestData> data, PerfTracker perfTracker) { return(Write(string.Format("[SQLite] sequential write ({0} items)", Constants.ItemsPerTransaction), data, Constants.ItemsPerTransaction, Constants.WriteTransactions, perfTracker)); }
private async Task<List<PerformanceRecord>> WriteInternalAsync(string operation, IEnumerator<TestData> enumerator, long itemsPerTransaction, long numberOfTransactions, PerfTracker perfTracker, FdbDatabase db) { var sw = new Stopwatch(); byte[] valueToWrite = null; var records = new List<PerformanceRecord>(); var location = db.GlobalSpace; sw.Restart(); for (int transactions = 0; transactions < numberOfTransactions; transactions++) { sw.Restart(); using (IFdbTransaction tx = db.BeginTransaction()) { for (int i = 0; i < itemsPerTransaction; i++) { enumerator.MoveNext(); valueToWrite = GetValueToWrite(valueToWrite, enumerator.Current.ValueSize); tx.Set(location.Pack(enumerator.Current.Id), Slice.Create(valueToWrite)); } await tx.CommitAsync(); perfTracker.Record(sw.ElapsedMilliseconds); } sw.Stop(); records.Add(new PerformanceRecord { Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = itemsPerTransaction }); } sw.Stop(); return records; }
public override PerformanceRecord ReadSequential(PerfTracker perfTracker) { var sequentialIds = Enumerable.Range(0, Constants.ReadItems).Select(x => (uint)x);; return(Read(string.Format("[SQLite] sequential read ({0} items)", Constants.ReadItems), sequentialIds, perfTracker)); }
private async Task<PerformanceRecord> ReadParallelAsync(string operation, IEnumerable<uint> ids, PerfTracker perfTracker, int numberOfThreads) { using (FdbDatabase db = await OpenDatabaseAsync()) { return ExecuteReadWithParallel(operation, ids, numberOfThreads, () => ReadInternal(ids, perfTracker, db)); } }
public override PerformanceRecord ReadParallelSequential(PerfTracker perfTracker, int numberOfThreads) { var sequentialIds = Enumerable.Range(0, Constants.ReadItems).Select(x => (uint)x);; return(ReadParallel(string.Format("[SQLite] parallel sequential read ({0} items)", Constants.ReadItems), sequentialIds, perfTracker, numberOfThreads)); }
public override List<PerformanceRecord> WriteSequential(IEnumerable<TestData> data, PerfTracker perfTracker) { return Write(string.Format("[FoundationDB] sequential write ({0} items)", Constants.ItemsPerTransaction), data, Constants.ItemsPerTransaction, Constants.WriteTransactions, perfTracker); }
private async Task <PerformanceRecord> ReadAsync(string operation, IEnumerable <uint> ids, PerfTracker perfTracker) { Stopwatch sw = Stopwatch.StartNew(); using (FdbDatabase db = await OpenDatabaseAsync()) { await ReadInternalAsync(ids, perfTracker, db); } sw.Stop(); return(new PerformanceRecord { Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = ids.Count() }); }
private List<PerformanceRecord> WriteInternalBatch( string operation, IEnumerator<TestData> enumerator, long itemsPerBatch, long numberOfBatches, PerfTracker perfTracker, StorageEnvironment env) { var sw = new Stopwatch(); byte[] valueToWrite = null; var records = new List<PerformanceRecord>(); for (var b = 0; b < numberOfBatches; b++) { sw.Restart(); long v = 0; using (var batch = new WriteBatch()) { for (var i = 0; i < itemsPerBatch; i++) { enumerator.MoveNext(); valueToWrite = GetValueToWrite(valueToWrite, enumerator.Current.ValueSize); v += valueToWrite.Length; batch.Add(enumerator.Current.Id.ToString("0000000000000000"), new MemoryStream(valueToWrite), "Root"); } env.Writer.Write(batch); } sw.Stop(); perfTracker.Record(sw.ElapsedMilliseconds); records.Add(new PerformanceRecord { Bytes = v, Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = itemsPerBatch }); } return records; }
private PerformanceRecord ReadParallel(string operation, IEnumerable <uint> ids, PerfTracker perfTracker, int numberOfThreads) { return(ReadParallelAsync(operation, ids, perfTracker, numberOfThreads).Result); }
private static long ReadInternal(IEnumerable<uint> ids, PerfTracker perfTracker, StorageEnvironment env) { var ms = new byte[4096]; using (var tx = env.NewTransaction(TransactionFlags.Read)) { var sw = Stopwatch.StartNew(); long v = 0; foreach (var id in ids) { var key = id.ToString("0000000000000000"); var readResult = tx.State.Root.Read(tx, key); int reads = 0; while ((reads = readResult.Reader.Read(ms, 0, ms.Length)) > 0) { v += reads; } } perfTracker.Record(sw.ElapsedMilliseconds); return v; } }
private static long ReadInternal(IEnumerable <uint> ids, PerfTracker perfTracker, FdbDatabase db) { return(ReadInternalAsync(ids, perfTracker, db).Result); }
public override PerformanceRecord ReadParallelRandom(IEnumerable<uint> randomIds, PerfTracker perfTracker, int numberOfThreads) { return ReadParallel(string.Format("[Voron] parallel random read ({0} items)", Constants.ReadItems), randomIds, perfTracker, numberOfThreads); }
public override List <PerformanceRecord> WriteRandom(IEnumerable <TestData> data, PerfTracker perfTracker) { return(Write(string.Format("[FoundationDB] random write ({0} items)", Constants.ItemsPerTransaction), data, Constants.ItemsPerTransaction, Constants.WriteTransactions, perfTracker)); }
private async Task <List <PerformanceRecord> > WriteInternalAsync(string operation, IEnumerator <TestData> enumerator, long itemsPerTransaction, long numberOfTransactions, PerfTracker perfTracker, FdbDatabase db) { var sw = new Stopwatch(); byte[] valueToWrite = null; var records = new List <PerformanceRecord>(); var location = db.GlobalSpace; sw.Restart(); for (int transactions = 0; transactions < numberOfTransactions; transactions++) { sw.Restart(); using (IFdbTransaction tx = db.BeginTransaction()) { for (int i = 0; i < itemsPerTransaction; i++) { enumerator.MoveNext(); valueToWrite = GetValueToWrite(valueToWrite, enumerator.Current.ValueSize); tx.Set(location.Pack(enumerator.Current.Id), Slice.Create(valueToWrite)); } await tx.CommitAsync(); perfTracker.Record(sw.ElapsedMilliseconds); } sw.Stop(); records.Add(new PerformanceRecord { Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = itemsPerTransaction }); } sw.Stop(); return(records); }
public override List <PerformanceRecord> WriteParallelRandom(IEnumerable <TestData> data, PerfTracker perfTracker, int numberOfThreads, out long elapsedMilliseconds) { return (WriteParallel( string.Format("[FoundationDB] parallel random write ({0} items)", Constants.ItemsPerTransaction), data, Constants.ItemsPerTransaction, Constants.WriteTransactions, perfTracker, numberOfThreads, out elapsedMilliseconds)); }
private PerformanceRecord Read(string operation, IEnumerable <uint> ids, PerfTracker perfTracker) { return(ReadAsync(operation, ids, perfTracker).Result); }
public override PerformanceRecord ReadRandom(IEnumerable <uint> randomIds, PerfTracker perfTracker) { return(Read(string.Format("[FoundationDB] random read ({0} items)", Constants.ReadItems), randomIds, perfTracker)); }
private async Task<List<PerformanceRecord>> WriteAsync(string operation, IEnumerable<TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker) { await NewDatabaseAsync(); using (FdbDatabase db = await OpenDatabaseAsync()) { IEnumerator<TestData> enumerator = data.GetEnumerator(); return WriteInternal(operation, enumerator, itemsPerTransaction, numberOfTransactions, perfTracker, db); } }
static void Main(string[] args) { PerfTracker.StartupEvent("Entered main"); Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); // Required for code page 850 support in .NET Core PerfTracker.StartupEvent("Registered encodings"); var commandLine = new CommandLineOptions(args); if (commandLine.Mode == ExecutionMode.Exit) { return; } var baseDir = FormatUtil.FindBasePath(); if (baseDir == null) { throw new InvalidOperationException("No base directory could be found."); } PerfTracker.StartupEvent($"Found base directory {baseDir}"); PerfTracker.StartupEvent("Registering asset manager"); var factory = new VeldridCoreFactory(); using var locatorRegistry = new AssetLocatorRegistry() .AddAssetLocator(new StandardAssetLocator()) .AddAssetLocator(new AssetConfigLocator()) .AddAssetLocator(new CoreSpriteLocator()) .AddAssetLocator(new MetaFontLocator(factory)) .AddAssetLocator(new NewStringLocator()) .AddAssetLocator(new SoundBankLocator()) .AddAssetLocator(new SavedGameLocator()) .AddAssetPostProcessor(new AlbionSpritePostProcessor()) .AddAssetPostProcessor(new ImageSharpPostProcessor()) .AddAssetPostProcessor(new InterlacedBitmapPostProcessor()) .AddAssetPostProcessor(new InventoryPostProcessor()) ; var assets = new AssetManager(); var services = new Container("Services", new StdioConsoleLogger(), new ClipboardManager(), new ImGuiConsoleLogger(), Settings.Load(baseDir), // Need to register settings first, as the AssetConfigLocator relies on it. locatorRegistry, assets); using var exchange = new EventExchange(new LogExchange()) .Register <ICoreFactory>(factory) .Attach(services); Engine.GlobalExchange = exchange; PerfTracker.StartupEvent("Registered asset manager"); PerfTracker.StartupEvent($"Running as {commandLine.Mode}"); switch (commandLine.Mode) { case ExecutionMode.Game: case ExecutionMode.GameWithSlavedAudio: Albion.RunGame(exchange, services, baseDir, commandLine); break; case ExecutionMode.AudioSlave: exchange.Attach(new AudioManager(true)); break; case ExecutionMode.Editor: break; // TODO case ExecutionMode.SavedGameTests: SavedGameTests.RoundTripTest(baseDir); break; case ExecutionMode.DumpData: PerfTracker.BeginFrame(); // Don't need to show verbose startup logging while dumping var tf = new TextFormatter(); exchange.Attach(tf); DumpType dumpTypes = DumpType.All; if (commandLine.GameModeArgument != null) { dumpTypes = 0; foreach (var t in commandLine.GameModeArgument.Split(' ', StringSplitOptions.RemoveEmptyEntries)) { dumpTypes |= Enum.Parse <DumpType>(t); } } if ((dumpTypes & DumpType.Characters) != 0) { Dump.CharacterSheets(assets, tf, baseDir); } if ((dumpTypes & DumpType.Chests) != 0) { Dump.Chests(assets, baseDir); } if ((dumpTypes & DumpType.CoreSprites) != 0) { Dump.CoreSprites(assets, baseDir); } if ((dumpTypes & DumpType.EventSets) != 0) { Dump.EventSets(assets, baseDir); } if ((dumpTypes & DumpType.Items) != 0) { Dump.ItemData(assets, baseDir); } if ((dumpTypes & DumpType.MapEvents) != 0) { Dump.MapEvents(assets, baseDir); } if ((dumpTypes & DumpType.Maps) != 0) { Dump.MapData(assets, tf, baseDir); } if ((dumpTypes & DumpType.Spells) != 0) { Dump.Spells(assets, tf, baseDir); } if ((dumpTypes & DumpType.ThreeDMaps) != 0) { Dump.ThreeDMapAndLabInfo(assets, baseDir); } break; case ExecutionMode.Exit: break; } }
private async Task<Tuple<List<PerformanceRecord>, long>> WriteParallelAsync(string operation, IEnumerable<TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker, int numberOfThreads) { await NewDatabaseAsync(); using (FdbDatabase db = await OpenDatabaseAsync()) { long elapsedMilliseconds; List<PerformanceRecord> records = ExecuteWriteWithParallel( data, numberOfTransactions, itemsPerTransaction, numberOfThreads, (enumerator, itmsPerTransaction, nmbrOfTransactions) => WriteInternal(operation, enumerator, itmsPerTransaction, nmbrOfTransactions, perfTracker, db), out elapsedMilliseconds); return new Tuple<List<PerformanceRecord>, long>(records, elapsedMilliseconds); } }
public void BeforeEach() { PocedLogger = new Mock <IPocedLogger>(); perfTracker = new PerfTracker(PocedLogger.Object, "name", "userId", "userName", "product", "location", "layer"); }
private List<PerformanceRecord> WriteInternal(string operation, IEnumerator<TestData> enumerator, long itemsPerTransaction, long numberOfTransactions, PerfTracker perfTracker, FdbDatabase db) { return WriteInternalAsync(operation, enumerator, itemsPerTransaction, numberOfTransactions, perfTracker, db) .Result; }
public Texture CreateDeviceTexture(GraphicsDevice gd, ResourceFactory rf, TextureUsage usage) { using var _ = PerfTracker.FrameEvent("6.1.2.1 Rebuild MultiTextures"); if (IsMetadataDirty) { RebuildLayers(); } var palette = PaletteManager.Palette.GetCompletePalette(); using var staging = rf.CreateTexture(new TextureDescription(Width, Height, Depth, MipLevels, ArrayLayers, Format, TextureUsage.Staging, Type)); staging.Name = "T_" + Name + "_Staging"; Span <uint> toBuffer = stackalloc uint[(int)(Width * Height)]; foreach (var lsi in LogicalSubImages) { //if (!rebuildAll && !lsi.IsPaletteAnimated) // TODO: Requires caching a single Texture and then modifying it // continue; for (int i = 0; i < lsi.Frames; i++) { toBuffer.Fill(lsi.IsAlphaTested ? 0 : 0xff000000); Rebuild(lsi, i, toBuffer, palette); uint destinationLayer = (uint)LayerLookup[new LayerKey(lsi.Id, i)]; unsafe { fixed(uint *toBufferPtr = toBuffer) { gd.UpdateTexture( staging, (IntPtr)toBufferPtr, Width * Height * sizeof(uint), 0, 0, 0, Width, Height, 1, 0, destinationLayer); } } } } /* TODO: Mipmap * for (uint level = 1; level < MipLevels; level++) * { * } //*/ var texture = rf.CreateTexture(new TextureDescription(Width, Height, Depth, MipLevels, ArrayLayers, Format, usage, Type)); texture.Name = "T_" + Name; using (CommandList cl = rf.CreateCommandList()) { cl.Begin(); cl.CopyTexture(staging, texture); cl.End(); gd.SubmitCommands(cl); } IsDirty = false; return(texture); }
private async Task<PerformanceRecord> ReadAsync(string operation, IEnumerable<uint> ids, PerfTracker perfTracker) { Stopwatch sw = Stopwatch.StartNew(); using (FdbDatabase db = await OpenDatabaseAsync()) { await ReadInternalAsync(ids, perfTracker, db); } sw.Stop(); return new PerformanceRecord { Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = ids.Count() }; }
private List<PerformanceRecord> WriteInternal( string operation, IEnumerator<TestData> enumerator, long itemsPerTransaction, long numberOfTransactions, PerfTracker perfTracker, rndseq Rflag, LightningEnvironment env, LightningDatabase db) { byte[] valueToWrite = null; var records = new List<PerformanceRecord>(); var sw = new Stopwatch(); LightningDB.PutOptions putflags = LightningDB.PutOptions.None; if (Rflag == rndseq.SEQ) putflags = LightningDB.PutOptions.AppendData; for (var transactions = 0; transactions < numberOfTransactions; transactions++) { sw.Restart(); using (var tx = env.BeginTransaction()) { for (var i = 0; i < itemsPerTransaction; i++) { enumerator.MoveNext(); valueToWrite = GetValueToWrite(valueToWrite, enumerator.Current.ValueSize); tx.Put(db, Encoding.UTF8.GetBytes(enumerator.Current.Id.ToString("0000000000000000")), valueToWrite, putflags); } tx.Commit(); } sw.Stop(); perfTracker.Record(sw.ElapsedMilliseconds); records.Add(new PerformanceRecord { Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = itemsPerTransaction }); } sw.Stop(); return records; }
private PerformanceRecord ReadParallel(string operation, IEnumerable<uint> ids, PerfTracker perfTracker, int numberOfThreads) { return ReadParallelAsync(operation, ids, perfTracker, numberOfThreads).Result; }
private PerformanceRecord Read(string operation, IEnumerable<uint> ids, PerfTracker perfTracker) { LightningDatabase db; using (var env = NewEnvironment(out db, delete: false)) { var sw = Stopwatch.StartNew(); ReadInternal(ids, perfTracker, env, db); sw.Stop(); return new PerformanceRecord { Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = ids.Count() }; } }
private static async Task<long> ReadInternalAsync(IEnumerable<uint> ids, PerfTracker perfTracker, FdbDatabase db) { const int BATCH_SIZE = 1000; var list = new List<int>(BATCH_SIZE); var location = db.GlobalSpace; Stopwatch sw = Stopwatch.StartNew(); long v = 0; foreach (int id in ids) { list.Add(id); if (list.Count >= BATCH_SIZE) { using (var tx = db.BeginReadOnlyTransaction()) { var slices = await tx.GetValuesAsync(location.PackRange(list)); v += slices.Sum(x=>x.Count); } list.Clear(); } } if (list.Count > 0) { using (var tx = db.BeginReadOnlyTransaction()) { var slices = await tx.GetValuesAsync(location.PackRange(list)); v += slices.Sum(x => x.Count); } } perfTracker.Record(sw.ElapsedMilliseconds); return v; }
private PerformanceRecord ReadParallel(string operation, IEnumerable<uint> ids, PerfTracker perfTracker, int numberOfThreads) { LightningDatabase db; using (var env = NewEnvironment(out db, delete: false)) { return ExecuteReadWithParallel(operation, ids, numberOfThreads, () => ReadInternal(ids, perfTracker, env, db)); } }
private static long ReadInternal(IEnumerable<uint> ids, PerfTracker perfTracker, LightningEnvironment env, LightningDatabase db) { using (var tx = env.BeginTransaction(LightningDB.TransactionBeginFlags.ReadOnly)) using (var cursor = new LightningCursor(db, tx)) { long v = 0; foreach (var id in ids) { var value = cursor.MoveTo(Encoding.UTF8.GetBytes(id.ToString("0000000000000000"))); v += value.Value.Length; //Debug.Assert(value != null); } return v; } }
static void Main(string[] args) { #if DEBUG PerfTracker.IsTracing = true; #endif PerfTracker.StartupEvent("Entered main"); AssetSystem.LoadEvents(); PerfTracker.StartupEvent("Built event parsers"); var commandLine = new CommandLineOptions(args); if (commandLine.Mode == ExecutionMode.Exit) { return; } PerfTracker.StartupEvent($"Running as {commandLine.Mode}"); var disk = new FileSystem(); var jsonUtil = new FormatJsonUtil(); var baseDir = ConfigUtil.FindBasePath(disk); if (baseDir == null) { throw new InvalidOperationException("No base directory could be found."); } PerfTracker.StartupEvent($"Found base directory {baseDir}"); if (commandLine.Mode == ExecutionMode.ConvertAssets) { using var converter = new AssetConverter( AssetMapping.Global, disk, jsonUtil, commandLine.ConvertFrom, commandLine.ConvertTo); converter.Convert( commandLine.DumpIds, commandLine.DumpAssetTypes, commandLine.ConvertFilePattern); return; } var(exchange, services) = AssetSystem.SetupAsync(baseDir, AssetMapping.Global, disk, jsonUtil).Result; IRenderPass mainPass = null; if (commandLine.NeedsEngine) { mainPass = BuildEngine(commandLine, exchange); } services.Add(new StdioConsoleReader()); var assets = exchange.Resolve <IAssetManager>(); AutodetectLanguage(exchange, assets); switch (commandLine.Mode) // ConvertAssets handled above as it requires a specialised asset system setup { case ExecutionMode.Game: Albion.RunGame(exchange, services, mainPass, baseDir, commandLine); break; case ExecutionMode.BakeIsometric: IsometricTest.Run(exchange, commandLine); break; case ExecutionMode.DumpData: PerfTracker.BeginFrame(); // Don't need to show verbose startup logging while dumping var tf = new TextFormatter(); exchange.Attach(tf); var parsedIds = commandLine.DumpIds?.Select(AssetId.Parse).ToArray(); if ((commandLine.DumpFormats & DumpFormats.Json) != 0) { var dumper = new DumpJson(); exchange.Attach(dumper); dumper.Dump(baseDir, commandLine.DumpAssetTypes, parsedIds); dumper.Remove(); } if ((commandLine.DumpFormats & DumpFormats.Text) != 0) { var dumper = new DumpText(); exchange.Attach(dumper); dumper.Dump(baseDir, commandLine.DumpAssetTypes, parsedIds); dumper.Remove(); } if ((commandLine.DumpFormats & DumpFormats.Png) != 0) { var dumper = new DumpGraphics(commandLine.DumpFormats); exchange.Attach(dumper); dumper.Dump(baseDir, commandLine.DumpAssetTypes, parsedIds); dumper.Remove(); } if ((commandLine.DumpFormats & DumpFormats.Annotated) != 0) { var dumper = new DumpAnnotated(); exchange.Attach(dumper); dumper.Dump(baseDir, commandLine.DumpAssetTypes, parsedIds); dumper.Remove(); } //if ((commandLine.DumpFormats & DumpFormats.Tiled) != 0) // DumpTiled.Dump(baseDir, assets, commandLine.DumpAssetTypes, parsedIds); break; case ExecutionMode.Exit: break; } Console.WriteLine("Exiting"); exchange.Dispose(); }
private List<PerformanceRecord> WriteParallel(string operation, IEnumerable<TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker, int numberOfThreads, out long elapsedMilliseconds) { NewStorage(); using (var env = new StorageEnvironment(StorageEnvironmentOptions.ForPath(dataPath))) { return ExecuteWriteWithParallel( data, numberOfTransactions, itemsPerTransaction, numberOfThreads, (enumerator, itmsPerTransaction, nmbrOfTransactions) => WriteInternalBatch(operation, enumerator, itmsPerTransaction, nmbrOfTransactions, perfTracker, env), out elapsedMilliseconds); } }
public override PerformanceRecord ReadParallelRandom(IEnumerable <uint> randomIds, PerfTracker perfTracker, int numberOfThreads) { return(ReadParallel(string.Format("[FoundationDB] parallel random read ({0} items)", Constants.ReadItems), randomIds, perfTracker, numberOfThreads)); }
private List<PerformanceRecord> WriteInternal( string operation, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker, StorageEnvironment env, IEnumerator<TestData> enumerator) { var sw = new Stopwatch(); byte[] valueToWrite = null; var records = new List<PerformanceRecord>(); for (var transactions = 0; transactions < numberOfTransactions; transactions++) { sw.Restart(); using (var tx = env.NewTransaction(TransactionFlags.ReadWrite)) { for (var i = 0; i < itemsPerTransaction; i++) { enumerator.MoveNext(); valueToWrite = GetValueToWrite(valueToWrite, enumerator.Current.ValueSize); tx.State.Root.Add(tx, enumerator.Current.Id.ToString("0000000000000000"), new MemoryStream(valueToWrite)); } tx.Commit(); perfTracker.Record(sw.ElapsedMilliseconds); } sw.Stop(); records.Add(new PerformanceRecord { Operation = operation, Time = DateTime.Now, Duration = sw.ElapsedMilliseconds, ProcessedItems = itemsPerTransaction }); } return records; }
private List <PerformanceRecord> Write(string operation, IEnumerable <TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker) { return(WriteAsync(operation, data, itemsPerTransaction, numberOfTransactions, perfTracker).Result); }
private PerformanceRecord ReadParallel(string operation, IEnumerable<uint> ids, PerfTracker perfTracker, int numberOfThreads) { var options = StorageEnvironmentOptions.ForPath(dataPath); options.ManualFlushing = true; using (var env = new StorageEnvironment(options)) { env.FlushLogToDataFile(); return ExecuteReadWithParallel(operation, ids, numberOfThreads, () => ReadInternal(ids, perfTracker, env)); } }
private async Task <List <PerformanceRecord> > WriteAsync(string operation, IEnumerable <TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker) { await NewDatabaseAsync(); using (FdbDatabase db = await OpenDatabaseAsync()) { IEnumerator <TestData> enumerator = data.GetEnumerator(); return(WriteInternal(operation, enumerator, itemsPerTransaction, numberOfTransactions, perfTracker, db)); } }
public override List<PerformanceRecord> WriteRandom(IEnumerable<TestData> data, PerfTracker perfTracker) { return Write(string.Format("[Voron] random write ({0} items)", Constants.ItemsPerTransaction), data, Constants.ItemsPerTransaction, Constants.WriteTransactions, perfTracker); }
private List <PerformanceRecord> WriteParallel(string operation, IEnumerable <TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker, int numberOfThreads, out long elapsedMilliseconds) { Tuple <List <PerformanceRecord>, long> result = WriteParallelAsync(operation, data, itemsPerTransaction, numberOfTransactions, perfTracker, numberOfThreads).Result; elapsedMilliseconds = result.Item2; return(result.Item1); }
public override PerformanceRecord ReadRandom(IEnumerable<uint> randomIds, PerfTracker perfTracker) { return Read(string.Format("[Voron] random read ({0} items)", Constants.ReadItems), randomIds, perfTracker); }
private async Task <Tuple <List <PerformanceRecord>, long> > WriteParallelAsync(string operation, IEnumerable <TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker, int numberOfThreads) { await NewDatabaseAsync(); using (FdbDatabase db = await OpenDatabaseAsync()) { long elapsedMilliseconds; List <PerformanceRecord> records = ExecuteWriteWithParallel( data, numberOfTransactions, itemsPerTransaction, numberOfThreads, (enumerator, itmsPerTransaction, nmbrOfTransactions) => WriteInternal(operation, enumerator, itmsPerTransaction, nmbrOfTransactions, perfTracker, db), out elapsedMilliseconds); return(new Tuple <List <PerformanceRecord>, long>(records, elapsedMilliseconds)); } }
private List<PerformanceRecord> Write(string operation, IEnumerable<TestData> data, int itemsPerTransaction, int numberOfTransactions, PerfTracker perfTracker) { NewStorage(); var storageEnvironmentOptions = StorageEnvironmentOptions.ForPath(dataPath); using (var env = new StorageEnvironment(storageEnvironmentOptions)) { var enumerator = data.GetEnumerator(); //return WriteInternal(operation, itemsPerTransaction, numberOfTransactions, perfTracker, env, enumerator); return WriteInternalBatch(operation, enumerator, itemsPerTransaction, numberOfTransactions, perfTracker, env); } }
static void Main(string[] args) { var fd = GetFlogDetail("starting application", null); Flogger.WriteDiagnostic(fd); var tracker = new PerfTracker("FloggerConsole_Execution", "", fd.UserName, fd.Location, fd.Product, fd.Layer); //try //{ // var ex = new Exception("Something bad has happened!"); // ex.Data.Add("input param", "nothing to see here"); // throw ex; //} //catch (Exception ex) //{ // fd = GetFlogDetail("", ex); // Flogger.WriteError(fd); //} var connStr = ConfigurationManager.ConnectionStrings["DefaultConnection"].ToString(); using (var db = new SqlConnection(connStr)) { db.Open(); try { //RAW ADO.NET //var rawAdoSp = new SqlCommand("CreateNewCustomer", db) //{ // CommandType = System.Data.CommandType.StoredProcedure //}; //rawAdoSp.Parameters.Add(new SqlParameter("@Name", "waytoolongforitsowngood")); //rawAdoSp.Parameters.Add(new SqlParameter("@TotalPurchases", 12000)); //rawAdoSp.Parameters.Add(new SqlParameter("@TotalReturns", 100.50M)); //rawAdoSp.ExecuteNonQuery(); var sp = new Sproc(db, "CreateNewCustomer"); sp.SetParam("@Name", "waytoolongforitsowngood"); sp.SetParam("@TotalPurchases", 12000); sp.SetParam("@TotalReturns", 100.50M); sp.ExecNonQuery(); } catch (Exception ex) { var efd = GetFlogDetail("", ex); Flogger.WriteError(efd); } try { // Dapper //db.Execute("CreateNewCustomer", new //{ // Name = "dappernametoolongtowork", // TotalPurchases = 12000, // TotalReturns = 100.50M //}, commandType: System.Data.CommandType.StoredProcedure); // Wrapped Dapper db.DapperProcNonQuery("CreateNewCustomer", new { Name = "dappernametoolongtowork", TotalPurchases = 12000, TotalReturns = 100.50M }); } catch (Exception ex) { var efd = GetFlogDetail("", ex); Flogger.WriteError(efd); } } var ctx = new CustomerDbContext(); try { // Entity Framework var name = new SqlParameter("@Name", "waytoolongforitsowngood"); var totalPurchases = new SqlParameter("@TotalPurchases", 12000); var totalReturns = new SqlParameter("@TotalReturns", 100.50M); ctx.Database.ExecuteSqlCommand("EXEC dbo.CreateNewCustomer @Name, @TotalPurchases, @TotalReturns", name, totalPurchases, totalReturns); } catch (Exception ex) { var efd = GetFlogDetail("", ex); Flogger.WriteError(efd); } var customers = ctx.Customers.ToList(); fd = GetFlogDetail($"{customers.Count} customers in the database", null); Flogger.WriteDiagnostic(fd); fd = GetFlogDetail("used flogging console", null); Flogger.WriteUsage(fd); fd = GetFlogDetail("stopping app", null); Flogger.WriteDiagnostic(fd); tracker.Stop(); }