fix: don't rely on std.time.microTimestamp() for accurate statistics

I don't know the extent to which you care about accuracy for rendering
statistics. I found when working on seamstress that the `timestamp`
calls in `std.time` were so unreliable as to be unusable, sometimes
reporting negative time deltas within a single
function. `std.time.Timer`, on the other hand, has been rock-solid in
my experience.
This commit is contained in:
Rylee Lyman 2024-03-14 08:16:38 -04:00 committed by Tim Culverhouse
parent 1e5560cc09
commit 9496270ecd

View file

@ -81,6 +81,7 @@ pub fn Vaxis(comptime T: type) type {
// statistics // statistics
renders: usize = 0, renders: usize = 0,
render_dur: i128 = 0, render_dur: i128 = 0,
render_timer: std.time.Timer,
/// Initialize Vaxis with runtime options /// Initialize Vaxis with runtime options
pub fn init(_: Options) !Self { pub fn init(_: Options) !Self {
@ -89,6 +90,7 @@ pub fn Vaxis(comptime T: type) type {
.tty = null, .tty = null,
.screen = .{}, .screen = .{},
.screen_last = .{}, .screen_last = .{},
.render_timer = try std.time.Timer.start(),
}; };
} }
@ -264,9 +266,9 @@ pub fn Vaxis(comptime T: type) type {
pub fn render(self: *Self) !void { pub fn render(self: *Self) !void {
var tty = self.tty orelse return; var tty = self.tty orelse return;
self.renders += 1; self.renders += 1;
const timer_start = std.time.microTimestamp(); self.render_timer.reset();
defer { defer {
self.render_dur += std.time.microTimestamp() - timer_start; self.render_dur += self.render_timer.read() / std.time.ns_per_us;
} }
defer self.refresh = false; defer self.refresh = false;