2022-11-02 00:22:16 +00:00
|
|
|
// Copyright 2020-2022, Collabora, Ltd.
|
2020-06-23 20:26:05 +00:00
|
|
|
// SPDX-License-Identifier: BSL-1.0
|
|
|
|
/*!
|
|
|
|
* @file
|
|
|
|
* @brief Shared frame timing code.
|
|
|
|
* @author Jakob Bornecrantz <jakob@collabora.com>
|
|
|
|
* @ingroup aux_util
|
|
|
|
*/
|
|
|
|
|
2021-04-02 18:43:01 +00:00
|
|
|
#include "os/os_time.h"
|
|
|
|
|
2021-01-21 14:44:52 +00:00
|
|
|
#include "util/u_time.h"
|
2020-06-23 20:26:05 +00:00
|
|
|
#include "util/u_misc.h"
|
2021-01-21 14:44:52 +00:00
|
|
|
#include "util/u_debug.h"
|
2022-01-14 23:48:52 +00:00
|
|
|
#include "util/u_pacing.h"
|
2021-04-09 16:36:49 +00:00
|
|
|
#include "util/u_logging.h"
|
|
|
|
#include "util/u_trace_marker.h"
|
2020-06-23 20:26:05 +00:00
|
|
|
|
|
|
|
#include <stdio.h>
|
|
|
|
#include <assert.h>
|
|
|
|
#include <inttypes.h>
|
|
|
|
|
2022-01-24 20:45:59 +00:00
|
|
|
DEBUG_GET_ONCE_LOG_OPTION(log_level, "U_PACING_APP_LOG", U_LOGGING_WARN)
|
|
|
|
|
|
|
|
#define UPA_LOG_T(...) U_LOG_IFL_T(debug_get_log_option_log_level(), __VA_ARGS__)
|
|
|
|
#define UPA_LOG_D(...) U_LOG_IFL_D(debug_get_log_option_log_level(), __VA_ARGS__)
|
|
|
|
#define UPA_LOG_I(...) U_LOG_IFL_I(debug_get_log_option_log_level(), __VA_ARGS__)
|
|
|
|
#define UPA_LOG_W(...) U_LOG_IFL_W(debug_get_log_option_log_level(), __VA_ARGS__)
|
|
|
|
#define UPA_LOG_E(...) U_LOG_IFL_E(debug_get_log_option_log_level(), __VA_ARGS__)
|
|
|
|
|
2022-11-02 00:22:16 +00:00
|
|
|
/*!
|
|
|
|
* Define to validate latched and retired call. Currently disabled due to
|
|
|
|
* simplistic frame allocation code, enable once improved.
|
|
|
|
*/
|
|
|
|
#undef VALIDATE_LATCHED_AND_RETIRED
|
|
|
|
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2021-04-02 18:43:01 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Structs enums, and defines.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2022-03-29 18:00:08 +00:00
|
|
|
#define FRAME_COUNT (8)
|
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
enum u_pa_state
|
2021-04-02 18:43:01 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
U_PA_READY,
|
2021-04-02 18:43:01 +00:00
|
|
|
U_RT_WAIT_LEFT,
|
|
|
|
U_RT_PREDICTED,
|
|
|
|
U_RT_BEGUN,
|
2022-03-30 16:36:53 +00:00
|
|
|
U_RT_DELIVERED,
|
2022-07-02 15:49:58 +00:00
|
|
|
U_RT_GPU_DONE,
|
2021-04-02 18:43:01 +00:00
|
|
|
};
|
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
struct u_pa_frame
|
2021-04-02 18:43:01 +00:00
|
|
|
{
|
2022-01-24 20:45:59 +00:00
|
|
|
int64_t frame_id;
|
|
|
|
|
2021-04-02 18:43:01 +00:00
|
|
|
//! When we predicted this frame to be shown.
|
|
|
|
uint64_t predicted_display_time_ns;
|
2021-04-07 14:41:26 +00:00
|
|
|
|
|
|
|
//! The selected display period.
|
|
|
|
uint64_t predicted_display_period_ns;
|
|
|
|
|
2022-04-16 21:10:26 +00:00
|
|
|
//! When the client's GPU work should have completed.
|
|
|
|
uint64_t predicted_gpu_done_time_ns;
|
2021-04-02 18:43:01 +00:00
|
|
|
|
2022-03-30 11:26:35 +00:00
|
|
|
/*!
|
|
|
|
* When the app told us to display this frame, can be different
|
|
|
|
* then the predicted display time so we track that separately.
|
|
|
|
*/
|
|
|
|
uint64_t display_time_ns;
|
|
|
|
|
2022-01-24 20:45:59 +00:00
|
|
|
//! When something happened.
|
2021-04-02 18:43:01 +00:00
|
|
|
struct
|
|
|
|
{
|
|
|
|
uint64_t predicted_ns;
|
|
|
|
uint64_t wait_woke_ns;
|
|
|
|
uint64_t begin_ns;
|
|
|
|
uint64_t delivered_ns;
|
2022-03-30 16:36:53 +00:00
|
|
|
uint64_t gpu_done_ns;
|
2022-01-24 20:45:59 +00:00
|
|
|
} when;
|
2021-04-02 18:43:01 +00:00
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
enum u_pa_state state;
|
2021-04-02 18:43:01 +00:00
|
|
|
};
|
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
struct pacing_app
|
2021-04-02 18:43:01 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
struct u_pacing_app base;
|
2021-04-02 18:43:01 +00:00
|
|
|
|
2022-09-20 21:25:23 +00:00
|
|
|
//! Id for this session.
|
|
|
|
int64_t session_id;
|
|
|
|
|
2022-03-29 18:00:08 +00:00
|
|
|
struct u_pa_frame frames[FRAME_COUNT];
|
2021-04-02 18:43:01 +00:00
|
|
|
uint32_t current_frame;
|
|
|
|
uint32_t next_frame;
|
|
|
|
|
|
|
|
int64_t frame_counter;
|
|
|
|
|
|
|
|
struct
|
|
|
|
{
|
|
|
|
//! App time between wait returning and begin being called.
|
|
|
|
uint64_t cpu_time_ns;
|
2022-03-31 23:05:06 +00:00
|
|
|
//! Time between begin and frame data being delivered.
|
2021-04-02 18:43:01 +00:00
|
|
|
uint64_t draw_time_ns;
|
2022-03-31 23:05:06 +00:00
|
|
|
//! Time between the frame data being delivered and GPU completing.
|
|
|
|
uint64_t wait_time_ns;
|
2022-01-24 20:45:59 +00:00
|
|
|
//! Extra time between end of draw time and when the compositor wakes up.
|
2021-04-02 18:43:01 +00:00
|
|
|
uint64_t margin_ns;
|
|
|
|
} app; //!< App statistics.
|
|
|
|
|
|
|
|
struct
|
|
|
|
{
|
|
|
|
//! The last display time that the thing driving this helper got.
|
|
|
|
uint64_t predicted_display_time_ns;
|
|
|
|
//! The last display period the hardware is running at.
|
|
|
|
uint64_t predicted_display_period_ns;
|
|
|
|
//! The extra time needed by the thing driving this helper.
|
|
|
|
uint64_t extra_ns;
|
|
|
|
} last_input;
|
|
|
|
|
|
|
|
uint64_t last_returned_ns;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
2020-06-23 20:26:05 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Helpers
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
static inline struct pacing_app *
|
|
|
|
pacing_app(struct u_pacing_app *upa)
|
2021-04-02 18:43:01 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
return (struct pacing_app *)upa;
|
2021-04-02 18:43:01 +00:00
|
|
|
}
|
|
|
|
|
2022-01-24 20:45:59 +00:00
|
|
|
#define DEBUG_PRINT_FRAME_ID() UPA_LOG_T("%" PRIi64, frame_id)
|
2022-03-29 18:00:08 +00:00
|
|
|
#define GET_INDEX_FROM_ID(RT, ID) ((uint64_t)(ID) % FRAME_COUNT)
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2021-04-07 14:40:57 +00:00
|
|
|
#define IIR_ALPHA_LT 0.8
|
|
|
|
#define IIR_ALPHA_GT 0.8
|
2021-04-03 01:12:47 +00:00
|
|
|
|
|
|
|
static void
|
|
|
|
do_iir_filter(uint64_t *target, double alpha_lt, double alpha_gt, uint64_t sample)
|
|
|
|
{
|
|
|
|
uint64_t t = *target;
|
|
|
|
double alpha = t < sample ? alpha_lt : alpha_gt;
|
|
|
|
double a = time_ns_to_s(t) * alpha;
|
|
|
|
double b = time_ns_to_s(sample) * (1.0 - alpha);
|
|
|
|
*target = time_s_to_ns(a + b);
|
|
|
|
}
|
|
|
|
|
2021-01-21 00:32:36 +00:00
|
|
|
static uint64_t
|
2022-01-14 23:48:52 +00:00
|
|
|
min_period(const struct pacing_app *pa)
|
2021-01-21 00:32:36 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
return pa->last_input.predicted_display_period_ns;
|
2021-01-21 00:32:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static uint64_t
|
2022-01-14 23:48:52 +00:00
|
|
|
last_sample_displayed(const struct pacing_app *pa)
|
2021-01-21 00:32:36 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
return pa->last_input.predicted_display_time_ns;
|
2021-01-21 00:32:36 +00:00
|
|
|
}
|
|
|
|
|
2020-06-23 20:26:05 +00:00
|
|
|
static uint64_t
|
2022-01-14 23:48:52 +00:00
|
|
|
last_return_predicted_display(const struct pacing_app *pa)
|
2020-06-23 20:26:05 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
return pa->last_returned_ns;
|
2021-04-02 16:54:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static uint64_t
|
2022-01-14 23:48:52 +00:00
|
|
|
total_app_time_ns(const struct pacing_app *pa)
|
2021-04-02 16:54:22 +00:00
|
|
|
{
|
2022-04-16 21:09:08 +00:00
|
|
|
return pa->app.cpu_time_ns + pa->app.draw_time_ns + pa->app.wait_time_ns;
|
2021-04-02 16:54:22 +00:00
|
|
|
}
|
|
|
|
|
2021-04-03 01:12:47 +00:00
|
|
|
static uint64_t
|
2022-01-14 23:48:52 +00:00
|
|
|
total_compositor_time_ns(const struct pacing_app *pa)
|
2021-04-03 01:12:47 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
return pa->app.margin_ns + pa->last_input.extra_ns;
|
2021-04-03 01:12:47 +00:00
|
|
|
}
|
|
|
|
|
2021-04-02 16:54:22 +00:00
|
|
|
static uint64_t
|
2022-01-14 23:48:52 +00:00
|
|
|
total_app_and_compositor_time_ns(const struct pacing_app *pa)
|
2021-04-02 16:54:22 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
return total_app_time_ns(pa) + total_compositor_time_ns(pa);
|
2021-04-02 16:54:22 +00:00
|
|
|
}
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2021-04-02 16:54:22 +00:00
|
|
|
static uint64_t
|
2022-01-14 23:48:52 +00:00
|
|
|
calc_period(const struct pacing_app *pa)
|
2021-04-02 16:54:22 +00:00
|
|
|
{
|
|
|
|
// Error checking.
|
2022-01-14 23:48:52 +00:00
|
|
|
uint64_t base_period_ns = min_period(pa);
|
2021-04-07 00:03:51 +00:00
|
|
|
if (base_period_ns == 0) {
|
2021-04-02 16:54:22 +00:00
|
|
|
assert(false && "Have not yet received and samples from timing driver.");
|
2021-04-07 00:03:51 +00:00
|
|
|
base_period_ns = U_TIME_1MS_IN_NS * 16; // Sure
|
|
|
|
}
|
|
|
|
|
|
|
|
// Calculate the using both values separately.
|
|
|
|
uint64_t period_ns = base_period_ns;
|
2022-01-14 23:48:52 +00:00
|
|
|
while (pa->app.cpu_time_ns > period_ns) {
|
2021-04-07 00:03:51 +00:00
|
|
|
period_ns += base_period_ns;
|
|
|
|
}
|
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
while (pa->app.draw_time_ns > period_ns) {
|
2021-04-07 00:03:51 +00:00
|
|
|
period_ns += base_period_ns;
|
2020-08-08 08:51:13 +00:00
|
|
|
}
|
|
|
|
|
2022-03-31 23:05:06 +00:00
|
|
|
while (pa->app.wait_time_ns > period_ns) {
|
|
|
|
period_ns += base_period_ns;
|
|
|
|
}
|
|
|
|
|
2021-04-07 00:03:51 +00:00
|
|
|
return period_ns;
|
|
|
|
}
|
|
|
|
|
|
|
|
static uint64_t
|
2022-01-18 20:48:43 +00:00
|
|
|
predict_display_time(const struct pacing_app *pa, uint64_t now_ns, uint64_t period_ns)
|
2021-04-07 00:03:51 +00:00
|
|
|
{
|
|
|
|
|
2021-04-02 16:54:22 +00:00
|
|
|
// Total app and compositor time to produce a frame
|
2022-01-14 23:48:52 +00:00
|
|
|
uint64_t app_and_compositor_time_ns = total_app_and_compositor_time_ns(pa);
|
2021-04-02 16:54:22 +00:00
|
|
|
|
|
|
|
// Start from the last time that the driver displayed something.
|
2022-01-14 23:48:52 +00:00
|
|
|
uint64_t val = last_sample_displayed(pa);
|
2021-04-02 16:54:22 +00:00
|
|
|
|
2022-03-19 00:24:48 +00:00
|
|
|
// Return a time after the last returned display time. Add half the
|
|
|
|
// display period to the comparison for robustness when the last display
|
|
|
|
// time shifts slightly with respect to the last sample.
|
|
|
|
while (val <= last_return_predicted_display(pa) + (period_ns / 2)) {
|
2021-04-02 16:54:22 +00:00
|
|
|
val += period_ns;
|
2020-06-23 20:26:05 +00:00
|
|
|
}
|
|
|
|
|
2021-04-02 16:54:22 +00:00
|
|
|
// Have to have enough time to perform app work.
|
|
|
|
while ((val - app_and_compositor_time_ns) <= now_ns) {
|
|
|
|
val += period_ns;
|
|
|
|
}
|
2020-08-08 08:51:13 +00:00
|
|
|
|
2020-06-23 20:26:05 +00:00
|
|
|
return val;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-09-20 20:20:05 +00:00
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Metrics and tracing.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
static void
|
|
|
|
do_tracing(struct pacing_app *pa, struct u_pa_frame *f)
|
|
|
|
{
|
|
|
|
if (!U_TRACE_CATEGORY_IS_ENABLED(timing)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-11-20 17:38:21 +00:00
|
|
|
#ifdef U_TRACE_TRACY // Uses Tracy specific things.
|
|
|
|
uint64_t cpu_ns = f->when.begin_ns - f->when.wait_woke_ns;
|
|
|
|
TracyCPlot("App CPU(ms)", time_ns_to_ms_f(cpu_ns));
|
|
|
|
|
|
|
|
uint64_t draw_ns = f->when.delivered_ns - f->when.begin_ns;
|
|
|
|
TracyCPlot("App Draw(ms)", time_ns_to_ms_f(draw_ns));
|
|
|
|
|
|
|
|
uint64_t gpu_ns = f->when.gpu_done_ns - f->when.delivered_ns;
|
|
|
|
TracyCPlot("App GPU(ms)", time_ns_to_ms_f(gpu_ns));
|
2022-09-20 20:20:05 +00:00
|
|
|
|
2022-11-20 17:38:21 +00:00
|
|
|
int64_t diff_ns = (int64_t)f->when.gpu_done_ns - (int64_t)f->predicted_gpu_done_time_ns;
|
|
|
|
TracyCPlot("App vs Expected(ms)", time_ns_to_ms_f(diff_ns));
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef U_TRACE_PERCETTO // Uses Percetto specific things.
|
2022-09-20 20:20:05 +00:00
|
|
|
#define TE_BEG(TRACK, TIME, NAME) U_TRACE_EVENT_BEGIN_ON_TRACK_DATA(timing, TRACK, TIME, NAME, PERCETTO_I(f->frame_id))
|
|
|
|
#define TE_END(TRACK, TIME) U_TRACE_EVENT_END_ON_TRACK(timing, TRACK, TIME)
|
|
|
|
|
|
|
|
TE_BEG(pa_cpu, f->when.predicted_ns, "sleep");
|
|
|
|
TE_END(pa_cpu, f->when.wait_woke_ns);
|
|
|
|
|
|
|
|
uint64_t cpu_start_ns = f->when.wait_woke_ns + 1;
|
|
|
|
TE_BEG(pa_cpu, cpu_start_ns, "cpu");
|
|
|
|
TE_END(pa_cpu, f->when.begin_ns);
|
|
|
|
|
|
|
|
TE_BEG(pa_draw, f->when.begin_ns, "draw");
|
|
|
|
if (f->when.begin_ns > f->predicted_gpu_done_time_ns) {
|
|
|
|
TE_BEG(pa_draw, f->when.begin_ns, "late");
|
|
|
|
TE_END(pa_draw, f->when.delivered_ns);
|
|
|
|
} else if (f->when.delivered_ns > f->predicted_gpu_done_time_ns) {
|
|
|
|
TE_BEG(pa_draw, f->predicted_gpu_done_time_ns, "late");
|
|
|
|
TE_END(pa_draw, f->when.delivered_ns);
|
|
|
|
}
|
|
|
|
TE_END(pa_draw, f->when.delivered_ns);
|
|
|
|
|
|
|
|
TE_BEG(pa_wait, f->when.delivered_ns, "wait");
|
|
|
|
if (f->when.delivered_ns > f->predicted_gpu_done_time_ns) {
|
|
|
|
TE_BEG(pa_wait, f->when.delivered_ns, "late");
|
|
|
|
TE_END(pa_wait, f->when.gpu_done_ns);
|
|
|
|
} else if (f->when.delivered_ns > f->predicted_gpu_done_time_ns) {
|
|
|
|
TE_BEG(pa_wait, f->predicted_gpu_done_time_ns, "late");
|
|
|
|
TE_END(pa_wait, f->when.gpu_done_ns);
|
|
|
|
}
|
|
|
|
TE_END(pa_wait, f->when.gpu_done_ns);
|
|
|
|
|
|
|
|
#undef TE_BEG
|
|
|
|
#undef TE_END
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-06-23 20:26:05 +00:00
|
|
|
/*
|
|
|
|
*
|
2021-04-02 18:43:01 +00:00
|
|
|
* Member functions.
|
2020-06-23 20:26:05 +00:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2021-04-02 18:43:01 +00:00
|
|
|
static void
|
2022-01-14 23:48:52 +00:00
|
|
|
pa_predict(struct u_pacing_app *upa,
|
2022-01-18 20:48:43 +00:00
|
|
|
uint64_t now_ns,
|
2021-04-02 18:43:01 +00:00
|
|
|
int64_t *out_frame_id,
|
|
|
|
uint64_t *out_wake_up_time,
|
|
|
|
uint64_t *out_predicted_display_time,
|
|
|
|
uint64_t *out_predicted_display_period)
|
2020-06-23 20:26:05 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
struct pacing_app *pa = pacing_app(upa);
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
int64_t frame_id = ++pa->frame_counter;
|
2020-06-23 20:26:05 +00:00
|
|
|
*out_frame_id = frame_id;
|
|
|
|
|
|
|
|
DEBUG_PRINT_FRAME_ID();
|
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
uint64_t period_ns = calc_period(pa);
|
2022-01-18 20:48:43 +00:00
|
|
|
uint64_t predict_ns = predict_display_time(pa, now_ns, period_ns);
|
2021-04-07 14:41:26 +00:00
|
|
|
// When should the client wake up.
|
2022-01-14 23:48:52 +00:00
|
|
|
uint64_t wake_up_time_ns = predict_ns - total_app_and_compositor_time_ns(pa);
|
2022-04-16 21:10:26 +00:00
|
|
|
// When the client's GPU work should have completed.
|
|
|
|
uint64_t gpu_done_time_ns = predict_ns - total_compositor_time_ns(pa);
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
pa->last_returned_ns = predict_ns;
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2021-04-07 14:41:26 +00:00
|
|
|
*out_wake_up_time = wake_up_time_ns;
|
2021-04-02 16:54:22 +00:00
|
|
|
*out_predicted_display_time = predict_ns;
|
2021-04-07 00:03:51 +00:00
|
|
|
*out_predicted_display_period = period_ns;
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
size_t index = GET_INDEX_FROM_ID(pa, frame_id);
|
2022-09-20 20:20:26 +00:00
|
|
|
struct u_pa_frame *f = &pa->frames[index];
|
|
|
|
assert(f->frame_id == -1);
|
|
|
|
assert(f->state == U_PA_READY);
|
|
|
|
|
|
|
|
f->state = U_RT_PREDICTED;
|
|
|
|
f->frame_id = frame_id;
|
|
|
|
f->predicted_gpu_done_time_ns = gpu_done_time_ns;
|
|
|
|
f->predicted_display_time_ns = predict_ns;
|
|
|
|
f->predicted_display_period_ns = period_ns;
|
|
|
|
f->when.predicted_ns = now_ns;
|
2020-06-23 20:26:05 +00:00
|
|
|
}
|
|
|
|
|
2021-04-02 18:43:01 +00:00
|
|
|
static void
|
2022-01-14 23:48:52 +00:00
|
|
|
pa_mark_point(struct u_pacing_app *upa, int64_t frame_id, enum u_timing_point point, uint64_t when_ns)
|
2020-06-23 20:26:05 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
struct pacing_app *pa = pacing_app(upa);
|
2021-04-02 18:43:01 +00:00
|
|
|
|
2022-01-24 20:45:59 +00:00
|
|
|
UPA_LOG_T("%" PRIi64 " (%u)", frame_id, point);
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
size_t index = GET_INDEX_FROM_ID(pa, frame_id);
|
2022-09-20 20:20:26 +00:00
|
|
|
struct u_pa_frame *f = &pa->frames[index];
|
|
|
|
assert(f->frame_id == frame_id);
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2021-04-02 16:54:22 +00:00
|
|
|
switch (point) {
|
|
|
|
case U_TIMING_POINT_WAKE_UP:
|
2022-09-20 20:20:26 +00:00
|
|
|
assert(f->state == U_RT_PREDICTED);
|
2021-04-02 16:54:22 +00:00
|
|
|
|
2022-09-20 20:20:26 +00:00
|
|
|
f->when.wait_woke_ns = when_ns;
|
|
|
|
f->state = U_RT_WAIT_LEFT;
|
2021-04-02 16:54:22 +00:00
|
|
|
break;
|
|
|
|
case U_TIMING_POINT_BEGIN:
|
2022-09-20 20:20:26 +00:00
|
|
|
assert(f->state == U_RT_WAIT_LEFT);
|
2021-04-02 16:54:22 +00:00
|
|
|
|
2022-09-20 20:20:26 +00:00
|
|
|
f->when.begin_ns = when_ns;
|
|
|
|
f->state = U_RT_BEGUN;
|
2021-04-02 16:54:22 +00:00
|
|
|
break;
|
|
|
|
case U_TIMING_POINT_SUBMIT:
|
|
|
|
default: assert(false);
|
|
|
|
}
|
2020-06-23 20:26:05 +00:00
|
|
|
}
|
|
|
|
|
2021-04-02 18:43:01 +00:00
|
|
|
static void
|
2022-01-18 20:48:43 +00:00
|
|
|
pa_mark_discarded(struct u_pacing_app *upa, int64_t frame_id, uint64_t when_ns)
|
2020-06-23 20:26:05 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
struct pacing_app *pa = pacing_app(upa);
|
2021-04-02 18:43:01 +00:00
|
|
|
|
2020-06-23 20:26:05 +00:00
|
|
|
DEBUG_PRINT_FRAME_ID();
|
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
size_t index = GET_INDEX_FROM_ID(pa, frame_id);
|
2022-09-20 20:20:26 +00:00
|
|
|
struct u_pa_frame *f = &pa->frames[index];
|
|
|
|
assert(f->frame_id == frame_id);
|
|
|
|
assert(f->state == U_RT_WAIT_LEFT || f->state == U_RT_BEGUN);
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2022-09-20 20:20:26 +00:00
|
|
|
// Update all data.
|
|
|
|
f->when.delivered_ns = when_ns;
|
|
|
|
|
|
|
|
// Reset the frame.
|
|
|
|
f->state = U_PA_READY;
|
|
|
|
f->frame_id = -1;
|
2020-06-23 20:26:05 +00:00
|
|
|
}
|
|
|
|
|
2021-04-02 18:43:01 +00:00
|
|
|
static void
|
2022-03-30 11:26:35 +00:00
|
|
|
pa_mark_delivered(struct u_pacing_app *upa, int64_t frame_id, uint64_t when_ns, uint64_t display_time_ns)
|
2020-06-23 20:26:05 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
struct pacing_app *pa = pacing_app(upa);
|
2021-04-02 18:43:01 +00:00
|
|
|
|
2020-06-23 20:26:05 +00:00
|
|
|
DEBUG_PRINT_FRAME_ID();
|
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
size_t index = GET_INDEX_FROM_ID(pa, frame_id);
|
|
|
|
struct u_pa_frame *f = &pa->frames[index];
|
2021-04-07 14:41:26 +00:00
|
|
|
assert(f->frame_id == frame_id);
|
|
|
|
assert(f->state == U_RT_BEGUN);
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2022-09-20 20:20:26 +00:00
|
|
|
// Update all data.
|
2022-01-18 20:48:43 +00:00
|
|
|
f->when.delivered_ns = when_ns;
|
2022-03-30 11:26:35 +00:00
|
|
|
f->display_time_ns = display_time_ns;
|
2022-03-30 16:36:53 +00:00
|
|
|
f->state = U_RT_DELIVERED;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
pa_mark_gpu_done(struct u_pacing_app *upa, int64_t frame_id, uint64_t when_ns)
|
|
|
|
{
|
|
|
|
struct pacing_app *pa = pacing_app(upa);
|
|
|
|
|
|
|
|
DEBUG_PRINT_FRAME_ID();
|
|
|
|
|
|
|
|
size_t index = GET_INDEX_FROM_ID(pa, frame_id);
|
|
|
|
struct u_pa_frame *f = &pa->frames[index];
|
|
|
|
assert(f->frame_id == frame_id);
|
|
|
|
assert(f->state == U_RT_DELIVERED);
|
|
|
|
|
|
|
|
// Update all data.
|
|
|
|
f->when.gpu_done_ns = when_ns;
|
2022-07-02 15:49:58 +00:00
|
|
|
f->state = U_RT_GPU_DONE;
|
2021-04-09 16:36:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Process data.
|
|
|
|
*/
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2022-04-16 21:10:26 +00:00
|
|
|
int64_t diff_ns = f->predicted_gpu_done_time_ns - when_ns;
|
2021-01-21 14:44:52 +00:00
|
|
|
bool late = false;
|
|
|
|
if (diff_ns < 0) {
|
|
|
|
diff_ns = -diff_ns;
|
|
|
|
late = true;
|
|
|
|
}
|
2020-06-23 20:26:05 +00:00
|
|
|
|
2021-04-07 14:41:26 +00:00
|
|
|
uint64_t diff_cpu_ns = f->when.begin_ns - f->when.wait_woke_ns;
|
2022-03-31 23:05:06 +00:00
|
|
|
uint64_t diff_draw_ns = f->when.delivered_ns - f->when.begin_ns;
|
|
|
|
uint64_t diff_wait_ns = f->when.gpu_done_ns - f->when.delivered_ns;
|
2021-04-07 14:41:26 +00:00
|
|
|
|
2022-01-24 20:45:59 +00:00
|
|
|
UPA_LOG_D(
|
2021-04-07 14:41:26 +00:00
|
|
|
"Delivered frame %.2fms %s." //
|
|
|
|
"\n\tperiod: %.2f" //
|
|
|
|
"\n\tcpu o: %.2f, n: %.2f" //
|
2022-03-31 23:05:06 +00:00
|
|
|
"\n\tdraw o: %.2f, n: %.2f" //
|
|
|
|
"\n\twait o: %.2f, n: %.2f", //
|
2021-04-07 14:41:26 +00:00
|
|
|
time_ns_to_ms_f(diff_ns), late ? "late" : "early", //
|
|
|
|
time_ns_to_ms_f(f->predicted_display_period_ns), //
|
2022-01-14 23:48:52 +00:00
|
|
|
time_ns_to_ms_f(pa->app.cpu_time_ns), time_ns_to_ms_f(diff_cpu_ns), //
|
2022-03-31 23:05:06 +00:00
|
|
|
time_ns_to_ms_f(pa->app.draw_time_ns), time_ns_to_ms_f(diff_draw_ns), //
|
|
|
|
time_ns_to_ms_f(pa->app.wait_time_ns), time_ns_to_ms_f(diff_wait_ns)); //
|
2021-04-03 01:12:47 +00:00
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
do_iir_filter(&pa->app.cpu_time_ns, IIR_ALPHA_LT, IIR_ALPHA_GT, diff_cpu_ns);
|
|
|
|
do_iir_filter(&pa->app.draw_time_ns, IIR_ALPHA_LT, IIR_ALPHA_GT, diff_draw_ns);
|
2022-03-31 23:05:06 +00:00
|
|
|
do_iir_filter(&pa->app.wait_time_ns, IIR_ALPHA_LT, IIR_ALPHA_GT, diff_wait_ns);
|
2021-04-09 16:36:49 +00:00
|
|
|
|
2022-09-20 20:20:05 +00:00
|
|
|
// Write out tracing data.
|
|
|
|
do_tracing(pa, f);
|
2022-11-02 00:22:16 +00:00
|
|
|
|
|
|
|
#ifndef VALIDATE_LATCHED_AND_RETIRED
|
|
|
|
// Reset the frame.
|
|
|
|
U_ZERO(f);
|
|
|
|
f->state = U_PA_READY;
|
|
|
|
f->frame_id = -1;
|
|
|
|
#endif
|
2022-07-02 15:49:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
pa_latched(struct u_pacing_app *upa, int64_t frame_id, uint64_t when_ns, int64_t system_frame_id)
|
|
|
|
{
|
|
|
|
struct pacing_app *pa = pacing_app(upa);
|
|
|
|
|
2022-11-02 00:22:16 +00:00
|
|
|
#ifdef VALIDATE_LATCHED_AND_RETIRED
|
|
|
|
size_t index = GET_INDEX_FROM_ID(pa, frame_id);
|
|
|
|
struct u_pa_frame *f = &pa->frames[index];
|
|
|
|
assert(f->frame_id == frame_id);
|
|
|
|
assert(f->state == U_RT_GPU_DONE);
|
|
|
|
#else
|
2022-07-02 15:49:58 +00:00
|
|
|
(void)pa;
|
2022-11-02 00:22:16 +00:00
|
|
|
#endif
|
2022-07-02 15:49:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
pa_retired(struct u_pacing_app *upa, int64_t frame_id, uint64_t when_ns)
|
|
|
|
{
|
|
|
|
struct pacing_app *pa = pacing_app(upa);
|
|
|
|
|
2022-11-02 00:22:16 +00:00
|
|
|
#ifdef VALIDATE_LATCHED_AND_RETIRED
|
2022-07-02 15:49:58 +00:00
|
|
|
size_t index = GET_INDEX_FROM_ID(pa, frame_id);
|
|
|
|
struct u_pa_frame *f = &pa->frames[index];
|
|
|
|
assert(f->frame_id == frame_id);
|
|
|
|
assert(f->state == U_RT_GPU_DONE || f->state == U_RT_DELIVERED);
|
2021-04-09 16:36:49 +00:00
|
|
|
|
|
|
|
// Reset the frame.
|
2022-11-02 00:22:16 +00:00
|
|
|
U_ZERO(f);
|
2022-01-14 23:48:52 +00:00
|
|
|
f->state = U_PA_READY;
|
2021-04-09 16:36:49 +00:00
|
|
|
f->frame_id = -1;
|
2022-11-02 00:22:16 +00:00
|
|
|
#else
|
|
|
|
(void)pa;
|
|
|
|
#endif
|
2020-06-23 20:26:05 +00:00
|
|
|
}
|
|
|
|
|
2021-04-02 18:43:01 +00:00
|
|
|
static void
|
2022-01-14 23:48:52 +00:00
|
|
|
pa_info(struct u_pacing_app *upa,
|
2021-04-02 18:43:01 +00:00
|
|
|
uint64_t predicted_display_time_ns,
|
|
|
|
uint64_t predicted_display_period_ns,
|
|
|
|
uint64_t extra_ns)
|
2020-06-23 20:26:05 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
struct pacing_app *pa = pacing_app(upa);
|
2021-04-02 18:43:01 +00:00
|
|
|
|
2022-01-14 23:48:52 +00:00
|
|
|
pa->last_input.predicted_display_time_ns = predicted_display_time_ns;
|
|
|
|
pa->last_input.predicted_display_period_ns = predicted_display_period_ns;
|
|
|
|
pa->last_input.extra_ns = extra_ns;
|
2021-04-02 18:43:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2022-01-14 23:48:52 +00:00
|
|
|
pa_destroy(struct u_pacing_app *upa)
|
2021-04-02 18:43:01 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
free(upa);
|
2021-04-02 18:43:01 +00:00
|
|
|
}
|
|
|
|
|
2022-06-27 17:30:37 +00:00
|
|
|
static xrt_result_t
|
2022-09-20 21:25:23 +00:00
|
|
|
pa_create(int64_t session_id, struct u_pacing_app **out_upa)
|
2021-04-02 18:43:01 +00:00
|
|
|
{
|
2022-01-14 23:48:52 +00:00
|
|
|
struct pacing_app *pa = U_TYPED_CALLOC(struct pacing_app);
|
|
|
|
pa->base.predict = pa_predict;
|
|
|
|
pa->base.mark_point = pa_mark_point;
|
|
|
|
pa->base.mark_discarded = pa_mark_discarded;
|
|
|
|
pa->base.mark_delivered = pa_mark_delivered;
|
2022-03-30 16:36:53 +00:00
|
|
|
pa->base.mark_gpu_done = pa_mark_gpu_done;
|
2022-07-02 15:49:58 +00:00
|
|
|
pa->base.latched = pa_latched;
|
|
|
|
pa->base.retired = pa_retired;
|
2022-01-14 23:48:52 +00:00
|
|
|
pa->base.info = pa_info;
|
|
|
|
pa->base.destroy = pa_destroy;
|
2022-09-20 21:25:23 +00:00
|
|
|
pa->session_id = session_id;
|
2022-01-14 23:48:52 +00:00
|
|
|
pa->app.cpu_time_ns = U_TIME_1MS_IN_NS * 2;
|
|
|
|
pa->app.draw_time_ns = U_TIME_1MS_IN_NS * 2;
|
|
|
|
pa->app.margin_ns = U_TIME_1MS_IN_NS * 2;
|
|
|
|
|
|
|
|
for (size_t i = 0; i < ARRAY_SIZE(pa->frames); i++) {
|
|
|
|
pa->frames[i].state = U_PA_READY;
|
|
|
|
pa->frames[i].frame_id = -1;
|
2021-04-02 18:43:01 +00:00
|
|
|
}
|
|
|
|
|
2022-04-12 21:20:11 +00:00
|
|
|
*out_upa = &pa->base;
|
2021-04-02 18:43:01 +00:00
|
|
|
|
|
|
|
return XRT_SUCCESS;
|
2020-06-23 20:26:05 +00:00
|
|
|
}
|
2022-06-27 17:30:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* Factory functions.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
static xrt_result_t
|
|
|
|
paf_create(struct u_pacing_app_factory *upaf, struct u_pacing_app **out_upa)
|
|
|
|
{
|
2022-09-20 21:25:23 +00:00
|
|
|
static int64_t session_id_gen = 0; // For now until global session id is introduced.
|
|
|
|
|
|
|
|
return pa_create(session_id_gen++, out_upa);
|
2022-06-27 17:30:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
paf_destroy(struct u_pacing_app_factory *upaf)
|
|
|
|
{
|
|
|
|
free(upaf);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
*
|
|
|
|
* 'Exported' functions.
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
xrt_result_t
|
|
|
|
u_pa_factory_create(struct u_pacing_app_factory **out_upaf)
|
|
|
|
{
|
|
|
|
struct u_pacing_app_factory *upaf = U_TYPED_CALLOC(struct u_pacing_app_factory);
|
|
|
|
upaf->create = paf_create;
|
|
|
|
upaf->destroy = paf_destroy;
|
|
|
|
|
|
|
|
*out_upaf = upaf;
|
|
|
|
|
|
|
|
return XRT_SUCCESS;
|
|
|
|
}
|