xrt: pass at_timestamp_ns to get_face_tracking

Part-of: <https://gitlab.freedesktop.org/monado/monado/-/merge_requests/2310>
This commit is contained in:
galister 2024-08-25 18:23:28 +09:00
parent 6e4a3a4759
commit 8b93ab6c72
7 changed files with 22 additions and 6 deletions

View file

@ -348,12 +348,15 @@ struct xrt_device
* @param[in] xdev The device.
* @param[in] facial_expression_type The facial expression data type (XR_FB_face_tracking,
* XR_HTC_facial_tracking, etc).
* @param[in] at_timestamp_ns Timestamp to be optionally used for prediction/history. For OXR extensions
* that do not pass a timestamp, the current timestamp is used.
* @param[in] out_value Set of requested expression weights & blend shape properties.
*
* @see xrt_input_name
*/
xrt_result_t (*get_face_tracking)(struct xrt_device *xdev,
enum xrt_input_name facial_expression_type,
int64_t at_timestamp_ns,
struct xrt_facial_expression_set *out_value);
/*!
@ -580,9 +583,10 @@ xrt_device_get_hand_tracking(struct xrt_device *xdev,
static inline xrt_result_t
xrt_device_get_face_tracking(struct xrt_device *xdev,
enum xrt_input_name facial_expression_type,
int64_t at_timestamp_ns,
struct xrt_facial_expression_set *out_value)
{
return xdev->get_face_tracking(xdev, facial_expression_type, out_value);
return xdev->get_face_tracking(xdev, facial_expression_type, at_timestamp_ns, out_value);
}
/*!

View file

@ -121,6 +121,7 @@ ipc_client_device_get_hand_tracking(struct xrt_device *xdev,
static xrt_result_t
ipc_client_device_get_face_tracking(struct xrt_device *xdev,
enum xrt_input_name facial_expression_type,
int64_t at_timestamp_ns,
struct xrt_facial_expression_set *out_value)
{
ipc_client_device_t *icd = ipc_client_device(xdev);
@ -129,6 +130,7 @@ ipc_client_device_get_face_tracking(struct xrt_device *xdev,
icd->ipc_c, //
icd->device_id, //
facial_expression_type, //
at_timestamp_ns, //
out_value); //
IPC_CHK_ALWAYS_RET(icd->ipc_c, xret, "ipc_call_device_get_face_tracking");
}

View file

@ -219,6 +219,7 @@ ipc_client_hmd_get_view_poses(struct xrt_device *xdev,
static xrt_result_t
ipc_client_hmd_get_face_tracking(struct xrt_device *xdev,
enum xrt_input_name facial_expression_type,
int64_t at_timestamp_ns,
struct xrt_facial_expression_set *out_value)
{
ipc_client_hmd_t *icd = ipc_client_hmd(xdev);
@ -227,6 +228,7 @@ ipc_client_hmd_get_face_tracking(struct xrt_device *xdev,
icd->ipc_c, //
icd->device_id, //
facial_expression_type, //
at_timestamp_ns, //
out_value); //
IPC_CHK_ALWAYS_RET(icd->ipc_c, xret, "ipc_call_device_get_face_tracking");
}

View file

@ -2116,12 +2116,13 @@ xrt_result_t
ipc_handle_device_get_face_tracking(volatile struct ipc_client_state *ics,
uint32_t id,
enum xrt_input_name facial_expression_type,
int64_t at_timestamp_ns,
struct xrt_facial_expression_set *out_value)
{
const uint32_t device_id = id;
struct xrt_device *xdev = get_xdev(ics, device_id);
// Get facial expression data.
return xrt_device_get_face_tracking(xdev, facial_expression_type, out_value);
return xrt_device_get_face_tracking(xdev, facial_expression_type, at_timestamp_ns, out_value);
}
xrt_result_t

View file

@ -486,7 +486,8 @@
"device_get_face_tracking": {
"in": [
{"name": "id", "type": "uint32_t"},
{"name": "facial_expression_type", "type": "enum xrt_input_name"}
{"name": "facial_expression_type", "type": "enum xrt_input_name"},
{"name": "at_timestamp_ns", "type": "int64_t"}
],
"out": [
{"name": "value", "type": "struct xrt_facial_expression_set"}

View file

@ -100,7 +100,10 @@ oxr_get_facial_expressions_htc(struct oxr_logger *log,
const enum xrt_input_name ft_input_name =
oxr_facial_tracking_type_htc_to_input_name(facial_tracker_htc->facial_tracking_type);
xrt_device_get_face_tracking(facial_tracker_htc->xdev, ft_input_name, &facial_expression_set_result);
int64_t at_timestamp_ns = os_monotonic_get_ns();
xrt_device_get_face_tracking(facial_tracker_htc->xdev, ft_input_name, at_timestamp_ns,
&facial_expression_set_result);
facialExpressions->isActive = facial_expression_set_result.base_expression_set_htc.is_active;
if (facialExpressions->isActive == XR_FALSE)

View file

@ -115,11 +115,15 @@ oxr_get_face_expression_weights2_fb(struct oxr_logger *log,
}
struct xrt_facial_expression_set result = {0};
const struct oxr_instance *inst = face_tracker2_fb->sess->sys->inst;
int64_t at_timestamp_ns = time_state_ts_to_monotonic_ns(inst->timekeeping, expression_info->time);
// spec: visual is allowed to use both camera and audio
enum xrt_input_name ft_input_name =
face_tracker2_fb->visual_enabled ? XRT_INPUT_FB_FACE_TRACKING2_VISUAL : XRT_INPUT_FB_FACE_TRACKING2_AUDIO;
xrt_result_t xres = xrt_device_get_face_tracking(face_tracker2_fb->xdev, ft_input_name, &result);
xrt_result_t xres =
xrt_device_get_face_tracking(face_tracker2_fb->xdev, ft_input_name, at_timestamp_ns, &result);
if (xres != XRT_SUCCESS) {
return XR_ERROR_RUNTIME_FAILURE;
}
@ -132,7 +136,6 @@ oxr_get_face_expression_weights2_fb(struct oxr_logger *log,
expression_weights->isEyeFollowingBlendshapesValid =
result.face_expression_set2_fb.is_eye_following_blendshapes_valid;
const struct oxr_instance *inst = face_tracker2_fb->sess->sys->inst;
expression_weights->time =
time_state_monotonic_to_ts_ns(inst->timekeeping, result.face_expression_set2_fb.sample_time_ns);