mirror of
https://gitlab.freedesktop.org/monado/monado.git
synced 2025-01-29 01:48:31 +00:00
d/wmr: Refactor how precomputed transforms work
Hopefully this is an improvement and not just a rewrite. Some ideas were: 1. Keep poses in WMR space as much as possible. 2. Add an explicit function that precomputes a handful of transforms that allow converting between WMR and OpenXR coordinates. 3. Make naming of variables in line with a relatively common T_A_B notation. 4. I wasn't able to figure out why `wmr_config_compute_pose` worked before, in any case, hopefully the new notation convention helps understanding why P_oxr_{acc,gyr} work.
This commit is contained in:
parent
326402da2a
commit
008ba2433b
|
@ -111,6 +111,14 @@ directed-acyclic-graph.
|
|||
- Constants/constexpr values: `kCamelCase`
|
||||
- If a header is only usable from C++ code, it should be named with the
|
||||
extension `.hpp` to signify this.
|
||||
- Math:
|
||||
- For different types of transforms `T` between two entities `A` and `B`, try
|
||||
to use variable names like `T_A_B` to express the transform such that `B =
|
||||
T_A_B * A`. This is equivalent to "`B` expressed w.r.t. `A`" and "the
|
||||
transform that converts a point in `B` coordinates into `A` coordinates".
|
||||
`T` can be used for 4x4 isometry matrices, but you can use others like
|
||||
`P` for poses, `R` for 3x3 rotations, `Q` for quaternion rotations, `t` for
|
||||
translations, etc.
|
||||
|
||||
## Patterns and Idioms
|
||||
|
||||
|
|
|
@ -46,22 +46,19 @@ wmr_hmd_config_init_defaults(struct wmr_hmd_config *c)
|
|||
math_matrix_3x3_identity(&c->sensors.mag.mix_matrix);
|
||||
}
|
||||
|
||||
static void
|
||||
wmr_config_compute_pose(struct xrt_pose *out_pose, const struct xrt_vec3 *tx, const struct xrt_matrix_3x3 *rx)
|
||||
static struct xrt_pose
|
||||
pose_from_rt(const struct xrt_matrix_3x3 rotation_rm, const struct xrt_vec3 translation)
|
||||
{
|
||||
// Adjust the coordinate system / conventions of the raw Tx and Rx config to yield a usable xrt_pose
|
||||
// The config stores a 3x3 rotation matrix and a vec3 translation.
|
||||
// Translation is applied after rotation, and the coordinate system is flipped in YZ.
|
||||
struct xrt_matrix_3x3 rotation_cm;
|
||||
math_matrix_3x3_transpose(&rotation_rm, &rotation_cm);
|
||||
|
||||
struct xrt_matrix_3x3 coordsys = {.v = {1.0, 0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0, -1.0}};
|
||||
struct xrt_matrix_4x4 mat = {0};
|
||||
math_matrix_4x4_isometry_from_rt(&rotation_cm, &translation, &mat);
|
||||
|
||||
struct xrt_matrix_3x3 rx_adj;
|
||||
math_matrix_3x3_multiply(&coordsys, rx, &rx_adj);
|
||||
math_quat_from_matrix_3x3(&rx_adj, &out_pose->orientation);
|
||||
struct xrt_pose pose;
|
||||
math_pose_from_isometry(&mat, &pose);
|
||||
|
||||
struct xrt_vec3 v;
|
||||
math_matrix_3x3_transform_vec3(&coordsys, tx, &v);
|
||||
math_matrix_3x3_transform_vec3(&rx_adj, &v, &out_pose->position);
|
||||
return pose;
|
||||
}
|
||||
|
||||
static bool
|
||||
|
@ -120,7 +117,9 @@ wmr_config_parse_display(struct wmr_hmd_config *c, cJSON *display, enum u_loggin
|
|||
if (u_json_get_float_array(rx, rotation.v, 9) != 9)
|
||||
return false;
|
||||
|
||||
wmr_config_compute_pose(&eye->pose, &translation, &rotation);
|
||||
eye->pose = pose_from_rt(rotation, translation);
|
||||
eye->translation = translation;
|
||||
eye->rotation = rotation;
|
||||
|
||||
/* Parse color distortion channels */
|
||||
const char *channel_names[] = {"DistortionRed", "DistortionGreen", "DistortionBlue"};
|
||||
|
@ -192,7 +191,7 @@ wmr_inertial_sensor_config_parse(struct wmr_inertial_sensor_config *c, cJSON *se
|
|||
return false;
|
||||
}
|
||||
|
||||
wmr_config_compute_pose(&c->pose, &translation, &rotation);
|
||||
c->pose = pose_from_rt(rotation, translation);
|
||||
c->translation = translation;
|
||||
c->rotation = rotation;
|
||||
|
||||
|
@ -339,7 +338,7 @@ wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_l
|
|||
return false;
|
||||
}
|
||||
|
||||
wmr_config_compute_pose(&cam_config->pose, &translation, &rotation);
|
||||
cam_config->pose = pose_from_rt(rotation, translation);
|
||||
cam_config->translation = translation;
|
||||
cam_config->rotation = rotation;
|
||||
|
||||
|
|
|
@ -85,8 +85,11 @@ struct wmr_distortion_eye_config
|
|||
{
|
||||
/* 3x3 camera matrix that moves from normalised camera coords (X/Z & Y/Z) to undistorted pixels */
|
||||
struct xrt_matrix_3x3 affine_xform;
|
||||
/* Eye pose in world space */
|
||||
struct xrt_pose pose;
|
||||
|
||||
struct xrt_vec3 translation; //!< Raw translation (to HT0)
|
||||
struct xrt_matrix_3x3 rotation; //!< Raw rotation (to HT0), row major
|
||||
struct xrt_pose pose; //!< Pose from `translation` and `rotation`
|
||||
|
||||
/* Radius of the (undistorted) visible area from the center (pixels) (I think) */
|
||||
double visible_radius;
|
||||
|
||||
|
@ -108,7 +111,7 @@ struct wmr_camera_config
|
|||
|
||||
struct xrt_vec3 translation; //!< Raw translation (to HT0)
|
||||
struct xrt_matrix_3x3 rotation; //!< Raw rotation (to HT0), row major
|
||||
struct xrt_pose pose; //!< Corrected pose from translation and rotation fields
|
||||
struct xrt_pose pose; //!< Pose from `translation` and `rotation`
|
||||
|
||||
struct wmr_distortion_6KT distortion6KT;
|
||||
};
|
||||
|
@ -118,7 +121,7 @@ struct wmr_inertial_sensor_config
|
|||
{
|
||||
struct xrt_vec3 translation; //!< Raw translation (to HT0). Usually non-zero only on accelerometers.
|
||||
struct xrt_matrix_3x3 rotation; //!< Raw rotation (to HT0), row major
|
||||
struct xrt_pose pose; //!< Corrected pose from translation and rotation fields
|
||||
struct xrt_pose pose; //!< Pose from `translation` and `rotation`
|
||||
|
||||
/* Current bias and mix matrix extracted from
|
||||
* the configuration, which provides coefficients
|
||||
|
|
|
@ -22,10 +22,10 @@
|
|||
#include "os/os_time.h"
|
||||
#include "os/os_hid.h"
|
||||
|
||||
#include "math/m_mathinclude.h"
|
||||
#include "math/m_api.h"
|
||||
#include "math/m_vec2.h"
|
||||
#include "math/m_mathinclude.h"
|
||||
#include "math/m_predict.h"
|
||||
#include "math/m_vec2.h"
|
||||
|
||||
#include "util/u_var.h"
|
||||
#include "util/u_misc.h"
|
||||
|
@ -323,14 +323,14 @@ hololens_handle_sensors(struct wmr_hmd *wh, const unsigned char *buffer, int siz
|
|||
vec3_from_hololens_gyro(wh->packet.gyro, i, rg);
|
||||
math_matrix_3x3_transform_vec3(&wh->config.sensors.gyro.mix_matrix, rg, cg);
|
||||
math_vec3_accum(&wh->config.sensors.gyro.bias_offsets, cg);
|
||||
math_quat_rotate_vec3(&wh->gyro_to_centerline.orientation, cg, cg);
|
||||
math_quat_rotate_vec3(&wh->P_oxr_gyr.orientation, cg, cg);
|
||||
|
||||
struct xrt_vec3 *ra = &raw_accel[i];
|
||||
struct xrt_vec3 *ca = &calib_accel[i];
|
||||
vec3_from_hololens_accel(wh->packet.accel, i, ra);
|
||||
math_matrix_3x3_transform_vec3(&wh->config.sensors.accel.mix_matrix, ra, ca);
|
||||
math_vec3_accum(&wh->config.sensors.accel.bias_offsets, ca);
|
||||
math_quat_rotate_vec3(&wh->accel_to_centerline.orientation, ca, ca);
|
||||
math_quat_rotate_vec3(&wh->P_oxr_acc.orientation, ca, ca);
|
||||
}
|
||||
|
||||
// Fusion tracking
|
||||
|
@ -1600,6 +1600,59 @@ wmr_hmd_setup_trackers(struct wmr_hmd *wh, struct xrt_slam_sinks *out_sinks, str
|
|||
return true;
|
||||
}
|
||||
|
||||
/*!
|
||||
* Precompute transforms to convert between OpenXR and WMR coordinate systems.
|
||||
*
|
||||
* OpenXR: X: Right, Y: Up, Z: Backward
|
||||
* WMR: X: Right, Y: Down, Z: Forward
|
||||
* ┌────────────────────┐
|
||||
* │ OXR WMR │
|
||||
* │ │
|
||||
* │ ▲ y │
|
||||
* │ │ ▲ z │
|
||||
* │ │ x │ x │
|
||||
* │ ├──────► ├──────► │
|
||||
* │ │ │ │
|
||||
* │ ▼ z │ │
|
||||
* │ ▼ y │
|
||||
* └────────────────────┘
|
||||
*/
|
||||
static void
|
||||
precompute_sensor_transforms(struct wmr_hmd *wh)
|
||||
{
|
||||
// P_A_B is such that B = P_A_B * A. See conventions.md
|
||||
struct xrt_pose P_oxr_wmr = {{.x = 1.0, .y = 0.0, .z = 0.0, .w = 0.0}, XRT_VEC3_ZERO};
|
||||
struct xrt_pose P_wmr_oxr = {0};
|
||||
struct xrt_pose P_acc_ht0 = wh->config.sensors.accel.pose;
|
||||
struct xrt_pose P_gyr_ht0 = wh->config.sensors.gyro.pose;
|
||||
struct xrt_pose P_ht0_acc = {0};
|
||||
struct xrt_pose P_ht0_gyr = {0};
|
||||
struct xrt_pose P_me_ht0 = {0}; // "me" == "middle of the eyes"
|
||||
struct xrt_pose P_me_acc = {0};
|
||||
struct xrt_pose P_me_gyr = {0};
|
||||
struct xrt_pose P_ht0_me = {0};
|
||||
struct xrt_pose P_acc_me = {0};
|
||||
|
||||
// All of the observed headsets have reported a zero translation for its gyro
|
||||
assert(m_vec3_equal_exact(P_gyr_ht0.position, (struct xrt_vec3){0, 0, 0}));
|
||||
|
||||
// Initialize transforms
|
||||
|
||||
// All of these are in WMR coordinates.
|
||||
math_pose_invert(&P_oxr_wmr, &P_wmr_oxr); // P_wmr_oxr == P_oxr_wmr
|
||||
math_pose_invert(&P_acc_ht0, &P_ht0_acc);
|
||||
math_pose_invert(&P_gyr_ht0, &P_ht0_gyr);
|
||||
math_pose_interpolate(&wh->config.eye_params[0].pose, &wh->config.eye_params[1].pose, 0.5, &P_me_ht0);
|
||||
math_pose_transform(&P_me_ht0, &P_ht0_acc, &P_me_acc);
|
||||
math_pose_transform(&P_me_ht0, &P_ht0_gyr, &P_me_gyr);
|
||||
math_pose_invert(&P_me_ht0, &P_ht0_me);
|
||||
math_pose_invert(&P_me_acc, &P_acc_me);
|
||||
|
||||
// Save transforms
|
||||
math_pose_transform(&P_oxr_wmr, &P_me_acc, &wh->P_oxr_acc);
|
||||
math_pose_transform(&P_oxr_wmr, &P_me_gyr, &wh->P_oxr_gyr);
|
||||
}
|
||||
|
||||
void
|
||||
wmr_hmd_create(enum wmr_headset_type hmd_type,
|
||||
struct os_hid_device *hid_holo,
|
||||
|
@ -1689,43 +1742,11 @@ wmr_hmd_create(enum wmr_headset_type hmd_type,
|
|||
|
||||
WMR_INFO(wh, "Found WMR headset type: %s", wh->hmd_desc->debug_name);
|
||||
|
||||
// Compute centerline in the HMD's calibration coordinate space as the average of the two display poses,
|
||||
// then rotate around the X axis to convert coordinate system from WMR (X right, Y down, Z away)
|
||||
// to OpenXR (X right, Y up, Z towards)
|
||||
math_quat_slerp(&wh->config.eye_params[0].pose.orientation, &wh->config.eye_params[1].pose.orientation, 0.5f,
|
||||
&wh->centerline.orientation);
|
||||
wh->centerline.position.x =
|
||||
(wh->config.eye_params[0].pose.position.x + wh->config.eye_params[1].pose.position.x) * 0.5f;
|
||||
wh->centerline.position.y =
|
||||
(wh->config.eye_params[0].pose.position.y + wh->config.eye_params[1].pose.position.y) * 0.5f;
|
||||
wh->centerline.position.z =
|
||||
(wh->config.eye_params[0].pose.position.z + wh->config.eye_params[1].pose.position.z) * 0.5f;
|
||||
|
||||
struct xrt_pose wmr_to_openxr_xform = {
|
||||
.position = {0.0, 0.0, 0.0},
|
||||
.orientation = {.x = 1.0, .y = 0.0, .z = 0.0, .w = 0.0},
|
||||
};
|
||||
|
||||
math_pose_transform(&wmr_to_openxr_xform, &wh->centerline, &wh->centerline);
|
||||
|
||||
// Compute display and sensor offsets relative to the centerline
|
||||
for (int dIdx = 0; dIdx < 2; ++dIdx) {
|
||||
math_pose_invert(&wh->config.eye_params[dIdx].pose, &wh->display_to_centerline[dIdx]);
|
||||
math_pose_transform(&wh->centerline, &wh->display_to_centerline[dIdx],
|
||||
&wh->display_to_centerline[dIdx]);
|
||||
}
|
||||
math_pose_invert(&wh->config.sensors.accel.pose, &wh->accel_to_centerline);
|
||||
math_pose_transform(&wh->centerline, &wh->accel_to_centerline, &wh->accel_to_centerline);
|
||||
math_pose_invert(&wh->config.sensors.gyro.pose, &wh->gyro_to_centerline);
|
||||
math_pose_transform(&wh->centerline, &wh->gyro_to_centerline, &wh->gyro_to_centerline);
|
||||
math_pose_invert(&wh->config.sensors.mag.pose, &wh->mag_to_centerline);
|
||||
math_pose_transform(&wh->centerline, &wh->mag_to_centerline, &wh->mag_to_centerline);
|
||||
precompute_sensor_transforms(wh);
|
||||
|
||||
struct u_extents_2d exts;
|
||||
|
||||
exts.w_pixels = (uint32_t)wh->config.eye_params[0].display_size.x;
|
||||
exts.h_pixels = (uint32_t)wh->config.eye_params[0].display_size.y;
|
||||
|
||||
u_extents_2d_split_side_by_side(&wh->base, &exts);
|
||||
|
||||
// Fill in blend mode - just opqaue, unless we get Hololens support one day.
|
||||
|
|
|
@ -116,12 +116,9 @@ struct wmr_hmd
|
|||
//! Distortion related parameters
|
||||
struct wmr_hmd_distortion_params distortion_params[2];
|
||||
|
||||
// Config-derived poses
|
||||
struct xrt_pose centerline;
|
||||
struct xrt_pose display_to_centerline[2];
|
||||
struct xrt_pose accel_to_centerline;
|
||||
struct xrt_pose gyro_to_centerline;
|
||||
struct xrt_pose mag_to_centerline;
|
||||
//! Precomputed transforms, @see precompute_sensor_transforms.
|
||||
struct xrt_pose P_oxr_acc; //!< Converts accel samples into OpenXR coordinates
|
||||
struct xrt_pose P_oxr_gyr; //!< Converts gyro samples into OpenXR coordinates
|
||||
|
||||
struct hololens_sensors_packet packet;
|
||||
|
||||
|
|
Loading…
Reference in a new issue