mirror of
https://gitlab.freedesktop.org/monado/monado.git
synced 2024-12-29 11:06:18 +00:00
d/wmr: Support head tracking with more than two cameras
This commit is contained in:
parent
629c4b67f1
commit
3ecee6d0ca
|
@ -16,6 +16,7 @@
|
|||
#include <assert.h>
|
||||
#include <inttypes.h>
|
||||
|
||||
#include "math/m_api.h"
|
||||
#include "os/os_threading.h"
|
||||
#include "util/u_autoexpgain.h"
|
||||
#include "util/u_debug.h"
|
||||
|
@ -88,8 +89,9 @@ struct wmr_camera
|
|||
struct os_thread_helper usb_thread;
|
||||
int usb_complete;
|
||||
|
||||
const struct wmr_camera_config *configs;
|
||||
int config_count;
|
||||
struct wmr_camera_config tcam_confs[WMR_MAX_CAMERAS]; //!< Configs for tracking cameras
|
||||
int tcam_count; //!< Number of tracking cameras
|
||||
int slam_cam_count; //!< Number of tracking cameras used for SLAM
|
||||
|
||||
size_t xfer_size;
|
||||
uint32_t frame_width, frame_height;
|
||||
|
@ -113,8 +115,7 @@ struct wmr_camera
|
|||
|
||||
struct u_sink_debug debug_sinks[2];
|
||||
|
||||
struct xrt_frame_sink *left_sink; //!< Downstream sinks to push left tracking frames to
|
||||
struct xrt_frame_sink *right_sink; //!< Downstream sinks to push right tracking frames to
|
||||
struct xrt_frame_sink *cam_sinks[WMR_MAX_CAMERAS]; //!< Downstream sinks to push tracking frames to
|
||||
|
||||
enum u_logging_level log_level;
|
||||
};
|
||||
|
@ -166,11 +167,8 @@ compute_frame_size(struct wmr_camera *cam)
|
|||
|
||||
F = 26;
|
||||
|
||||
for (i = 0; i < cam->config_count; i++) {
|
||||
const struct wmr_camera_config *config = &cam->configs[i];
|
||||
if (config->purpose != WMR_CAMERA_PURPOSE_HEAD_TRACKING) {
|
||||
continue;
|
||||
}
|
||||
for (i = 0; i < cam->tcam_count; i++) {
|
||||
const struct wmr_camera_config *config = &cam->tcam_confs[i];
|
||||
|
||||
WMR_CAM_DEBUG(cam, "Found head tracking camera index %d width %d height %d", i, config->roi.extent.w,
|
||||
config->roi.extent.h);
|
||||
|
@ -373,24 +371,20 @@ img_xfer_cb(struct libusb_transfer *xfer)
|
|||
bool tracking_frame = sink_index == 0;
|
||||
if (tracking_frame) {
|
||||
// Tracking frames usually come at ~30fps
|
||||
struct xrt_frame *xf_left = NULL;
|
||||
struct xrt_frame *xf_right = NULL;
|
||||
u_frame_create_roi(xf, cam->configs[0].roi, &xf_left);
|
||||
u_frame_create_roi(xf, cam->configs[1].roi, &xf_right);
|
||||
struct xrt_frame *frames[WMR_MAX_CAMERAS] = {NULL};
|
||||
for (int i = 0; i < cam->slam_cam_count; i++) {
|
||||
u_frame_create_roi(xf, cam->tcam_confs[i].roi, &frames[i]);
|
||||
}
|
||||
|
||||
struct xrt_frame *frames[WMR_MAX_CAMERAS] = {xf_left, xf_right, NULL, NULL};
|
||||
update_expgain(cam, frames);
|
||||
|
||||
if (cam->left_sink != NULL) {
|
||||
xrt_sink_push_frame(cam->left_sink, xf_left);
|
||||
for (int i = 0; i < cam->slam_cam_count; i++) {
|
||||
xrt_sink_push_frame(cam->cam_sinks[i], frames[i]);
|
||||
}
|
||||
|
||||
if (cam->right_sink != NULL) {
|
||||
xrt_sink_push_frame(cam->right_sink, xf_right);
|
||||
for (int i = 0; i < cam->slam_cam_count; i++) {
|
||||
xrt_frame_reference(&frames[i], NULL);
|
||||
}
|
||||
|
||||
xrt_frame_reference(&xf_left, NULL);
|
||||
xrt_frame_reference(&xf_right, NULL);
|
||||
}
|
||||
|
||||
xrt_frame_reference(&xf, NULL);
|
||||
|
@ -407,11 +401,7 @@ out:
|
|||
*/
|
||||
|
||||
struct wmr_camera *
|
||||
wmr_camera_open(struct xrt_prober_device *dev_holo,
|
||||
struct xrt_frame_sink *left_sink,
|
||||
struct xrt_frame_sink *right_sink,
|
||||
int camera_count,
|
||||
enum u_logging_level log_level)
|
||||
wmr_camera_open(struct wmr_camera_open_config *config)
|
||||
{
|
||||
DRV_TRACE_MARKER();
|
||||
|
||||
|
@ -419,7 +409,14 @@ wmr_camera_open(struct xrt_prober_device *dev_holo,
|
|||
int res;
|
||||
int i;
|
||||
|
||||
cam->log_level = log_level;
|
||||
cam->tcam_count = config->tcam_count;
|
||||
cam->slam_cam_count = config->slam_cam_count;
|
||||
cam->log_level = config->log_level;
|
||||
|
||||
for (int i = 0; i < cam->tcam_count; i++) {
|
||||
cam->tcam_confs[i] = *config->tcam_confs[i];
|
||||
cam->cam_sinks[i] = config->tcam_sinks[i];
|
||||
}
|
||||
|
||||
if (os_thread_helper_init(&cam->usb_thread) != 0) {
|
||||
WMR_CAM_ERROR(cam, "Failed to initialise threading");
|
||||
|
@ -432,6 +429,7 @@ wmr_camera_open(struct xrt_prober_device *dev_holo,
|
|||
goto fail;
|
||||
}
|
||||
|
||||
struct xrt_prober_device *dev_holo = config->dev_holo;
|
||||
cam->dev = libusb_open_device_with_vid_pid(cam->ctx, dev_holo->vendor_id, dev_holo->product_id);
|
||||
if (cam->dev == NULL) {
|
||||
goto fail;
|
||||
|
@ -460,7 +458,7 @@ wmr_camera_open(struct xrt_prober_device *dev_holo,
|
|||
int frame_delay = 3; // WMR takes about three frames until the cmd changes the image
|
||||
cam->unify_expgains = debug_get_bool_option_wmr_unify_expgain();
|
||||
|
||||
for (int i = 0; i < camera_count; i++) {
|
||||
for (int i = 0; i < cam->tcam_count; i++) {
|
||||
struct wmr_camera_expgain *ceg = &cam->ceg[i];
|
||||
ceg->manual_control = false;
|
||||
ceg->last_exposure = DEFAULT_EXPOSURE;
|
||||
|
@ -487,7 +485,7 @@ wmr_camera_open(struct xrt_prober_device *dev_holo,
|
|||
u_var_add_gui_header_begin(cam, NULL, "Exposure and gain control");
|
||||
u_var_add_bool(cam, &cam->unify_expgains, "Use same values");
|
||||
|
||||
for (int i = 0; i < camera_count; i++) {
|
||||
for (int i = 0; i < cam->tcam_count; i++) {
|
||||
struct wmr_camera_expgain *ceg = &cam->ceg[i];
|
||||
char label[256] = {0};
|
||||
|
||||
|
@ -511,9 +509,6 @@ wmr_camera_open(struct xrt_prober_device *dev_holo,
|
|||
|
||||
u_var_add_gui_header_end(cam, NULL, "Auto exposure and gain control END");
|
||||
|
||||
cam->left_sink = left_sink;
|
||||
cam->right_sink = right_sink;
|
||||
|
||||
return cam;
|
||||
|
||||
|
||||
|
@ -563,14 +558,12 @@ wmr_camera_free(struct wmr_camera *cam)
|
|||
}
|
||||
|
||||
bool
|
||||
wmr_camera_start(struct wmr_camera *cam, const struct wmr_camera_config *cam_configs, int config_count)
|
||||
wmr_camera_start(struct wmr_camera *cam)
|
||||
{
|
||||
DRV_TRACE_MARKER();
|
||||
|
||||
int res = 0;
|
||||
|
||||
cam->configs = cam_configs;
|
||||
cam->config_count = config_count;
|
||||
if (!compute_frame_size(cam)) {
|
||||
WMR_CAM_WARN(cam, "Invalid config or no head tracking cameras found");
|
||||
goto fail;
|
||||
|
@ -659,11 +652,8 @@ static int
|
|||
update_expgain(struct wmr_camera *cam, struct xrt_frame **frames)
|
||||
{
|
||||
int res = 0;
|
||||
for (int i = 0; i < cam->config_count; i++) {
|
||||
const struct wmr_camera_config *config = &cam->configs[i];
|
||||
if (config->purpose != WMR_CAMERA_PURPOSE_HEAD_TRACKING) {
|
||||
continue;
|
||||
}
|
||||
for (int i = 0; i < cam->tcam_count; i++) {
|
||||
const struct wmr_camera_config *config = &cam->tcam_confs[i];
|
||||
|
||||
struct wmr_camera_expgain *ceg = &cam->ceg[i];
|
||||
|
||||
|
|
|
@ -9,10 +9,11 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include "xrt/xrt_config_have.h"
|
||||
#include "util/u_debug.h"
|
||||
#include "util/u_logging.h"
|
||||
#include "xrt/xrt_prober.h"
|
||||
#include "xrt/xrt_config_have.h"
|
||||
#include "xrt/xrt_frame.h"
|
||||
#include "xrt/xrt_prober.h"
|
||||
|
||||
#include "wmr_config.h"
|
||||
|
||||
|
@ -22,23 +23,27 @@ extern "C" {
|
|||
|
||||
struct wmr_camera;
|
||||
|
||||
struct wmr_camera_open_config
|
||||
{
|
||||
struct xrt_prober_device *dev_holo;
|
||||
struct wmr_camera_config **tcam_confs; //!< Pointers to tracking cameras. Will be copied.
|
||||
struct xrt_frame_sink **tcam_sinks; //!< Sinks for tracking cameras
|
||||
int tcam_count; //!< Tracking camera count
|
||||
int slam_cam_count; //!< Number of tracking cameras to use for SLAM
|
||||
enum u_logging_level log_level;
|
||||
};
|
||||
|
||||
#ifdef XRT_HAVE_LIBUSB
|
||||
struct wmr_camera *
|
||||
wmr_camera_open(struct xrt_prober_device *dev_holo,
|
||||
struct xrt_frame_sink *left_sink,
|
||||
struct xrt_frame_sink *right_sink,
|
||||
int camera_count,
|
||||
enum u_logging_level log_level);
|
||||
wmr_camera_open(struct wmr_camera_open_config *config);
|
||||
void
|
||||
wmr_camera_free(struct wmr_camera *cam);
|
||||
|
||||
/*!
|
||||
* Starts the cameras.
|
||||
*
|
||||
* The data pointed to by @p configs must be kept alive for as long as the camera is kept alive.
|
||||
*/
|
||||
bool
|
||||
wmr_camera_start(struct wmr_camera *cam, const struct wmr_camera_config *configs, int config_count);
|
||||
wmr_camera_start(struct wmr_camera *cam);
|
||||
bool
|
||||
wmr_camera_stop(struct wmr_camera *cam);
|
||||
|
||||
|
@ -56,9 +61,9 @@ wmr_camera_set_exposure_gain(struct wmr_camera *cam, uint8_t camera_id, uint16_t
|
|||
#else
|
||||
|
||||
/* Stubs to disable camera functions without libusb */
|
||||
#define wmr_camera_open(dev_holo, left_sink, right_sink, camera_count, log_level) NULL
|
||||
#define wmr_camera_open(config) NULL
|
||||
#define wmr_camera_free(cam)
|
||||
#define wmr_camera_start(cam, cam_configs, n_configs) false
|
||||
#define wmr_camera_start(cam) false
|
||||
#define wmr_camera_stop(cam) false
|
||||
#define wmr_camera_set_exposure_gain(cam, camera_id, exposure, gain) -1
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#include <string.h>
|
||||
#include "math/m_api.h"
|
||||
|
||||
#include "util/u_debug.h"
|
||||
#include "util/u_misc.h"
|
||||
#include "util/u_json.h"
|
||||
|
||||
|
@ -28,6 +29,9 @@
|
|||
#define JSON_MATRIX_3X3(a, b, c) u_json_get_matrix_3x3(u_json_get(a, b), c)
|
||||
#define JSON_STRING(a, b, c) u_json_get_string_into_array(u_json_get(a, b), c, sizeof(c))
|
||||
|
||||
//! Specifies the maximum number of cameras to use for SLAM tracking
|
||||
DEBUG_GET_ONCE_NUM_OPTION(wmr_max_slam_cams, "WMR_MAX_SLAM_CAMS", WMR_MAX_CAMERAS)
|
||||
|
||||
static void
|
||||
wmr_hmd_config_init_defaults(struct wmr_hmd_config *c)
|
||||
{
|
||||
|
@ -274,18 +278,18 @@ wmr_inertial_sensors_config_parse(struct wmr_inertial_sensors_config *c, cJSON *
|
|||
static bool
|
||||
wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_logging_level log_level)
|
||||
{
|
||||
if (c->n_cameras == WMR_MAX_CAMERAS) {
|
||||
if (c->cam_count == WMR_MAX_CAMERAS) {
|
||||
WMR_ERROR(log_level, "Too many camera entries. Enlarge WMR_MAX_CAMERAS");
|
||||
return false;
|
||||
}
|
||||
|
||||
struct wmr_camera_config *cam_config = c->cameras + c->n_cameras;
|
||||
struct wmr_camera_config *cam_config = c->cams + c->cam_count;
|
||||
|
||||
/* Camera purpose */
|
||||
cJSON *json_purpose = cJSON_GetObjectItem(camera, "Purpose");
|
||||
char *json_purpose_name = cJSON_GetStringValue(json_purpose);
|
||||
if (json_purpose_name == NULL) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - camera purpose not found", c->n_cameras);
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - camera purpose not found", c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -294,14 +298,14 @@ wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_l
|
|||
} else if (!strcmp(json_purpose_name, "CALIBRATION_CameraPurposeDisplayObserver")) {
|
||||
cam_config->purpose = WMR_CAMERA_PURPOSE_DISPLAY_OBSERVER;
|
||||
} else {
|
||||
WMR_ERROR(log_level, "Unknown camera purpose: \"%s\" (camera %d)", json_purpose_name, c->n_cameras);
|
||||
WMR_ERROR(log_level, "Unknown camera purpose: \"%s\" (camera %d)", json_purpose_name, c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
cJSON *json_location = cJSON_GetObjectItem(camera, "Location");
|
||||
char *json_location_name = cJSON_GetStringValue(json_location);
|
||||
if (json_location_name == NULL) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - location", c->n_cameras);
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - location", c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -318,7 +322,7 @@ wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_l
|
|||
} else if (!strcmp(json_location_name, "CALIBRATION_CameraLocationDO1")) {
|
||||
cam_config->location = WMR_CAMERA_LOCATION_DO1;
|
||||
} else {
|
||||
WMR_ERROR(log_level, "Unknown camera location: \"%s\" (camera %d)", json_location_name, c->n_cameras);
|
||||
WMR_ERROR(log_level, "Unknown camera location: \"%s\" (camera %d)", json_location_name, c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -329,12 +333,12 @@ wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_l
|
|||
cJSON *rt = cJSON_GetObjectItem(camera, "Rt");
|
||||
cJSON *rx = cJSON_GetObjectItem(rt, "Rotation");
|
||||
if (rt == NULL || rx == NULL) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - pose", c->n_cameras);
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - pose", c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!JSON_VEC3(rt, "Translation", &translation) || u_json_get_float_array(rx, rotation.v, 9) != 9) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - pose", c->n_cameras);
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - pose", c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -344,28 +348,28 @@ wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_l
|
|||
|
||||
if (!JSON_INT(camera, "SensorWidth", &cam_config->roi.extent.w) ||
|
||||
!JSON_INT(camera, "SensorHeight", &cam_config->roi.extent.h)) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - sensor size", c->n_cameras);
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - sensor size", c->cam_count);
|
||||
return false;
|
||||
}
|
||||
cam_config->roi.offset.w = c->n_ht_cameras * cam_config->roi.extent.w; // Assume all HT cams have same width
|
||||
cam_config->roi.offset.h = 1; // Ignore first metadata row
|
||||
cam_config->roi.offset.w = c->tcam_count * cam_config->roi.extent.w; // Assume all tracking cams have same width
|
||||
cam_config->roi.offset.h = 1; // Ignore first metadata row
|
||||
|
||||
/* Distortion information */
|
||||
cJSON *dist = cJSON_GetObjectItemCaseSensitive(camera, "Intrinsics");
|
||||
if (!dist) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - distortion", c->n_cameras);
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - distortion", c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
const char *model_type = cJSON_GetStringValue(cJSON_GetObjectItemCaseSensitive(dist, "ModelType"));
|
||||
if (model_type == NULL) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - missing distortion type", c->n_cameras);
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - missing distortion type", c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!strcmp(model_type, "CALIBRATION_LensDistortionModelRational6KT")) {
|
||||
} else {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - unknown distortion type %s", c->n_cameras,
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - unknown distortion type %s", c->cam_count,
|
||||
model_type);
|
||||
return false;
|
||||
}
|
||||
|
@ -374,12 +378,12 @@ wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_l
|
|||
|
||||
int param_count;
|
||||
if (!JSON_INT(dist, "ModelParameterCount", ¶m_count)) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - no ModelParameterCount", c->n_cameras);
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - no ModelParameterCount", c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (param_count != 15) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - wrong ModelParameterCount %d", c->n_cameras,
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - wrong ModelParameterCount %d", c->cam_count,
|
||||
param_count);
|
||||
return false;
|
||||
}
|
||||
|
@ -388,15 +392,16 @@ wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_l
|
|||
if (params_json == NULL ||
|
||||
u_json_get_float_array(params_json, distortion6KT->v, param_count) != (size_t)param_count) {
|
||||
WMR_ERROR(log_level, "Invalid camera calibration block %d - missing distortion parameters",
|
||||
c->n_cameras);
|
||||
c->cam_count);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (cam_config->purpose == WMR_CAMERA_PURPOSE_HEAD_TRACKING) {
|
||||
c->n_ht_cameras++;
|
||||
c->tcams[c->tcam_count] = cam_config;
|
||||
c->tcam_count++;
|
||||
}
|
||||
|
||||
c->n_cameras++;
|
||||
c->cam_count++;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -444,6 +449,7 @@ wmr_config_parse_calibration(struct wmr_hmd_config *c, cJSON *calib_info, enum u
|
|||
if (!wmr_config_parse_camera_config(c, item, log_level))
|
||||
return false;
|
||||
}
|
||||
c->slam_cam_count = MIN(c->tcam_count, (int)debug_get_num_option_wmr_max_slam_cams());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -158,9 +158,12 @@ struct wmr_hmd_config
|
|||
|
||||
struct wmr_inertial_sensors_config sensors;
|
||||
|
||||
int n_cameras;
|
||||
int n_ht_cameras;
|
||||
struct wmr_camera_config cameras[WMR_MAX_CAMERAS];
|
||||
int cam_count;
|
||||
struct wmr_camera_config cams[WMR_MAX_CAMERAS];
|
||||
|
||||
struct wmr_camera_config *tcams[WMR_MAX_CAMERAS]; //!< Pointers to tracking cameras in `cameras`
|
||||
int tcam_count; //!< Number of tracking cameras
|
||||
int slam_cam_count; //!< Number of tracking cameras to use for SLAM
|
||||
};
|
||||
|
||||
bool
|
||||
|
|
|
@ -1293,7 +1293,7 @@ XRT_MAYBE_UNUSED static struct t_camera_calibration
|
|||
wmr_hmd_get_cam_calib(struct wmr_hmd *wh, int cam_index)
|
||||
{
|
||||
struct t_camera_calibration res;
|
||||
struct wmr_camera_config *wcalib = &wh->config.cameras[cam_index];
|
||||
struct wmr_camera_config *wcalib = wh->config.tcams[cam_index];
|
||||
struct wmr_distortion_6KT *intr = &wcalib->distortion6KT;
|
||||
|
||||
res.image_size_pixels.h = wcalib->roi.extent.h;
|
||||
|
@ -1342,14 +1342,14 @@ wmr_hmd_create_stereo_camera_calib(struct wmr_hmd *wh)
|
|||
|
||||
|
||||
// Intrinsics
|
||||
for (int i = 0; i < 2; i++) { // Assuming that cameras[0-1] are HT0 and HT1
|
||||
for (int i = 0; i < 2; i++) {
|
||||
calib->view[i] = wmr_hmd_get_cam_calib(wh, i);
|
||||
}
|
||||
|
||||
// Extrinsics
|
||||
|
||||
// Compute transform from HT1 to HT0 (HT0 space into HT1 space)
|
||||
struct wmr_camera_config *ht1 = &wh->config.cameras[1];
|
||||
struct wmr_camera_config *ht1 = &wh->config.cams[1];
|
||||
calib->camera_translation[0] = ht1->translation.x;
|
||||
calib->camera_translation[1] = ht1->translation.y;
|
||||
calib->camera_translation[2] = ht1->translation.z;
|
||||
|
@ -1370,33 +1370,44 @@ wmr_hmd_create_stereo_camera_calib(struct wmr_hmd *wh)
|
|||
XRT_MAYBE_UNUSED static void
|
||||
wmr_hmd_fill_slam_cams_calibration(struct wmr_hmd *wh)
|
||||
{
|
||||
struct xrt_pose P_imu_ht0 = wh->config.sensors.accel.pose;
|
||||
struct xrt_pose P_ht1_ht0 = wh->config.cameras[1].pose;
|
||||
struct xrt_pose P_ht0_ht1;
|
||||
math_pose_invert(&P_ht1_ht0, &P_ht0_ht1);
|
||||
struct xrt_pose P_imu_ht1;
|
||||
math_pose_transform(&P_imu_ht0, &P_ht0_ht1, &P_imu_ht1);
|
||||
wh->tracking.slam_calib.cam_count = wh->config.tcam_count;
|
||||
|
||||
struct xrt_matrix_4x4 T_imu_ht0;
|
||||
struct xrt_matrix_4x4 T_imu_ht1;
|
||||
math_matrix_4x4_isometry_from_pose(&P_imu_ht0, &T_imu_ht0);
|
||||
math_matrix_4x4_isometry_from_pose(&P_imu_ht1, &T_imu_ht1);
|
||||
|
||||
struct t_slam_camera_calibration calib0 = {
|
||||
// Fill camera 0
|
||||
struct xrt_pose P_imu_c0 = wh->config.sensors.accel.pose;
|
||||
struct xrt_matrix_4x4 T_imu_c0;
|
||||
math_matrix_4x4_isometry_from_pose(&P_imu_c0, &T_imu_c0);
|
||||
wh->tracking.slam_calib.cams[0] = (struct t_slam_camera_calibration){
|
||||
.base = wmr_hmd_get_cam_calib(wh, 0),
|
||||
.T_imu_cam = T_imu_ht0,
|
||||
.T_imu_cam = T_imu_c0,
|
||||
.frequency = CAMERA_FREQUENCY,
|
||||
};
|
||||
|
||||
struct t_slam_camera_calibration calib1 = {
|
||||
.base = wmr_hmd_get_cam_calib(wh, 1),
|
||||
.T_imu_cam = T_imu_ht1,
|
||||
.frequency = CAMERA_FREQUENCY,
|
||||
};
|
||||
// Fill remaining cameras
|
||||
for (int i = 1; i < wh->config.tcam_count; i++) {
|
||||
struct xrt_pose P_ci_c0 = wh->config.tcams[i]->pose;
|
||||
|
||||
wh->tracking.slam_calib.cam_count = 2;
|
||||
wh->tracking.slam_calib.cams[0] = calib0;
|
||||
wh->tracking.slam_calib.cams[1] = calib1;
|
||||
if (i == 2 || i == 3) {
|
||||
//! @note The calibration json for the reverb G2v2 (the only 4-camera wmr
|
||||
//! headset we know about) has the HT2 and HT3 extrinsics flipped compared
|
||||
//! to the order the third and fourth camera images come from usb.
|
||||
P_ci_c0 = wh->config.tcams[i == 2 ? 3 : 2]->pose;
|
||||
}
|
||||
|
||||
struct xrt_pose P_c0_ci;
|
||||
math_pose_invert(&P_ci_c0, &P_c0_ci);
|
||||
|
||||
struct xrt_pose P_imu_ci;
|
||||
math_pose_transform(&P_imu_c0, &P_c0_ci, &P_imu_ci);
|
||||
|
||||
struct xrt_matrix_4x4 T_imu_ci;
|
||||
math_matrix_4x4_isometry_from_pose(&P_imu_ci, &T_imu_ci);
|
||||
|
||||
wh->tracking.slam_calib.cams[i] = (struct t_slam_camera_calibration){
|
||||
.base = wmr_hmd_get_cam_calib(wh, i),
|
||||
.T_imu_cam = T_imu_ci,
|
||||
.frequency = CAMERA_FREQUENCY,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
XRT_MAYBE_UNUSED static struct t_imu_calibration
|
||||
|
@ -1483,7 +1494,8 @@ wmr_hmd_slam_track(struct wmr_hmd *wh)
|
|||
#ifdef XRT_FEATURE_SLAM
|
||||
struct t_slam_tracker_config config = {0};
|
||||
t_slam_fill_default_config(&config);
|
||||
config.cam_count = 2;
|
||||
config.cam_count = wh->config.slam_cam_count;
|
||||
wh->tracking.slam_calib.cam_count = wh->config.slam_cam_count;
|
||||
config.slam_calib = &wh->tracking.slam_calib;
|
||||
if (debug_get_option_slam_submit_from_start() == NULL) {
|
||||
config.submit_from_start = true;
|
||||
|
@ -1724,16 +1736,15 @@ wmr_hmd_setup_trackers(struct wmr_hmd *wh, struct xrt_slam_sinks *out_sinks, str
|
|||
// Setup sinks depending on tracking configuration
|
||||
struct xrt_slam_sinks entry_sinks = {0};
|
||||
if (slam_enabled && hand_enabled) {
|
||||
struct xrt_frame_sink *entry_left_sink = NULL;
|
||||
struct xrt_frame_sink *entry_right_sink = NULL;
|
||||
struct xrt_frame_sink *entry_cam0_sink = NULL;
|
||||
struct xrt_frame_sink *entry_cam1_sink = NULL;
|
||||
|
||||
u_sink_split_create(&wh->tracking.xfctx, slam_sinks->cams[0], hand_sinks->cams[0], &entry_left_sink);
|
||||
u_sink_split_create(&wh->tracking.xfctx, slam_sinks->cams[1], hand_sinks->cams[1], &entry_right_sink);
|
||||
u_sink_split_create(&wh->tracking.xfctx, slam_sinks->cams[0], hand_sinks->cams[0], &entry_cam0_sink);
|
||||
u_sink_split_create(&wh->tracking.xfctx, slam_sinks->cams[1], hand_sinks->cams[1], &entry_cam1_sink);
|
||||
|
||||
entry_sinks = *slam_sinks;
|
||||
entry_sinks.cam_count = 2;
|
||||
entry_sinks.cams[0] = entry_left_sink;
|
||||
entry_sinks.cams[1] = entry_right_sink;
|
||||
entry_sinks.cams[0] = entry_cam0_sink;
|
||||
entry_sinks.cams[1] = entry_cam1_sink;
|
||||
} else if (slam_enabled) {
|
||||
entry_sinks = *slam_sinks;
|
||||
} else if (hand_enabled) {
|
||||
|
|
|
@ -61,23 +61,21 @@ struct wmr_source
|
|||
struct wmr_hmd_config config;
|
||||
struct wmr_camera *camera;
|
||||
|
||||
// Sinks
|
||||
struct xrt_frame_sink left_sink; //!< Intermediate sink for left camera frames
|
||||
struct xrt_frame_sink right_sink; //!< Intermediate sink for right camera frames
|
||||
struct xrt_imu_sink imu_sink; //!< Intermediate sink for IMU samples
|
||||
struct xrt_slam_sinks in_sinks; //!< Pointers to intermediate sinks
|
||||
struct xrt_slam_sinks out_sinks; //!< Pointers to downstream sinks
|
||||
// Sinks (head tracking)
|
||||
struct xrt_frame_sink cam_sinks[WMR_MAX_CAMERAS]; //!< Intermediate sinks for camera frames
|
||||
struct xrt_imu_sink imu_sink; //!< Intermediate sink for IMU samples
|
||||
struct xrt_slam_sinks in_sinks; //!< Pointers to intermediate sinks
|
||||
struct xrt_slam_sinks out_sinks; //!< Pointers to downstream sinks
|
||||
|
||||
// UI Sinks
|
||||
struct u_sink_debug ui_left_sink; //!< Sink to display left frames in UI
|
||||
struct u_sink_debug ui_right_sink; //!< Sink to display right frames in UI
|
||||
struct m_ff_vec3_f32 *gyro_ff; //!< Queue of gyroscope data to display in UI
|
||||
struct m_ff_vec3_f32 *accel_ff; //!< Queue of accelerometer data to display in UI
|
||||
// UI Sinks (head tracking)
|
||||
struct u_sink_debug ui_cam_sinks[WMR_MAX_CAMERAS]; //!< Sink to display camera frames in UI
|
||||
struct m_ff_vec3_f32 *gyro_ff; //!< Queue of gyroscope data to display in UI
|
||||
struct m_ff_vec3_f32 *accel_ff; //!< Queue of accelerometer data to display in UI
|
||||
|
||||
bool is_running; //!< Whether the device is streaming
|
||||
bool first_imu_received; //!< Don't send frames until first IMU sample
|
||||
time_duration_ns hw2mono; //!< Estimated offset from IMU to monotonic clock
|
||||
time_duration_ns cam_hw2mono; //!< Cache for hw2mono used in last left frame
|
||||
time_duration_ns cam_hw2mono; //!< Caches hw2mono for use in the full frame bundle
|
||||
};
|
||||
|
||||
/*
|
||||
|
@ -86,30 +84,34 @@ struct wmr_source
|
|||
*
|
||||
*/
|
||||
|
||||
static void
|
||||
receive_left_frame(struct xrt_frame_sink *sink, struct xrt_frame *xf)
|
||||
{
|
||||
struct wmr_source *ws = container_of(sink, struct wmr_source, left_sink);
|
||||
ws->cam_hw2mono = ws->hw2mono; // We want the right frame to use the same offset
|
||||
xf->timestamp += ws->cam_hw2mono;
|
||||
WMR_TRACE(ws, "left img t=%" PRId64 " source_t=%" PRId64, xf->timestamp, xf->source_timestamp);
|
||||
u_sink_debug_push_frame(&ws->ui_left_sink, xf);
|
||||
if (ws->out_sinks.cams[0] && ws->first_imu_received) {
|
||||
xrt_sink_push_frame(ws->out_sinks.cams[0], xf);
|
||||
#define DEFINE_RECEIVE_CAM(cam_id) \
|
||||
static void receive_cam##cam_id(struct xrt_frame_sink *sink, struct xrt_frame *xf) \
|
||||
{ \
|
||||
struct wmr_source *ws = container_of(sink, struct wmr_source, cam_sinks[cam_id]); \
|
||||
if (cam_id == 0) { \
|
||||
ws->cam_hw2mono = ws->hw2mono; \
|
||||
} \
|
||||
xf->timestamp += ws->cam_hw2mono; \
|
||||
WMR_TRACE(ws, "cam" #cam_id " img t=%" PRId64 " source_t=%" PRId64, xf->timestamp, \
|
||||
xf->source_timestamp); \
|
||||
u_sink_debug_push_frame(&ws->ui_cam_sinks[cam_id], xf); \
|
||||
if (ws->out_sinks.cams[cam_id] && ws->first_imu_received) { \
|
||||
xrt_sink_push_frame(ws->out_sinks.cams[cam_id], xf); \
|
||||
} \
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
receive_right_frame(struct xrt_frame_sink *sink, struct xrt_frame *xf)
|
||||
{
|
||||
struct wmr_source *ws = container_of(sink, struct wmr_source, right_sink);
|
||||
xf->timestamp += ws->cam_hw2mono;
|
||||
WMR_TRACE(ws, "right img t=%" PRId64 " source_t=%" PRId64, xf->timestamp, xf->source_timestamp);
|
||||
u_sink_debug_push_frame(&ws->ui_right_sink, xf);
|
||||
if (ws->out_sinks.cams[1] && ws->first_imu_received) {
|
||||
xrt_sink_push_frame(ws->out_sinks.cams[1], xf);
|
||||
}
|
||||
}
|
||||
DEFINE_RECEIVE_CAM(0)
|
||||
DEFINE_RECEIVE_CAM(1)
|
||||
DEFINE_RECEIVE_CAM(2)
|
||||
DEFINE_RECEIVE_CAM(3)
|
||||
|
||||
//! Define a function for each WMR_MAX_CAMERAS and reference it in this array
|
||||
void (*receive_cam[WMR_MAX_CAMERAS])(struct xrt_frame_sink *, struct xrt_frame *) = {
|
||||
receive_cam0, //
|
||||
receive_cam1, //
|
||||
receive_cam2, //
|
||||
receive_cam3, //
|
||||
};
|
||||
|
||||
static void
|
||||
receive_imu_sample(struct xrt_imu_sink *sink, struct xrt_imu_sample *s)
|
||||
|
@ -206,7 +208,7 @@ wmr_source_stream_start(struct xrt_fs *xfs,
|
|||
if (xs == NULL && capture_type == XRT_FS_CAPTURE_TYPE_TRACKING) {
|
||||
WMR_INFO(ws, "Starting WMR stream in tracking mode");
|
||||
} else if (xs != NULL && capture_type == XRT_FS_CAPTURE_TYPE_CALIBRATION) {
|
||||
WMR_INFO(ws, "Starting WMR stream in calibration mode, will stream only left frames");
|
||||
WMR_INFO(ws, "Starting WMR stream in calibration mode, will stream only cam0 frames");
|
||||
ws->out_sinks.cam_count = 1;
|
||||
ws->out_sinks.cams[0] = xs;
|
||||
} else {
|
||||
|
@ -214,7 +216,7 @@ wmr_source_stream_start(struct xrt_fs *xfs,
|
|||
return false;
|
||||
}
|
||||
|
||||
bool started = wmr_camera_start(ws->camera, ws->config.cameras, ws->config.n_cameras);
|
||||
bool started = wmr_camera_start(ws->camera);
|
||||
if (!started) {
|
||||
WMR_ERROR(ws, "Unable to start WMR cameras");
|
||||
WMR_ASSERT_(false);
|
||||
|
@ -259,8 +261,9 @@ wmr_source_node_destroy(struct xrt_frame_node *node)
|
|||
|
||||
struct wmr_source *ws = container_of(node, struct wmr_source, node);
|
||||
WMR_DEBUG(ws, "Destroying WMR source");
|
||||
u_sink_debug_destroy(&ws->ui_left_sink);
|
||||
u_sink_debug_destroy(&ws->ui_right_sink);
|
||||
for (int i = 0; i < ws->config.tcam_count; i++) {
|
||||
u_sink_debug_destroy(&ws->ui_cam_sinks[i]);
|
||||
}
|
||||
m_ff_vec3_f32_free(&ws->gyro_ff);
|
||||
m_ff_vec3_f32_free(&ws->accel_ff);
|
||||
u_var_remove_root(ws);
|
||||
|
@ -294,35 +297,51 @@ wmr_source_create(struct xrt_frame_context *xfctx, struct xrt_prober_device *dev
|
|||
xfs->slam_stream_start = wmr_source_slam_stream_start;
|
||||
xfs->stream_stop = wmr_source_stream_stop;
|
||||
xfs->is_running = wmr_source_is_running;
|
||||
snprintf(xfs->name, sizeof(xfs->name), WMR_SOURCE_STR);
|
||||
snprintf(xfs->product, sizeof(xfs->product), WMR_SOURCE_STR " Product");
|
||||
snprintf(xfs->manufacturer, sizeof(xfs->manufacturer), WMR_SOURCE_STR " Manufacturer");
|
||||
snprintf(xfs->serial, sizeof(xfs->serial), WMR_SOURCE_STR " Serial");
|
||||
xfs->source_id = 0x574d522d53524300; // WMR_SRC\0 in hex
|
||||
(void)snprintf(xfs->name, sizeof(xfs->name), WMR_SOURCE_STR);
|
||||
(void)snprintf(xfs->product, sizeof(xfs->product), WMR_SOURCE_STR " Product");
|
||||
(void)snprintf(xfs->manufacturer, sizeof(xfs->manufacturer), WMR_SOURCE_STR " Manufacturer");
|
||||
(void)snprintf(xfs->serial, sizeof(xfs->serial), WMR_SOURCE_STR " Serial");
|
||||
xfs->source_id = *((uint64_t *)"WMR_SRC\0");
|
||||
|
||||
// Setup sinks
|
||||
ws->left_sink.push_frame = receive_left_frame;
|
||||
ws->right_sink.push_frame = receive_right_frame;
|
||||
for (int i = 0; i < WMR_MAX_CAMERAS; i++) {
|
||||
ws->cam_sinks[i].push_frame = receive_cam[i];
|
||||
}
|
||||
ws->imu_sink.push_imu = receive_imu_sample;
|
||||
ws->in_sinks.cam_count = 2;
|
||||
ws->in_sinks.cams[0] = &ws->left_sink;
|
||||
ws->in_sinks.cams[1] = &ws->right_sink;
|
||||
|
||||
ws->in_sinks.cam_count = cfg.tcam_count;
|
||||
for (int i = 0; i < cfg.tcam_count; i++) {
|
||||
ws->in_sinks.cams[i] = &ws->cam_sinks[i];
|
||||
}
|
||||
ws->in_sinks.imu = &ws->imu_sink;
|
||||
ws->camera =
|
||||
wmr_camera_open(dev_holo, ws->in_sinks.cams[0], ws->in_sinks.cams[1], cfg.n_cameras, ws->log_level);
|
||||
|
||||
struct wmr_camera_open_config options = {
|
||||
.dev_holo = dev_holo,
|
||||
.tcam_confs = ws->config.tcams,
|
||||
.tcam_sinks = ws->in_sinks.cams,
|
||||
.tcam_count = cfg.tcam_count,
|
||||
.slam_cam_count = cfg.slam_cam_count,
|
||||
.log_level = ws->log_level,
|
||||
};
|
||||
|
||||
ws->camera = wmr_camera_open(&options);
|
||||
ws->config = cfg;
|
||||
|
||||
// Setup UI
|
||||
u_sink_debug_init(&ws->ui_left_sink);
|
||||
u_sink_debug_init(&ws->ui_right_sink);
|
||||
for (int i = 0; i < cfg.tcam_count; i++) {
|
||||
u_sink_debug_init(&ws->ui_cam_sinks[i]);
|
||||
}
|
||||
m_ff_vec3_f32_alloc(&ws->gyro_ff, 1000);
|
||||
m_ff_vec3_f32_alloc(&ws->accel_ff, 1000);
|
||||
u_var_add_root(ws, WMR_SOURCE_STR, false);
|
||||
u_var_add_log_level(ws, &ws->log_level, "Log Level");
|
||||
u_var_add_ro_ff_vec3_f32(ws, ws->gyro_ff, "Gyroscope");
|
||||
u_var_add_ro_ff_vec3_f32(ws, ws->accel_ff, "Accelerometer");
|
||||
u_var_add_sink_debug(ws, &ws->ui_left_sink, "Left Camera");
|
||||
u_var_add_sink_debug(ws, &ws->ui_right_sink, "Right Camera");
|
||||
for (int i = 0; i < cfg.tcam_count; i++) {
|
||||
char label[] = "Camera NNNNNNNNNNN";
|
||||
(void)snprintf(label, sizeof(label), "Camera %d", i);
|
||||
u_var_add_sink_debug(ws, &ws->ui_cam_sinks[i], label);
|
||||
}
|
||||
|
||||
// Setup node
|
||||
struct xrt_frame_node *xfn = &ws->node;
|
||||
|
|
Loading…
Reference in a new issue