d/rift_s: Add 6DOF SLAM tracking and hand tracking

Find and capture input from cameras, and split according to
frame type. Send long exposure tracking frames through
the AEG module, and SLAM and hand tracking trackers.

Add controller emulated hand devices.

The native "fisheye62" camera distortion model is
dynamically converted to OpenCV Kannala-Brandt
parameters using a TinyCeres solver.
This commit is contained in:
Jan Schmidt 2022-05-02 21:17:36 +10:00
parent 9fbe81e494
commit 495eecb65f
17 changed files with 2115 additions and 226 deletions

View file

@ -204,6 +204,8 @@ if(XRT_BUILD_DRIVER_RIFT_S)
drv_rift_s STATIC
rift_s/rift_s_builder.c
rift_s/rift_s_interface.h
rift_s/rift_s_camera.c
rift_s/rift_s_camera.h
rift_s/rift_s_controller.c
rift_s/rift_s_controller.h
rift_s/rift_s_firmware.c
@ -214,9 +216,14 @@ if(XRT_BUILD_DRIVER_RIFT_S)
rift_s/rift_s_protocol.h
rift_s/rift_s_radio.c
rift_s/rift_s_radio.h
rift_s/rift_s_tracker.c
rift_s/rift_s_tracker.h
rift_s/rift_s_util.cpp
rift_s/rift_s_util.h
rift_s/rift_s.c
rift_s/rift_s.h
)
target_include_directories(drv_rift_s SYSTEM PRIVATE ${EIGEN3_INCLUDE_DIR})
target_link_libraries(
drv_rift_s
PRIVATE
@ -224,7 +231,8 @@ if(XRT_BUILD_DRIVER_RIFT_S)
aux_util
aux_math
xrt-external-cjson
)
xrt-external-tinyceres
)
list(APPEND ENABLED_HEADSET_DRIVERS rift-s)
endif()

View file

@ -45,14 +45,109 @@
#include "rift_s.h"
#include "rift_s_hmd.h"
#include "rift_s_controller.h"
#include "rift_s_camera.h"
static void *
rift_s_run_thread(void *ptr);
static void
rift_s_system_free(struct rift_s_system *sys);
static int
read_camera_calibration(struct os_hid_device *hid_hmd, struct rift_s_camera_calibration_block *calibration)
{
char *json = NULL;
int json_len = 0;
int ret = rift_s_read_firmware_block(hid_hmd, RIFT_S_FIRMWARE_BLOCK_CAMERA_CALIB, &json, &json_len);
if (ret < 0)
return ret;
ret = rift_s_parse_camera_calibration_block(json, calibration);
free(json);
return ret;
}
static int
read_hmd_fw_imu_calibration(struct os_hid_device *hid_hmd, struct rift_s_imu_calibration *imu_calibration)
{
char *json = NULL;
int json_len = 0;
int ret = rift_s_read_firmware_block(hid_hmd, RIFT_S_FIRMWARE_BLOCK_IMU_CALIB, &json, &json_len);
if (ret < 0)
return ret;
ret = rift_s_parse_imu_calibration(json, imu_calibration);
free(json);
return ret;
}
static int
read_hmd_proximity_threshold(struct os_hid_device *hid_hmd, int *proximity_threshold)
{
char *json = NULL;
int json_len = 0;
int ret = rift_s_read_firmware_block(hid_hmd, RIFT_S_FIRMWARE_BLOCK_THRESHOLD, &json, &json_len);
if (ret < 0)
return ret;
ret = rift_s_parse_proximity_threshold(json, proximity_threshold);
free(json);
return ret;
}
static int
read_hmd_config(struct os_hid_device *hid_hmd, struct rift_s_hmd_config *config)
{
int ret;
ret = rift_s_read_firmware_version(hid_hmd);
if (ret < 0) {
RIFT_S_ERROR("Failed to read Rift S firmware version");
return ret;
}
ret = rift_s_read_panel_info(hid_hmd, &config->panel_info);
if (ret < 0) {
RIFT_S_ERROR("Failed to read Rift S device info");
return ret;
}
ret = rift_s_read_imu_config_info(hid_hmd, &config->imu_config_info);
if (ret < 0) {
RIFT_S_ERROR("Failed to read IMU configuration block");
return ret;
}
ret = read_hmd_fw_imu_calibration(hid_hmd, &config->imu_calibration);
if (ret < 0) {
RIFT_S_ERROR("Failed to read IMU configuration block");
return ret;
}
/* Configure the proximity sensor threshold */
ret = read_hmd_proximity_threshold(hid_hmd, &config->proximity_threshold);
if (ret < 0) {
RIFT_S_ERROR("Failed to read proximity sensor firmware block");
return ret;
}
ret = read_camera_calibration(hid_hmd, &config->camera_calibration);
if (ret < 0) {
RIFT_S_ERROR("Failed to read HMD camera calibration block");
return ret;
}
return 0;
}
struct rift_s_system *
rift_s_system_create(const unsigned char *hmd_serial_no,
rift_s_system_create(struct xrt_prober *xp,
const unsigned char *hmd_serial_no,
struct os_hid_device *hid_hmd,
struct os_hid_device *hid_status,
struct os_hid_device *hid_controllers)
@ -85,11 +180,22 @@ rift_s_system_create(const unsigned char *hmd_serial_no,
goto cleanup;
}
if (read_hmd_config(hid_hmd, &sys->hmd_config) < 0) {
RIFT_S_ERROR("Failed to read HMD configuration");
goto cleanup;
}
sys->tracker = rift_s_tracker_create(&sys->base, &sys->xfctx, &sys->hmd_config);
if (sys->tracker == NULL) {
RIFT_S_ERROR("Failed to init tracking");
goto cleanup;
}
rift_s_radio_state_init(&sys->radio_state);
/* Create the HMD now. Controllers are created in the
* rift_s_system_get_controller() call later */
struct rift_s_hmd *hmd = rift_s_hmd_create(sys, hmd_serial_no);
struct rift_s_hmd *hmd = rift_s_hmd_create(sys, hmd_serial_no, &sys->hmd_config);
if (hmd == NULL) {
RIFT_S_ERROR("Failed to create Oculus Rift S device.");
goto cleanup;
@ -118,6 +224,20 @@ rift_s_system_create(const unsigned char *hmd_serial_no,
// Two seconds seems to be needed for the display connection to stabilise
os_nanosleep((uint64_t)U_TIME_1S_IN_NS * 2);
// Start the camera input
struct rift_s_camera *cam =
rift_s_camera_create(xp, &sys->xfctx, (const char *)hmd_serial_no, sys->handles[HMD_HID], sys->tracker,
&sys->hmd_config.camera_calibration);
if (cam == NULL) {
RIFT_S_ERROR("Failed to open Rift S camera device");
goto cleanup;
}
os_mutex_lock(&sys->dev_mutex);
sys->cam = cam;
os_mutex_unlock(&sys->dev_mutex);
rift_s_tracker_start(sys->tracker);
RIFT_S_DEBUG("Oculus Rift S driver ready");
return sys;
@ -136,6 +256,10 @@ rift_s_system_free(struct rift_s_system *sys)
/* Stop the packet reading thread */
os_thread_helper_destroy(&sys->oth);
/* Stop all the frame processing (has to happen before the cameras
* and tracker are destroyed */
xrt_frame_context_destroy_nodes(&sys->xfctx);
rift_s_radio_state_clear(&sys->radio_state);
if (sys->handles[HMD_HID]) {
@ -149,6 +273,15 @@ rift_s_system_free(struct rift_s_system *sys)
os_hid_destroy(sys->handles[i]);
}
/* Free the camera */
if (sys->cam != NULL) {
rift_s_camera_destroy(sys->cam);
}
if (sys->tracker != NULL) {
rift_s_tracker_destroy(sys->tracker);
}
os_mutex_destroy(&sys->dev_mutex);
free(sys);
@ -189,6 +322,12 @@ rift_s_system_radio(struct rift_s_system *sys)
return &sys->radio_state;
}
struct rift_s_tracker *
rift_s_system_get_tracker(struct rift_s_system *sys)
{
return sys->tracker;
}
struct xrt_device *
rift_s_system_get_hmd(struct rift_s_system *sys)
{
@ -235,6 +374,12 @@ rift_s_system_remove_controller(struct rift_s_system *sys, struct rift_s_control
os_mutex_unlock(&sys->dev_mutex);
}
struct xrt_device *
rift_s_system_get_hand_tracking_device(struct rift_s_system *sys)
{
return rift_s_tracker_get_hand_tracking_device(sys->tracker);
}
/* Packet reading / handling */
static int
update_tracked_device_types(struct rift_s_system *sys)
@ -420,6 +565,12 @@ rift_s_run_thread(void *ptr)
if (success) {
rift_s_radio_update(&sys->radio_state, sys->handles[HMD_HID]);
os_mutex_lock(&sys->dev_mutex);
if (sys->cam != NULL) {
rift_s_camera_update(sys->cam, sys->handles[HMD_HID]);
}
os_mutex_unlock(&sys->dev_mutex);
}
os_thread_helper_lock(&sys->oth);

View file

@ -20,16 +20,22 @@
#include "os/os_threading.h"
#include "util/u_logging.h"
#include "xrt/xrt_defines.h"
#include "xrt/xrt_frame.h"
#include "xrt/xrt_frameserver.h"
#include "xrt/xrt_prober.h"
#include "xrt/xrt_tracking.h"
#include "rift_s_firmware.h"
#include "rift_s_protocol.h"
#include "rift_s_radio.h"
#include "rift_s_tracker.h"
#ifndef RIFT_S_H
#define RIFT_S_H
struct rift_s_hmd;
struct rift_s_controller;
struct rift_s_camera;
extern enum u_logging_level rift_s_log_level;
@ -45,6 +51,19 @@ extern enum u_logging_level rift_s_log_level;
#define STATUS_HID 1
#define CONTROLLER_HID 2
/* All HMD Configuration / calibration info */
struct rift_s_hmd_config
{
rift_s_panel_info_t panel_info;
int proximity_threshold;
/* Camera calibration block from firmware */
struct rift_s_camera_calibration_block camera_calibration;
struct rift_s_imu_config_info_t imu_config_info;
struct rift_s_imu_calibration imu_calibration;
};
/* Structure to track online devices and type */
struct rift_s_tracked_device
{
@ -72,15 +91,27 @@ struct rift_s_system
/* Device lock protects device access */
struct os_mutex dev_mutex;
/* All configuration data for the HMD, stored
* here for sharing to child objects */
struct rift_s_hmd_config hmd_config;
/* 3dof/SLAM tracker that provides HMD pose */
struct rift_s_tracker *tracker;
/* HMD device */
struct rift_s_hmd *hmd;
/* Controller devices */
struct rift_s_controller *controllers[MAX_TRACKED_DEVICES];
/* Video feed handling */
struct xrt_frame_context xfctx;
struct rift_s_camera *cam;
};
struct rift_s_system *
rift_s_system_create(const unsigned char *hmd_serial_no,
rift_s_system_create(struct xrt_prober *xp,
const unsigned char *hmd_serial_no,
struct os_hid_device *hid_hmd,
struct os_hid_device *hid_status,
struct os_hid_device *hid_controllers);
@ -90,6 +121,9 @@ rift_s_system_hid_handle(struct rift_s_system *sys);
rift_s_radio_state *
rift_s_system_radio(struct rift_s_system *sys);
struct rift_s_tracker *
rift_s_system_get_tracker(struct rift_s_system *sys);
struct xrt_device *
rift_s_system_get_hmd(struct rift_s_system *sys);
void
@ -100,6 +134,9 @@ rift_s_system_get_controller(struct rift_s_system *sys, int index);
void
rift_s_system_remove_controller(struct rift_s_system *sys, struct rift_s_controller *ctrl);
struct xrt_device *
rift_s_system_get_hand_tracking_device(struct rift_s_system *sys);
void
rift_s_system_reference(struct rift_s_system **dst, struct rift_s_system *src);

View file

@ -14,6 +14,7 @@
#include "os/os_hid.h"
#include "xrt/xrt_config_drivers.h"
#include "xrt/xrt_prober.h"
#include "util/u_builders.h"
@ -23,6 +24,10 @@
#include "util/u_system_helpers.h"
#include "util/u_trace_marker.h"
#ifdef XRT_BUILD_DRIVER_HANDTRACKING
#include "ht_ctrl_emu/ht_ctrl_emu_interface.h"
#endif
#include "rift_s_interface.h"
#include "rift_s.h"
#include "rift_s_hmd.h"
@ -136,17 +141,17 @@ rift_s_open_system(struct xrt_builder *xb, cJSON *config, struct xrt_prober *xp,
goto fail;
}
struct rift_s_system *sys = rift_s_system_create(hmd_serial_no, hid_hmd, hid_status, hid_controllers);
struct rift_s_system *sys = rift_s_system_create(xp, hmd_serial_no, hid_hmd, hid_status, hid_controllers);
if (sys == NULL) {
RIFT_S_ERROR("Failed to initialise Oculus Rift S driver");
goto fail;
}
struct xrt_device *xdev = rift_s_system_get_hmd(sys);
usysd->base.xdevs[usysd->base.xdev_count++] = xdev;
usysd->base.roles.head = xdev;
struct xrt_device *hmd_xdev = rift_s_system_get_hmd(sys);
usysd->base.xdevs[usysd->base.xdev_count++] = hmd_xdev;
usysd->base.roles.head = hmd_xdev;
xdev = rift_s_system_get_controller(sys, 0);
struct xrt_device *xdev = rift_s_system_get_controller(sys, 0);
usysd->base.xdevs[usysd->base.xdev_count++] = xdev;
usysd->base.roles.left = xdev;
@ -154,6 +159,23 @@ rift_s_open_system(struct xrt_builder *xb, cJSON *config, struct xrt_prober *xp,
usysd->base.xdevs[usysd->base.xdev_count++] = xdev;
usysd->base.roles.right = xdev;
#ifdef XRT_BUILD_DRIVER_HANDTRACKING
struct xrt_device *ht_xdev = rift_s_system_get_hand_tracking_device(sys);
if (ht_xdev != NULL) {
// Create hand-tracked controllers
RIFT_S_DEBUG("Creating emulated hand tracking controllers");
struct xrt_device *two_hands[2];
cemu_devices_create(hmd_xdev, ht_xdev, two_hands);
usysd->base.roles.hand_tracking.left = two_hands[0];
usysd->base.roles.hand_tracking.right = two_hands[1];
usysd->base.xdevs[usysd->base.xdev_count++] = two_hands[0];
usysd->base.xdevs[usysd->base.xdev_count++] = two_hands[1];
}
#endif
*out_xsysd = &usysd->base;
return XRT_SUCCESS;

View file

@ -0,0 +1,482 @@
/*
* Copyright 2021, Collabora, Ltd.
* Copyright 2022 Jan Schmidt
* SPDX-License-Identifier: BSL-1.0
*
*/
/*!
* @file
* @brief Oculus Rift S camera handling
*
* The Rift S camera module, handles reception and dispatch
* of camera frames.
*
* @author Jan Schmidt <jan@centricular.com>
* @ingroup drv_rift_s
*/
#include <asm/byteorder.h>
#include <string.h>
#include <inttypes.h>
#include "rift_s.h"
#include "rift_s_camera.h"
#include "os/os_threading.h"
#include "xrt/xrt_defines.h"
#include "xrt/xrt_frame.h"
#include "xrt/xrt_frameserver.h"
#include "util/u_autoexpgain.h"
#include "util/u_debug.h"
#include "util/u_var.h"
#include "util/u_sink.h"
#include "util/u_frame.h"
#include "util/u_trace_marker.h"
#define DEFAULT_EXPOSURE 6000
#define DEFAULT_GAIN 127
#define RIFT_S_MIN_EXPOSURE 38
#define RIFT_S_MAX_EXPOSURE 14022
#define RIFT_S_MIN_GAIN 16
#define RIFT_S_MAX_GAIN 255
//! Specifies whether the user wants to enable autoexposure from the start.
DEBUG_GET_ONCE_BOOL_OPTION(rift_s_autoexposure, "RIFT_S_AUTOEXPOSURE", true)
struct rift_s_camera
{
struct os_mutex lock;
struct rift_s_tracker *tracker;
struct rift_s_camera_calibration_block *camera_calibration;
struct xrt_frame_sink in_sink; // Receive raw frames and split them
struct u_sink_debug debug_sinks[2];
rift_s_camera_report_t camera_report;
uint16_t last_slam_exposure, target_exposure;
uint8_t last_slam_gain, target_gain;
bool manual_control; //!< Whether to control exp/gain manually or with aeg
struct u_var_draggable_u16 exposure_ui; //! Widget to control `exposure` value
struct u_autoexpgain *aeg;
};
struct rift_s_camera_finder
{
const char *hmd_serial_no;
struct xrt_fs *xfs;
struct xrt_frame_context *xfctx;
};
union rift_s_frame_data {
struct
{
uint8_t frame_type; // 0x06 or 0x86 (controller or SLAM exposure)
__le16 magic_abcd; // 0xabcd
__le16 frame_ctr; // Increments every exposure
__le32 const1; // QHWH
uint8_t pad1[7]; // all zeroes padding to 16 bytes
__le64 frame_ts; // microseconds
__le32 frame_ctr2; // Another frame counter, but only increments on alternate frames @ 30Hz
__le16 slam_exposure[5]; // One 16-bit per camera. Exposure duration?
uint8_t pad2[2]; // zero padding
uint8_t slam_gain[5]; // One byte per camera. 0x40 or 0xf0 depending on frame type
uint8_t pad3; // zero padding
__le16 unknown1; // changes every frame. No clear pattern
__le16 magic_face; // 0xface
} __attribute__((packed)) data;
uint8_t raw[50];
};
static void
update_expgain(struct rift_s_camera *cam, struct xrt_frame *xf);
static void
receive_cam_frame(struct xrt_frame_sink *sink, struct xrt_frame *xf);
static void
on_video_device(struct xrt_prober *xp,
struct xrt_prober_device *pdev,
const char *product,
const char *manufacturer,
const char *serial,
void *ptr)
{
struct rift_s_camera_finder *finder = (struct rift_s_camera_finder *)ptr;
/* Already found a device? */
if (finder->xfs != NULL)
return;
if (product == NULL || manufacturer == NULL || serial == NULL) {
return;
}
RIFT_S_TRACE("Inspecting video device %s - %s serial %s", manufacturer, product, serial);
if ((strcmp(product, "Rift S Sensor") == 0) && (strcmp(manufacturer, "Oculus VR") == 0)) {
// && (strcmp(serial, finder->hmd_serial_no) == 0)) {
// Serial no seems to be all zeros right now, so ignore it
xrt_prober_open_video_device(xp, pdev, finder->xfctx, &finder->xfs);
return;
}
}
struct rift_s_camera *
rift_s_camera_create(struct xrt_prober *xp,
struct xrt_frame_context *xfctx,
const char *hmd_serial_no,
struct os_hid_device *hid,
struct rift_s_tracker *tracker,
struct rift_s_camera_calibration_block *camera_calibration)
{
struct rift_s_camera_finder finder = {
0,
};
DRV_TRACE_MARKER();
/* Set up the finder with the HMD serial number and frame server context we want */
finder.xfctx = xfctx;
finder.hmd_serial_no = hmd_serial_no;
/* Re-probe devices. The v4l2 camera device should have appeared by now */
int retry_count = 5;
do {
xrt_result_t xret = xrt_prober_probe(xp);
if (xret != XRT_SUCCESS) {
return NULL;
}
xrt_prober_list_video_devices(xp, on_video_device, &finder);
if (finder.xfs != NULL) {
break;
}
/* Sleep 1 second before retry */
os_nanosleep((uint64_t)U_TIME_1S_IN_NS);
} while (retry_count-- > 0);
if (finder.xfs == NULL) {
RIFT_S_ERROR("Didn't find Rift S camera device");
return NULL;
}
struct rift_s_camera *cam = U_TYPED_CALLOC(struct rift_s_camera);
if (os_mutex_init(&cam->lock) != 0) {
RIFT_S_ERROR("Failed to init camera configuration mutex");
goto cleanup;
}
// Store the tracker
cam->tracker = tracker;
cam->camera_calibration = camera_calibration;
/* Configure default camera settings */
rift_s_protocol_camera_report_init(&cam->camera_report);
cam->camera_report.uvc_enable = 0x1;
cam->camera_report.radio_sync_flag = 0x1;
/* Store the defaults from the init() call into our current settings */
cam->last_slam_exposure = cam->camera_report.slam_frame_exposures[0];
cam->last_slam_gain = cam->camera_report.slam_frame_gains[0];
cam->target_exposure = DEFAULT_EXPOSURE;
cam->target_gain = DEFAULT_GAIN;
rift_s_camera_update(cam, hid);
cam->in_sink.push_frame = receive_cam_frame;
bool enable_aeg = debug_get_bool_option_rift_s_autoexposure();
int frame_delay =
3; // WMR takes about three frames until the cmd changes the image. TODO: Confirm this for Rift S
cam->aeg = u_autoexpgain_create(U_AEG_STRATEGY_TRACKING, enable_aeg, frame_delay);
u_sink_debug_init(&cam->debug_sinks[0]);
u_sink_debug_init(&cam->debug_sinks[1]);
struct xrt_frame_sink *tmp = &cam->in_sink;
struct xrt_fs_mode *modes = NULL;
uint32_t count;
xrt_fs_enumerate_modes(finder.xfs, &modes, &count);
bool found_mode = false;
uint32_t selected_mode = 0;
for (; selected_mode < count; selected_mode++) {
if (modes[selected_mode].format == XRT_FORMAT_YUYV422) {
found_mode = true;
break;
}
if (modes[selected_mode].format == XRT_FORMAT_MJPEG) {
u_sink_create_format_converter(xfctx, XRT_FORMAT_L8, tmp, &tmp);
found_mode = true;
break;
}
}
if (!found_mode) {
selected_mode = 0;
RIFT_S_ERROR("Couldn't find compatible camera input format.");
goto cleanup;
}
free(modes);
u_var_add_root(cam, "Oculus Rift S Cameras", true);
u_var_add_bool(cam, &cam->manual_control, "Manual exposure and gain control");
cam->exposure_ui.val = &cam->target_exposure;
cam->exposure_ui.min = RIFT_S_MIN_EXPOSURE;
cam->exposure_ui.max = RIFT_S_MAX_EXPOSURE;
cam->exposure_ui.step = 25;
u_var_add_draggable_u16(cam, &cam->exposure_ui, "Exposure");
u_var_add_u8(cam, &cam->target_gain, "Gain");
u_var_add_gui_header(cam, NULL, "Auto exposure and gain control");
u_autoexpgain_add_vars(cam->aeg, cam);
u_var_add_gui_header(cam, NULL, "Camera Streams");
u_var_add_sink_debug(cam, &cam->debug_sinks[0], "Tracking Streams");
u_var_add_sink_debug(cam, &cam->debug_sinks[1], "Controller Streams");
/* Finally, start the video feed */
xrt_fs_stream_start(finder.xfs, tmp, XRT_FS_CAPTURE_TYPE_TRACKING, selected_mode);
return cam;
cleanup:
rift_s_camera_destroy(cam);
return NULL;
}
void
rift_s_camera_destroy(struct rift_s_camera *cam)
{
u_var_remove_root(cam);
os_mutex_destroy(&cam->lock);
free(cam);
}
static bool
parse_frame_data(const struct xrt_frame *xf, union rift_s_frame_data *row_data)
{
/* Parse out the bits encoded as 8x8 blocks in the top rows */
unsigned int x, out_x;
if (xf->width != 50 * 8 * 8 || xf->height < 8)
return false;
uint8_t *pix = &xf->data[xf->width * 4];
int bit = 7;
for (x = 4, out_x = 0; x < xf->width; x += 8) {
uint8_t val = 0;
if (pix[x] > 128)
val = 1 << bit;
if (bit == 7) {
row_data->raw[out_x] = val;
} else {
row_data->raw[out_x] |= val;
}
if (bit > 0)
bit--;
else {
bit = 7;
out_x++;
}
}
/* Check magic numbers */
if (__le16_to_cpu(row_data->data.magic_abcd) != 0xabcd)
return false;
if (__le16_to_cpu(row_data->data.magic_face) != 0xface)
return false;
return true;
}
static int
get_y_offset(struct rift_s_camera *cam, enum rift_s_camera_id cam_id, union rift_s_frame_data *row_data)
{
/* There's a magic formula for computing the vertical offset of each camera view
* based on exposure, due to some internals of the headset. This formula extracted
* through trial and error */
int exposure = __le16_to_cpu(row_data->data.slam_exposure[cam_id]);
int y_offset = (exposure + 275) / 38;
if (y_offset > 375) {
y_offset = 375;
} else if (y_offset < 8) {
y_offset = 8;
}
return y_offset;
}
static struct xrt_frame *
rift_s_camera_extract_frame(struct rift_s_camera *cam,
enum rift_s_camera_id cam_id,
struct xrt_frame *full_frame,
union rift_s_frame_data *row_data)
{
struct rift_s_camera_calibration *calib = &cam->camera_calibration->cameras[cam_id];
struct xrt_rect roi = calib->roi;
roi.offset.h = get_y_offset(cam, cam_id, row_data);
struct xrt_frame *xf_crop = NULL;
u_frame_create_roi(full_frame, roi, &xf_crop);
return xf_crop;
}
static void
receive_cam_frame(struct xrt_frame_sink *sink, struct xrt_frame *xf)
{
struct rift_s_camera *cam = container_of(sink, struct rift_s_camera, in_sink);
bool release_xf = false;
RIFT_S_TRACE("cam img t=%" PRIu64 " source_t=%" PRIu64, xf->timestamp, xf->source_timestamp);
// If the format is YUYV422 we need to override it to L8 and double the width
// because the v4l2 device provides the wrong format description for the actual video
// data
if (xf->format == XRT_FORMAT_YUYV422) {
struct xrt_rect roi = {.offset = {0, 0}, .extent = {.w = xf->width, .h = xf->height}};
struct xrt_frame *xf_l8 = NULL;
u_frame_create_roi(xf, roi, &xf_l8);
xf_l8->width = 2 * xf->width;
xf_l8->format = XRT_FORMAT_L8;
xf = xf_l8;
release_xf = true;
}
// Dump mid-row of the 8 pix data line
union rift_s_frame_data row_data;
if (!parse_frame_data(xf, &row_data)) {
RIFT_S_TRACE("Invalid frame top-row data. Skipping");
return;
}
RIFT_S_DEBUG("frame ctr %u ts %" PRIu64
" µS pair ctr %u "
"exposure[0] %u gain[0] %u unk %u",
(uint16_t)__le16_to_cpu(row_data.data.frame_ctr), (uint64_t)__le64_to_cpu(row_data.data.frame_ts),
(uint32_t)__le32_to_cpu(row_data.data.frame_ctr2),
(uint16_t)__le16_to_cpu(row_data.data.slam_exposure[0]), row_data.data.slam_gain[0],
(uint16_t)__le16_to_cpu(row_data.data.unknown1));
// rift_s_hexdump_buffer("Row data", row_data.raw, sizeof(row_data.row));
// If the top left pixel is > 128, send as SLAM frame else controller
if (row_data.data.frame_type & 0x80) {
int y_offset = get_y_offset(cam, 0, &row_data);
struct xrt_rect roi = {.offset = {0, y_offset}, .extent = {.w = xf->width, .h = 480}};
struct xrt_frame *xf_crop = NULL;
u_frame_create_roi(xf, roi, &xf_crop);
u_sink_debug_push_frame(&cam->debug_sinks[0], xf_crop);
xrt_frame_reference(&xf_crop, NULL);
/* Extract left and right frames and push to the tracker */
struct xrt_frame *left = rift_s_camera_extract_frame(cam, RIFT_S_CAMERA_FRONT_LEFT, xf, &row_data);
struct xrt_frame *right = rift_s_camera_extract_frame(cam, RIFT_S_CAMERA_FRONT_RIGHT, xf, &row_data);
/* Update the exposure for all cameras based on the auto exposure for the left camera view */
update_expgain(cam, left);
uint64_t frame_ts_ns = (uint64_t)__le64_to_cpu(row_data.data.frame_ts) * OS_NS_PER_USEC;
rift_s_tracker_push_slam_frames(cam->tracker, frame_ts_ns, left, right);
xrt_frame_reference(&left, NULL);
xrt_frame_reference(&right, NULL);
} else {
struct xrt_rect roi = {.offset = {0, 40}, .extent = {.w = xf->width, .h = 480}};
struct xrt_frame *xf_crop = NULL;
u_frame_create_roi(xf, roi, &xf_crop);
u_sink_debug_push_frame(&cam->debug_sinks[1], xf_crop);
xrt_frame_reference(&xf_crop, NULL);
}
if (release_xf)
xrt_frame_reference(&xf, NULL);
}
static void
update_expgain(struct rift_s_camera *cam, struct xrt_frame *xf)
{
if (!cam->manual_control && xf != NULL) {
u_autoexpgain_update(cam->aeg, xf);
uint16_t new_target_exposure;
uint8_t new_target_gain;
new_target_exposure =
CLAMP(u_autoexpgain_get_exposure(cam->aeg), RIFT_S_MIN_EXPOSURE, RIFT_S_MAX_EXPOSURE);
new_target_gain = CLAMP(u_autoexpgain_get_gain(cam->aeg), RIFT_S_MIN_GAIN, RIFT_S_MAX_GAIN);
if (cam->target_exposure != new_target_exposure || cam->target_gain != new_target_gain) {
RIFT_S_DEBUG("AEG exposure now %u (cur %u) gain %u (cur %u)", new_target_exposure,
cam->target_exposure, new_target_gain, cam->target_gain);
os_mutex_lock(&cam->lock);
cam->target_exposure = new_target_exposure;
cam->target_gain = new_target_gain;
os_mutex_unlock(&cam->lock);
}
}
}
/* Called from the Rift S system device USB loop, so we can check
* and send an exposure/gain change command if needed */
void
rift_s_camera_update(struct rift_s_camera *cam, struct os_hid_device *hid)
{
bool need_update = false;
int i;
os_mutex_lock(&cam->lock);
if (cam->target_exposure != cam->last_slam_exposure) {
for (i = 0; i < 5; i++) {
cam->camera_report.slam_frame_exposures[i] = cam->target_exposure;
}
cam->last_slam_exposure = cam->target_exposure;
need_update = true;
}
if (cam->target_gain != cam->last_slam_gain) {
for (i = 0; i < 5; i++) {
cam->camera_report.slam_frame_gains[i] = cam->target_gain;
}
cam->last_slam_gain = cam->target_gain;
need_update = true;
}
os_mutex_unlock(&cam->lock);
if (need_update) {
if (rift_s_protocol_send_camera_report(hid, &cam->camera_report) < 0) {
RIFT_S_WARN("Failed to update camera settings");
}
}
}

View file

@ -0,0 +1,38 @@
/*
* Copyright 2013, Fredrik Hultin.
* Copyright 2013, Jakob Bornecrantz.
* Copyright 2016 Philipp Zabel
* Copyright 2019-2022 Jan Schmidt
* SPDX-License-Identifier: BSL-1.0
*/
/*!
* @file
* @brief Oculus Rift S camera handling
* @author Jan Schmidt <jan@centricular.com>
* @ingroup drv_rift_s
*/
#pragma once
#include "os/os_hid.h"
#include "xrt/xrt_prober.h"
#include "rift_s_firmware.h"
#include "rift_s_tracker.h"
struct rift_s_camera;
struct rift_s_camera *
rift_s_camera_create(struct xrt_prober *xp,
struct xrt_frame_context *xfctx,
const char *hmd_serial_no,
struct os_hid_device *hid,
struct rift_s_tracker *tracker,
struct rift_s_camera_calibration_block *camera_calibration);
void
rift_s_camera_destroy(struct rift_s_camera *cam);
void
rift_s_camera_update(struct rift_s_camera *cam, struct os_hid_device *hid);

View file

@ -22,6 +22,7 @@
#include "util/u_json.h"
#include "util/u_misc.h"
#include "rift_s.h"
#include "rift_s_firmware.h"
#define JSON_INT(a, b, c) u_json_get_int(u_json_get(a, b), c)

View file

@ -20,8 +20,6 @@
#include "math/m_mathinclude.h"
#include "math/m_api.h"
#include "rift_s.h"
enum rift_s_firmware_block
{
RIFT_S_FIRMWARE_BLOCK_SERIAL_NUM = 0x0B,
@ -32,7 +30,7 @@ enum rift_s_firmware_block
RIFT_S_FIRMWARE_BLOCK_LENS_CALIB = 0x12
};
enum rift_s_camera_ids
enum rift_s_camera_id
{
RIFT_S_CAMERA_TOP = 0x0,
RIFT_S_CAMERA_SIDE_LEFT = 0x1,

View file

@ -62,14 +62,7 @@ rift_s_get_tracked_pose(struct xrt_device *xdev,
U_ZERO(out_relation);
// Estimate pose at timestamp at_timestamp_ns!
os_mutex_lock(&hmd->mutex);
math_quat_normalize(&hmd->pose.orientation);
out_relation->pose = hmd->pose;
out_relation->relation_flags = (enum xrt_space_relation_flags)(XRT_SPACE_RELATION_ORIENTATION_VALID_BIT |
XRT_SPACE_RELATION_POSITION_VALID_BIT |
XRT_SPACE_RELATION_ORIENTATION_TRACKED_BIT);
os_mutex_unlock(&hmd->mutex);
rift_s_tracker_get_tracked_pose(hmd->tracker, RIFT_S_TRACKER_POSE_DEVICE, at_timestamp_ns, out_relation);
}
static void
@ -88,24 +81,28 @@ rift_s_get_view_poses(struct xrt_device *xdev,
void
rift_s_hmd_handle_report(struct rift_s_hmd *hmd, timepoint_ns local_ts, rift_s_hmd_report_t *report)
{
const uint32_t TICK_LEN_US = 1000000 / hmd->imu_config.imu_hz;
struct rift_s_imu_config_info_t *imu_config = &hmd->config->imu_config_info;
struct rift_s_imu_calibration *imu_calibration = &hmd->config->imu_calibration;
const uint32_t TICK_LEN_US = 1000000 / imu_config->imu_hz;
uint32_t dt = TICK_LEN_US;
os_mutex_lock(&hmd->mutex);
/* Check that there's at least 1 valid sample */
if (report->samples[0].marker & 0x80)
return;
if (hmd->last_imu_timestamp_ns != 0) {
/* Avoid wrap-around on 32-bit device times */
dt = report->timestamp - hmd->last_imu_timestamp32;
} else {
hmd->last_imu_timestamp_ns = report->timestamp;
hmd->last_imu_timestamp_ns = (timepoint_ns)(report->timestamp) * OS_NS_PER_USEC;
hmd->last_imu_timestamp32 = report->timestamp;
}
hmd->last_imu_timestamp32 = report->timestamp;
hmd->last_imu_local_timestamp_ns = local_ts;
const float gyro_scale = 1.0 / hmd->imu_config.gyro_scale;
const float accel_scale = MATH_GRAVITY_M_S2 / hmd->imu_config.accel_scale;
const float temperature_scale = 1.0 / hmd->imu_config.temperature_scale;
const float temperature_offset = hmd->imu_config.temperature_offset;
const float gyro_scale = 1.0 / imu_config->gyro_scale;
const float accel_scale = MATH_GRAVITY_M_S2 / imu_config->accel_scale;
const float temperature_scale = 1.0 / imu_config->temperature_scale;
const float temperature_offset = imu_config->temperature_offset;
for (int i = 0; i < 3; i++) {
rift_s_hmd_imu_sample_t *s = report->samples + i;
@ -113,41 +110,43 @@ rift_s_hmd_handle_report(struct rift_s_hmd *hmd, timepoint_ns local_ts, rift_s_h
if (s->marker & 0x80)
break; /* Sample (and remaining ones) are invalid */
struct xrt_vec3 gyro, accel;
struct xrt_vec3 raw_accel, raw_gyro;
struct xrt_vec3 accel, gyro;
gyro.x = DEG_TO_RAD(gyro_scale * s->gyro[0]);
gyro.y = DEG_TO_RAD(gyro_scale * s->gyro[1]);
gyro.z = DEG_TO_RAD(gyro_scale * s->gyro[2]);
raw_gyro.x = DEG_TO_RAD(gyro_scale * s->gyro[0]);
raw_gyro.y = DEG_TO_RAD(gyro_scale * s->gyro[1]);
raw_gyro.z = DEG_TO_RAD(gyro_scale * s->gyro[2]);
accel.x = accel_scale * s->accel[0];
accel.y = accel_scale * s->accel[1];
accel.z = accel_scale * s->accel[2];
raw_accel.x = accel_scale * s->accel[0];
raw_accel.y = accel_scale * s->accel[1];
raw_accel.z = accel_scale * s->accel[2];
/* Apply correction offsets first, then rectify */
accel = m_vec3_sub(accel, hmd->imu_calibration.accel.offset_at_0C);
gyro = m_vec3_sub(gyro, hmd->imu_calibration.gyro.offset);
accel = m_vec3_sub(raw_accel, imu_calibration->accel.offset_at_0C);
gyro = m_vec3_sub(raw_gyro, imu_calibration->gyro.offset);
math_matrix_3x3_transform_vec3(&hmd->imu_calibration.accel.rectification, &accel, &hmd->raw_accel);
math_matrix_3x3_transform_vec3(&hmd->imu_calibration.gyro.rectification, &gyro, &hmd->raw_gyro);
math_matrix_3x3_transform_vec3(&imu_calibration->accel.rectification, &raw_accel, &accel);
math_matrix_3x3_transform_vec3(&imu_calibration->gyro.rectification, &raw_gyro, &gyro);
/* FIXME: This doesn't seem to produce the right numbers, but it's OK - we don't use it anyway */
hmd->temperature = temperature_scale * (s->temperature - temperature_offset) + 25;
#if 0
printf ("Sample %d dt %f accel %f %f %f gyro %f %f %f\n",
i, dt_sec, hmd->raw_accel.x, hmd->raw_accel.y, hmd->raw_accel.z,
hmd->raw_gyro.x, hmd->raw_gyro.y, hmd->raw_gyro.z);
RIFT_S_DEBUG("Sample %d dt %f ts %" PRIu64 " report ts %u "
"accel %f %f %f (len %f) gyro %f %f %f",
i, (double)(dt) / (1000000), hmd->last_imu_timestamp_ns,
report->timestamp,
accel.x, accel.y, accel.z, m_vec3_len(raw_accel),
gyro.x, gyro.y, gyro.z);
#endif
// Do 3DOF fusion
m_imu_3dof_update(&hmd->fusion, hmd->last_imu_timestamp_ns, &hmd->raw_accel, &hmd->raw_gyro);
// Send the sample to the pose tracker
rift_s_tracker_imu_update(hmd->tracker, hmd->last_imu_timestamp_ns, local_ts, &accel, &gyro);
hmd->last_imu_timestamp_ns += (uint64_t)dt * OS_NS_PER_USEC;
hmd->last_imu_timestamp32 += dt;
dt = TICK_LEN_US;
}
hmd->pose.orientation = hmd->fusion.rot;
os_mutex_unlock(&hmd->mutex);
}
static bool
@ -173,48 +172,6 @@ dump_fw_block(struct os_hid_device *handle, uint8_t block_id) {
}
#endif
static int
read_hmd_calibration(struct rift_s_hmd *hmd, struct os_hid_device *hid_hmd)
{
char *json = NULL;
int json_len = 0;
int ret = rift_s_read_firmware_block(hid_hmd, RIFT_S_FIRMWARE_BLOCK_IMU_CALIB, &json, &json_len);
if (ret < 0)
return ret;
ret = rift_s_parse_imu_calibration(json, &hmd->imu_calibration);
free(json);
if (ret < 0)
return ret;
ret = rift_s_read_firmware_block(hid_hmd, RIFT_S_FIRMWARE_BLOCK_CAMERA_CALIB, &json, &json_len);
if (ret < 0)
return ret;
ret = rift_s_parse_camera_calibration_block(json, &hmd->camera_calibration);
free(json);
return ret;
}
static int
rift_s_hmd_read_proximity_threshold(struct rift_s_hmd *hmd, struct os_hid_device *hid_hmd)
{
char *json = NULL;
int json_len = 0;
int ret = rift_s_read_firmware_block(hid_hmd, RIFT_S_FIRMWARE_BLOCK_THRESHOLD, &json, &json_len);
if (ret < 0)
return ret;
ret = rift_s_parse_proximity_threshold(json, &hmd->proximity_threshold);
free(json);
return ret;
}
static void
rift_s_hmd_destroy(struct xrt_device *xdev)
{
@ -230,18 +187,12 @@ rift_s_hmd_destroy(struct xrt_device *xdev)
u_var_remove_root(hmd);
m_imu_3dof_close(&hmd->fusion);
os_mutex_destroy(&hmd->mutex);
u_device_free(&hmd->base);
}
struct rift_s_hmd *
rift_s_hmd_create(struct rift_s_system *sys, const unsigned char *hmd_serial_no)
rift_s_hmd_create(struct rift_s_system *sys, const unsigned char *hmd_serial_no, struct rift_s_hmd_config *config)
{
int ret;
DRV_TRACE_MARKER();
enum u_device_alloc_flags flags =
@ -255,6 +206,8 @@ rift_s_hmd_create(struct rift_s_system *sys, const unsigned char *hmd_serial_no)
/* Take a reference to the rift_s_system */
rift_s_system_reference(&hmd->sys, sys);
hmd->config = config;
hmd->base.tracking_origin = &sys->base;
hmd->base.update_inputs = rift_s_update_inputs;
@ -263,16 +216,8 @@ rift_s_hmd_create(struct rift_s_system *sys, const unsigned char *hmd_serial_no)
hmd->base.destroy = rift_s_hmd_destroy;
hmd->base.name = XRT_DEVICE_GENERIC_HMD;
hmd->base.device_type = XRT_DEVICE_TYPE_HMD;
hmd->pose.orientation.w = 1.0f; // All other values set to zero by U_DEVICE_ALLOCATE (which calls U_CALLOC)
m_imu_3dof_init(&hmd->fusion, M_IMU_3DOF_USE_GRAVITY_DUR_20MS);
// Pose / state lock
ret = os_mutex_init(&hmd->mutex);
if (ret != 0) {
RIFT_S_ERROR("Failed to init mutex!");
goto cleanup;
}
hmd->tracker = rift_s_system_get_tracker(sys);
// Print name.
snprintf(hmd->base.str, XRT_DEVICE_NAME_LEN, "Oculus Rift S");
@ -285,31 +230,9 @@ rift_s_hmd_create(struct rift_s_system *sys, const unsigned char *hmd_serial_no)
struct os_hid_device *hid_hmd = rift_s_system_hid_handle(hmd->sys);
if (rift_s_read_panel_info(hid_hmd, &hmd->panel_info) < 0) {
RIFT_S_ERROR("Failed to read Rift S device info");
goto cleanup;
}
RIFT_S_DEBUG("Configuring firmware provided proximity sensor threshold %u", config->proximity_threshold);
if (rift_s_read_firmware_version(hid_hmd) < 0) {
RIFT_S_ERROR("Failed to read Rift S firmware version");
goto cleanup;
}
if (rift_s_read_imu_config(hid_hmd, &hmd->imu_config) < 0) {
RIFT_S_ERROR("Failed to read IMU configuration block");
goto cleanup;
}
if (read_hmd_calibration(hmd, hid_hmd) < 0)
goto cleanup;
/* Configure the proximity sensor threshold */
if (rift_s_hmd_read_proximity_threshold(hmd, hid_hmd) < 0)
goto cleanup;
RIFT_S_DEBUG("Configuring firmware provided proximity sensor threshold %u", hmd->proximity_threshold);
if (rift_s_protocol_set_proximity_threshold(hid_hmd, (uint16_t)hmd->proximity_threshold) < 0)
if (rift_s_protocol_set_proximity_threshold(hid_hmd, (uint16_t)config->proximity_threshold) < 0)
goto cleanup;
#if 0
@ -402,11 +325,8 @@ rift_s_hmd_create(struct rift_s_system *sys, const unsigned char *hmd_serial_no)
// Setup variable tracker: Optional but useful for debugging
u_var_add_root(hmd, "Oculus Rift S", true);
u_var_add_gui_header(hmd, NULL, "Tracking");
u_var_add_pose(hmd, &hmd->pose, "pose");
u_var_add_gui_header(hmd, NULL, "3DoF Tracking");
m_imu_3dof_add_vars(&hmd->fusion, hmd, "");
/* Add tracker variables to the HMD debug */
rift_s_tracker_add_debug_ui(hmd->tracker, hmd);
u_var_add_gui_header(hmd, NULL, "Misc");
u_var_add_log_level(hmd, &rift_s_log_level, "log_level");

View file

@ -21,8 +21,6 @@
#include "xrt/xrt_device.h"
#include "rift_s.h"
#include "rift_s_protocol.h"
#include "rift_s_firmware.h"
/* Oculus Rift S HMD Internal Interface */
#ifndef RIFT_S_HMD_H
@ -33,33 +31,26 @@ struct rift_s_hmd
struct xrt_device base;
struct rift_s_system *sys;
/* HMD config info (belongs to the system, which we have a ref to */
struct rift_s_hmd_config *config;
/* 3DOF fusion */
struct os_mutex mutex;
/* Pose tracker provided by the system */
struct rift_s_tracker *tracker;
/* Tracking to extend 32-bit HMD time to 64-bit nanoseconds */
uint32_t last_imu_timestamp32; /* 32-bit µS device timestamp */
timepoint_ns last_imu_timestamp_ns;
timepoint_ns last_imu_local_timestamp_ns;
struct m_imu_3dof fusion;
struct xrt_pose pose;
struct xrt_vec3 raw_mag, raw_accel, raw_gyro;
/* Auxiliary state */
float temperature;
bool display_on;
/* Configuration / calibration info */
rift_s_panel_info_t panel_info;
rift_s_imu_config_t imu_config;
struct rift_s_imu_calibration imu_calibration;
int proximity_threshold;
struct rift_s_camera_calibration_block camera_calibration;
/* Temporary distortion values for mesh calc */
struct u_panotools_values distortion_vals[2];
};
struct rift_s_hmd *
rift_s_hmd_create(struct rift_s_system *sys, const unsigned char *hmd_serial_no);
rift_s_hmd_create(struct rift_s_system *sys, const unsigned char *hmd_serial_no, struct rift_s_hmd_config *config);
void
rift_s_hmd_handle_report(struct rift_s_hmd *hmd, timepoint_ns local_ts, rift_s_hmd_report_t *report);
void

View file

@ -317,76 +317,44 @@ rift_s_send_keepalive(struct os_hid_device *hid)
os_hid_set_feature(hid, buf, 6);
}
int
rift_s_send_camera_report(struct os_hid_device *hid, bool enable, bool radio_sync_bit)
void
rift_s_protocol_camera_report_init(rift_s_camera_report_t *camera_report)
{
/*
* 05 O1 O2 P1 P1 P2 P2 P3 P3 P4 P4 P5 P5 E1 E1 E3
* E4 E5 U1 U2 U3 A1 A1 A1 A1 A2 A2 A2 A2 A3 A3 A3
* A3 A4 A4 A4 A4 A5 A5 A5 A5
*
* O1 = Camera stream on (0x00 = off, 0x1 = on)
* O2 = Radio Sync maybe?
* Px = Vertical offset / position of camera x passthrough view
* Ex = Exposure of camera x passthrough view
* Ax = ? of camera x. 4 byte LE, Always seems to take values 0x3f0-0x4ff
* but I can't see the effect on the image
* U1U2U3 = 26 00 40 always?
*/
unsigned char buf[41] = {
#if 0
0x05, 0x01, 0x01, 0xb3, 0x36, 0xb3, 0x36, 0xb3, 0x36, 0xb3, 0x36, 0xb3, 0x36, 0xf0, 0xf0, 0xf0,
0xf0, 0xf0, 0x26, 0x00, 0x40, 0x7a, 0x04, 0x00, 0x00, 0xa7, 0x04, 0x00, 0x00, 0xa7, 0x04, 0x00,
0x00, 0xa5, 0x04, 0x00, 0x00, 0xa8, 0x04, 0x00, 0x00
#else
0x05,
0x01,
0x01,
0xb3,
0x36,
0xb3,
0x36,
0xb3,
0x36,
0xb3,
0x36,
0xb3,
0x36,
0xf0,
0xf0,
0xf0,
0xf0,
0xf0,
0x26,
0x00,
0x40,
0x7a,
0x04,
0x00,
0x00,
0xa7,
0x04,
0x00,
0x00,
0xa7,
0x04,
0x00,
0x00,
0xa5,
0x04,
0x00,
0x00,
0xa8,
0x04,
0x00,
0x00
#endif
};
int i;
buf[1] = enable ? 0x1 : 0x0;
buf[2] = radio_sync_bit ? 0x1 : 0x0;
/* One slot per camera: */
camera_report->id = 0x05;
camera_report->uvc_enable = 0x0;
camera_report->radio_sync_flag = 0x0;
return os_hid_set_feature(hid, buf, 41);
camera_report->marker[0] = 0x26;
camera_report->marker[1] = 0x0;
camera_report->marker[2] = 0x40;
for (i = 0; i < 5; i++) {
camera_report->slam_frame_exposures[i] = 0x36b3;
camera_report->slam_frame_gains[i] = 0xf0;
camera_report->unknown32[i] = 0x04bc;
}
}
int
rift_s_protocol_send_camera_report(struct os_hid_device *hid, rift_s_camera_report_t *camera_report)
{
return os_hid_set_feature(hid, (uint8_t *)camera_report, sizeof(*camera_report));
}
static int
rift_s_enable_camera(struct os_hid_device *hid, bool enable, bool radio_sync_bit)
{
rift_s_camera_report_t camera_report;
rift_s_protocol_camera_report_init(&camera_report);
camera_report.uvc_enable = enable ? 0x1 : 0x0;
camera_report.radio_sync_flag = radio_sync_bit ? 0x1 : 0x0;
return rift_s_protocol_send_camera_report(hid, &camera_report);
}
int
@ -433,7 +401,7 @@ rift_s_read_firmware_version(struct os_hid_device *hid)
}
int
rift_s_read_imu_config(struct os_hid_device *hid, rift_s_imu_config_t *imu_config)
rift_s_read_imu_config_info(struct os_hid_device *hid, struct rift_s_imu_config_info_t *imu_config)
{
uint8_t buf[FEATURE_BUFFER_SIZE];
int res;
@ -442,7 +410,7 @@ rift_s_read_imu_config(struct os_hid_device *hid, rift_s_imu_config_t *imu_confi
if (res < 21)
return -1;
*imu_config = *(rift_s_imu_config_t *)buf;
*imu_config = *(struct rift_s_imu_config_info_t *)buf;
return 0;
}
@ -494,7 +462,7 @@ rift_s_hmd_enable(struct os_hid_device *hid, bool enable)
/* Send camera report with enable=true enables the streaming. The
* 2nd byte seems something to do with sync, but doesn't always work,
* not sure why yet. */
return rift_s_send_camera_report(hid, enable, false);
return rift_s_enable_camera(hid, enable, false);
}
/* Read the list of devices on the radio link */

View file

@ -179,6 +179,35 @@ typedef struct
int16_t unknown_zero2;
} rift_s_hmd_report_t;
/* Read/Write using report 5 */
/*
* 05 O1 O2 P1 P1 P2 P2 P3 P3 P4 P4 P5 P5 E1 E1 E3
* E4 E5 U1 U2 U3 A1 A1 A1 A1 A2 A2 A2 A2 A3 A3 A3
* A3 A4 A4 A4 A4 A5 A5 A5 A5
*
* O1 = Camera stream on (0x00 = off, 0x1 = on)
* O2 = Radio Sync? (Usage not clear, but seems to sometimes affect sync)
* Px = Exposure *and* Vertical offset / position of camera x passthrough view
* Seems to take values from 0x1db7-0x36b3. Values above 0x36c6 are ignored.
* Ex = Gain of camera x passthrough view
* U1U2U3 = 26 00 40 always?
* Ax = ? of camera x. 4 byte LE, Always seems to take values 0x3b0-0x4ff
* but I can't see the effect on the images, either controller or passthrough
*/
typedef struct
{
uint8_t id;
uint8_t uvc_enable;
uint8_t radio_sync_flag;
/* One slot per camera: */
uint16_t slam_frame_exposures[5];
uint8_t slam_frame_gains[5];
uint8_t marker[3]; // 0x26 0x00 0x40
uint32_t unknown32[5];
} rift_s_camera_report_t;
/* Read using report 6 */
typedef struct
{
@ -191,7 +220,7 @@ typedef struct
} rift_s_panel_info_t;
/* Read using report 9 */
typedef struct
struct rift_s_imu_config_info_t
{
uint8_t cmd;
uint32_t imu_hz;
@ -199,7 +228,7 @@ typedef struct
float accel_scale; /* Accel = reading * g / accel_scale */
float temperature_scale; /* Temperature = reading / scale + offset */
float temperature_offset;
} rift_s_imu_config_t;
};
/* Packet read from endpoint 11 (0x0b) */
typedef struct
@ -241,7 +270,7 @@ rift_s_read_firmware_version(struct os_hid_device *hid);
int
rift_s_read_panel_info(struct os_hid_device *hid, rift_s_panel_info_t *panel_info);
int
rift_s_read_imu_config(struct os_hid_device *hid, rift_s_imu_config_t *imu_config);
rift_s_read_imu_config_info(struct os_hid_device *hid, struct rift_s_imu_config_info_t *imu_config);
int
rift_s_read_fw_proximity_threshold(struct os_hid_device *hid, int *proximity_threshold);
int
@ -253,6 +282,12 @@ rift_s_set_screen_enable(struct os_hid_device *hid, bool enable);
void
rift_s_send_keepalive(struct os_hid_device *hid);
void
rift_s_protocol_camera_report_init(rift_s_camera_report_t *camera_report);
int
rift_s_protocol_send_camera_report(struct os_hid_device *hid, rift_s_camera_report_t *camera_report);
bool
rift_s_parse_hmd_report(rift_s_hmd_report_t *report, const unsigned char *buf, int size);
bool

View file

@ -26,8 +26,6 @@
#include "rift_s_radio.h"
#include "rift_s_protocol.h"
#define MIN(a, b) ((a) < (b) ? (a) : (b))
/* Struct that forms a double linked queue of pending commands,
* with the head being the currently active command */
struct rift_s_radio_command

View file

@ -0,0 +1,701 @@
/*
* Copyright 2013, Fredrik Hultin.
* Copyright 2013, Jakob Bornecrantz.
* Copyright 2016 Philipp Zabel
* Copyright 2019-2022 Jan Schmidt
* SPDX-License-Identifier: BSL-1.0
*
*/
/*!
* @file
* @brief Driver code for Oculus Rift S headsets
*
* Implementation for the HMD 3dof and 6dof tracking
*
* @author Jan Schmidt <jan@centricular.com>
* @ingroup drv_rift_s
*/
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <time.h>
#include <assert.h>
#include <inttypes.h>
#include "math/m_api.h"
#include "math/m_space.h"
#include "math/m_vec3.h"
#include "os/os_time.h"
#include "util/u_debug.h"
#include "util/u_device.h"
#include "util/u_sink.h"
#include "util/u_trace_marker.h"
#include "util/u_var.h"
#include "xrt/xrt_config_drivers.h"
#include "xrt/xrt_device.h"
#ifdef XRT_BUILD_DRIVER_HANDTRACKING
#include "../drivers/ht/ht_interface.h"
#include "../multi_wrapper/multi.h"
#endif
#include "rift_s.h"
#include "rift_s_interface.h"
#include "rift_s_util.h"
#include "rift_s_tracker.h"
#ifdef XRT_FEATURE_SLAM
static const bool slam_supported = true;
#else
static const bool slam_supported = false;
#endif
#ifdef XRT_BUILD_DRIVER_HANDTRACKING
static const bool hand_supported = true;
#else
static const bool hand_supported = false;
#endif
//! Specifies whether the user wants to use a SLAM tracker.
DEBUG_GET_ONCE_BOOL_OPTION(rift_s_slam, "RIFT_S_SLAM", true)
//! Specifies whether the user wants to use the hand tracker.
DEBUG_GET_ONCE_BOOL_OPTION(rift_s_handtracking, "RIFT_S_HANDTRACKING", true)
static void
rift_s_tracker_get_tracked_pose_imu(struct xrt_device *xdev,
enum xrt_input_name name,
uint64_t at_timestamp_ns,
struct xrt_space_relation *out_relation);
static void
rift_s_tracker_switch_method_cb(void *t_ptr)
{
DRV_TRACE_MARKER();
struct rift_s_tracker *t = t_ptr;
t->slam_over_3dof = !t->slam_over_3dof;
struct u_var_button *btn = &t->gui.switch_tracker_btn;
if (t->slam_over_3dof) { // Use SLAM
snprintf(btn->label, sizeof(btn->label), "Switch to 3DoF Tracking");
} else { // Use 3DoF
snprintf(btn->label, sizeof(btn->label), "Switch to SLAM Tracking");
os_mutex_lock(&t->mutex);
m_imu_3dof_reset(&t->fusion.i3dof);
t->fusion.i3dof.rot = t->pose.orientation;
os_mutex_unlock(&t->mutex);
}
}
XRT_MAYBE_UNUSED struct t_imu_calibration
rift_s_create_slam_imu_calib()
{
/* FIXME: Validate these hard coded standard deviations against
* some actual at-rest IMU measurements */
const double a_bias_std = 0.001;
const double a_noise_std = 0.016;
const double g_bias_std = 0.0001;
const double g_noise_std = 0.000282;
/* we pass already corrected accel and gyro
* readings to Basalt, so the transforms and
* offsets are just identity / zero matrices */
struct t_imu_calibration calib = {
.accel =
{
.transform =
{
{1.0, 0.0, 0.0},
{0.0, 1.0, 0.0},
{0.0, 0.0, 1.0},
},
.offset =
{
0,
},
.bias_std = {a_bias_std, a_bias_std, a_bias_std},
.noise_std = {a_noise_std, a_noise_std, a_noise_std},
},
.gyro =
{
.transform =
{
{1.0, 0.0, 0.0},
{0.0, 1.0, 0.0},
{0.0, 0.0, 1.0},
},
.offset =
{
0,
},
.bias_std = {g_bias_std, g_bias_std, g_bias_std},
.noise_std = {g_noise_std, g_noise_std, g_noise_std},
},
};
return calib;
}
//! IMU extrinsics, frequency
static struct t_slam_calib_extras
rift_s_create_extra_slam_calib(struct rift_s_hmd_config *hmd_config)
{
/* SLAM frames are every 2nd frame of 60Hz camera feed */
const int CAMERA_FREQUENCY = 30;
struct rift_s_camera_calibration_block *camera_calibration = &hmd_config->camera_calibration;
struct rift_s_camera_calibration *left = &camera_calibration->cameras[RIFT_S_CAMERA_FRONT_LEFT];
struct rift_s_camera_calibration *right = &camera_calibration->cameras[RIFT_S_CAMERA_FRONT_RIGHT];
/* Compute the IMU from cam transform for each cam */
struct xrt_pose device_from_imu, imu_from_device;
math_pose_from_isometry(&hmd_config->imu_calibration.device_from_imu, &device_from_imu);
math_pose_invert(&device_from_imu, &imu_from_device);
struct xrt_pose device_from_left, device_from_right;
math_pose_from_isometry(&left->device_from_camera, &device_from_left);
math_pose_from_isometry(&right->device_from_camera, &device_from_right);
struct xrt_pose P_imu_left_cam, P_imu_right_cam;
math_pose_transform(&imu_from_device, &device_from_left, &P_imu_left_cam);
math_pose_transform(&imu_from_device, &device_from_right, &P_imu_right_cam);
struct xrt_matrix_4x4 T_imu_left_cam, T_imu_right_cam;
math_matrix_4x4_isometry_from_pose(&P_imu_left_cam, &T_imu_left_cam);
math_matrix_4x4_isometry_from_pose(&P_imu_right_cam, &T_imu_right_cam);
RIFT_S_DEBUG("IMU left cam pose %f %f %f orient %f %f %f %f", P_imu_left_cam.position.x,
P_imu_left_cam.position.y, P_imu_left_cam.position.z, P_imu_left_cam.orientation.x,
P_imu_left_cam.orientation.y, P_imu_left_cam.orientation.z, P_imu_left_cam.orientation.w);
RIFT_S_DEBUG("IMU right cam pose %f %f %f orient %f %f %f %f", P_imu_right_cam.position.x,
P_imu_right_cam.position.y, P_imu_right_cam.position.z, P_imu_right_cam.orientation.x,
P_imu_right_cam.orientation.y, P_imu_right_cam.orientation.z, P_imu_right_cam.orientation.w);
double imu_frequency = hmd_config->imu_config_info.imu_hz;
struct t_slam_calib_extras calib = {
.imu_frequency = imu_frequency,
.cams =
{
{
.frequency = CAMERA_FREQUENCY,
.T_imu_cam = T_imu_left_cam,
.rpmax = 0.0,
},
{
.frequency = CAMERA_FREQUENCY,
.T_imu_cam = T_imu_right_cam,
.rpmax = 0.0,
},
},
};
return calib;
}
static struct xrt_slam_sinks *
rift_s_create_slam_tracker(struct rift_s_tracker *t, struct xrt_frame_context *xfctx)
{
DRV_TRACE_MARKER();
struct xrt_slam_sinks *sinks = NULL;
#ifdef XRT_FEATURE_SLAM
struct t_slam_tracker_config config = {0};
t_slam_fill_default_config(&config);
/* No need to refcount these parameters */
config.stereo_calib = t->stereo_calib;
config.imu_calib = &t->slam_imu_calib;
config.extra_calib = &t->slam_extra_calib;
int create_status = t_slam_create(xfctx, &config, &t->tracking.slam, &sinks);
if (create_status != 0) {
return NULL;
}
int start_status = t_slam_start(t->tracking.slam);
if (start_status != 0) {
return NULL;
}
RIFT_S_DEBUG("Rift S SLAM tracker successfully started");
#endif
return sinks;
}
static int
rift_s_create_hand_tracker(struct rift_s_tracker *t,
struct xrt_frame_context *xfctx,
struct xrt_slam_sinks **out_sinks,
struct xrt_device **out_device)
{
DRV_TRACE_MARKER();
struct xrt_slam_sinks *sinks = NULL;
struct xrt_device *device = NULL;
#ifdef XRT_BUILD_DRIVER_HANDTRACKING
//!@todo What's a sensible boundary for Rift S?
struct t_image_boundary_info boundary_info;
boundary_info.views[0].type = HT_IMAGE_BOUNDARY_NONE;
boundary_info.views[1].type = HT_IMAGE_BOUNDARY_NONE;
int create_status = ht_device_create(xfctx, //
t->stereo_calib, //
HT_ALGORITHM_MERCURY, //
boundary_info,
&sinks, //
&device);
if (create_status != 0) {
return create_status;
}
if (device != NULL) {
// Attach tracking override that links hand pose to the SLAM tracked position
// The hand poses need to be rotated 90° because of the way we passed
// the stereo camera configuration to the hand tracker.
struct xrt_pose left_cam_rotated_from_imu;
struct xrt_pose cam_rotate = {.orientation = {.x = 1.0, .y = 0.0, .z = 0.0, .w = 0.0},
.position = {0, 0, 0}};
math_pose_transform(&cam_rotate, &t->left_cam_from_imu, &left_cam_rotated_from_imu);
device = multi_create_tracking_override(XRT_TRACKING_OVERRIDE_ATTACHED, device, &t->base,
XRT_INPUT_GENERIC_TRACKER_POSE, &left_cam_rotated_from_imu);
}
RIFT_S_DEBUG("Rift S HMD hand tracker successfully created");
#endif
*out_sinks = sinks;
*out_device = device;
return 0;
}
void
rift_s_tracker_add_debug_ui(struct rift_s_tracker *t, void *root)
{
u_var_add_gui_header(root, NULL, "Tracking");
if (t->tracking.slam_enabled) {
t->gui.switch_tracker_btn.cb = rift_s_tracker_switch_method_cb;
t->gui.switch_tracker_btn.ptr = t;
u_var_add_button(root, &t->gui.switch_tracker_btn, "Switch to 3DoF Tracking");
}
u_var_add_pose(root, &t->pose, "Tracked Pose");
u_var_add_gui_header(root, NULL, "3DoF Tracking");
m_imu_3dof_add_vars(&t->fusion.i3dof, root, "");
u_var_add_gui_header(root, NULL, "SLAM Tracking");
u_var_add_ro_text(root, t->gui.slam_status, "Tracker status");
u_var_add_gui_header(root, NULL, "Hand Tracking");
u_var_add_ro_text(root, t->gui.hand_status, "Tracker status");
}
/*!
* Procedure to setup trackers: 3dof, SLAM and hand tracking.
*
* Determines which trackers to initialize
*
* @param xfctx the frame server that will own processing nodes
* @param hmd_config HMD configuration and firmware info
*
* @return initialised tracker on success, NULL if creation fails
*/
struct rift_s_tracker *
rift_s_tracker_create(struct xrt_tracking_origin *origin,
struct xrt_frame_context *xfctx,
struct rift_s_hmd_config *hmd_config)
{
struct rift_s_tracker *t = U_DEVICE_ALLOCATE(struct rift_s_tracker, U_DEVICE_ALLOC_TRACKING_NONE, 1, 0);
if (t == NULL) {
return NULL;
}
t->base.tracking_origin = origin;
t->base.get_tracked_pose = rift_s_tracker_get_tracked_pose_imu;
// Pose / state lock
int ret = os_mutex_init(&t->mutex);
if (ret != 0) {
RIFT_S_ERROR("Failed to init mutex!");
rift_s_tracker_destroy(t);
return NULL;
}
// Compute IMU and camera device poses for get_tracked_pose relations
math_pose_from_isometry(&hmd_config->imu_calibration.device_from_imu, &t->device_from_imu);
struct xrt_pose device_from_left_cam;
struct rift_s_camera_calibration *left_cam = &hmd_config->camera_calibration.cameras[RIFT_S_CAMERA_FRONT_LEFT];
math_pose_from_isometry(&left_cam->device_from_camera, &device_from_left_cam);
struct xrt_pose left_cam_from_device;
math_pose_invert(&device_from_left_cam, &left_cam_from_device);
math_pose_transform(&left_cam_from_device, &t->device_from_imu, &t->left_cam_from_imu);
// Decide whether to initialize the SLAM tracker
bool slam_wanted = debug_get_bool_option_rift_s_slam();
bool slam_enabled = slam_supported && slam_wanted;
// Decide whether to initialize the hand tracker
bool hand_wanted = debug_get_bool_option_rift_s_handtracking();
bool hand_enabled = hand_supported && hand_wanted;
t->tracking.slam_enabled = slam_enabled;
t->tracking.hand_enabled = hand_enabled;
t->slam_over_3dof = slam_enabled; // We prefer SLAM over 3dof tracking if possible
const char *slam_status = t->tracking.slam_enabled ? "Enabled"
: !slam_wanted ? "Disabled by the user (envvar set to false)"
: !slam_supported ? "Unavailable (not built)"
: NULL;
const char *hand_status = t->tracking.hand_enabled ? "Enabled"
: !hand_wanted ? "Disabled by the user (envvar set to false)"
: !hand_supported ? "Unavailable (not built)"
: NULL;
assert(slam_status != NULL && hand_status != NULL);
snprintf(t->gui.slam_status, sizeof(t->gui.slam_status), "%s", slam_status);
snprintf(t->gui.hand_status, sizeof(t->gui.hand_status), "%s", hand_status);
// Initialize 3DoF tracker
m_imu_3dof_init(&t->fusion.i3dof, M_IMU_3DOF_USE_GRAVITY_DUR_20MS);
t->pose.orientation.w = 1.0f; // All other values set to zero by U_DEVICE_ALLOCATE (which calls U_CALLOC)
// Construct the stereo camera calibration for the front cameras
t->stereo_calib = rift_s_create_stereo_camera_calib_rotated(&hmd_config->camera_calibration);
t->slam_imu_calib = rift_s_create_slam_imu_calib();
t->slam_extra_calib = rift_s_create_extra_slam_calib(hmd_config);
// Initialize the input sinks for the camera to send to
// Initialize SLAM tracker
struct xrt_slam_sinks *slam_sinks = NULL;
if (t->tracking.slam_enabled) {
slam_sinks = rift_s_create_slam_tracker(t, xfctx);
if (slam_sinks == NULL) {
RIFT_S_WARN("Unable to setup the SLAM tracker");
rift_s_tracker_destroy(t);
return NULL;
}
}
// Initialize hand tracker
struct xrt_slam_sinks *hand_sinks = NULL;
struct xrt_device *hand_device = NULL;
if (t->tracking.hand_enabled) {
int hand_status = rift_s_create_hand_tracker(t, xfctx, &hand_sinks, &hand_device);
if (hand_status != 0 || hand_sinks == NULL || hand_device == NULL) {
RIFT_S_WARN("Unable to setup the hand tracker");
rift_s_tracker_destroy(t);
return NULL;
}
}
// Setup sinks depending on tracking configuration
struct xrt_slam_sinks entry_sinks = {0};
if (slam_enabled && hand_enabled) {
struct xrt_frame_sink *entry_left_sink = NULL;
struct xrt_frame_sink *entry_right_sink = NULL;
u_sink_split_create(xfctx, slam_sinks->left, hand_sinks->left, &entry_left_sink);
u_sink_split_create(xfctx, slam_sinks->right, hand_sinks->right, &entry_right_sink);
entry_sinks = (struct xrt_slam_sinks){
.left = entry_left_sink,
.right = entry_right_sink,
.imu = slam_sinks->imu,
.gt = slam_sinks->gt,
};
} else if (slam_enabled) {
entry_sinks = *slam_sinks;
} else if (hand_enabled) {
entry_sinks = *hand_sinks;
} else {
entry_sinks = (struct xrt_slam_sinks){0};
}
t->slam_sinks = entry_sinks;
t->handtracker = hand_device;
return t;
}
void
rift_s_tracker_destroy(struct rift_s_tracker *t)
{
t_stereo_camera_calibration_reference(&t->stereo_calib, NULL);
m_imu_3dof_close(&t->fusion.i3dof);
os_mutex_destroy(&t->mutex);
}
struct xrt_slam_sinks *
rift_s_tracker_get_slam_sinks(struct rift_s_tracker *t)
{
return &t->in_slam_sinks;
}
struct xrt_device *
rift_s_tracker_get_hand_tracking_device(struct rift_s_tracker *t)
{
return t->handtracker;
}
/*!
* Convert a hardware timestamp into monotonic clock. Updates offset estimate.
* @note Only used with IMU samples as they have the smallest USB transmission time.
*
* @param t struct rift_s_tracker
* @param local_timestamp_ns Monotonic timestamp at which the IMU sample was received
* @param device_ts HMD Hardware timestamp, gets converted to local monotonic clock.
*/
static timepoint_ns
clock_hw2mono_update(struct rift_s_tracker *t, timepoint_ns local_timestamp_ns, uint64_t device_ts)
{
const double alpha = 0.9995; // Weight to put on accumulated hw2mono clock offset
timepoint_ns hw = device_ts;
/* Only do updates if the monotonic time increased
* (otherwise we're processing packets that arrived
* at the same time - so only take the earliest) */
if (local_timestamp_ns > t->last_hw2mono_local_ts) {
time_duration_ns old_hw2mono = t->hw2mono;
time_duration_ns got_hw2mono = local_timestamp_ns - hw;
time_duration_ns new_hw2mono = old_hw2mono * alpha + got_hw2mono * (1.0 - alpha);
if (old_hw2mono == 0) { // hw2mono was not set for the first time yet
new_hw2mono = got_hw2mono;
}
time_duration_ns new_hw2mono_out = hw + new_hw2mono;
if (new_hw2mono_out >= t->last_hw2mono_out) {
t->last_hw2mono_out = new_hw2mono_out;
t->hw2mono = new_hw2mono;
t->have_hw2mono = true;
t->last_hw2mono_local_ts = local_timestamp_ns;
} else {
RIFT_S_WARN("Monotonic time map went backward (%" PRIu64 ", %" PRIu64 ") => %" PRIu64
" < %" PRIu64 ". Reporting %" PRIu64,
hw, local_timestamp_ns, new_hw2mono_out, t->last_hw2mono_out, hw + t->hw2mono);
}
} else {
t->last_hw2mono_out = hw + t->hw2mono;
}
return t->last_hw2mono_out;
}
//! Camera specific logic for clock conversion
static void
clock_hw2mono_get(struct rift_s_tracker *t, uint64_t device_ts, timepoint_ns *out)
{
*out = t->hw2mono + device_ts;
}
void
rift_s_tracker_imu_update(struct rift_s_tracker *t,
uint64_t timestamp_ns,
timepoint_ns local_timestamp_ns_orig,
const struct xrt_vec3 *accel,
const struct xrt_vec3 *gyro)
{
os_mutex_lock(&t->mutex);
/* Ignore packets before we're ready */
if (!t->ready_for_data) {
os_mutex_unlock(&t->mutex);
return;
}
/* Get the smoothed monotonic time estimate for this IMU sample */
timepoint_ns local_timestamp_ns = clock_hw2mono_update(t, local_timestamp_ns_orig, timestamp_ns);
if (t->fusion.last_imu_local_timestamp_ns != 0 && local_timestamp_ns < t->fusion.last_imu_local_timestamp_ns) {
RIFT_S_WARN("IMU time went backward by %" PRId64 " ns",
local_timestamp_ns - t->fusion.last_imu_local_timestamp_ns);
} else {
m_imu_3dof_update(&t->fusion.i3dof, timestamp_ns, accel, gyro);
}
RIFT_S_TRACE("IMU timestamp %" PRIu64 " (dt %f) local %" PRIu64 " hw2mono %" PRIu64 " (dt %f) offset %" PRId64,
timestamp_ns, (double)(timestamp_ns - t->fusion.last_imu_timestamp_ns) / 1000000000.0,
local_timestamp_ns_orig, local_timestamp_ns,
(double)(local_timestamp_ns - t->fusion.last_imu_local_timestamp_ns) / 1000000000.0, t->hw2mono);
t->fusion.last_angular_velocity = *gyro;
t->fusion.last_imu_timestamp_ns = timestamp_ns;
t->fusion.last_imu_local_timestamp_ns = local_timestamp_ns;
t->pose.orientation = t->fusion.i3dof.rot;
os_mutex_unlock(&t->mutex);
if (t->slam_sinks.imu) {
/* Push IMU sample to the SLAM tracker */
struct xrt_vec3_f64 accel64 = {accel->x, accel->y, accel->z};
struct xrt_vec3_f64 gyro64 = {gyro->x, gyro->y, gyro->z};
struct xrt_imu_sample sample = {
.timestamp_ns = local_timestamp_ns, .accel_m_s2 = accel64, .gyro_rad_secs = gyro64};
xrt_sink_push_imu(t->slam_sinks.imu, &sample);
}
}
#define UPPER_32BITS(x) ((x)&0xffffffff00000000ULL)
void
rift_s_tracker_push_slam_frames(struct rift_s_tracker *t,
uint64_t frame_ts_ns,
struct xrt_frame *left_frame,
struct xrt_frame *right_frame)
{
timepoint_ns frame_time;
os_mutex_lock(&t->mutex);
/* Ignore packets before we're ready */
if (!t->ready_for_data) {
os_mutex_unlock(&t->mutex);
return;
}
if (!t->have_hw2mono) {
/* Drop any frames before we have IMU */
os_mutex_unlock(&t->mutex);
return;
}
/* Ensure the input timestamp is within 32-bits of the IMU
* time, because the timestamps are reported and extended to 64-bits
* separately and can end up in different epochs */
uint64_t adj_frame_ts_ns = frame_ts_ns + t->camera_ts_offset;
int64_t frame_to_imu_uS = (adj_frame_ts_ns / 1000 - t->fusion.last_imu_timestamp_ns / 1000);
if (frame_to_imu_uS < -(int64_t)(1ULL << 31) || frame_to_imu_uS > (int64_t)(1ULL << 31)) {
t->camera_ts_offset =
(UPPER_32BITS(t->fusion.last_imu_timestamp_ns / 1000) - UPPER_32BITS(frame_ts_ns / 1000)) * 1000;
RIFT_S_DEBUG("Applying epoch offset to frame times of %" PRId64 " (frame->imu was %" PRId64 " µS)",
t->camera_ts_offset, frame_to_imu_uS);
}
frame_ts_ns += t->camera_ts_offset;
clock_hw2mono_get(t, frame_ts_ns, &frame_time);
if (frame_time < t->last_frame_time) {
RIFT_S_WARN("Camera frame time went backward by %" PRId64 " ns", frame_time - t->last_frame_time);
os_mutex_unlock(&t->mutex);
return;
}
RIFT_S_TRACE("SLAM frame timestamp %" PRIu64 " local %" PRIu64, frame_ts_ns, frame_time);
t->last_frame_time = frame_time;
os_mutex_unlock(&t->mutex);
if (t->slam_sinks.left) {
left_frame->timestamp = frame_time;
xrt_sink_push_frame(t->slam_sinks.left, left_frame);
}
if (t->slam_sinks.right) {
right_frame->timestamp = frame_time;
xrt_sink_push_frame(t->slam_sinks.right, right_frame);
}
}
//! Specific pose correction for Basalt to OpenXR coordinates
XRT_MAYBE_UNUSED static inline void
rift_s_tracker_correct_pose_from_basalt(struct xrt_pose *pose)
{
struct xrt_quat q = {0.70710678, 0, 0, -0.70710678};
math_quat_rotate(&q, &pose->orientation, &pose->orientation);
math_quat_rotate_vec3(&q, &pose->position, &pose->position);
}
static void
rift_s_tracker_get_tracked_pose_imu(struct xrt_device *xdev,
enum xrt_input_name name,
uint64_t at_timestamp_ns,
struct xrt_space_relation *out_relation)
{
struct rift_s_tracker *tracker = (struct rift_s_tracker *)(xdev);
assert(name == XRT_INPUT_GENERIC_TRACKER_POSE);
rift_s_tracker_get_tracked_pose(tracker, RIFT_S_TRACKER_POSE_IMU, at_timestamp_ns, out_relation);
}
void
rift_s_tracker_get_tracked_pose(struct rift_s_tracker *t,
enum rift_s_tracker_pose pose,
uint64_t at_timestamp_ns,
struct xrt_space_relation *out_relation)
{
struct xrt_relation_chain xrc = {0};
if (pose == RIFT_S_TRACKER_POSE_DEVICE) {
m_relation_chain_push_pose(&xrc, &t->device_from_imu);
} else if (pose == RIFT_S_TRACKER_POSE_LEFT_CAMERA) {
m_relation_chain_push_pose(&xrc, &t->left_cam_from_imu);
}
if (t->tracking.slam_enabled && t->slam_over_3dof) {
struct xrt_space_relation imu_relation = XRT_SPACE_RELATION_ZERO;
// Get the IMU pose from the SLAM tracker
xrt_tracked_slam_get_tracked_pose(t->tracking.slam, at_timestamp_ns, &imu_relation);
#if defined(XRT_HAVE_BASALT_SLAM)
rift_s_tracker_correct_pose_from_basalt(&imu_relation.pose);
#endif
imu_relation.relation_flags = (enum xrt_space_relation_flags)(
XRT_SPACE_RELATION_ORIENTATION_VALID_BIT | XRT_SPACE_RELATION_POSITION_VALID_BIT |
XRT_SPACE_RELATION_ORIENTATION_TRACKED_BIT | XRT_SPACE_RELATION_POSITION_TRACKED_BIT);
m_relation_chain_push_relation(&xrc, &imu_relation);
} else {
struct xrt_space_relation imu_relation = XRT_SPACE_RELATION_ZERO;
os_mutex_lock(&t->mutex);
// TODO: Estimate pose at timestamp at_timestamp_ns
math_quat_normalize(&t->pose.orientation);
imu_relation.pose = t->pose;
imu_relation.angular_velocity = t->fusion.last_angular_velocity;
imu_relation.relation_flags = (enum xrt_space_relation_flags)(
XRT_SPACE_RELATION_ORIENTATION_VALID_BIT | XRT_SPACE_RELATION_POSITION_VALID_BIT |
XRT_SPACE_RELATION_ORIENTATION_TRACKED_BIT);
m_relation_chain_push_relation(&xrc, &imu_relation);
os_mutex_unlock(&t->mutex);
}
m_relation_chain_resolve(&xrc, out_relation);
}
void
rift_s_tracker_start(struct rift_s_tracker *t)
{
os_mutex_lock(&t->mutex);
t->ready_for_data = true;
os_mutex_unlock(&t->mutex);
}

View file

@ -0,0 +1,159 @@
/*
* Copyright 2013, Fredrik Hultin.
* Copyright 2013, Jakob Bornecrantz.
* Copyright 2016 Philipp Zabel
* Copyright 2019-2022 Jan Schmidt
* SPDX-License-Identifier: BSL-1.0
*/
/*!
* @file
* @brief HMD tracker handling
* @author Jan Schmidt <jan@centricular.com>
* @ingroup drv_rift_s
*/
#pragma once
#include "math/m_imu_3dof.h"
#include "os/os_threading.h"
#include "util/u_var.h"
#include "xrt/xrt_defines.h"
#include "xrt/xrt_device.h"
#include "tracking/t_tracking.h"
#include "rift_s_firmware.h"
/* Oculus Rift S HMD Tracking */
#ifndef RIFT_S_TRACKER_H
#define RIFT_S_TRACKER_H
struct rift_s_hmd_config;
enum rift_s_tracker_pose
{
RIFT_S_TRACKER_POSE_IMU,
RIFT_S_TRACKER_POSE_LEFT_CAMERA,
RIFT_S_TRACKER_POSE_DEVICE,
};
struct rift_s_tracker
{
struct xrt_device base;
//! Protects shared access to 3dof and pose storage
struct os_mutex mutex;
//! Don't process IMU / video until started
bool ready_for_data;
struct
{
//! Main fusion calculator.
struct m_imu_3dof i3dof;
//! The last angular velocity from the IMU, for prediction.
struct xrt_vec3 last_angular_velocity;
//! When did we get the last IMU sample, device clock
uint64_t last_imu_timestamp_ns;
//! Last IMU sample local system clock
timepoint_ns last_imu_local_timestamp_ns;
} fusion;
//! Fields related to camera-based tracking (SLAM and hand tracking)
struct
{
//! SLAM tracker.
//! @todo Right now, we are not consistent in how we interface with
//! trackers. In particular, we have a @ref xrt_tracked_slam field but not
//! an equivalent for hand tracking.
struct xrt_tracked_slam *slam;
//! Set at start. Whether the SLAM tracker was initialized.
bool slam_enabled;
//! Set at start. Whether the hand tracker was initialized.
bool hand_enabled;
} tracking;
// Correction offset poses from firmware
struct xrt_pose device_from_imu;
struct xrt_pose left_cam_from_imu;
//!< Estimated offset from HMD device timestamp to local monotonic clock
bool have_hw2mono;
time_duration_ns hw2mono;
time_duration_ns last_hw2mono_out;
timepoint_ns last_hw2mono_local_ts;
timepoint_ns last_frame_time;
//! Adjustment to apply to camera timestamps to bring them into the
// same 32-bit range as the IMU times
int64_t camera_ts_offset;
//! Whether to track the HMD with 6dof SLAM or fallback to the `fusion` 3dof tracker
bool slam_over_3dof;
//! Last tracked pose
struct xrt_pose pose;
/* Stereo calibration for the front 2 cameras */
struct t_stereo_camera_calibration *stereo_calib;
struct t_imu_calibration slam_imu_calib;
struct t_slam_calib_extras slam_extra_calib;
/* Input sinks that the camera delivers SLAM frames to */
struct xrt_slam_sinks in_slam_sinks;
/* SLAM/HT sinks we deliver imu and frame data to */
struct xrt_slam_sinks slam_sinks;
struct xrt_device *handtracker;
struct
{
struct u_var_button hmd_screen_enable_btn;
struct u_var_button switch_tracker_btn;
char hand_status[128];
char slam_status[128];
} gui;
};
struct rift_s_tracker *
rift_s_tracker_create(struct xrt_tracking_origin *origin,
struct xrt_frame_context *xfctx,
struct rift_s_hmd_config *hmd_config);
void
rift_s_tracker_start(struct rift_s_tracker *t);
void
rift_s_tracker_destroy(struct rift_s_tracker *t);
void
rift_s_tracker_add_debug_ui(struct rift_s_tracker *t, void *root);
struct xrt_slam_sinks *
rift_s_tracker_get_slam_sinks(struct rift_s_tracker *t);
struct xrt_device *
rift_s_tracker_get_hand_tracking_device(struct rift_s_tracker *t);
void
rift_s_tracker_imu_update(struct rift_s_tracker *t,
uint64_t timestamp_ns,
timepoint_ns local_timestamp_ns,
const struct xrt_vec3 *accel,
const struct xrt_vec3 *gyro);
void
rift_s_tracker_push_slam_frames(struct rift_s_tracker *t,
uint64_t frame_ts_ns,
struct xrt_frame *left,
struct xrt_frame *right);
void
rift_s_tracker_get_tracked_pose(struct rift_s_tracker *t,
enum rift_s_tracker_pose pose,
uint64_t at_timestamp_ns,
struct xrt_space_relation *out_relation);
#endif

View file

@ -0,0 +1,352 @@
/*
* Copyright 2022 Jan Schmidt
* SPDX-License-Identifier: BSL-1.0
*
*/
/*!
* @file
* @brief Driver code for Oculus Rift S headsets
*
* Utility functions for generating a stereo camera calibration,
* and converting the Rift S Fisheye62 distortion parameters into
* OpenCV-compatible Kannala-Brandt parameters
*
* @author Jan Schmidt <jan@centricular.com>
* @ingroup drv_rift_s
*/
#include <stdbool.h>
#include <math.h>
#include "tinyceres/tiny_solver.hpp"
#include "tinyceres/tiny_solver_autodiff_function.hpp"
#include "rift_s_util.h"
using ceres::TinySolver;
using ceres::TinySolverAutoDiffFunction;
const int N_KB4_DISTORT_PARAMS = 4;
template <typename T>
bool
fisheye62_undistort_func(struct t_camera_calibration *calib,
const double *distortion_params,
const T point[2],
T *out_point)
{
const T x = point[0];
const T y = point[1];
const T r2 = x * x + y * y;
const T r = sqrt(r2);
const double fx = calib->intrinsics[0][0];
const double fy = calib->intrinsics[1][1];
const double cx = calib->intrinsics[0][2];
const double cy = calib->intrinsics[1][2];
if (r < 1e-8) {
out_point[0] = fx * x + cx;
out_point[1] = fy * y + cy;
return true;
}
const T theta = atan(r);
const T theta2 = theta * theta;
const T xp = x * theta / r;
const T yp = y * theta / r;
const double k1 = distortion_params[0];
const double k2 = distortion_params[1];
const double k3 = distortion_params[2];
const double k4 = distortion_params[3];
const double k5 = distortion_params[4];
const double k6 = distortion_params[5];
const double p1 = distortion_params[6];
const double p2 = distortion_params[7];
/* 1 + k1 * theta^2 + k2 * theta^4 + k3 * theta^6 + k4 * theta^8 + k5 * theta^10 + k6 * theta^12 */
T r_theta = k6 * theta2;
r_theta += k5;
r_theta *= theta2;
r_theta += k4;
r_theta *= theta2;
r_theta += k3;
r_theta *= theta2;
r_theta += k2;
r_theta *= theta2;
r_theta += k1;
r_theta *= theta2;
r_theta += 1;
T delta_x = 2 * p1 * xp * yp + p2 * (theta2 + xp * xp * 2.0);
T delta_y = 2 * p2 * xp * yp + p1 * (theta2 + yp * yp * 2.0);
const T mx = xp * r_theta + delta_x;
const T my = yp * r_theta + delta_y;
out_point[0] = fx * mx + cx;
out_point[1] = fy * my + cy;
return true;
}
struct UndistortCostFunctor
{
UndistortCostFunctor(struct t_camera_calibration *calib, double *distortion_params, double point[2])
: m_calib(calib), m_distortion_params(distortion_params)
{
m_point[0] = point[0];
m_point[1] = point[1];
}
struct t_camera_calibration *m_calib;
double *m_distortion_params;
double m_point[2];
template <typename T>
bool
operator()(const T *const x, T *residual) const
{
T out_point[2];
if (!fisheye62_undistort_func(m_calib, m_distortion_params, x, out_point))
return false;
residual[0] = out_point[0] - m_point[0];
residual[1] = out_point[1] - m_point[1];
return true;
}
};
template <typename T>
bool
kb4_distort_func(struct t_camera_calibration *calib, const T *distortion_params, const double point[2], T *out_point)
{
const double x = point[0];
const double y = point[1];
const double r2 = x * x + y * y;
const double r = sqrt(r2);
const double fx = calib->intrinsics[0][0];
const double fy = calib->intrinsics[1][1];
const double cx = calib->intrinsics[0][2];
const double cy = calib->intrinsics[1][2];
if (r < 1e-8) {
out_point[0] = T(fx * x + cx);
out_point[1] = T(fy * y + cy);
return true;
}
const double theta = atan(r);
const double theta2 = theta * theta;
const T k1 = distortion_params[0];
const T k2 = distortion_params[1];
const T k3 = distortion_params[2];
const T k4 = distortion_params[3];
T r_theta = k4 * theta2;
r_theta += k3;
r_theta *= theta2;
r_theta += k2;
r_theta *= theta2;
r_theta += k1;
r_theta *= theta2;
r_theta += 1;
r_theta *= theta;
const T mx = x * r_theta / r;
const T my = y * r_theta / r;
out_point[0] = fx * mx + cx;
out_point[1] = fy * my + cy;
return true;
}
struct TargetPoint
{
double point[2];
double distorted[2];
};
struct DistortParamKB4CostFunctor
{
DistortParamKB4CostFunctor(struct t_camera_calibration *calib, int nSteps, TargetPoint *targetPointGrid)
: m_calib(calib), m_nSteps(nSteps), m_targetPointGrid(targetPointGrid)
{}
struct t_camera_calibration *m_calib;
int m_nSteps;
TargetPoint *m_targetPointGrid;
template <typename T>
bool
operator()(const T *const distort_params, T *residual) const
{
T out_point[2];
for (int y_index = 0; y_index < m_nSteps; y_index++) {
for (int x_index = 0; x_index < m_nSteps; x_index++) {
int residual_index = 2 * (y_index * m_nSteps + x_index);
TargetPoint *p = &m_targetPointGrid[(y_index * m_nSteps) + x_index];
if (!kb4_distort_func<T>(m_calib, distort_params, p->point, out_point))
return false;
residual[residual_index + 0] = out_point[0] - p->distorted[0];
residual[residual_index + 1] = out_point[1] - p->distorted[1];
}
}
return true;
}
};
#define STEPS 21
static bool
convert_camera_calibration(struct rift_s_camera_calibration *rift_s_cam, struct t_camera_calibration *tcc)
{
tcc->image_size_pixels.h = rift_s_cam->roi.extent.h;
tcc->image_size_pixels.w = rift_s_cam->roi.extent.w;
tcc->intrinsics[0][0] = rift_s_cam->projection.fx;
tcc->intrinsics[1][1] = rift_s_cam->projection.fy;
tcc->intrinsics[0][2] = rift_s_cam->projection.cx;
tcc->intrinsics[1][2] = rift_s_cam->projection.cy;
tcc->intrinsics[2][2] = 1.0;
tcc->use_fisheye = true;
TargetPoint xy[STEPS * STEPS];
/* Convert fisheye62 params to KB4: */
double fisheye62_distort_params[8];
for (int i = 0; i < 6; i++) {
fisheye62_distort_params[i] = rift_s_cam->distortion.k[i];
}
fisheye62_distort_params[6] = rift_s_cam->distortion.p1;
fisheye62_distort_params[7] = rift_s_cam->distortion.p2;
/* Calculate Fisheye62 distortion grid by finding the viewplane coordinates that
* project onto the points of grid spaced across the pixel image plane */
for (int y_index = 0; y_index < STEPS; y_index++) {
for (int x_index = 0; x_index < STEPS; x_index++) {
int x = x_index * (tcc->image_size_pixels.w - 1) / (STEPS - 1);
int y = y_index * (tcc->image_size_pixels.h - 1) / (STEPS - 1);
TargetPoint *p = &xy[(y_index * STEPS) + x_index];
p->distorted[0] = x;
p->distorted[1] = y;
Eigen::Matrix<double, 2, 1> result(0, 0);
using AutoDiffUndistortFunction = TinySolverAutoDiffFunction<UndistortCostFunctor, 2, 2>;
UndistortCostFunctor undistort_functor(tcc, fisheye62_distort_params, p->distorted);
AutoDiffUndistortFunction f(undistort_functor);
TinySolver<AutoDiffUndistortFunction> solver;
solver.Solve(f, &result);
p->point[0] = result[0];
p->point[1] = result[1];
}
}
/* Use the calculated distortion grid to solve for kb4 params */
{
Eigen::Matrix<double, N_KB4_DISTORT_PARAMS, 1> kb4_distort_params;
using AutoDiffDistortParamKB4Function =
TinySolverAutoDiffFunction<DistortParamKB4CostFunctor, 2 * STEPS * STEPS, N_KB4_DISTORT_PARAMS>;
DistortParamKB4CostFunctor distort_param_kb4_functor(tcc, STEPS, xy);
AutoDiffDistortParamKB4Function f(distort_param_kb4_functor);
TinySolver<AutoDiffDistortParamKB4Function> solver;
solver.Solve(f, &kb4_distort_params);
for (int i = 0; i < 4; i++)
tcc->distortion_fisheye[i] = kb4_distort_params[i];
}
return true;
}
/*!
* Allocate and populate an OpenCV-compatible @ref t_stereo_camera_calibration pointer from
* the Rift S config.
*
* This requires fitting a KB4 fisheye polynomial to the 6 radial + 2 tangential 'Fisheye62'
* parameters provided by the Rift S.
*
*/
struct t_stereo_camera_calibration *
rift_s_create_stereo_camera_calib_rotated(struct rift_s_camera_calibration_block *camera_calibration)
{
struct t_stereo_camera_calibration *calib = NULL;
t_stereo_camera_calibration_alloc(&calib, 8);
struct rift_s_camera_calibration *left = &camera_calibration->cameras[RIFT_S_CAMERA_FRONT_LEFT];
struct rift_s_camera_calibration *right = &camera_calibration->cameras[RIFT_S_CAMERA_FRONT_RIGHT];
// Intrinsics
for (int view = 0; view < 2; view++) {
struct t_camera_calibration *tcc = &calib->view[view];
struct rift_s_camera_calibration *cam_config;
if (view == 0) {
cam_config = left;
} else {
cam_config = right;
}
if (!convert_camera_calibration(cam_config, tcc))
goto fail;
}
struct xrt_pose device_from_left, device_from_right;
struct xrt_pose right_from_device, right_from_left;
struct xrt_matrix_3x3 right_from_left_rot;
/* Compute the transform from the left eye to the right eye
* by using the config provided camera->device transform
*/
math_pose_from_isometry(&left->device_from_camera, &device_from_left);
math_pose_from_isometry(&right->device_from_camera, &device_from_right);
math_pose_invert(&device_from_right, &right_from_device);
math_pose_transform(&device_from_left, &right_from_device, &right_from_left);
math_matrix_3x3_from_quat(&right_from_left.orientation, &right_from_left_rot);
/* Rotate the position in the camera extrinsics 90° to
* compensate for the front cams being rotated. That means hand poses
* are detected and come out rotated too, so need correcting
* in the tracking override offset */
calib->camera_translation[0] = -right_from_left.position.y;
calib->camera_translation[1] = right_from_left.position.x;
calib->camera_translation[2] = right_from_left.position.z;
calib->camera_rotation[0][0] = right_from_left_rot.v[0];
calib->camera_rotation[0][1] = right_from_left_rot.v[1];
calib->camera_rotation[0][2] = right_from_left_rot.v[2];
calib->camera_rotation[1][0] = right_from_left_rot.v[3];
calib->camera_rotation[1][1] = right_from_left_rot.v[4];
calib->camera_rotation[1][2] = right_from_left_rot.v[5];
calib->camera_rotation[2][0] = right_from_left_rot.v[6];
calib->camera_rotation[2][1] = right_from_left_rot.v[7];
calib->camera_rotation[2][2] = right_from_left_rot.v[8];
return calib;
fail:
t_stereo_camera_calibration_reference(&calib, NULL);
return NULL;
}

View file

@ -0,0 +1,28 @@
/*
* Copyright 2022 Jan Schmidt
* SPDX-License-Identifier: BSL-1.0
*/
/*!
* @file
* @brief Oculus Rift S utility functions
* @author Jan Schmidt <jan@centricular.com>
* @ingroup drv_rift_s
*/
#pragma once
#include "tracking/t_tracking.h"
#include "rift_s_firmware.h"
#ifdef __cplusplus
extern "C" {
#endif
struct t_stereo_camera_calibration *
rift_s_create_stereo_camera_calib_rotated(struct rift_s_camera_calibration_block *camera_calibration);
#ifdef __cplusplus
}
#endif