xrt: Add input, hmd_parts and tracking interface

This patch has been spun of the rather massive amount of work for adding input
devices into Monado. The interfaces feels somewhat stable now so could do
with another pair of eyes on them, before too much work is done on st/oxr.
This commit is contained in:
Jakob Bornecrantz 2019-05-09 11:54:48 +01:00
parent 889c89590c
commit 1f64f714f6
14 changed files with 596 additions and 290 deletions

View file

@ -89,42 +89,44 @@ u_device_dump_config(struct xrt_device* xdev,
// clang-format off
fprintf(stderr, "%s - device_setup\n", prefix);
PRINT_STR( "prod", prod);
PRINT_INT( "screens[0].w_pixels ", xdev->screens[0].w_pixels);
PRINT_INT( "screens[0].h_pixels ", xdev->screens[0].h_pixels);
if (xdev->hmd != NULL) {
PRINT_INT( "screens[0].w_pixels ", xdev->hmd->screens[0].w_pixels);
PRINT_INT( "screens[0].h_pixels ", xdev->hmd->screens[0].h_pixels);
// PRINT_MM( "info.display.w_meters", info.display.w_meters);
// PRINT_MM( "info.display.h_meters", info.display.h_meters);
PRINT_INT( "views[0].viewport.x_pixels ", xdev->views[0].viewport.x_pixels);
PRINT_INT( "views[0].viewport.y_pixels ", xdev->views[0].viewport.y_pixels);
PRINT_INT( "views[0].viewport.w_pixels ", xdev->views[0].viewport.w_pixels);
PRINT_INT( "views[0].viewport.h_pixels ", xdev->views[0].viewport.h_pixels);
PRINT_INT( "views[0].display.w_pixels ", xdev->views[0].display.w_pixels);
PRINT_INT( "views[0].display.h_pixels ", xdev->views[0].display.h_pixels);
PRINT_MM( "views[0].display.w_meters ", xdev->views[0].display.w_meters);
PRINT_MM( "views[0].display.h_meters ", xdev->views[0].display.h_meters);
PRINT_MM( "views[0].lens_center.x_meters", xdev->views[0].lens_center.x_meters);
PRINT_MM( "views[0].lens_center.y_meters", xdev->views[0].lens_center.y_meters);
PRINT_MAT2X2("views[0].rot ", xdev->views[0].rot);
PRINT_ANGLE( "views[0].fov.angle_left ", xdev->views[0].fov.angle_left);
PRINT_ANGLE( "views[0].fov.angle_right", xdev->views[0].fov.angle_right);
PRINT_ANGLE( "views[0].fov.angle_up ", xdev->views[0].fov.angle_up);
PRINT_ANGLE( "views[0].fov.angle_down ", xdev->views[0].fov.angle_down);
PRINT_INT( "views[0].viewport.x_pixels ", xdev->hmd->views[0].viewport.x_pixels);
PRINT_INT( "views[0].viewport.y_pixels ", xdev->hmd->views[0].viewport.y_pixels);
PRINT_INT( "views[0].viewport.w_pixels ", xdev->hmd->views[0].viewport.w_pixels);
PRINT_INT( "views[0].viewport.h_pixels ", xdev->hmd->views[0].viewport.h_pixels);
PRINT_INT( "views[0].display.w_pixels ", xdev->hmd->views[0].display.w_pixels);
PRINT_INT( "views[0].display.h_pixels ", xdev->hmd->views[0].display.h_pixels);
PRINT_MM( "views[0].display.w_meters ", xdev->hmd->views[0].display.w_meters);
PRINT_MM( "views[0].display.h_meters ", xdev->hmd->views[0].display.h_meters);
PRINT_MM( "views[0].lens_center.x_meters", xdev->hmd->views[0].lens_center.x_meters);
PRINT_MM( "views[0].lens_center.y_meters", xdev->hmd->views[0].lens_center.y_meters);
PRINT_MAT2X2("views[0].rot ", xdev->hmd->views[0].rot);
PRINT_ANGLE( "views[0].fov.angle_left ", xdev->hmd->views[0].fov.angle_left);
PRINT_ANGLE( "views[0].fov.angle_right", xdev->hmd->views[0].fov.angle_right);
PRINT_ANGLE( "views[0].fov.angle_up ", xdev->hmd->views[0].fov.angle_up);
PRINT_ANGLE( "views[0].fov.angle_down ", xdev->hmd->views[0].fov.angle_down);
// PRINT_ANGLE( "info.views[0].fov ", info.views[0].fov);
PRINT_INT( "views[1].viewport.x_pixels ", xdev->views[1].viewport.x_pixels);
PRINT_INT( "views[1].viewport.y_pixels ", xdev->views[1].viewport.y_pixels);
PRINT_INT( "views[1].viewport.w_pixels ", xdev->views[1].viewport.w_pixels);
PRINT_INT( "views[1].viewport.h_pixels ", xdev->views[1].viewport.h_pixels);
PRINT_INT( "views[1].display.w_pixels ", xdev->views[1].display.w_pixels);
PRINT_INT( "views[1].display.h_pixels ", xdev->views[1].display.h_pixels);
PRINT_MM( "views[1].display.w_meters ", xdev->views[1].display.w_meters);
PRINT_MM( "views[1].display.h_meters ", xdev->views[1].display.h_meters);
PRINT_MM( "views[1].lens_center.x_meters", xdev->views[1].lens_center.x_meters);
PRINT_MM( "views[1].lens_center.y_meters", xdev->views[1].lens_center.y_meters);
PRINT_MAT2X2("views[1].rot ", xdev->views[1].rot);
PRINT_ANGLE( "views[1].fov.angle_left ", xdev->views[1].fov.angle_left);
PRINT_ANGLE( "views[1].fov.angle_right", xdev->views[1].fov.angle_right);
PRINT_ANGLE( "views[1].fov.angle_up ", xdev->views[1].fov.angle_up);
PRINT_ANGLE( "views[1].fov.angle_down ", xdev->views[1].fov.angle_down);
PRINT_INT( "views[1].viewport.x_pixels ", xdev->hmd->views[1].viewport.x_pixels);
PRINT_INT( "views[1].viewport.y_pixels ", xdev->hmd->views[1].viewport.y_pixels);
PRINT_INT( "views[1].viewport.w_pixels ", xdev->hmd->views[1].viewport.w_pixels);
PRINT_INT( "views[1].viewport.h_pixels ", xdev->hmd->views[1].viewport.h_pixels);
PRINT_INT( "views[1].display.w_pixels ", xdev->hmd->views[1].display.w_pixels);
PRINT_INT( "views[1].display.h_pixels ", xdev->hmd->views[1].display.h_pixels);
PRINT_MM( "views[1].display.w_meters ", xdev->hmd->views[1].display.w_meters);
PRINT_MM( "views[1].display.h_meters ", xdev->hmd->views[1].display.h_meters);
PRINT_MM( "views[1].lens_center.x_meters", xdev->hmd->views[1].lens_center.x_meters);
PRINT_MM( "views[1].lens_center.y_meters", xdev->hmd->views[1].lens_center.y_meters);
PRINT_MAT2X2("views[1].rot ", xdev->hmd->views[1].rot);
PRINT_ANGLE( "views[1].fov.angle_left ", xdev->hmd->views[1].fov.angle_left);
PRINT_ANGLE( "views[1].fov.angle_right", xdev->hmd->views[1].fov.angle_right);
PRINT_ANGLE( "views[1].fov.angle_up ", xdev->hmd->views[1].fov.angle_up);
PRINT_ANGLE( "views[1].fov.angle_down ", xdev->hmd->views[1].fov.angle_down);
// PRINT_ANGLE( "info.views[1].fov ", info.views[0].fov);
}
// clang-format on
}
@ -155,55 +157,105 @@ u_device_setup_split_side_by_side(struct xrt_device* xdev,
};
// Common
xdev->blend_mode = XRT_BLEND_MODE_OPAQUE;
xdev->distortion.models = XRT_DISTORTION_MODEL_NONE;
xdev->distortion.preferred = XRT_DISTORTION_MODEL_NONE;
xdev->screens[0].w_pixels = info->display.w_pixels;
xdev->screens[0].h_pixels = info->display.h_pixels;
xdev->hmd->blend_mode = XRT_BLEND_MODE_OPAQUE;
xdev->hmd->distortion.models = XRT_DISTORTION_MODEL_NONE;
xdev->hmd->distortion.preferred = XRT_DISTORTION_MODEL_NONE;
xdev->hmd->screens[0].w_pixels = info->display.w_pixels;
xdev->hmd->screens[0].h_pixels = info->display.h_pixels;
// Left
xdev->views[0].display.w_meters = w_meters;
xdev->views[0].display.h_meters = h_meters;
xdev->views[0].lens_center.x_meters = lens_center_x_meters[0];
xdev->views[0].lens_center.y_meters = lens_center_y_meters[0];
xdev->views[0].display.w_pixels = w_pixels;
xdev->views[0].display.h_pixels = h_pixels;
xdev->views[0].viewport.x_pixels = 0;
xdev->views[0].viewport.y_pixels = 0;
xdev->views[0].viewport.w_pixels = w_pixels;
xdev->views[0].viewport.h_pixels = h_pixels;
xdev->views[0].rot = u_device_rotation_ident;
xdev->hmd->views[0].display.w_meters = w_meters;
xdev->hmd->views[0].display.h_meters = h_meters;
xdev->hmd->views[0].lens_center.x_meters = lens_center_x_meters[0];
xdev->hmd->views[0].lens_center.y_meters = lens_center_y_meters[0];
xdev->hmd->views[0].display.w_pixels = w_pixels;
xdev->hmd->views[0].display.h_pixels = h_pixels;
xdev->hmd->views[0].viewport.x_pixels = 0;
xdev->hmd->views[0].viewport.y_pixels = 0;
xdev->hmd->views[0].viewport.w_pixels = w_pixels;
xdev->hmd->views[0].viewport.h_pixels = h_pixels;
xdev->hmd->views[0].rot = u_device_rotation_ident;
// Right
xdev->views[1].display.w_meters = w_meters;
xdev->views[1].display.h_meters = h_meters;
xdev->views[1].lens_center.x_meters = lens_center_x_meters[1];
xdev->views[1].lens_center.y_meters = lens_center_y_meters[1];
xdev->views[1].display.w_pixels = w_pixels;
xdev->views[1].display.h_pixels = h_pixels;
xdev->views[1].viewport.x_pixels = w_pixels;
xdev->views[1].viewport.y_pixels = 0;
xdev->views[1].viewport.w_pixels = w_pixels;
xdev->views[1].viewport.h_pixels = h_pixels;
xdev->views[1].rot = u_device_rotation_ident;
xdev->hmd->views[1].display.w_meters = w_meters;
xdev->hmd->views[1].display.h_meters = h_meters;
xdev->hmd->views[1].lens_center.x_meters = lens_center_x_meters[1];
xdev->hmd->views[1].lens_center.y_meters = lens_center_y_meters[1];
xdev->hmd->views[1].display.w_pixels = w_pixels;
xdev->hmd->views[1].display.h_pixels = h_pixels;
xdev->hmd->views[1].viewport.x_pixels = w_pixels;
xdev->hmd->views[1].viewport.y_pixels = 0;
xdev->hmd->views[1].viewport.w_pixels = w_pixels;
xdev->hmd->views[1].viewport.h_pixels = h_pixels;
xdev->hmd->views[1].rot = u_device_rotation_ident;
{
/* right eye */
if (!math_compute_fovs(w_meters, lens_center_x_meters[1],
info->views[1].fov, h_meters,
lens_center_y_meters[1], 0,
&xdev->views[1].fov)) {
&xdev->hmd->views[1].fov)) {
return false;
}
}
{
/* left eye - just mirroring right eye now */
xdev->views[0].fov.angle_up = xdev->views[1].fov.angle_up;
xdev->views[0].fov.angle_down = xdev->views[1].fov.angle_down;
xdev->hmd->views[0].fov.angle_up =
xdev->hmd->views[1].fov.angle_up;
xdev->hmd->views[0].fov.angle_down =
xdev->hmd->views[1].fov.angle_down;
xdev->views[0].fov.angle_left = -xdev->views[1].fov.angle_right;
xdev->views[0].fov.angle_right = -xdev->views[1].fov.angle_left;
xdev->hmd->views[0].fov.angle_left =
-xdev->hmd->views[1].fov.angle_right;
xdev->hmd->views[0].fov.angle_right =
-xdev->hmd->views[1].fov.angle_left;
}
return true;
}
void*
u_device_allocate(enum u_device_alloc_flags flags,
size_t size,
size_t num_inputs)
{
bool alloc_hmd = (flags & U_DEVICE_ALLOC_HMD) != 0;
bool alloc_tracking = (flags & U_DEVICE_ALLOC_TRACKING_NONE) != 0;
size_t total_size = size;
// Inputs
size_t offset_inputs = total_size;
total_size += num_inputs * sizeof(struct xrt_input);
// HMD
size_t offset_hmd = total_size;
total_size += alloc_hmd ? sizeof(struct xrt_hmd_parts) : 0;
// Tracking
size_t offset_tracking = total_size;
total_size += alloc_tracking ? sizeof(struct xrt_tracking) : 0;
// Do the allocation
char* ptr = calloc(1, total_size);
struct xrt_device* xdev = (struct xrt_device*)ptr;
if (num_inputs > 0) {
xdev->num_inputs = num_inputs;
xdev->inputs = (struct xrt_input*)(ptr + offset_inputs);
}
if (alloc_hmd) {
xdev->hmd = (struct xrt_hmd_parts*)(ptr + offset_hmd);
}
if (alloc_tracking) {
xdev->tracking = (struct xrt_tracking*)(ptr + offset_tracking);
xdev->tracking->type = XRT_TRACKING_TYPE_NONE;
xdev->tracking->offset.orientation.w = 1.0f;
snprintf(xdev->tracking->name, XRT_TRACKING_NAME_LEN, "%s",
"No tracking");
}
return xdev;
}

View file

@ -11,6 +11,7 @@
#include "xrt/xrt_compiler.h"
#include "xrt/xrt_device.h"
#include "xrt/xrt_tracking.h"
#ifdef __cplusplus
extern "C" {
@ -22,6 +23,14 @@ extern const struct xrt_matrix_2x2 u_device_rotation_left;
extern const struct xrt_matrix_2x2 u_device_rotation_ident;
extern const struct xrt_matrix_2x2 u_device_rotation_180;
enum u_device_alloc_flags
{
// clang-format off
U_DEVICE_ALLOC_HMD = 1 << 0,
U_DEVICE_ALLOC_TRACKING_NONE = 1 << 1,
// clang-format on
};
struct u_device_simple_info
{
struct
@ -61,6 +70,21 @@ u_device_dump_config(struct xrt_device* xdev,
const char* prefix,
const char* prod);
#define U_DEVICE_ALLOCATE(type, flags, num_inputs) \
((type*)u_device_allocate(flags, sizeof(type), num_inputs))
/*!
* Helper function to allocate a device plus inputs in the same allocation
* placed after the device in memory.
*
* Will setup any pointers and num values.
*/
void*
u_device_allocate(enum u_device_alloc_flags flags,
size_t size,
size_t num_inputs);
#ifdef __cplusplus
}

View file

@ -544,20 +544,20 @@ comp_distortion_update_uniform_buffer_warp(struct comp_distortion *d,
/*
* VIVE fragment shader
*/
d->ubo_vive.aspect_x_over_y = c->xdev->distortion.vive.aspect_x_over_y;
d->ubo_vive.grow_for_undistort = c->xdev->distortion.vive.grow_for_undistort;
d->ubo_vive.aspect_x_over_y = c->xdev->hmd->distortion.vive.aspect_x_over_y;
d->ubo_vive.grow_for_undistort = c->xdev->hmd->distortion.vive.grow_for_undistort;
for (uint32_t i = 0; i < 2; i++)
d->ubo_vive.undistort_r2_cutoff[i] = c->xdev->distortion.vive.undistort_r2_cutoff[i];
d->ubo_vive.undistort_r2_cutoff[i] = c->xdev->hmd->distortion.vive.undistort_r2_cutoff[i];
for (uint32_t i = 0; i < 2; i++)
for (uint32_t j = 0; j < 2; j++)
d->ubo_vive.center[i][j] = c->xdev->distortion.vive.center[i][j];
d->ubo_vive.center[i][j] = c->xdev->hmd->distortion.vive.center[i][j];
for (uint32_t i = 0; i < 2; i++)
for (uint32_t j = 0; j < 3; j++)
for (uint32_t k = 0; k < 3; k++)
d->ubo_vive.coefficients[i][j][k] = c->xdev->distortion.vive.coefficients[i][j][k];
d->ubo_vive.coefficients[i][j][k] = c->xdev->hmd->distortion.vive.coefficients[i][j][k];
memcpy(d->ubo_handle.mapped, &d->ubo_vive, sizeof(d->ubo_vive));
break;
@ -566,21 +566,21 @@ comp_distortion_update_uniform_buffer_warp(struct comp_distortion *d,
/*
* Pano vision fragment shader
*/
d->ubo_pano.hmd_warp_param[0] = c->xdev->distortion.pano.distortion_k[0];
d->ubo_pano.hmd_warp_param[1] = c->xdev->distortion.pano.distortion_k[1];
d->ubo_pano.hmd_warp_param[2] = c->xdev->distortion.pano.distortion_k[2];
d->ubo_pano.hmd_warp_param[3] = c->xdev->distortion.pano.distortion_k[3];
d->ubo_pano.aberr[0] = c->xdev->distortion.pano.aberration_k[0];
d->ubo_pano.aberr[1] = c->xdev->distortion.pano.aberration_k[1];
d->ubo_pano.aberr[2] = c->xdev->distortion.pano.aberration_k[2];
d->ubo_pano.aberr[3] = c->xdev->distortion.pano.aberration_k[3];
d->ubo_pano.lens_center[0][0] = c->xdev->views[0].lens_center.x_meters;
d->ubo_pano.lens_center[0][1] = c->xdev->views[0].lens_center.y_meters;
d->ubo_pano.lens_center[1][0] = c->xdev->views[1].lens_center.x_meters;
d->ubo_pano.lens_center[1][1] = c->xdev->views[1].lens_center.y_meters;
d->ubo_pano.viewport_scale[0] = c->xdev->views[0].display.w_meters;
d->ubo_pano.viewport_scale[1] = c->xdev->views[0].display.h_meters;
d->ubo_pano.warp_scale = c->xdev->distortion.pano.warp_scale;
d->ubo_pano.hmd_warp_param[0] = c->xdev->hmd->distortion.pano.distortion_k[0];
d->ubo_pano.hmd_warp_param[1] = c->xdev->hmd->distortion.pano.distortion_k[1];
d->ubo_pano.hmd_warp_param[2] = c->xdev->hmd->distortion.pano.distortion_k[2];
d->ubo_pano.hmd_warp_param[3] = c->xdev->hmd->distortion.pano.distortion_k[3];
d->ubo_pano.aberr[0] = c->xdev->hmd->distortion.pano.aberration_k[0];
d->ubo_pano.aberr[1] = c->xdev->hmd->distortion.pano.aberration_k[1];
d->ubo_pano.aberr[2] = c->xdev->hmd->distortion.pano.aberration_k[2];
d->ubo_pano.aberr[3] = c->xdev->hmd->distortion.pano.aberration_k[3];
d->ubo_pano.lens_center[0][0] = c->xdev->hmd->views[0].lens_center.x_meters;
d->ubo_pano.lens_center[0][1] = c->xdev->hmd->views[0].lens_center.y_meters;
d->ubo_pano.lens_center[1][0] = c->xdev->hmd->views[1].lens_center.x_meters;
d->ubo_pano.lens_center[1][1] = c->xdev->hmd->views[1].lens_center.y_meters;
d->ubo_pano.viewport_scale[0] = c->xdev->hmd->views[0].display.w_meters;
d->ubo_pano.viewport_scale[1] = c->xdev->hmd->views[0].display.h_meters;
d->ubo_pano.warp_scale = c->xdev->hmd->distortion.pano.warp_scale;
memcpy(d->ubo_handle.mapped, &d->ubo_pano, sizeof(d->ubo_pano));
}
@ -592,9 +592,9 @@ comp_distortion_update_uniform_buffer_warp(struct comp_distortion *d,
// clang-format off
d->ubo_vp_data[0].viewport_id = 0;
d->ubo_vp_data[0].rot = c->xdev->views[0].rot;
d->ubo_vp_data[0].rot = c->xdev->hmd->views[0].rot;
d->ubo_vp_data[1].viewport_id = 1;
d->ubo_vp_data[1].rot = c->xdev->views[1].rot;
d->ubo_vp_data[1].rot = c->xdev->hmd->views[1].rot;
memcpy(d->ubo_viewport_handles[0].mapped, &d->ubo_vp_data[0], sizeof(d->ubo_vp_data[0]));
memcpy(d->ubo_viewport_handles[1].mapped, &d->ubo_vp_data[1], sizeof(d->ubo_vp_data[1]));

View file

@ -327,9 +327,9 @@ renderer_build_command_buffer(struct comp_renderer *r,
// clang-format off
float scale_x = (float)r->c->current.width /
(float)r->c->xdev->screens[0].w_pixels;
(float)r->c->xdev->hmd->screens[0].w_pixels;
float scale_y = (float)r->c->current.height /
(float)r->c->xdev->screens[0].h_pixels;
(float)r->c->xdev->hmd->screens[0].h_pixels;
// clang-format on
VkViewport viewport = {
@ -347,7 +347,7 @@ renderer_build_command_buffer(struct comp_renderer *r,
};
renderer_set_viewport_scissor(scale_x, scale_y, &viewport, &scissor,
&r->c->xdev->views[0]);
&r->c->xdev->hmd->views[0]);
vk->vkCmdSetViewport(command_buffer, 0, 1, &viewport);
vk->vkCmdSetScissor(command_buffer, 0, 1, &scissor);
@ -355,7 +355,7 @@ renderer_build_command_buffer(struct comp_renderer *r,
renderer_set_viewport_scissor(scale_x, scale_y, &viewport, &scissor,
&r->c->xdev->views[1]);
&r->c->xdev->hmd->views[1]);
vk->vkCmdSetViewport(command_buffer, 0, 1, &viewport);
vk->vkCmdSetScissor(command_buffer, 0, 1, &scissor);

View file

@ -31,11 +31,11 @@ comp_settings_init(struct comp_settings *s, struct xrt_device *xdev)
s->window_type = WINDOW_AUTO;
s->fullscreen = false;
s->flip_y = false;
s->distortion_model = xdev->distortion.preferred;
s->width = xdev->screens[0].w_pixels;
s->height = xdev->screens[0].h_pixels;
s->distortion_model = xdev->hmd->distortion.preferred;
s->width = xdev->hmd->screens[0].w_pixels;
s->height = xdev->hmd->screens[0].h_pixels;
s->nominal_frame_interval_ns =
xdev->screens[0].nominal_frame_interval_ns;
xdev->hmd->screens[0].nominal_frame_interval_ns;
s->print_spew = debug_get_bool_option_print_spew();
s->print_debug = debug_get_bool_option_print_debug();
s->validate_vulkan = debug_get_bool_option_validate_vulkan();

View file

@ -99,14 +99,27 @@ hdk_device_destroy(struct xrt_device *xdev)
free(hd);
}
static void
hdk_device_update_inputs(struct xrt_device *xdev,
struct time_state *timekeeping)
{
// Empty
}
static void
hdk_device_get_tracked_pose(struct xrt_device *xdev,
enum xrt_input_name name,
struct time_state *timekeeping,
int64_t *out_timestamp,
struct xrt_space_relation *out_relation)
{
struct hdk_device *hd = hdk_device(xdev);
if (name != XRT_INPUT_GENERIC_HEAD_RELATION) {
HDK_ERROR(hd, "unknown input name");
return;
}
uint8_t buffer[32];
int64_t now = time_state_get_now(timekeeping);
auto bytesRead = hid_read(hd->dev, &(buffer[0]), sizeof(buffer));
@ -249,11 +262,16 @@ hdk_device_create(hid_device *dev,
bool print_spew,
bool print_debug)
{
struct hdk_device *hd = U_TYPED_CALLOC(struct hdk_device);
hd->base.blend_mode = XRT_BLEND_MODE_OPAQUE;
hd->base.destroy = hdk_device_destroy;
enum u_device_alloc_flags flags = (enum u_device_alloc_flags)(
U_DEVICE_ALLOC_HMD | U_DEVICE_ALLOC_TRACKING_NONE);
struct hdk_device *hd = U_DEVICE_ALLOCATE(struct hdk_device, flags, 1);
hd->base.hmd->blend_mode = XRT_BLEND_MODE_OPAQUE;
hd->base.update_inputs = hdk_device_update_inputs;
hd->base.get_tracked_pose = hdk_device_get_tracked_pose;
hd->base.get_view_pose = hdk_device_get_view_pose;
hd->base.destroy = hdk_device_destroy;
hd->base.inputs[0].name = XRT_INPUT_GENERIC_HEAD_RELATION;
hd->dev = dev;
hd->print_spew = print_spew;
hd->print_debug = print_debug;
@ -296,25 +314,26 @@ hdk_device_create(hid_device *dev,
/* right eye */
math_compute_fovs(1.0, hCOP, hFOV * DEGREES_TO_RADIANS, 1, vCOP,
vFOV * DEGREES_TO_RADIANS,
&hd->base.views[1].fov);
&hd->base.hmd->views[1].fov);
}
{
/* left eye - just mirroring right eye now */
hd->base.views[0].fov.angle_up = hd->base.views[1].fov.angle_up;
hd->base.views[0].fov.angle_down =
hd->base.views[1].fov.angle_down;
hd->base.hmd->views[0].fov.angle_up =
hd->base.hmd->views[1].fov.angle_up;
hd->base.hmd->views[0].fov.angle_down =
hd->base.hmd->views[1].fov.angle_down;
hd->base.views[0].fov.angle_left =
-hd->base.views[1].fov.angle_right;
hd->base.views[0].fov.angle_right =
-hd->base.views[1].fov.angle_left;
hd->base.hmd->views[0].fov.angle_left =
-hd->base.hmd->views[1].fov.angle_right;
hd->base.hmd->views[0].fov.angle_right =
-hd->base.hmd->views[1].fov.angle_left;
}
switch (variant) {
case HDK_UNKNOWN: assert(!"unknown device"); break;
case HDK_VARIANT_2: {
hd->base.screens[0].nominal_frame_interval_ns =
hd->base.hmd->screens[0].nominal_frame_interval_ns =
time_s_to_ns(1.0f / 90.0f);
constexpr int panel_w = 1080;
constexpr int panel_h = 1200;
@ -324,44 +343,44 @@ hdk_device_create(hid_device *dev,
// clang-format off
// Main display.
hd->base.screens[0].w_pixels = panel_w * 2;
hd->base.screens[0].h_pixels = panel_h;
hd->base.hmd->screens[0].w_pixels = panel_w * 2;
hd->base.hmd->screens[0].h_pixels = panel_h;
#ifndef HDK_DO_NOT_FLIP_HDK2_SCREEN
// Left
hd->base.views[0].display.w_pixels = panel_w;
hd->base.views[0].display.h_pixels = panel_h;
hd->base.views[0].viewport.x_pixels = panel_w; // right half of display
hd->base.views[0].viewport.y_pixels = vert_padding;
hd->base.views[0].viewport.w_pixels = panel_w;
hd->base.views[0].viewport.h_pixels = panel_w;
hd->base.views[0].rot = u_device_rotation_180;
hd->base.hmd->views[0].display.w_pixels = panel_w;
hd->base.hmd->views[0].display.h_pixels = panel_h;
hd->base.hmd->views[0].viewport.x_pixels = panel_w; // right half of display
hd->base.hmd->views[0].viewport.y_pixels = vert_padding;
hd->base.hmd->views[0].viewport.w_pixels = panel_w;
hd->base.hmd->views[0].viewport.h_pixels = panel_w;
hd->base.hmd->views[0].rot = u_device_rotation_180;
// Right
hd->base.views[1].display.w_pixels = panel_w;
hd->base.views[1].display.h_pixels = panel_h;
hd->base.views[1].viewport.x_pixels = 0;
hd->base.views[1].viewport.y_pixels = vert_padding;
hd->base.views[1].viewport.w_pixels = panel_w;
hd->base.views[1].viewport.h_pixels = panel_w;
hd->base.views[1].rot = u_device_rotation_180;
hd->base.hmd->views[1].display.w_pixels = panel_w;
hd->base.hmd->views[1].display.h_pixels = panel_h;
hd->base.hmd->views[1].viewport.x_pixels = 0;
hd->base.hmd->views[1].viewport.y_pixels = vert_padding;
hd->base.hmd->views[1].viewport.w_pixels = panel_w;
hd->base.hmd->views[1].viewport.h_pixels = panel_w;
hd->base.hmd->views[1].rot = u_device_rotation_180;
#else
// Left
hd->base.views[0].display.w_pixels = panel_w;
hd->base.views[0].display.h_pixels = panel_h;
hd->base.views[0].viewport.x_pixels = 0;
hd->base.views[0].viewport.y_pixels = vert_padding;
hd->base.views[0].viewport.w_pixels = panel_w;
hd->base.views[0].viewport.h_pixels = panel_w;
hd->base.views[0].rot = u_device_rotation_ident;
hd->base.hmd->views[0].display.w_pixels = panel_w;
hd->base.hmd->views[0].display.h_pixels = panel_h;
hd->base.hmd->views[0].viewport.x_pixels = 0;
hd->base.hmd->views[0].viewport.y_pixels = vert_padding;
hd->base.hmd->views[0].viewport.w_pixels = panel_w;
hd->base.hmd->views[0].viewport.h_pixels = panel_w;
hd->base.hmd->views[0].rot = u_device_rotation_ident;
// Right
hd->base.views[1].display.w_pixels = panel_w;
hd->base.views[1].display.h_pixels = panel_h;
hd->base.views[1].viewport.x_pixels = panel_w;
hd->base.views[1].viewport.y_pixels = vert_padding;
hd->base.views[1].viewport.w_pixels = panel_w;
hd->base.views[1].viewport.h_pixels = panel_w;
hd->base.views[1].rot = u_device_rotation_ident;
hd->base.hmd->views[1].display.w_pixels = panel_w;
hd->base.hmd->views[1].display.h_pixels = panel_h;
hd->base.hmd->views[1].viewport.x_pixels = panel_w;
hd->base.hmd->views[1].viewport.y_pixels = vert_padding;
hd->base.hmd->views[1].viewport.w_pixels = panel_w;
hd->base.hmd->views[1].viewport.h_pixels = panel_w;
hd->base.hmd->views[1].rot = u_device_rotation_ident;
#endif
// clang-format on
break;
@ -370,7 +389,7 @@ hdk_device_create(hid_device *dev,
// fallthrough intentional
case HDK_VARIANT_1_2: {
// 1080x1920 screen, with the top at the left.
hd->base.screens[0].nominal_frame_interval_ns =
hd->base.hmd->screens[0].nominal_frame_interval_ns =
time_s_to_ns(1.0f / 60.0f);
constexpr int panel_w = 1080;
@ -378,26 +397,26 @@ hdk_device_create(hid_device *dev,
constexpr int panel_half_h = panel_h / 2;
// clang-format off
// Main display.
hd->base.screens[0].w_pixels = panel_w;
hd->base.screens[0].h_pixels = panel_h;
hd->base.hmd->screens[0].w_pixels = panel_w;
hd->base.hmd->screens[0].h_pixels = panel_h;
// Left
hd->base.views[0].display.w_pixels = panel_half_h;
hd->base.views[0].display.h_pixels = panel_w;
hd->base.views[0].viewport.x_pixels = 0;
hd->base.views[0].viewport.y_pixels = 0;// top half of display
hd->base.views[0].viewport.w_pixels = panel_w;
hd->base.views[0].viewport.h_pixels = panel_half_h;
hd->base.views[0].rot = u_device_rotation_left;
hd->base.hmd->views[0].display.w_pixels = panel_half_h;
hd->base.hmd->views[0].display.h_pixels = panel_w;
hd->base.hmd->views[0].viewport.x_pixels = 0;
hd->base.hmd->views[0].viewport.y_pixels = 0;// top half of display
hd->base.hmd->views[0].viewport.w_pixels = panel_w;
hd->base.hmd->views[0].viewport.h_pixels = panel_half_h;
hd->base.hmd->views[0].rot = u_device_rotation_left;
// Right
hd->base.views[1].display.w_pixels = panel_half_h;
hd->base.views[1].display.h_pixels = panel_w;
hd->base.views[1].viewport.x_pixels = 0;
hd->base.views[1].viewport.y_pixels = panel_half_h; // bottom half of display
hd->base.views[1].viewport.w_pixels = panel_w;
hd->base.views[1].viewport.h_pixels = panel_half_h;
hd->base.views[1].rot = u_device_rotation_left;
hd->base.hmd->views[1].display.w_pixels = panel_half_h;
hd->base.hmd->views[1].display.h_pixels = panel_w;
hd->base.hmd->views[1].viewport.x_pixels = 0;
hd->base.hmd->views[1].viewport.y_pixels = panel_half_h; // bottom half of display
hd->base.hmd->views[1].viewport.w_pixels = panel_w;
hd->base.hmd->views[1].viewport.h_pixels = panel_half_h;
hd->base.hmd->views[1].rot = u_device_rotation_left;
// clang-format on
break;
}
@ -407,13 +426,14 @@ hdk_device_create(hid_device *dev,
// "None" is correct or at least acceptable for 1.2.
// We have coefficients for 1.3/1.4, though the mesh is better.
// We only have a mesh for 2, so use "none" there until it's supported.
hd->base.distortion.models = XRT_DISTORTION_MODEL_NONE;
hd->base.distortion.preferred = XRT_DISTORTION_MODEL_NONE;
hd->base.hmd->distortion.models = XRT_DISTORTION_MODEL_NONE;
hd->base.hmd->distortion.preferred = XRT_DISTORTION_MODEL_NONE;
// if (variant == HDK_VARIANT_1_3_1_4) {
// hd->base.distortion.models =
// xrt_distortion_model(hd->base.distortion.models |
// hd->base.hmd->distortion.models =
// xrt_distortion_model(hd->base.hmd->distortion.models |
// XRT_DISTORTION_MODEL_PANOTOOLS);
// hd->base.distortion.preferred = XRT_DISTORTION_MODEL_PANOTOOLS;
// hd->base.hmd->distortion.preferred =
// XRT_DISTORTION_MODEL_PANOTOOLS;
// }

View file

@ -45,14 +45,27 @@ oh_device_destroy(struct xrt_device *xdev)
free(ohd);
}
static void
oh_device_update_inputs(struct xrt_device *xdev, struct time_state *timekeeping)
{
// Empty
}
static void
oh_device_get_tracked_pose(struct xrt_device *xdev,
enum xrt_input_name name,
struct time_state *timekeeping,
int64_t *out_timestamp,
struct xrt_space_relation *out_relation)
{
struct oh_device *ohd = oh_device(xdev);
struct xrt_quat quat = {0.f, 0.f, 0.f, 1.f};
if (name != XRT_INPUT_GENERIC_HEAD_RELATION) {
OH_ERROR(ohd, "unknown input name");
return;
}
ohmd_ctx_update(ohd->ctx);
int64_t now = time_state_get_now(timekeeping);
//! @todo adjust for latency here
@ -325,10 +338,14 @@ oh_device_create(ohmd_context *ctx,
bool print_spew,
bool print_debug)
{
struct oh_device *ohd = U_TYPED_CALLOC(struct oh_device);
ohd->base.destroy = oh_device_destroy;
enum u_device_alloc_flags flags =
U_DEVICE_ALLOC_HMD | U_DEVICE_ALLOC_TRACKING_NONE;
struct oh_device *ohd = U_DEVICE_ALLOCATE(struct oh_device, flags, 1);
ohd->base.update_inputs = oh_device_update_inputs;
ohd->base.get_tracked_pose = oh_device_get_tracked_pose;
ohd->base.get_view_pose = oh_device_get_view_pose;
ohd->base.destroy = oh_device_destroy;
ohd->base.inputs[0].name = XRT_INPUT_GENERIC_HEAD_RELATION;
ohd->ctx = ctx;
ohd->dev = dev;
ohd->print_spew = print_spew;
@ -346,7 +363,7 @@ oh_device_create(ohmd_context *ctx,
info.views[1].fov,
info.views[1].display.h_meters,
info.views[1].lens_center_y_meters, 0,
&ohd->base.views[1].fov)) {
&ohd->base.hmd->views[1].fov)) {
OH_ERROR(
ohd,
"Failed to compute the partial fields of view.");
@ -356,124 +373,125 @@ oh_device_create(ohmd_context *ctx,
}
{
/* left eye - just mirroring right eye now */
ohd->base.views[0].fov.angle_up =
ohd->base.views[1].fov.angle_up;
ohd->base.views[0].fov.angle_down =
ohd->base.views[1].fov.angle_down;
ohd->base.hmd->views[0].fov.angle_up =
ohd->base.hmd->views[1].fov.angle_up;
ohd->base.hmd->views[0].fov.angle_down =
ohd->base.hmd->views[1].fov.angle_down;
ohd->base.views[0].fov.angle_left =
-ohd->base.views[1].fov.angle_right;
ohd->base.views[0].fov.angle_right =
-ohd->base.views[1].fov.angle_left;
ohd->base.hmd->views[0].fov.angle_left =
-ohd->base.hmd->views[1].fov.angle_right;
ohd->base.hmd->views[0].fov.angle_right =
-ohd->base.hmd->views[1].fov.angle_left;
}
// clang-format off
// Main display.
ohd->base.distortion.models = XRT_DISTORTION_MODEL_PANOTOOLS;
ohd->base.distortion.preferred = XRT_DISTORTION_MODEL_PANOTOOLS;
ohd->base.screens[0].w_pixels = info.display.w_pixels;
ohd->base.screens[0].h_pixels = info.display.h_pixels;
ohd->base.screens[0].nominal_frame_interval_ns = info.display.nominal_frame_interval_ns;
ohd->base.distortion.pano.distortion_k[0] = info.pano_distortion_k[0];
ohd->base.distortion.pano.distortion_k[1] = info.pano_distortion_k[1];
ohd->base.distortion.pano.distortion_k[2] = info.pano_distortion_k[2];
ohd->base.distortion.pano.distortion_k[3] = info.pano_distortion_k[3];
ohd->base.distortion.pano.aberration_k[0] = info.pano_aberration_k[0];
ohd->base.distortion.pano.aberration_k[1] = info.pano_aberration_k[1];
ohd->base.distortion.pano.aberration_k[2] = info.pano_aberration_k[2];
ohd->base.distortion.pano.warp_scale = info.pano_warp_scale;
ohd->base.hmd->distortion.models = XRT_DISTORTION_MODEL_PANOTOOLS;
ohd->base.hmd->distortion.preferred = XRT_DISTORTION_MODEL_PANOTOOLS;
ohd->base.hmd->screens[0].w_pixels = info.display.w_pixels;
ohd->base.hmd->screens[0].h_pixels = info.display.h_pixels;
ohd->base.hmd->screens[0].nominal_frame_interval_ns = info.display.nominal_frame_interval_ns;
ohd->base.hmd->distortion.pano.distortion_k[0] = info.pano_distortion_k[0];
ohd->base.hmd->distortion.pano.distortion_k[1] = info.pano_distortion_k[1];
ohd->base.hmd->distortion.pano.distortion_k[2] = info.pano_distortion_k[2];
ohd->base.hmd->distortion.pano.distortion_k[3] = info.pano_distortion_k[3];
ohd->base.hmd->distortion.pano.aberration_k[0] = info.pano_aberration_k[0];
ohd->base.hmd->distortion.pano.aberration_k[1] = info.pano_aberration_k[1];
ohd->base.hmd->distortion.pano.aberration_k[2] = info.pano_aberration_k[2];
ohd->base.hmd->distortion.pano.warp_scale = info.pano_warp_scale;
// Left
ohd->base.views[0].display.w_meters = info.views[0].display.w_meters;
ohd->base.views[0].display.h_meters = info.views[0].display.h_meters;
ohd->base.views[0].lens_center.x_meters = info.views[0].lens_center_x_meters;
ohd->base.views[0].lens_center.y_meters = info.views[0].lens_center_y_meters;
ohd->base.views[0].display.w_pixels = info.views[0].display.w_pixels;
ohd->base.views[0].display.h_pixels = info.views[0].display.h_pixels;
ohd->base.views[0].viewport.x_pixels = 0;
ohd->base.views[0].viewport.y_pixels = 0;
ohd->base.views[0].viewport.w_pixels = info.views[0].display.w_pixels;
ohd->base.views[0].viewport.h_pixels = info.views[0].display.h_pixels;
ohd->base.views[0].rot = u_device_rotation_ident;
ohd->base.hmd->views[0].display.w_meters = info.views[0].display.w_meters;
ohd->base.hmd->views[0].display.h_meters = info.views[0].display.h_meters;
ohd->base.hmd->views[0].lens_center.x_meters = info.views[0].lens_center_x_meters;
ohd->base.hmd->views[0].lens_center.y_meters = info.views[0].lens_center_y_meters;
ohd->base.hmd->views[0].display.w_pixels = info.views[0].display.w_pixels;
ohd->base.hmd->views[0].display.h_pixels = info.views[0].display.h_pixels;
ohd->base.hmd->views[0].viewport.x_pixels = 0;
ohd->base.hmd->views[0].viewport.y_pixels = 0;
ohd->base.hmd->views[0].viewport.w_pixels = info.views[0].display.w_pixels;
ohd->base.hmd->views[0].viewport.h_pixels = info.views[0].display.h_pixels;
ohd->base.hmd->views[0].rot = u_device_rotation_ident;
// Right
ohd->base.views[1].display.w_meters = info.views[1].display.w_meters;
ohd->base.views[1].display.h_meters = info.views[1].display.h_meters;
ohd->base.views[1].lens_center.x_meters = info.views[1].lens_center_x_meters;
ohd->base.views[1].lens_center.y_meters = info.views[1].lens_center_y_meters;
ohd->base.views[1].display.w_pixels = info.views[1].display.w_pixels;
ohd->base.views[1].display.h_pixels = info.views[1].display.h_pixels;
ohd->base.views[1].viewport.x_pixels = info.views[0].display.w_pixels;
ohd->base.views[1].viewport.y_pixels = 0;
ohd->base.views[1].viewport.w_pixels = info.views[1].display.w_pixels;
ohd->base.views[1].viewport.h_pixels = info.views[1].display.h_pixels;
ohd->base.views[1].rot = u_device_rotation_ident;
ohd->base.hmd->views[1].display.w_meters = info.views[1].display.w_meters;
ohd->base.hmd->views[1].display.h_meters = info.views[1].display.h_meters;
ohd->base.hmd->views[1].lens_center.x_meters = info.views[1].lens_center_x_meters;
ohd->base.hmd->views[1].lens_center.y_meters = info.views[1].lens_center_y_meters;
ohd->base.hmd->views[1].display.w_pixels = info.views[1].display.w_pixels;
ohd->base.hmd->views[1].display.h_pixels = info.views[1].display.h_pixels;
ohd->base.hmd->views[1].viewport.x_pixels = info.views[0].display.w_pixels;
ohd->base.hmd->views[1].viewport.y_pixels = 0;
ohd->base.hmd->views[1].viewport.w_pixels = info.views[1].display.w_pixels;
ohd->base.hmd->views[1].viewport.h_pixels = info.views[1].display.h_pixels;
ohd->base.hmd->views[1].rot = u_device_rotation_ident;
// clang-format on
// Which blend modes does the device support.
ohd->base.blend_mode = XRT_BLEND_MODE_OPAQUE;
ohd->base.hmd->blend_mode = XRT_BLEND_MODE_OPAQUE;
if (info.quirks.video_see_through) {
ohd->base.blend_mode = (enum xrt_blend_mode)(
ohd->base.blend_mode | XRT_BLEND_MODE_ALPHA_BLEND);
ohd->base.hmd->blend_mode = (enum xrt_blend_mode)(
ohd->base.hmd->blend_mode | XRT_BLEND_MODE_ALPHA_BLEND);
}
if (info.quirks.video_distortion_vive) {
ohd->base.distortion.models = (enum xrt_distortion_model)(
ohd->base.distortion.models | XRT_DISTORTION_MODEL_VIVE);
ohd->base.distortion.preferred = XRT_DISTORTION_MODEL_VIVE;
ohd->base.hmd->distortion.models = (enum xrt_distortion_model)(
ohd->base.hmd->distortion.models |
XRT_DISTORTION_MODEL_VIVE);
ohd->base.hmd->distortion.preferred = XRT_DISTORTION_MODEL_VIVE;
// clang-format off
// These need to be aquired from the vive config
ohd->base.distortion.vive.aspect_x_over_y = 0.8999999761581421f;
ohd->base.distortion.vive.grow_for_undistort = 0.6000000238418579f;
ohd->base.distortion.vive.undistort_r2_cutoff[0] = 1.11622154712677f;
ohd->base.distortion.vive.undistort_r2_cutoff[1] = 1.101870775222778f;
ohd->base.distortion.vive.center[0][0] = 0.08946027017045266f;
ohd->base.distortion.vive.center[0][1] = -0.009002181016260827f;
ohd->base.distortion.vive.center[1][0] = -0.08933516629552526f;
ohd->base.distortion.vive.center[1][1] = -0.006014565287238661f;
ohd->base.hmd->distortion.vive.aspect_x_over_y = 0.8999999761581421f;
ohd->base.hmd->distortion.vive.grow_for_undistort = 0.6000000238418579f;
ohd->base.hmd->distortion.vive.undistort_r2_cutoff[0] = 1.11622154712677f;
ohd->base.hmd->distortion.vive.undistort_r2_cutoff[1] = 1.101870775222778f;
ohd->base.hmd->distortion.vive.center[0][0] = 0.08946027017045266f;
ohd->base.hmd->distortion.vive.center[0][1] = -0.009002181016260827f;
ohd->base.hmd->distortion.vive.center[1][0] = -0.08933516629552526f;
ohd->base.hmd->distortion.vive.center[1][1] = -0.006014565287238661f;
// left
// green
ohd->base.distortion.vive.coefficients[0][0][0] = -0.188236068524731f;
ohd->base.distortion.vive.coefficients[0][0][1] = -0.221086205321053f;
ohd->base.distortion.vive.coefficients[0][0][2] = -0.2537849057915209f;
ohd->base.hmd->distortion.vive.coefficients[0][0][0] = -0.188236068524731f;
ohd->base.hmd->distortion.vive.coefficients[0][0][1] = -0.221086205321053f;
ohd->base.hmd->distortion.vive.coefficients[0][0][2] = -0.2537849057915209f;
// blue
ohd->base.distortion.vive.coefficients[0][1][0] = -0.07316590815739493f;
ohd->base.distortion.vive.coefficients[0][1][1] = -0.02332400789561968f;
ohd->base.distortion.vive.coefficients[0][1][2] = 0.02469959434698275f;
ohd->base.hmd->distortion.vive.coefficients[0][1][0] = -0.07316590815739493f;
ohd->base.hmd->distortion.vive.coefficients[0][1][1] = -0.02332400789561968f;
ohd->base.hmd->distortion.vive.coefficients[0][1][2] = 0.02469959434698275f;
// red
ohd->base.distortion.vive.coefficients[0][2][0] = -0.02223805567703767f;
ohd->base.distortion.vive.coefficients[0][2][1] = -0.04931309279533211f;
ohd->base.distortion.vive.coefficients[0][2][2] = -0.07862881939243466f;
ohd->base.hmd->distortion.vive.coefficients[0][2][0] = -0.02223805567703767f;
ohd->base.hmd->distortion.vive.coefficients[0][2][1] = -0.04931309279533211f;
ohd->base.hmd->distortion.vive.coefficients[0][2][2] = -0.07862881939243466f;
// right
// green
ohd->base.distortion.vive.coefficients[1][0][0] = -0.1906209981894497f;
ohd->base.distortion.vive.coefficients[1][0][1] = -0.2248896677207884f;
ohd->base.distortion.vive.coefficients[1][0][2] = -0.2721364516782803f;
ohd->base.hmd->distortion.vive.coefficients[1][0][0] = -0.1906209981894497f;
ohd->base.hmd->distortion.vive.coefficients[1][0][1] = -0.2248896677207884f;
ohd->base.hmd->distortion.vive.coefficients[1][0][2] = -0.2721364516782803f;
// blue
ohd->base.distortion.vive.coefficients[1][1][0] = -0.07346071902951497f;
ohd->base.distortion.vive.coefficients[1][1][1] = -0.02189527566250131f;
ohd->base.distortion.vive.coefficients[1][1][2] = 0.0581378652359256f;
ohd->base.hmd->distortion.vive.coefficients[1][1][0] = -0.07346071902951497f;
ohd->base.hmd->distortion.vive.coefficients[1][1][1] = -0.02189527566250131f;
ohd->base.hmd->distortion.vive.coefficients[1][1][2] = 0.0581378652359256f;
// red
ohd->base.distortion.vive.coefficients[1][2][0] = -0.01755850332081247f;
ohd->base.distortion.vive.coefficients[1][2][1] = -0.04517245633373419f;
ohd->base.distortion.vive.coefficients[1][2][2] = -0.0928909347763f;
ohd->base.hmd->distortion.vive.coefficients[1][2][0] = -0.01755850332081247f;
ohd->base.hmd->distortion.vive.coefficients[1][2][1] = -0.04517245633373419f;
ohd->base.hmd->distortion.vive.coefficients[1][2][2] = -0.0928909347763f;
// clang-format on
}
if (info.quirks.video_distortion_none) {
ohd->base.distortion.models = XRT_DISTORTION_MODEL_NONE;
ohd->base.distortion.preferred = XRT_DISTORTION_MODEL_NONE;
ohd->base.hmd->distortion.models = XRT_DISTORTION_MODEL_NONE;
ohd->base.hmd->distortion.preferred = XRT_DISTORTION_MODEL_NONE;
}
if (info.quirks.left_center_pano_scale) {
ohd->base.distortion.pano.warp_scale =
ohd->base.hmd->distortion.pano.warp_scale =
info.views[0].lens_center_x_meters;
}
@ -481,30 +499,30 @@ oh_device_create(ohmd_context *ctx,
int w = info.display.w_pixels;
int h = info.display.h_pixels;
ohd->base.views[0].viewport.x_pixels = 0;
ohd->base.views[0].viewport.y_pixels = 0;
ohd->base.views[0].viewport.w_pixels = w;
ohd->base.views[0].viewport.h_pixels = h / 2;
ohd->base.views[0].rot = u_device_rotation_right;
ohd->base.hmd->views[0].viewport.x_pixels = 0;
ohd->base.hmd->views[0].viewport.y_pixels = 0;
ohd->base.hmd->views[0].viewport.w_pixels = w;
ohd->base.hmd->views[0].viewport.h_pixels = h / 2;
ohd->base.hmd->views[0].rot = u_device_rotation_right;
ohd->base.views[1].viewport.x_pixels = 0;
ohd->base.views[1].viewport.y_pixels = h / 2;
ohd->base.views[1].viewport.w_pixels = w;
ohd->base.views[1].viewport.h_pixels = h / 2;
ohd->base.views[1].rot = u_device_rotation_right;
ohd->base.hmd->views[1].viewport.x_pixels = 0;
ohd->base.hmd->views[1].viewport.y_pixels = h / 2;
ohd->base.hmd->views[1].viewport.w_pixels = w;
ohd->base.hmd->views[1].viewport.h_pixels = h / 2;
ohd->base.hmd->views[1].rot = u_device_rotation_right;
}
if (info.quirks.rotate_lenses_inwards) {
int w2 = info.display.w_pixels / 2;
int h = info.display.h_pixels;
ohd->base.views[0].display.w_pixels = h;
ohd->base.views[0].display.h_pixels = w2;
ohd->base.views[0].rot = u_device_rotation_right;
ohd->base.hmd->views[0].display.w_pixels = h;
ohd->base.hmd->views[0].display.h_pixels = w2;
ohd->base.hmd->views[0].rot = u_device_rotation_right;
ohd->base.views[1].display.w_pixels = h;
ohd->base.views[1].display.h_pixels = w2;
ohd->base.views[1].rot = u_device_rotation_left;
ohd->base.hmd->views[1].display.w_pixels = h;
ohd->base.hmd->views[1].display.h_pixels = w2;
ohd->base.hmd->views[1].rot = u_device_rotation_left;
}
if (ohd->print_debug) {

View file

@ -584,14 +584,27 @@ teardown(struct psvr_device *psvr)
*
*/
static void
psvr_device_update_inputs(struct xrt_device *xdev,
struct time_state *timekeeping)
{
// Empty
}
static void
psvr_device_get_tracked_pose(struct xrt_device *xdev,
enum xrt_input_name name,
struct time_state *timekeeping,
int64_t *out_timestamp,
struct xrt_space_relation *out_relation)
{
struct psvr_device *psvr = psvr_device(xdev);
if (name != XRT_INPUT_GENERIC_HEAD_RELATION) {
PSVR_ERROR(psvr, "unknown input name");
return;
}
// Read all packets.
read_handle_packets(psvr);
read_control_packets(psvr);
@ -663,14 +676,19 @@ psvr_device_create(struct hid_device_info *hmd_handle_info,
bool print_spew,
bool print_debug)
{
struct psvr_device *psvr = U_TYPED_CALLOC(struct psvr_device);
enum u_device_alloc_flags flags =
U_DEVICE_ALLOC_HMD | U_DEVICE_ALLOC_TRACKING_NONE;
struct psvr_device *psvr =
U_DEVICE_ALLOCATE(struct psvr_device, flags, 1);
int ret;
psvr->print_spew = print_spew;
psvr->print_debug = print_debug;
psvr->base.destroy = psvr_device_destroy;
psvr->base.update_inputs = psvr_device_update_inputs;
psvr->base.get_tracked_pose = psvr_device_get_tracked_pose;
psvr->base.get_view_pose = psvr_device_get_view_pose;
psvr->base.destroy = psvr_device_destroy;
psvr->base.inputs[0].name = XRT_INPUT_GENERIC_HEAD_RELATION;
ret = open_hid(psvr, hmd_handle_info, &psvr->hmd_handle);
if (ret != 0) {

View file

@ -57,15 +57,13 @@ struct xrt_quat
};
/*!
* A 3 element vector with single floats.
* A 1 element vector with single floats.
*
* @ingroup xrt_iface math
*/
struct xrt_vec3
struct xrt_vec1
{
float x;
float y;
float z;
};
/*!
@ -79,6 +77,18 @@ struct xrt_vec2
float y;
};
/*!
* A 3 element vector with single floats.
*
* @ingroup xrt_iface math
*/
struct xrt_vec3
{
float x;
float y;
float z;
};
/*!
* A pose composed of a position and orientation.
*
@ -191,6 +201,61 @@ struct xrt_space_relation
struct xrt_vec3 angular_acceleration;
};
/*
*
* Input related enums and structs.
*
*/
/*!
* Base type of this inputs.
*
* @ingroup xrt_iface
*/
enum xrt_input_type
{
// clang-format off
XRT_INPUT_TYPE_VEC1_ZERO_TO_ONE = 0x00,
XRT_INPUT_TYPE_VEC1_MINUS_ONE_TO_ONE = 0x01,
XRT_INPUT_TYPE_VEC2_MINUS_ONE_TO_ONE = 0x02,
XRT_INPUT_TYPE_VEC3_MINUS_ONE_TO_ONE = 0x03,
XRT_INPUT_TYPE_BOOLEAN = 0x04,
XRT_INPUT_TYPE_POSE = 0x05,
XRT_INPUT_TYPE_RELATION = 0x06,
// clang-format on
};
#define XRT_INPUT_NAME(id, type) ((id << 8) | XRT_INPUT_TYPE_##type)
/*!
* Name of a input with a baked in type.
*
* @see xrt_input_type
* @ingroup xrt_iface
*/
enum xrt_input_name
{
// clang-format off
XRT_INPUT_GENERIC_HEAD_RELATION = XRT_INPUT_NAME(0x0000, RELATION),
XRT_INPUT_GENERIC_HEAD_DETECT = XRT_INPUT_NAME(0x0001, BOOLEAN),
// clang-format on
};
/*!
* A union of all input types.
*
* @see xrt_input_type
* @ingroup xrt_iface math
*/
union xrt_input_value {
struct xrt_vec1 vec1;
struct xrt_vec2 vec2;
struct xrt_vec3 vec3;
bool boolean;
};
#ifdef __cplusplus
}
#endif

View file

@ -18,6 +18,8 @@ extern "C" {
#endif
struct time_state;
struct xrt_tracking;
/*!
* A per-lens view information.
@ -82,17 +84,14 @@ struct xrt_view
};
/*!
* A single HMD device.
* All of the device components that deals with interfacing to a users head.
*
* HMD is probably a bad name for the future but for now will have to do.
*
* @ingroup xrt_iface
*/
struct xrt_device
struct xrt_hmd_parts
{
/*!
* A string describing the device.
*/
char name[XRT_DEVICE_NAME_LEN];
/*!
* The hmd screen, right now hardcoded to one.
*/
@ -152,6 +151,53 @@ struct xrt_device
} vive;
} distortion;
};
/*!
* A single named input, that sits on a @ref xrt_device.
*
* @ingroup xrt_iface
*/
struct xrt_input
{
int64_t timestamp;
enum xrt_input_name name;
union xrt_input_value value;
};
/*!
* A single HMD or input device.
*
* @ingroup xrt_iface
*/
struct xrt_device
{
/*!
* A string describing the device.
*/
char name[XRT_DEVICE_NAME_LEN];
//! Null if this device does not interface with the users head.
struct xrt_hmd_parts *hmd;
//! Always set, pointing to the tracking system for this device.
struct xrt_tracking *tracking;
//! Number of inputs.
size_t num_inputs;
//! Array of input structs.
struct xrt_input *inputs;
/*!
* Update any attached inputs.
*
* @param[in] xdev The device.
* @param[in] timekeeping Shared time synchronization struct.
*/
void (*update_inputs)(struct xrt_device *xdev,
struct time_state *timekeeping);
/*!
* Get relationship of a tracked device to the device "base space".
@ -159,8 +205,19 @@ struct xrt_device
* Right now the base space is assumed to be local space.
*
* This is very very WIP and will need to be made a lot more advanced.
*
* @param[in] xdev The device.
* @param[in] name Some devices may have multiple poses on
* them, select the one using this field. For
* HMDs use @p XRT_INPUT_GENERIC_HEAD_POSE.
* @param[in] timekeeping Shared time synchronization struct.
* @param[out] out_timestamp Timestamp when this relation was captured.
* @param[out] out_relation The relation read from the device.
*
* @see xrt_input_name
*/
void (*get_tracked_pose)(struct xrt_device *xdev,
enum xrt_input_name name,
struct time_state *timekeeping,
int64_t *out_timestamp,
struct xrt_space_relation *out_relation);

View file

@ -0,0 +1,51 @@
// Copyright 2019, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Header defining the tracking system integration in Monado.
* @author Jakob Bornecrantz <jakob@collabora.com>
* @ingroup xrt_iface
*/
#pragma once
#define XRT_TRACKING_NAME_LEN 256
#include "xrt/xrt_defines.h"
#ifdef __cplusplus
extern "C" {
#endif
struct time_state;
enum xrt_tracking_type
{
// The device(s) are never tracked.
XRT_TRACKING_TYPE_NONE,
};
/*!
* A tracking system or device origin.
*
* @ingroup xrt_iface
*/
struct xrt_tracking
{
//! For debugging.
char name[XRT_TRACKING_NAME_LEN];
//! What can the state tracker expect from this tracking system.
enum xrt_tracking_type type;
/*!
* Read and written to by the state-tracker using the device(s)
* this tracking system is tracking.
*/
struct xrt_pose offset;
};
#ifdef __cplusplus
}
#endif

View file

@ -95,8 +95,8 @@ oxr_instance_create(struct oxr_logger *log,
"and right eye angle_right with %f radians (%i°)\n",
left_override, radtodeg_for_display(left_override),
-left_override, radtodeg_for_display(-left_override));
dev->views[0].fov.angle_left = left_override;
dev->views[1].fov.angle_right = -left_override;
dev->hmd->views[0].fov.angle_left = left_override;
dev->hmd->views[1].fov.angle_right = -left_override;
}
const float right_override = debug_get_float_option_lfov_right();
@ -106,16 +106,16 @@ oxr_instance_create(struct oxr_logger *log,
"and right eye angle_left with %f radians (%i°)\n",
right_override, radtodeg_for_display(right_override),
-right_override, radtodeg_for_display(-right_override));
dev->views[0].fov.angle_right = right_override;
dev->views[1].fov.angle_left = -right_override;
dev->hmd->views[0].fov.angle_right = right_override;
dev->hmd->views[1].fov.angle_left = -right_override;
}
const float up_override = debug_get_float_option_lfov_up();
if (up_override != 0.0f) {
printf("Overriding both eyes angle_up with %f radians (%i°)\n",
up_override, radtodeg_for_display(up_override));
dev->views[0].fov.angle_up = up_override;
dev->views[1].fov.angle_up = up_override;
dev->hmd->views[0].fov.angle_up = up_override;
dev->hmd->views[1].fov.angle_up = up_override;
}
const float down_override = debug_get_float_option_lfov_down();
@ -123,8 +123,8 @@ oxr_instance_create(struct oxr_logger *log,
printf(
"Overriding both eyes angle_down with %f radians (%i°)\n",
down_override, radtodeg_for_display(down_override));
dev->views[0].fov.angle_down = down_override;
dev->views[1].fov.angle_down = down_override;
dev->hmd->views[0].fov.angle_down = down_override;
dev->hmd->views[1].fov.angle_down = down_override;
}
oxr_system_fill_in(log, inst, 1, &inst->system, dev);

View file

@ -148,7 +148,8 @@ oxr_session_get_view_pose_at(struct oxr_logger *log,
struct xrt_device *xdev = sess->sys->device;
struct xrt_space_relation relation;
int64_t timestamp;
xdev->get_tracked_pose(xdev, sess->sys->inst->timekeeping, &timestamp,
xdev->get_tracked_pose(xdev, XRT_INPUT_GENERIC_HEAD_RELATION,
sess->sys->inst->timekeeping, &timestamp,
&relation);
if ((relation.relation_flags &
XRT_SPACE_RELATION_ORIENTATION_VALID_BIT) != 0) {
@ -296,7 +297,7 @@ oxr_session_views(struct oxr_logger *log,
(struct xrt_pose *)&views[i].pose);
// Copy the fov information directly from the device.
views[i].fov = *(XrFovf *)&xdev->views[i].fov;
views[i].fov = *(XrFovf *)&xdev->hmd->views[i].fov;
print_view_fov(i, (struct xrt_fov *)&views[i].fov);
print_view_pose(i, (struct xrt_pose *)&views[i].pose);
@ -426,7 +427,7 @@ oxr_session_frame_end(struct oxr_logger *log,
"unknown environment blend mode");
}
if ((blend_mode & sess->sys->device->blend_mode) == 0) {
if ((blend_mode & sess->sys->device->hmd->blend_mode) == 0) {
return oxr_error(log,
XR_ERROR_ENVIRONMENT_BLEND_MODE_UNSUPPORTED,
"(frameEndInfo->environmentBlendMode) "

View file

@ -78,10 +78,10 @@ oxr_system_fill_in(struct oxr_logger *log,
double scale = debug_get_num_option_scale_percentage() / 100.0;
uint32_t w0 = (uint32_t)(xdev->views[0].display.w_pixels * scale);
uint32_t h0 = (uint32_t)(xdev->views[0].display.w_pixels * scale);
uint32_t w1 = (uint32_t)(xdev->views[1].display.w_pixels * scale);
uint32_t h1 = (uint32_t)(xdev->views[1].display.w_pixels * scale);
uint32_t w0 = (uint32_t)(xdev->hmd->views[0].display.w_pixels * scale);
uint32_t h0 = (uint32_t)(xdev->hmd->views[0].display.w_pixels * scale);
uint32_t w1 = (uint32_t)(xdev->hmd->views[1].display.w_pixels * scale);
uint32_t h1 = (uint32_t)(xdev->hmd->views[1].display.w_pixels * scale);
sys->views[0].recommendedImageRectWidth = w0;
sys->views[0].maxImageRectWidth = w0;
@ -99,13 +99,13 @@ oxr_system_fill_in(struct oxr_logger *log,
// clang-format on
uint32_t i = 0;
if (xdev->blend_mode & XRT_BLEND_MODE_OPAQUE) {
if (xdev->hmd->blend_mode & XRT_BLEND_MODE_OPAQUE) {
sys->blend_modes[i++] = XR_ENVIRONMENT_BLEND_MODE_OPAQUE;
}
if (xdev->blend_mode & XRT_BLEND_MODE_ADDITIVE) {
if (xdev->hmd->blend_mode & XRT_BLEND_MODE_ADDITIVE) {
sys->blend_modes[i++] = XR_ENVIRONMENT_BLEND_MODE_ADDITIVE;
}
if (xdev->blend_mode & XRT_BLEND_MODE_ALPHA_BLEND) {
if (xdev->hmd->blend_mode & XRT_BLEND_MODE_ALPHA_BLEND) {
sys->blend_modes[i++] = XR_ENVIRONMENT_BLEND_MODE_ALPHA_BLEND;
}
sys->num_blend_modes = i;