h/mercury: Better naming

This commit is contained in:
Moses Turner 2022-06-15 13:14:36 +01:00
parent 8e3a47dd94
commit d7eece4cb5
4 changed files with 161 additions and 161 deletions

View file

@ -75,8 +75,8 @@ hsv2rgb(float fH, float fS, float fV)
struct xrt_vec2
raycoord(ht_view *htv, struct xrt_vec2 model_out)
{
model_out.x *= htv->htd->multiply_px_coord_for_undistort;
model_out.y *= htv->htd->multiply_px_coord_for_undistort;
model_out.x *= htv->hgt->multiply_px_coord_for_undistort;
model_out.y *= htv->hgt->multiply_px_coord_for_undistort;
cv::Mat in_px_coords(1, 1, CV_32FC2);
float *write_in;
write_in = in_px_coords.ptr<float>(0);
@ -84,7 +84,7 @@ raycoord(ht_view *htv, struct xrt_vec2 model_out)
write_in[1] = model_out.y;
cv::Mat out_ray(1, 1, CV_32FC2);
if (htv->htd->use_fisheye) {
if (htv->hgt->use_fisheye) {
cv::fisheye::undistortPoints(in_px_coords, out_ray, htv->cameraMatrix, htv->distortion);
} else {
cv::undistortPoints(in_px_coords, out_ray, htv->cameraMatrix, htv->distortion);

View file

@ -30,7 +30,7 @@ cv::Scalar colors[2] = {YELLOW, RED};
OrtStatus *status = wrap->api->expr; \
if (status != nullptr) { \
const char *msg = wrap->api->GetErrorMessage(status); \
HT_ERROR(htd, "[%s:%d]: %s\n", __FILE__, __LINE__, msg); \
HG_ERROR(hgt, "[%s:%d]: %s\n", __FILE__, __LINE__, msg); \
wrap->api->ReleaseStatus(status); \
assert(false); \
} \
@ -148,9 +148,9 @@ normalizeGrayscaleImage(cv::Mat &data_in, cv::Mat &data_out)
}
static void
init_hand_detection(HandTracking *htd, onnx_wrap *wrap)
init_hand_detection(HandTracking *hgt, onnx_wrap *wrap)
{
std::filesystem::path path = htd->models_folder;
std::filesystem::path path = hgt->models_folder;
path /= "grayscale_detection.onnx";
@ -204,7 +204,7 @@ run_hand_detection(void *ptr)
XRT_TRACE_MARKER();
ht_view *view = (ht_view *)ptr;
HandTracking *htd = view->htd;
HandTracking *hgt = view->hgt;
onnx_wrap *wrap = &view->detection;
cv::Mat &data_400x640 = view->run_model_on_this;
@ -264,7 +264,7 @@ run_hand_detection(void *ptr)
output->center = _pt;
output->size_px = size;
if (htd->debug_scribble) {
if (hgt->debug_scribble) {
cv::Point2i pt(_pt.x, _pt.y);
cv::rectangle(debug_frame,
cv::Rect(pt - cv::Point2i(size / 2, size / 2), cv::Size(size, size)),
@ -277,10 +277,10 @@ run_hand_detection(void *ptr)
}
static void
init_keypoint_estimation(HandTracking *htd, onnx_wrap *wrap)
init_keypoint_estimation(HandTracking *hgt, onnx_wrap *wrap)
{
std::filesystem::path path = htd->models_folder;
std::filesystem::path path = hgt->models_folder;
path /= "grayscale_keypoint_simdr.onnx";
@ -337,7 +337,7 @@ run_keypoint_estimation(void *ptr)
keypoint_estimation_run_info *info = (keypoint_estimation_run_info *)ptr;
onnx_wrap *wrap = &info->view->keypoint[info->hand_idx];
struct HandTracking *htd = info->view->htd;
struct HandTracking *hgt = info->view->hgt;
cv::Mat &debug = info->view->debug_out_to_this;
@ -415,7 +415,7 @@ run_keypoint_estimation(void *ptr)
tan_space.kps[i] = raycoord(info->view, loc);
}
if (htd->debug_scribble) {
if (hgt->debug_scribble) {
for (int finger = 0; finger < 5; finger++) {
cv::Point last = {(int)keypoints_global[0].x, (int)keypoints_global[0].y};
for (int joint = 0; joint < 4; joint++) {
@ -439,10 +439,10 @@ run_keypoint_estimation(void *ptr)
static void
init_keypoint_estimation_new(HandTracking *htd, onnx_wrap *wrap)
init_keypoint_estimation_new(HandTracking *hgt, onnx_wrap *wrap)
{
std::filesystem::path path = htd->models_folder;
std::filesystem::path path = hgt->models_folder;
path /= "grayscale_keypoint_new.onnx";
@ -468,7 +468,7 @@ init_keypoint_estimation_new(HandTracking *htd, onnx_wrap *wrap)
ORT(CreateCpuMemoryInfo(OrtArenaAllocator, OrtMemTypeDefault, &wrap->meminfo));
// HT_DEBUG(this->device, "Loading hand detection model from file '%s'", path.c_str());
// HG_DEBUG(this->device, "Loading hand detection model from file '%s'", path.c_str());
ORT(CreateSession(wrap->env, path.c_str(), opts, &wrap->session));
assert(wrap->session != NULL);
@ -502,7 +502,7 @@ run_keypoint_estimation_new(void *ptr)
keypoint_estimation_run_info *info = (keypoint_estimation_run_info *)ptr;
onnx_wrap *wrap = &info->view->keypoint[info->hand_idx];
struct HandTracking *htd = info->view->htd;
struct HandTracking *hgt = info->view->hgt;
// Factor out starting here
@ -594,7 +594,7 @@ run_keypoint_estimation_new(void *ptr)
tan_space.kps[i] = raycoord(info->view, loc);
}
if (htd->debug_scribble) {
if (hgt->debug_scribble) {
for (int finger = 0; finger < 5; finger++) {
cv::Point last = {(int)keypoints_global[0].x, (int)keypoints_global[0].y};
for (int joint = 0; joint < 4; joint++) {
@ -665,7 +665,7 @@ run_hand_detection_ncnn(void *ptr)
{
ht_view *view = (ht_view *)ptr;
HandTracking *htd = view->htd;
HandTracking *hgt = view->hgt;
onnx_wrap *wrap = &view->detection;
cv::Mat &data_400x640 = view->run_model_on_this;
@ -678,7 +678,7 @@ run_hand_detection_ncnn(void *ptr)
ncnn_option_t opt = ncnn_option_create();
ncnn_option_set_use_vulkan_compute(opt, 1);
ncnn_extractor_t ex = ncnn_extractor_create(htd->net);
ncnn_extractor_t ex = ncnn_extractor_create(hgt->net);
ncnn_extractor_set_option(ex, opt);
@ -761,7 +761,7 @@ run_keypoint_estimation_new_ncnn(void *ptr)
XRT_TRACE_MARKER();
keypoint_estimation_run_info *info = (keypoint_estimation_run_info *)ptr;
struct HandTracking *htd = info->view->htd;
struct HandTracking *hgt = info->view->hgt;
// Factor out starting here
@ -816,7 +816,7 @@ run_keypoint_estimation_new_ncnn(void *ptr)
ncnn_option_t opt = ncnn_option_create();
ncnn_option_set_use_vulkan_compute(opt, 1);
ncnn_extractor_t ex = ncnn_extractor_create(htd->net_keypoint);
ncnn_extractor_t ex = ncnn_extractor_create(hgt->net_keypoint);
ncnn_extractor_set_option(ex, opt);
@ -860,7 +860,7 @@ run_keypoint_estimation_new_ncnn(void *ptr)
tan_space.kps[i] = raycoord(info->view, loc);
}
if (htd->debug_scribble) {
if (hgt->debug_scribble) {
for (int finger = 0; finger < 5; finger++) {
cv::Point last = {(int)keypoints_global[0].x, (int)keypoints_global[0].y};
for (int joint = 0; joint < 4; joint++) {

View file

@ -21,18 +21,18 @@ static const enum xrt_space_relation_flags valid_flags_ht = (enum xrt_space_rela
XRT_SPACE_RELATION_POSITION_VALID_BIT | XRT_SPACE_RELATION_POSITION_TRACKED_BIT);
static void
hgJointDisparityMath(struct HandTracking *htd, Hand2D *hand_in_left, Hand2D *hand_in_right, Hand3D *out_hand)
hgJointDisparityMath(struct HandTracking *hgt, Hand2D *hand_in_left, Hand2D *hand_in_right, Hand3D *out_hand)
{
for (int i = 0; i < 21; i++) {
// Believe it or not, this is where the 3D stuff happens!
float t = htd->baseline / (hand_in_left->kps[i].x - hand_in_right->kps[i].x);
float t = hgt->baseline / (hand_in_left->kps[i].x - hand_in_right->kps[i].x);
out_hand->kps[i].z = -t;
out_hand->kps[i].x = (hand_in_left->kps[i].x * t);
out_hand->kps[i].y = -hand_in_left->kps[i].y * t;
out_hand->kps[i].x += htd->baseline + (hand_in_right->kps[i].x * t);
out_hand->kps[i].x += hgt->baseline + (hand_in_right->kps[i].x * t);
out_hand->kps[i].y += -hand_in_right->kps[i].y * t;
out_hand->kps[i].x *= .5;
@ -45,23 +45,23 @@ hgJointDisparityMath(struct HandTracking *htd, Hand2D *hand_in_left, Hand2D *han
*/
static bool
getCalibration(struct HandTracking *htd, t_stereo_camera_calibration *calibration)
getCalibration(struct HandTracking *hgt, t_stereo_camera_calibration *calibration)
{
xrt::auxiliary::tracking::StereoCameraCalibrationWrapper wrap(calibration);
xrt_vec3 trans = {(float)wrap.camera_translation_mat(0, 0), (float)wrap.camera_translation_mat(1, 0),
(float)wrap.camera_translation_mat(2, 0)};
htd->baseline = m_vec3_len(trans);
HT_DEBUG(htd, "I think the baseline is %f meters!", htd->baseline);
hgt->baseline = m_vec3_len(trans);
HG_DEBUG(hgt, "I think the baseline is %f meters!", hgt->baseline);
// Note, this assumes camera 0 is the left camera and camera 1 is the right camera.
// If you find one with the opposite arrangement, you'll need to invert htd->baseline, and look at
// If you find one with the opposite arrangement, you'll need to invert hgt->baseline, and look at
// hgJointDisparityMath
htd->use_fisheye = wrap.view[0].use_fisheye;
hgt->use_fisheye = wrap.view[0].use_fisheye;
if (htd->use_fisheye) {
HT_DEBUG(htd, "I think the cameras are fisheye!");
if (hgt->use_fisheye) {
HG_DEBUG(hgt, "I think the cameras are fisheye!");
} else {
HT_DEBUG(htd, "I think the cameras are not fisheye!");
HG_DEBUG(hgt, "I think the cameras are not fisheye!");
}
cv::Matx34d P1;
@ -70,7 +70,7 @@ getCalibration(struct HandTracking *htd, t_stereo_camera_calibration *calibratio
cv::Matx44d Q;
// We only want R1 and R2, we don't care about anything else
if (htd->use_fisheye) {
if (hgt->use_fisheye) {
cv::fisheye::stereoRectify(wrap.view[0].intrinsics_mat, // cameraMatrix1
wrap.view[0].distortion_fisheye_mat, // distCoeffs1
wrap.view[1].intrinsics_mat, // cameraMatrix2
@ -78,8 +78,8 @@ getCalibration(struct HandTracking *htd, t_stereo_camera_calibration *calibratio
wrap.view[0].image_size_pixels_cv, // imageSize*
wrap.camera_rotation_mat, // R
wrap.camera_translation_mat, // T
htd->views[0].rotate_camera_to_stereo_camera, // R1
htd->views[1].rotate_camera_to_stereo_camera, // R2
hgt->views[0].rotate_camera_to_stereo_camera, // R1
hgt->views[1].rotate_camera_to_stereo_camera, // R2
P1, // P1
P2, // P2
Q, // Q
@ -93,8 +93,8 @@ getCalibration(struct HandTracking *htd, t_stereo_camera_calibration *calibratio
wrap.view[0].image_size_pixels_cv, // imageSize*
wrap.camera_rotation_mat, // R
wrap.camera_translation_mat, // T
htd->views[0].rotate_camera_to_stereo_camera, // R1
htd->views[1].rotate_camera_to_stereo_camera, // R2
hgt->views[0].rotate_camera_to_stereo_camera, // R1
hgt->views[1].rotate_camera_to_stereo_camera, // R2
P1, // P1
P2, // P2
Q, // Q
@ -107,33 +107,33 @@ getCalibration(struct HandTracking *htd, t_stereo_camera_calibration *calibratio
//* Good enough guess that view 0 and view 1 are the same size.
for (int i = 0; i < 2; i++) {
htd->views[i].cameraMatrix = wrap.view[i].intrinsics_mat;
hgt->views[i].cameraMatrix = wrap.view[i].intrinsics_mat;
if (htd->use_fisheye) {
htd->views[i].distortion = wrap.view[i].distortion_fisheye_mat;
if (hgt->use_fisheye) {
hgt->views[i].distortion = wrap.view[i].distortion_fisheye_mat;
} else {
htd->views[i].distortion = wrap.view[i].distortion_mat;
hgt->views[i].distortion = wrap.view[i].distortion_mat;
}
if (htd->log_level <= U_LOGGING_DEBUG) {
HT_DEBUG(htd, "R%d ->", i);
std::cout << htd->views[i].rotate_camera_to_stereo_camera << std::endl;
if (hgt->log_level <= U_LOGGING_DEBUG) {
HG_DEBUG(hgt, "R%d ->", i);
std::cout << hgt->views[i].rotate_camera_to_stereo_camera << std::endl;
HT_DEBUG(htd, "K%d ->", i);
std::cout << htd->views[i].cameraMatrix << std::endl;
HG_DEBUG(hgt, "K%d ->", i);
std::cout << hgt->views[i].cameraMatrix << std::endl;
HT_DEBUG(htd, "D%d ->", i);
std::cout << htd->views[i].distortion << std::endl;
HG_DEBUG(hgt, "D%d ->", i);
std::cout << hgt->views[i].distortion << std::endl;
}
}
htd->calibration_one_view_size_px.w = wrap.view[0].image_size_pixels.w;
htd->calibration_one_view_size_px.h = wrap.view[0].image_size_pixels.h;
hgt->calibration_one_view_size_px.w = wrap.view[0].image_size_pixels.w;
hgt->calibration_one_view_size_px.h = wrap.view[0].image_size_pixels.h;
htd->last_frame_one_view_size_px = htd->calibration_one_view_size_px;
htd->multiply_px_coord_for_undistort = 1.0f;
hgt->last_frame_one_view_size_px = hgt->calibration_one_view_size_px;
hgt->multiply_px_coord_for_undistort = 1.0f;
cv::Matx33d rotate_stereo_camera_to_left_camera = htd->views[0].rotate_camera_to_stereo_camera.inv();
cv::Matx33d rotate_stereo_camera_to_left_camera = hgt->views[0].rotate_camera_to_stereo_camera.inv();
xrt_matrix_3x3 s;
s.v[0] = rotate_stereo_camera_to_left_camera(0, 0);
@ -156,46 +156,46 @@ getCalibration(struct HandTracking *htd, t_stereo_camera_calibration *calibratio
// have swapped row-major and col-major - remember, if you transpose a rotation matrix, you get its inverse.
// Doesn't matter that I don't understand - non-inverted looks definitely wrong, inverted looks definitely
// right.
math_quat_invert(&tmp, &htd->stereo_camera_to_left_camera);
math_quat_invert(&tmp, &hgt->stereo_camera_to_left_camera);
return true;
}
static void
getModelsFolder(struct HandTracking *htd)
getModelsFolder(struct HandTracking *hgt)
{
// Please bikeshed me on this! I don't know where is the best place to put this stuff!
#if 0
char exec_location[1024] = {};
readlink("/proc/self/exe", exec_location, 1024);
HT_DEBUG(htd, "Exec at %s\n", exec_location);
HG_DEBUG(hgt, "Exec at %s\n", exec_location);
int end = 0;
while (exec_location[end] != '\0') {
HT_DEBUG(htd, "%d", end);
HG_DEBUG(hgt, "%d", end);
end++;
}
while (exec_location[end] != '/' && end != 0) {
HT_DEBUG(htd, "%d %c", end, exec_location[end]);
HG_DEBUG(hgt, "%d %c", end, exec_location[end]);
exec_location[end] = '\0';
end--;
}
strcat(exec_location, "../share/monado/hand-tracking-models/");
strcpy(htd->startup_config.model_slug, exec_location);
strcpy(hgt->startup_config.model_slug, exec_location);
#else
const char *xdg_home = getenv("XDG_CONFIG_HOME");
const char *home = getenv("HOME");
if (xdg_home != NULL) {
strcpy(htd->models_folder, xdg_home);
strcpy(hgt->models_folder, xdg_home);
} else if (home != NULL) {
strcpy(htd->models_folder, home);
strcpy(hgt->models_folder, home);
} else {
assert(false);
}
strcat(htd->models_folder, "/.local/share/monado/hand-tracking-models/");
strcat(hgt->models_folder, "/.local/share/monado/hand-tracking-models/");
#endif
}
@ -227,7 +227,7 @@ applyThumbIndexDrag(Hand3D *hand)
}
static void
applyJointWidths(struct HandTracking *htd, struct xrt_hand_joint_set *set)
applyJointWidths(struct HandTracking *hgt, struct xrt_hand_joint_set *set)
{
// Thanks to Nick Klingensmith for this idea
struct xrt_hand_joint_value *gr = set->values.hand_joint_set_default;
@ -259,13 +259,13 @@ applyJointWidths(struct HandTracking *htd, struct xrt_hand_joint_set *set)
}
static bool
handle_changed_image_size(HandTracking *htd, xrt_size &new_one_view_size)
handle_changed_image_size(HandTracking *hgt, xrt_size &new_one_view_size)
{
int gcd_calib = std::gcd(htd->calibration_one_view_size_px.h, htd->calibration_one_view_size_px.w);
int gcd_calib = std::gcd(hgt->calibration_one_view_size_px.h, hgt->calibration_one_view_size_px.w);
int gcd_new = std::gcd(new_one_view_size.h, new_one_view_size.w);
int lcm_h_calib = htd->calibration_one_view_size_px.h / gcd_calib;
int lcm_w_calib = htd->calibration_one_view_size_px.w / gcd_calib;
int lcm_h_calib = hgt->calibration_one_view_size_px.h / gcd_calib;
int lcm_w_calib = hgt->calibration_one_view_size_px.w / gcd_calib;
int lcm_h_new = new_one_view_size.h / gcd_new;
int lcm_w_new = new_one_view_size.w / gcd_new;
@ -273,13 +273,13 @@ handle_changed_image_size(HandTracking *htd, xrt_size &new_one_view_size)
bool good = (lcm_h_calib == lcm_h_new) && (lcm_w_calib == lcm_w_new);
if (!good) {
HT_WARN(htd, "Can't process this frame, wrong aspect ratio. What we wanted: %dx%d, what we got: %dx%d",
HG_WARN(hgt, "Can't process this frame, wrong aspect ratio. What we wanted: %dx%d, what we got: %dx%d",
lcm_h_calib, lcm_w_calib, lcm_h_new, lcm_w_new);
return false;
}
htd->multiply_px_coord_for_undistort = (float)htd->calibration_one_view_size_px.h / (float)new_one_view_size.h;
htd->last_frame_one_view_size_px = new_one_view_size;
hgt->multiply_px_coord_for_undistort = (float)hgt->calibration_one_view_size_px.h / (float)new_one_view_size.h;
hgt->last_frame_one_view_size_px = new_one_view_size;
return true;
}
@ -330,9 +330,9 @@ HandTracking::cCallbackProcess(struct t_hand_tracking_sync *ht_sync,
{
XRT_TRACE_MARKER();
HandTracking *htd = (struct HandTracking *)ht_sync;
HandTracking *hgt = (struct HandTracking *)ht_sync;
htd->current_frame_timestamp = left_frame->timestamp;
hgt->current_frame_timestamp = left_frame->timestamp;
struct xrt_hand_joint_set *out_xrt_hands[2] = {out_left_hand, out_right_hand};
@ -347,61 +347,61 @@ HandTracking::cCallbackProcess(struct t_hand_tracking_sync *ht_sync,
const int full_height = left_frame->height;
const int full_width = left_frame->width * 2;
if ((left_frame->width != (uint32_t)htd->last_frame_one_view_size_px.w) ||
(left_frame->height != (uint32_t)htd->last_frame_one_view_size_px.h)) {
if ((left_frame->width != (uint32_t)hgt->last_frame_one_view_size_px.w) ||
(left_frame->height != (uint32_t)hgt->last_frame_one_view_size_px.h)) {
xrt_size new_one_view_size;
new_one_view_size.h = left_frame->height;
new_one_view_size.w = left_frame->width;
// Could be an assert, should never happen.
if (!handle_changed_image_size(htd, new_one_view_size)) {
if (!handle_changed_image_size(hgt, new_one_view_size)) {
return;
}
}
const int view_width = htd->last_frame_one_view_size_px.w;
const int view_height = htd->last_frame_one_view_size_px.h;
const int view_width = hgt->last_frame_one_view_size_px.w;
const int view_height = hgt->last_frame_one_view_size_px.h;
const cv::Size full_size = cv::Size(full_width, full_height);
const cv::Size view_size = cv::Size(view_width, view_height);
const cv::Point view_offsets[2] = {cv::Point(0, 0), cv::Point(view_width, 0)};
htd->views[0].run_model_on_this = cv::Mat(view_size, CV_8UC1, left_frame->data, left_frame->stride);
htd->views[1].run_model_on_this = cv::Mat(view_size, CV_8UC1, right_frame->data, right_frame->stride);
hgt->views[0].run_model_on_this = cv::Mat(view_size, CV_8UC1, left_frame->data, left_frame->stride);
hgt->views[1].run_model_on_this = cv::Mat(view_size, CV_8UC1, right_frame->data, right_frame->stride);
*out_timestamp_ns = htd->current_frame_timestamp; // No filtering, fine to do this now. Also just a reminder
*out_timestamp_ns = hgt->current_frame_timestamp; // No filtering, fine to do this now. Also just a reminder
// that this took you 2 HOURS TO DEBUG THAT ONE TIME.
htd->debug_scribble = u_sink_debug_is_active(&htd->debug_sink);
hgt->debug_scribble = u_sink_debug_is_active(&hgt->debug_sink);
cv::Mat debug_output = {};
xrt_frame *debug_frame = nullptr;
if (htd->debug_scribble) {
if (hgt->debug_scribble) {
u_frame_create_one_off(XRT_FORMAT_R8G8B8, full_width, full_height, &debug_frame);
debug_frame->timestamp = htd->current_frame_timestamp;
debug_frame->timestamp = hgt->current_frame_timestamp;
debug_output = cv::Mat(full_size, CV_8UC3, debug_frame->data, debug_frame->stride);
cv::cvtColor(htd->views[0].run_model_on_this, debug_output(cv::Rect(view_offsets[0], view_size)),
cv::cvtColor(hgt->views[0].run_model_on_this, debug_output(cv::Rect(view_offsets[0], view_size)),
cv::COLOR_GRAY2BGR);
cv::cvtColor(htd->views[1].run_model_on_this, debug_output(cv::Rect(view_offsets[1], view_size)),
cv::cvtColor(hgt->views[1].run_model_on_this, debug_output(cv::Rect(view_offsets[1], view_size)),
cv::COLOR_GRAY2BGR);
htd->views[0].debug_out_to_this = debug_output(cv::Rect(view_offsets[0], view_size));
htd->views[1].debug_out_to_this = debug_output(cv::Rect(view_offsets[1], view_size));
hgt->views[0].debug_out_to_this = debug_output(cv::Rect(view_offsets[0], view_size));
hgt->views[1].debug_out_to_this = debug_output(cv::Rect(view_offsets[1], view_size));
}
u_worker_group_push(htd->group, run_hand_detection, &htd->views[0]);
u_worker_group_push(htd->group, run_hand_detection, &htd->views[1]);
u_worker_group_wait_all(htd->group);
u_worker_group_push(hgt->group, run_hand_detection, &hgt->views[0]);
u_worker_group_push(hgt->group, run_hand_detection, &hgt->views[1]);
u_worker_group_wait_all(hgt->group);
for (int hand_idx = 0; hand_idx < 2; hand_idx++) {
if (!(htd->views[0].det_outputs[hand_idx].found && htd->views[1].det_outputs[hand_idx].found)) {
if (!(hgt->views[0].det_outputs[hand_idx].found && hgt->views[1].det_outputs[hand_idx].found)) {
// If we don't find this hand in *both* views
out_xrt_hands[hand_idx]->is_active = false;
continue;
@ -410,29 +410,29 @@ HandTracking::cCallbackProcess(struct t_hand_tracking_sync *ht_sync,
for (int view_idx = 0; view_idx < 2; view_idx++) {
struct keypoint_estimation_run_info &inf = htd->views[view_idx].run_info[hand_idx];
inf.view = &htd->views[view_idx];
struct keypoint_estimation_run_info &inf = hgt->views[view_idx].run_info[hand_idx];
inf.view = &hgt->views[view_idx];
inf.hand_idx = hand_idx;
u_worker_group_push(htd->group, htd->keypoint_estimation_run_func,
&htd->views[view_idx].run_info[hand_idx]);
u_worker_group_push(hgt->group, hgt->keypoint_estimation_run_func,
&hgt->views[view_idx].run_info[hand_idx]);
}
}
u_worker_group_wait_all(htd->group);
u_worker_group_wait_all(hgt->group);
for (int hand_idx = 0; hand_idx < 2; hand_idx++) {
if (!out_xrt_hands[hand_idx]->is_active) {
htd->last_frame_hand_detected[hand_idx] = false;
hgt->last_frame_hand_detected[hand_idx] = false;
continue;
}
kine::kinematic_hand_4f *hand = htd->kinematic_hands[hand_idx];
if (!htd->last_frame_hand_detected[hand_idx]) {
kine::init_hardcoded_statics(hand, htd->hand_size / 100.0f);
kine::kinematic_hand_4f *hand = hgt->kinematic_hands[hand_idx];
if (!hgt->last_frame_hand_detected[hand_idx]) {
kine::init_hardcoded_statics(hand, hgt->hand_size / 100.0f);
}
Hand2D *hand_in_left_view = &htd->views[0].keypoint_outputs[hand_idx].hand_tan_space;
Hand2D *hand_in_right_view = &htd->views[1].keypoint_outputs[hand_idx].hand_tan_space;
Hand2D *hand_in_left_view = &hgt->views[0].keypoint_outputs[hand_idx].hand_tan_space;
Hand2D *hand_in_right_view = &hgt->views[1].keypoint_outputs[hand_idx].hand_tan_space;
Hand3D hand_3d;
@ -441,22 +441,22 @@ HandTracking::cCallbackProcess(struct t_hand_tracking_sync *ht_sync,
applyThumbIndexDrag(&hand_3d);
applyJointWidths(htd, put_in_set);
applyJointWidths(hgt, put_in_set);
hgJointDisparityMath(htd, hand_in_left_view, hand_in_right_view, &hand_3d);
hgJointDisparityMath(hgt, hand_in_left_view, hand_in_right_view, &hand_3d);
kine::optimize_new_frame(hand_3d.kps, hand, put_in_set, hand_idx);
math_pose_identity(&put_in_set->hand_pose.pose);
switch (htd->output_space) {
switch (hgt->output_space) {
case HT_OUTPUT_SPACE_LEFT_CAMERA: {
put_in_set->hand_pose.pose.orientation = htd->stereo_camera_to_left_camera;
put_in_set->hand_pose.pose.orientation = hgt->stereo_camera_to_left_camera;
} break;
case HT_OUTPUT_SPACE_CENTER_OF_STEREO_CAMERA: {
put_in_set->hand_pose.pose.orientation.w = 1.0;
put_in_set->hand_pose.pose.position.x = -htd->baseline / 2;
put_in_set->hand_pose.pose.position.x = -hgt->baseline / 2;
} break;
}
@ -465,10 +465,10 @@ HandTracking::cCallbackProcess(struct t_hand_tracking_sync *ht_sync,
u_frame_times_widget_push_sample(&htd->ft_widget, htd->current_frame_timestamp);
u_frame_times_widget_push_sample(&hgt->ft_widget, hgt->current_frame_timestamp);
if (htd->debug_scribble) {
u_sink_debug_push_frame(&htd->debug_sink, debug_frame);
if (hgt->debug_scribble) {
u_sink_debug_push_frame(&hgt->debug_sink, debug_frame);
xrt_frame_reference(&debug_frame, NULL);
}
}
@ -495,97 +495,97 @@ t_hand_tracking_sync_mercury_create(struct t_stereo_camera_calibration *calib, h
{
XRT_TRACE_MARKER();
auto htd = new xrt::tracking::hand::mercury::HandTracking();
auto hgt = new xrt::tracking::hand::mercury::HandTracking();
// Setup logging first. We like logging.
htd->log_level = xrt::tracking::hand::mercury::debug_get_log_option_mercury_log();
hgt->log_level = xrt::tracking::hand::mercury::debug_get_log_option_mercury_log();
bool use_simdr = xrt::tracking::hand::mercury::debug_get_bool_option_mercury_use_simdr_keypoint();
htd->output_space = output_space;
hgt->output_space = output_space;
/*
* Get configuration
*/
assert(calib != NULL);
htd->calib = NULL;
hgt->calib = NULL;
// We have to reference it, getCalibration points at it.
t_stereo_camera_calibration_reference(&htd->calib, calib);
getCalibration(htd, calib);
getModelsFolder(htd);
t_stereo_camera_calibration_reference(&hgt->calib, calib);
getCalibration(hgt, calib);
getModelsFolder(hgt);
#ifdef USE_NCNN
{
htd->net = ncnn_net_create();
hgt->net = ncnn_net_create();
ncnn_option_t opt = ncnn_option_create();
ncnn_option_set_use_vulkan_compute(opt, 1);
ncnn_net_set_option(htd->net, opt);
ncnn_net_load_param(htd->net, "/3/clones/ncnn/batch_size_2.param");
ncnn_net_load_model(htd->net, "/3/clones/ncnn/batch_size_2.bin");
ncnn_net_set_option(hgt->net, opt);
ncnn_net_load_param(hgt->net, "/3/clones/ncnn/batch_size_2.param");
ncnn_net_load_model(hgt->net, "/3/clones/ncnn/batch_size_2.bin");
}
{
htd->net_keypoint = ncnn_net_create();
hgt->net_keypoint = ncnn_net_create();
ncnn_option_t opt = ncnn_option_create();
ncnn_option_set_use_vulkan_compute(opt, 1);
ncnn_net_set_option(htd->net_keypoint, opt);
ncnn_net_set_option(hgt->net_keypoint, opt);
ncnn_net_load_param(
htd->net_keypoint,
hgt->net_keypoint,
"/home/moses/.local/share/monado/hand-tracking-models/grayscale_keypoint_new.param");
ncnn_net_load_model(htd->net_keypoint,
ncnn_net_load_model(hgt->net_keypoint,
"/home/moses/.local/share/monado/hand-tracking-models/grayscale_keypoint_new.bin");
}
#endif
htd->views[0].htd = htd;
htd->views[1].htd = htd; // :)
hgt->views[0].hgt = hgt;
hgt->views[1].hgt = hgt; // :)
init_hand_detection(htd, &htd->views[0].detection);
init_hand_detection(htd, &htd->views[1].detection);
init_hand_detection(hgt, &hgt->views[0].detection);
init_hand_detection(hgt, &hgt->views[1].detection);
if (use_simdr) {
init_keypoint_estimation(htd, &htd->views[0].keypoint[0]);
init_keypoint_estimation(htd, &htd->views[0].keypoint[1]);
init_keypoint_estimation(hgt, &hgt->views[0].keypoint[0]);
init_keypoint_estimation(hgt, &hgt->views[0].keypoint[1]);
init_keypoint_estimation(htd, &htd->views[1].keypoint[0]);
init_keypoint_estimation(htd, &htd->views[1].keypoint[1]);
htd->keypoint_estimation_run_func = xrt::tracking::hand::mercury::run_keypoint_estimation;
init_keypoint_estimation(hgt, &hgt->views[1].keypoint[0]);
init_keypoint_estimation(hgt, &hgt->views[1].keypoint[1]);
hgt->keypoint_estimation_run_func = xrt::tracking::hand::mercury::run_keypoint_estimation;
} else {
init_keypoint_estimation_new(htd, &htd->views[0].keypoint[0]);
init_keypoint_estimation_new(htd, &htd->views[0].keypoint[1]);
init_keypoint_estimation_new(hgt, &hgt->views[0].keypoint[0]);
init_keypoint_estimation_new(hgt, &hgt->views[0].keypoint[1]);
init_keypoint_estimation_new(htd, &htd->views[1].keypoint[0]);
init_keypoint_estimation_new(htd, &htd->views[1].keypoint[1]);
htd->keypoint_estimation_run_func = xrt::tracking::hand::mercury::run_keypoint_estimation_new;
init_keypoint_estimation_new(hgt, &hgt->views[1].keypoint[0]);
init_keypoint_estimation_new(hgt, &hgt->views[1].keypoint[1]);
hgt->keypoint_estimation_run_func = xrt::tracking::hand::mercury::run_keypoint_estimation_new;
}
htd->views[0].view = 0;
htd->views[1].view = 1;
hgt->views[0].view = 0;
hgt->views[1].view = 1;
int num_threads = 4;
htd->pool = u_worker_thread_pool_create(num_threads - 1, num_threads);
htd->group = u_worker_group_create(htd->pool);
hgt->pool = u_worker_thread_pool_create(num_threads - 1, num_threads);
hgt->group = u_worker_group_create(hgt->pool);
htd->hand_size = 9.5864;
xrt::tracking::hand::mercury::kine::alloc_kinematic_hand(&htd->kinematic_hands[0]);
xrt::tracking::hand::mercury::kine::alloc_kinematic_hand(&htd->kinematic_hands[1]);
hgt->hand_size = 9.5864;
xrt::tracking::hand::mercury::kine::alloc_kinematic_hand(&hgt->kinematic_hands[0]);
xrt::tracking::hand::mercury::kine::alloc_kinematic_hand(&hgt->kinematic_hands[1]);
u_frame_times_widget_init(&htd->ft_widget, 10.0f, 10.0f);
u_frame_times_widget_init(&hgt->ft_widget, 10.0f, 10.0f);
u_var_add_root(htd, "Camera-based Hand Tracker", true);
u_var_add_root(hgt, "Camera-based Hand Tracker", true);
u_var_add_f32(htd, &htd->hand_size, "Hand size (Centimeters between wrist and middle-proximal joint)");
u_var_add_ro_f32(htd, &htd->ft_widget.fps, "FPS!");
u_var_add_f32_timing(htd, htd->ft_widget.debug_var, "times!");
u_var_add_f32(hgt, &hgt->hand_size, "Hand size (Centimeters between wrist and middle-proximal joint)");
u_var_add_ro_f32(hgt, &hgt->ft_widget.fps, "FPS!");
u_var_add_f32_timing(hgt, hgt->ft_widget.debug_var, "times!");
u_var_add_sink_debug(htd, &htd->debug_sink, "i");
u_var_add_sink_debug(hgt, &hgt->debug_sink, "i");
HT_DEBUG(htd, "Hand Tracker initialized!");
HG_DEBUG(hgt, "Hand Tracker initialized!");
return &htd->base;
return &hgt->base;
}

View file

@ -51,11 +51,11 @@ using namespace xrt::auxiliary::util;
DEBUG_GET_ONCE_LOG_OPTION(mercury_log, "MERCURY_LOG", U_LOGGING_WARN)
DEBUG_GET_ONCE_BOOL_OPTION(mercury_use_simdr_keypoint, "MERCURY_USE_SIMDR_KEYPOINT", false)
#define HT_TRACE(htd, ...) U_LOG_IFL_T(htd->log_level, __VA_ARGS__)
#define HT_DEBUG(htd, ...) U_LOG_IFL_D(htd->log_level, __VA_ARGS__)
#define HT_INFO(htd, ...) U_LOG_IFL_I(htd->log_level, __VA_ARGS__)
#define HT_WARN(htd, ...) U_LOG_IFL_W(htd->log_level, __VA_ARGS__)
#define HT_ERROR(htd, ...) U_LOG_IFL_E(htd->log_level, __VA_ARGS__)
#define HG_TRACE(hgt, ...) U_LOG_IFL_T(hgt->log_level, __VA_ARGS__)
#define HG_DEBUG(hgt, ...) U_LOG_IFL_D(hgt->log_level, __VA_ARGS__)
#define HG_INFO(hgt, ...) U_LOG_IFL_I(hgt->log_level, __VA_ARGS__)
#define HG_WARN(hgt, ...) U_LOG_IFL_W(hgt->log_level, __VA_ARGS__)
#define HG_ERROR(hgt, ...) U_LOG_IFL_E(hgt->log_level, __VA_ARGS__)
#undef USE_NCNN
@ -138,7 +138,7 @@ struct keypoint_estimation_run_info
struct ht_view
{
HandTracking *htd;
HandTracking *hgt;
onnx_wrap detection;
onnx_wrap keypoint[2];
int view;