d/vf: Add video file playback driver

This commit is contained in:
Christoph Haag 2020-11-26 20:16:02 +01:00 committed by Jakob Bornecrantz
parent e480352cee
commit f0d21c1dcc
12 changed files with 599 additions and 1 deletions

View file

@ -93,6 +93,12 @@ if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
find_package(OpenGL COMPONENTS GLX)
pkg_search_module(DBUS dbus-1)
pkg_check_modules(GST
gstreamer-1.0
gstreamer-app-1.0
gstreamer-video-1.0
)
pkg_check_modules(SURVIVE IMPORTED_TARGET survive)
else()
find_package(OpenGL)
@ -117,6 +123,7 @@ cmake_dependent_option(XRT_HAVE_OPENGL "Enable OpenGL Graphics API support" ON "
cmake_dependent_option(XRT_HAVE_OPENGLES "Enable OpenGL-ES Graphics API support" ON "OpenGLES_FOUND" OFF)
cmake_dependent_option(XRT_HAVE_EGL "Enable OpenGL on EGL Graphics API support" ON "EGL_FOUND; XRT_HAVE_OPENGL OR XRT_HAVE_OPENGLES" OFF)
cmake_dependent_option(XRT_HAVE_DBUS "Enable dbus support (for BLE support)" ON "DBUS_FOUND" OFF)
cmake_dependent_option(XRT_HAVE_VF "Enable gstreamer support (for video file support)" ON "GST_FOUND" OFF)
cmake_dependent_option(XRT_FEATURE_COMPOSITOR_MAIN "Build main compositor host functionality" ON "XRT_HAVE_VULKAN; XRT_HAVE_WAYLAND OR XRT_HAVE_XCB OR ANDROID OR WIN32" OFF)
cmake_dependent_option(XRT_FEATURE_OPENXR "Build OpenXR runtime target" ON "XRT_FEATURE_COMPOSITOR_MAIN" OFF)
cmake_dependent_option(XRT_FEATURE_SERVICE "Enable separate service module for OpenXR runtime" ON "NOT WIN32" OFF)
@ -273,6 +280,7 @@ message(STATUS "# OPENGLES: ${XRT_HAVE_OPENGLES}")
message(STATUS "# VULKAN: ${XRT_HAVE_VULKAN}")
message(STATUS "# EGL: ${XRT_HAVE_EGL}")
message(STATUS "# DBUS: ${XRT_HAVE_DBUS}")
message(STATUS "# VF: ${XRT_HAVE_VF}")
message(STATUS "# LIBUSB: ${XRT_HAVE_LIBUSB}")
message(STATUS "# JPEG: ${XRT_HAVE_JPEG}")
message(STATUS "# OPENCV: ${XRT_HAVE_OPENCV}")

View file

@ -70,6 +70,11 @@ vulkan = dependency('vulkan', required: true)
zlib = dependency('zlib', required: false)
survive = dependency('survive', required: false)
dbus = dependency('dbus-1', required: get_option('dbus'))
gst = dependency('gstreamer-1.0', required: false)
gst_app = dependency('gstreamer-app-1.0', required: false)
gst_video= dependency('gstreamer-video-1.0', required: false)
gst_found = gst.found() and gst_app.found() and gst_video.found()
opencv = dependency('opencv4', required: false)
if not opencv.found()
@ -191,6 +196,12 @@ if has_v4l2_header and ('auto' in drivers or 'v4l2' in drivers)
endif
endif
if gst_found and ('auto' in drivers or 'vf' in drivers)
if 'vf' not in drivers
drivers += ['vf']
endif
endif
if survive.found() and ('survive' in drivers)
if 'survive' not in drivers
drivers += ['survive']

View file

@ -3,7 +3,7 @@
option('drivers',
type: 'array',
choices: ['auto', 'dummy', 'hdk', 'hydra', 'ns', 'ohmd', 'psmv', 'psvr', 'rs', 'v4l2', 'vive', 'survive', 'daydream', 'arduino', 'remote', 'handtracking'],
choices: ['auto', 'dummy', 'hdk', 'hydra', 'ns', 'ohmd', 'psmv', 'psvr', 'rs', 'v4l2', 'vf', 'vive', 'survive', 'daydream', 'arduino', 'remote', 'handtracking'],
value: ['auto'],
description: 'Set of drivers to build')

View file

@ -193,6 +193,18 @@ if(XRT_HAVE_V4L2)
list(APPEND ENABLED_DRIVERS v4l2)
endif()
if(XRT_HAVE_VF)
set(VF_SOURCE_FILES
vf/vf_driver.c
)
add_library(drv_vf STATIC ${VF_SOURCE_FILES})
target_link_libraries(drv_vf PRIVATE xrt-interfaces aux_os ${GST_LIBRARIES})
target_include_directories(drv_vf PRIVATE ${GST_INCLUDE_DIRS})
MESSAGE("GST include ${GST_INCLUDE_DIRS}")
list(APPEND ENABLED_DRIVERS vf)
endif()
if (XRT_BUILD_DRIVER_SURVIVE)
set(SURVIVE_SOURCE_FILES
survive/survive_driver.c

View file

@ -134,6 +134,16 @@ lib_drv_remote = static_library(
build_by_default: 'remote' in drivers,
)
lib_drv_vf = static_library(
'drv_vf',
files(
'vf/vf_driver.c'
),
include_directories: xrt_include,
dependencies: [aux, gst, gst_app, gst_video],
build_by_default: 'vf' in drivers,
)
lib_drv_v4l2 = static_library(
'drv_v4l2',
files(

View file

@ -0,0 +1,491 @@
// Copyright 2020, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Video file frameserver implementation
* @author Christoph Haag <christoph.haag@collabora.com>
* @author Pete Black <pblack@collabora.com>
* @author Jakob Bornecrantz <jakob@collabora.com>
* @ingroup drv_vf
*/
#include "os/os_time.h"
#include "os/os_threading.h"
#include "util/u_var.h"
#include "util/u_misc.h"
#include "util/u_debug.h"
#include "util/u_format.h"
#include "util/u_frame.h"
#include <stdio.h>
#include <assert.h>
#include "vf_interface.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <glib.h>
/*
*
* Defines.
*
*/
/*!
* Spew level logging.
*
* Outputs a line, from the given format string and arguments, only if
* vf_fs::print_spew is true.
* @relates vf_fs
*/
#define V_SPEW(p, ...) \
do { \
if (p->print_spew) { \
fprintf(stderr, "%s - ", __func__); \
fprintf(stderr, __VA_ARGS__); \
fprintf(stderr, "\n"); \
} \
} while (false)
/*!
* Debug level logging.
*
* Outputs a line, from the given format string and arguments, only if
* vf_fs::print_debug is true.
*
* @relates vf_fs
*/
#define V_DEBUG(p, ...) \
do { \
if (p->print_debug) { \
fprintf(stderr, "%s - ", __func__); \
fprintf(stderr, __VA_ARGS__); \
fprintf(stderr, "\n"); \
} \
} while (false)
/*!
* Error level logging.
*
* Outputs a line, from the given format string and arguments.
*
* @relates vf_fs
*/
#define V_ERROR(p, ...) \
do { \
fprintf(stderr, "%s - ", __func__); \
fprintf(stderr, __VA_ARGS__); \
fprintf(stderr, "\n"); \
} while (false)
DEBUG_GET_ONCE_BOOL_OPTION(vf_spew, "VF_PRINT_SPEW", false)
DEBUG_GET_ONCE_BOOL_OPTION(vf_debug, "VF_PRINT_DEBUG", false)
/*!
* A frame server operating on a video file.
*
* @implements xrt_frame_node
* @implements xrt_fs
*/
struct vf_fs
{
struct xrt_fs base;
struct os_thread_helper play_thread;
const char *path;
GMainLoop *loop;
GstElement *source;
GstElement *testsink;
bool got_sample;
int width;
int height;
enum xrt_format format;
enum xrt_stereo_format stereo_format;
struct xrt_frame_node node;
struct
{
bool extended_format;
bool timeperframe;
} has;
enum xrt_fs_capture_type capture_type;
struct xrt_frame_sink *sink;
uint32_t selected;
struct xrt_fs_capture_parameters capture_params;
bool is_configured;
bool is_running;
bool print_spew;
bool print_debug;
};
/*!
* Cast to derived type.
*/
static inline struct vf_fs *
vf_fs(struct xrt_fs *xfs)
{
return (struct vf_fs *)xfs;
}
/*
*
* Misc helper functions
*
*/
/*
*
* Exported functions.
*
*/
static bool
vf_fs_enumerate_modes(struct xrt_fs *xfs,
struct xrt_fs_mode **out_modes,
uint32_t *out_count)
{
struct vf_fs *vid = vf_fs(xfs);
struct xrt_fs_mode *modes = U_TYPED_ARRAY_CALLOC(struct xrt_fs_mode, 1);
if (modes == NULL) {
return false;
}
modes[0].width = vid->width;
modes[0].height = vid->height;
modes[0].format = vid->format;
modes[0].stereo_format = vid->stereo_format;
*out_modes = modes;
*out_count = 1;
return true;
}
static bool
vf_fs_configure_capture(struct xrt_fs *xfs,
struct xrt_fs_capture_parameters *cp)
{
// struct vf_fs *vid = vf_fs(xfs);
//! @todo
return false;
}
static bool
vf_fs_stream_start(struct xrt_fs *xfs,
struct xrt_frame_sink *xs,
enum xrt_fs_capture_type capture_type,
uint32_t descriptor_index)
{
struct vf_fs *vid = vf_fs(xfs);
vid->sink = xs;
vid->is_running = true;
vid->capture_type = capture_type;
vid->selected = descriptor_index;
gst_element_set_state(vid->source, GST_STATE_PLAYING);
V_SPEW(vid, "info: Started!");
// we're off to the races!
return true;
}
static bool
vf_fs_stream_stop(struct xrt_fs *xfs)
{
struct vf_fs *vid = vf_fs(xfs);
if (!vid->is_running) {
return true;
}
vid->is_running = false;
gst_element_set_state(vid->source, GST_STATE_PAUSED);
return true;
}
static bool
vf_fs_is_running(struct xrt_fs *xfs)
{
struct vf_fs *vid = vf_fs(xfs);
GstState current = GST_STATE_NULL;
GstState pending;
gst_element_get_state(vid->source, &current, &pending, 0);
return current == GST_STATE_PLAYING;
}
static void
vf_fs_destroy(struct vf_fs *vid)
{
g_main_loop_quit(vid->loop);
os_thread_helper_stop(&vid->play_thread);
os_thread_helper_destroy(&vid->play_thread);
free(vid);
}
static void
vf_fs_node_break_apart(struct xrt_frame_node *node)
{
struct vf_fs *vid = container_of(node, struct vf_fs, node);
vf_fs_stream_stop(&vid->base);
}
static void
vf_fs_node_destroy(struct xrt_frame_node *node)
{
struct vf_fs *vid = container_of(node, struct vf_fs, node);
vf_fs_destroy(vid);
}
#include <gst/video/video-frame.h>
void
vf_fs_frame(struct vf_fs *vid, GstSample *sample)
{
GstBuffer *buffer;
buffer = gst_sample_get_buffer(sample);
GstCaps *caps = gst_sample_get_caps(sample);
static int seq = 0;
GstVideoFrame frame;
GstVideoInfo info;
gst_video_info_init(&info);
gst_video_info_from_caps(&info, caps);
if (gst_video_frame_map(&frame, &info, buffer, GST_MAP_READ)) {
int plane = 0;
struct xrt_frame *xf = NULL;
u_frame_create_one_off(vid->format, vid->width, vid->height,
&xf);
//! @todo Sequence number and timestamp.
xf->width = vid->width;
xf->height = vid->height;
xf->format = vid->format;
xf->stereo_format = vid->stereo_format;
xf->data = frame.data[plane];
xf->stride = info.stride[plane];
xf->size = info.size;
xf->source_id = vid->base.source_id;
xf->source_sequence = seq;
xf->timestamp = os_monotonic_get_ns();
if (vid->sink) {
vid->sink->push_frame(vid->sink, xf);
// The frame is requeued as soon as the refcount reaches
// zero, this can be done safely from another thread.
// xrt_frame_reference(&xf, NULL);
}
gst_video_frame_unmap(&frame);
} else {
V_ERROR(vid, "Failed to map frame %d", seq);
}
seq++;
}
static GstFlowReturn
on_new_sample_from_sink(GstElement *elt, struct vf_fs *vid)
{
GstSample *sample;
sample = gst_app_sink_pull_sample(GST_APP_SINK(elt));
if (!vid->got_sample) {
gint width;
gint height;
GstCaps *caps = gst_sample_get_caps(sample);
GstStructure *structure = gst_caps_get_structure(caps, 0);
gst_structure_get_int(structure, "width", &width);
gst_structure_get_int(structure, "height", &height);
V_DEBUG(vid, "video size is %dx%d\n", width, height);
vid->got_sample = true;
vid->width = width;
vid->height = height;
// first sample is only used for getting metadata
return GST_FLOW_OK;
}
vf_fs_frame(vid, sample);
gst_sample_unref(sample);
return GST_FLOW_OK;
}
static void
print_gst_error(GstMessage *message)
{
GError *err = NULL;
gchar *dbg_info = NULL;
gst_message_parse_error(message, &err, &dbg_info);
V_ERROR("ERROR from element %s: %s\n", GST_OBJECT_NAME(message->src),
err->message);
V_ERROR("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
g_error_free(err);
g_free(dbg_info);
}
static gboolean
on_source_message(GstBus *bus, GstMessage *message, struct vf_fs *vid)
{
/* nil */
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_EOS:
V_DEBUG(vid, "Finished playback\n");
g_main_loop_quit(vid->loop);
break;
case GST_MESSAGE_ERROR:
V_ERROR(vid, "Received error\n");
print_gst_error(message);
g_main_loop_quit(vid->loop);
break;
default: break;
}
return TRUE;
}
static void *
run_play_thread(void *ptr)
{
struct vf_fs *vid = (struct vf_fs *)ptr;
V_DEBUG(vid, "Let's run!\n");
g_main_loop_run(vid->loop);
V_DEBUG(vid, "Going out\n");
gst_object_unref(vid->testsink);
gst_element_set_state(vid->source, GST_STATE_NULL);
gst_object_unref(vid->source);
g_main_loop_unref(vid->loop);
return NULL;
}
struct xrt_fs *
vf_fs_create(struct xrt_frame_context *xfctx, const char *path)
{
if (path == NULL) {
V_ERROR(p, "No path given");
return NULL;
}
struct vf_fs *vid = U_TYPED_CALLOC(struct vf_fs);
vid->path = path;
vid->got_sample = false;
gchar *loop = "false";
gchar *string = NULL;
GstBus *bus = NULL;
gst_init(0, NULL);
if (!g_file_test(path, G_FILE_TEST_EXISTS)) {
V_ERROR(vid, "File %s does not exist\n", path);
return NULL;
}
vid->loop = g_main_loop_new(NULL, FALSE);
#if 0
const gchar *caps = "video/x-raw,format=RGB";
vid->format = XRT_FORMAT_R8G8B8;
vid->stereo_format = XRT_STEREO_FORMAT_SBS;
#endif
#if 1
const gchar *caps = "video/x-raw,format=YUY2";
vid->format = XRT_FORMAT_YUYV422;
vid->stereo_format = XRT_STEREO_FORMAT_SBS;
#endif
string = g_strdup_printf(
"multifilesrc location=\"%s\" loop=%s ! decodebin ! videoconvert ! "
"appsink caps=\"%s\" name=testsink",
path, loop, caps);
V_DEBUG(vid, "Pipeline: %s\n", string);
vid->source = gst_parse_launch(string, NULL);
g_free(string);
if (vid->source == NULL) {
V_ERROR(vid, "Bad source\n");
g_main_loop_unref(vid->loop);
free(vid);
return NULL;
}
vid->testsink = gst_bin_get_by_name(GST_BIN(vid->source), "testsink");
g_object_set(G_OBJECT(vid->testsink), "emit-signals", TRUE, "sync",
TRUE, NULL);
g_signal_connect(vid->testsink, "new-sample",
G_CALLBACK(on_new_sample_from_sink), vid);
bus = gst_element_get_bus(vid->source);
gst_bus_add_watch(bus, (GstBusFunc)on_source_message, vid);
gst_object_unref(bus);
int ret =
os_thread_helper_start(&vid->play_thread, run_play_thread, vid);
if (!ret) {
V_ERROR(vid, "Failed to start thread");
}
// we need one sample to determine frame size
gst_element_set_state(vid->source, GST_STATE_PLAYING);
while (!vid->got_sample) {
os_nanosleep(100 * 1000 * 1000);
}
gst_element_set_state(vid->source, GST_STATE_PAUSED);
vid->base.enumerate_modes = vf_fs_enumerate_modes;
vid->base.configure_capture = vf_fs_configure_capture;
vid->base.stream_start = vf_fs_stream_start;
vid->base.stream_stop = vf_fs_stream_stop;
vid->base.is_running = vf_fs_is_running;
vid->node.break_apart = vf_fs_node_break_apart;
vid->node.destroy = vf_fs_node_destroy;
vid->print_spew = debug_get_bool_option_vf_spew();
vid->print_debug = debug_get_bool_option_vf_debug();
// It's now safe to add it to the context.
xrt_frame_context_add(xfctx, &vid->node);
// Start the variable tracking after we know what device we have.
// clang-format off
u_var_add_root(vid, "Video File Frameserver", true);
u_var_add_ro_text(vid, vid->base.name, "Card");
u_var_add_bool(vid, &vid->print_debug, "Debug");
u_var_add_bool(vid, &vid->print_spew, "Spew");
// clang-format on
return &(vid->base);
}

View file

@ -0,0 +1,36 @@
// Copyright 2029, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
/*!
* @file
* @brief Header
* @author Christoph Haag <christoph.haag@collabora.com>
* @ingroup drv_vf
*/
#pragma once
#include "xrt/xrt_frameserver.h"
#ifdef __cplusplus
extern "C" {
#endif
/*!
* @defgroup drv_vf Video Fileframeserver driver
* @ingroup drv
*
* @brief Frameserver using a video file.
*/
/*!
* Create a vf frameserver
*
* @ingroup drv_vf
*/
struct xrt_fs *
vf_fs_create(struct xrt_frame_context *xfctx, const char *path);
#ifdef __cplusplus
}
#endif

View file

@ -69,6 +69,10 @@ if has_v4l2_header and 'v4l2' in drivers
have_conf.set('XRT_HAVE_V4L2', true)
endif
if 'vf' in drivers
have_conf.set('XRT_HAVE_VF', true)
endif
if true
have_conf.set('XRT_HAVE_VULKAN', true)
endif

View file

@ -10,6 +10,7 @@
#pragma once
#cmakedefine XRT_HAVE_DBUS
#cmakedefine XRT_HAVE_VF
#cmakedefine XRT_HAVE_EGL
#cmakedefine XRT_HAVE_FFMPEG
#cmakedefine XRT_HAVE_JPEG

View file

@ -20,6 +20,10 @@
#include "v4l2/v4l2_interface.h"
#endif
#ifdef XRT_HAVE_VF
#include "vf/vf_interface.h"
#endif
#ifdef XRT_BUILD_DRIVER_REMOTE
#include "remote/r_interface.h"
#endif
@ -776,6 +780,8 @@ open_hid_interface(struct xrt_prober *xp,
return -1;
}
DEBUG_GET_ONCE_OPTION(vf_path, "VF_PATH", NULL)
static int
open_video_device(struct xrt_prober *xp,
struct xrt_prober_device *xpdev,
@ -785,6 +791,17 @@ open_video_device(struct xrt_prober *xp,
XRT_MAYBE_UNUSED struct prober_device *pdev =
(struct prober_device *)xpdev;
#if defined(XRT_HAVE_VF)
const char *path = debug_get_option_vf_path();
if (path != NULL) {
struct xrt_fs *xfs = vf_fs_create(xfctx, path);
if (xfs) {
*out_xfs = xfs;
return 0;
}
}
#endif
#if defined(XRT_HAVE_V4L2)
if (pdev->num_v4ls == 0) {
return -1;

View file

@ -72,6 +72,10 @@ if(XRT_HAVE_V4L2)
target_link_libraries(target_lists PRIVATE drv_v4l2)
endif()
if(XRT_HAVE_VF)
target_link_libraries(target_lists PRIVATE drv_vf)
endif()
if(XRT_BUILD_DRIVER_VIVE)
target_link_libraries(target_lists PRIVATE drv_vive)
endif()

View file

@ -53,6 +53,10 @@ if 'v4l2' in drivers
driver_libs += [lib_drv_v4l2]
endif
if 'vf' in drivers
driver_libs += [lib_drv_vf]
endif
if 'vive' in drivers
driver_libs += [lib_drv_vive]
endif