diff --git a/CMake/realsense.def b/CMake/realsense.def index 86fdf1e193..80b6ccb0d0 100644 --- a/CMake/realsense.def +++ b/CMake/realsense.def @@ -68,7 +68,7 @@ EXPORTS rs2_keep_frame rs2_frame_add_ref rs2_pose_frame_get_pose_data - + rs2_get_option rs2_set_option rs2_supports_option @@ -147,6 +147,7 @@ EXPORTS rs2_clone_stream_profile rs2_allocate_synthetic_video_frame + rs2_allocate_points rs2_allocate_composite_frame rs2_synthetic_frame_ready rs2_create_processing_block diff --git a/CMakeLists.txt b/CMakeLists.txt index de56cb0355..2af224191f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -89,7 +89,7 @@ if(ANDROID_NDK_TOOLCHAIN_INCLUDED) set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fPIC -pedantic -g -D_BSD_SOURCE") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC -pedantic -g -Wno-missing-field-initializers") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-switch -Wno-multichar") - + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fPIE -pie") message(STATUS "Prepare RealSense SDK for Android OS (${ANDROID_NDK_ABI_NAME})") endif() @@ -201,6 +201,22 @@ set(REALSENSE_CPP ## Check for Windows Version ## if( (${CMAKE_SYSTEM_VERSION} EQUAL 6.1) OR (FORCE_WINUSB_UVC) ) # Windows 7 + if (NOT CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR) + set(DRIVERS IntelD415_win7.inf + WdfCoinstaller01011.dll + winusbcoinstaller2.dll + ) + + foreach(item IN LISTS DRIVERS) + add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/${item}" + COMMAND ${CMAKE_COMMAND} -E copy "${CMAKE_CURRENT_SOURCE_DIR}/src/win7/d415_driver/${item}" "${CMAKE_CURRENT_BINARY_DIR}/${item}" + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${item}" + ) + endforeach() + endif() + add_custom_target(realsense-driver ALL DEPENDS ${DRIVERS}) + list(APPEND REALSENSE_CPP src/win7/win7-helpers.cpp src/win7/win7-uvc.cpp @@ -892,6 +908,12 @@ target_include_directories(realsense2 PRIVATE ${BOOST_INCLUDE_PATH} ${LZ4_INCLUDE_PATH} ) +if( (${CMAKE_SYSTEM_VERSION} EQUAL 6.1) OR (FORCE_WINUSB_UVC) ) # Windows 7 + add_dependencies(realsense2 realsense-driver) + set_target_properties (realsense-driver PROPERTIES + FOLDER Library + ) +endif() if (USE_SYSTEM_LIBUSB) target_include_directories(realsense2 PRIVATE ${LIBUSB1_INCLUDE_DIRS}) @@ -942,6 +964,7 @@ option(BUILD_EXAMPLES "Build realsense examples and tools." ON) option(ENFORCE_METADATA "Require WinSDK with Metadata support during compilation. Windows OS Only" OFF) option(BUILD_PYTHON_BINDINGS "Build Python bindings" OFF) option(BUILD_CSHARP_BINDINGS "Build C# bindings" OFF) +#option(BUILD_MATLAB_BINDINGS "Build Matlab bindings" OFF) option(BUILD_UNITY_BINDINGS "Copy the unity project to the build folder with the required dependencies" OFF) option(BUILD_CV_EXAMPLES "Build OpenCV examples" OFF) option(BUILD_PCL_EXAMPLES "Build PCL examples" OFF) @@ -972,6 +995,10 @@ if (BUILD_WITH_CUDA) add_definitions(-DRS2_USE_CUDA) endif() +if (BUILD_MATLAB_BINDINGS) + add_subdirectory(wrappers/matlab) +endif() + # Check for unreferenced files FILE(GLOB_RECURSE AllSources RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} diff --git a/common/model-views.cpp b/common/model-views.cpp index 920c25adf5..9f93f8ee78 100644 --- a/common/model-views.cpp +++ b/common/model-views.cpp @@ -1413,7 +1413,7 @@ namespace rs2 { if (viewer.synchronization_enable && (!viewer.is_3d_view || viewer.is_3d_depth_source(f) || viewer.is_3d_texture_source(f))) { - viewer.s(f); + viewer.s.invoke(f); } else { @@ -2580,7 +2580,7 @@ namespace rs2 float val{}; if (texture->try_pick(x, y, &val)) { - ss << ", *p: 0x" << std::hex << val; + ss << ", *p: 0x" << std::hex << static_cast(round(val)); } if (texture->get_last_frame().is()) @@ -5657,7 +5657,7 @@ namespace rs2 ImGui::SetCursorPos({ rc.x, rc.y + line_h }); } - ImGui::SetCursorPos({ rc.x + 225, rc.y - 127 }); + ImGui::SetCursorPos({ rc.x + 225, rc.y - 107 }); if (fw_version_less_than(fw_version, min_fw_version)) { @@ -6111,7 +6111,7 @@ namespace rs2 } } - void device_model::handle_harware_events(const std::string& serialized_data) + void device_model::handle_hardware_events(const std::string& serialized_data) { //TODO: Move under hour glass std::string event_type = get_event_type(serialized_data); diff --git a/common/model-views.h b/common/model-views.h index 324571726c..635b59da79 100644 --- a/common/model-views.h +++ b/common/model-views.h @@ -477,7 +477,7 @@ namespace rs2 bool load_json_if_streaming = false, json_loading_func json_loading = [](std::function load) {load(); }, bool draw_device_outline = true); - void handle_harware_events(const std::string& serialized_data); + void handle_hardware_events(const std::string& serialized_data); std::vector> subdevices; bool is_streaming() const; diff --git a/doc/android/AndroidJavaApp.md b/doc/android/AndroidJavaApp.md index f268b5013b..ea75c9bdcb 100644 --- a/doc/android/AndroidJavaApp.md +++ b/doc/android/AndroidJavaApp.md @@ -47,6 +47,7 @@ Click on `Run` and choose `Run 'app'`. Choose your Android device and click on t 16. Open the Terminal Emulator application and type below lines in order to move to Super User mode and change the USB permissions. ```shell su +setenforce 0 lsusb chmod 0777 /dev/bus/usb// ``` diff --git a/doc/android/AndroidNativeSamples.md b/doc/android/AndroidNativeSamples.md index 675c61e514..82188dfdab 100644 --- a/doc/android/AndroidNativeSamples.md +++ b/doc/android/AndroidNativeSamples.md @@ -14,7 +14,7 @@ This document describes how to build the Intel® RealSense™ SDK 2.0 including 7. Open Terminal on the host machine, navigate to *librealsense* root directory and type the following lines: ```shell mkdir build && cd build -cmake .. -DANDROID_ABI= -DCMAKE_TOOLCHAIN_FILE=/build/cmake/android.toolchain.cmake -DFORCE_LIBUVC=TRUE +cmake .. -DANDROID_ABI= -DCMAKE_TOOLCHAIN_FILE=/build/cmake/android.toolchain.cmake -DFORCE_LIBUVC=TRUE -DBUILD_SHARED_LIBS=false make ``` @@ -23,7 +23,6 @@ make 8. When compilation done type the following lines to store the binaries at the same location to easily copy them to your Android device. ```shell mkdir lrs_binaries && cd lrs_binaries -cp ../librealsense2.so ./ cp ../examples/C/color/rs-color ./ cp ../examples/C/depth/rs-depth ./ cp ../examples/C/distance/rs-distance ./ diff --git a/doc/android/native-lib.cpp_ b/doc/android/native-lib.cpp_ index 0f2ed8b4e6..c592aedb58 100644 --- a/doc/android/native-lib.cpp_ +++ b/doc/android/native-lib.cpp_ @@ -8,7 +8,6 @@ #include "librealsense/include/librealsense2/hpp/rs_pipeline.hpp" #include "librealsense/include/librealsense2/hpp/rs_internal.hpp" - static rs2::context ctx; static std::atomic streaming(false); static std::thread frame_thread; @@ -21,24 +20,27 @@ Java_com_example_realsense_1app_MainActivity_startStreaming(JNIEnv *env, jobject return; const char* nativeClassName = env->GetStringUTFChars(className, 0); - jclass handlerClass = env->FindClass(nativeClassName); - env->ReleaseStringUTFChars(className, nativeClassName); - if (handlerClass == NULL) { + jclass localClass = env->FindClass(nativeClassName); + if (localClass == NULL) { throw std::runtime_error("FindClass(...) failed!"); } + jclass handlerClass = reinterpret_cast(env->NewGlobalRef(localClass)); + env->ReleaseStringUTFChars(className, nativeClassName); + jmethodID onFrame = env->GetStaticMethodID(handlerClass, "onFrame", "(Ljava/lang/Object;II)V"); if (onFrame == NULL) { + env->DeleteGlobalRef(handlerClass); throw std::runtime_error("GetStaticMethodID(...) failed!"); } auto list = ctx.query_devices(); if (0 == list.size()) { + env->DeleteGlobalRef(handlerClass); throw std::runtime_error("No RealSense devices where found!"); } - JavaVM* vm; env->GetJavaVM(&vm); frame_thread = std::thread([vm, handlerClass, onFrame](){ @@ -76,6 +78,7 @@ Java_com_example_realsense_1app_MainActivity_startStreaming(JNIEnv *env, jobject {} env->DeleteLocalRef(arr); + env->DeleteGlobalRef(handlerClass); vm->DetachCurrentThread(); }); } @@ -93,9 +96,9 @@ Java_com_example_realsense_1app_MainActivity_stopStreaming(JNIEnv *env, jobject if (!streaming) return; - streaming = false; - if (frame_thread.joinable()) - frame_thread.join(); + streaming = false; + if (frame_thread.joinable()) + frame_thread.join(); } catch (const std::exception& ex) diff --git a/doc/distribution_linux.md b/doc/distribution_linux.md index afc89311a5..a68bea01bf 100644 --- a/doc/distribution_linux.md +++ b/doc/distribution_linux.md @@ -1,12 +1,16 @@ # Linux Distribution +#### Using pre-build packages **Intel® RealSense™ SDK 2.0** provides installation packages in [`dpkg`](https://en.wikipedia.org/wiki/Dpkg) format for Ubuntu 16/18 LTS. -\* The Realsense [DKMS](https://en.wikipedia.org/wiki/Dynamic_Kernel_Module_Support) kernel drivers package (`librealsense2-dkms`) supports Ubuntu LTS kernels 4.4, 4.10, 4.13 and 4.15\*. +The Realsense [DKMS](https://en.wikipedia.org/wiki/Dynamic_Kernel_Module_Support) kernel drivers package (`librealsense2-dkms`) supports Ubuntu LTS kernels 4.4, 4.10, 4.13 and 4.15. -**Note** Kernel 4.16 introduced a major change to uvcvideo and media subsystem. While we work to add support v4.16 in future releases, **librealsense** is verified to run correctly with kernels v4.4-v4.15; the affected users are requested to downgrade the kernel version. +#### Configuring and building from the source code +While we strongly recommend to use DKMS package whenever possible, there are certain cases where installing and patching the system manually is necessary: + - Using SDK with linux kernel **version 4.16+** + - Integration of user-specific patches/modules with `librealsense` SDK. + - Adjusting the patches for alternative kernels/distributions. - ->To build the project from sources and prepare/patch the OS manually please follow steps described [here](./installation.md). +The steps are described in [Linux manual installation guide](./installation.md) ## Installing the packages: diff --git a/doc/img/matlab_select.png b/doc/img/matlab_select.png new file mode 100644 index 0000000000..a59479f0f1 Binary files /dev/null and b/doc/img/matlab_select.png differ diff --git a/doc/installation.md b/doc/installation.md index 3ad0252bc3..a89ebb18a3 100644 --- a/doc/installation.md +++ b/doc/installation.md @@ -10,17 +10,23 @@ The scripts and commands below invoke `wget, git, add-apt-repository` which may **Important:** Running RealSense Depth Cameras on Linux requires patching and inserting modified kernel drivers. Some OEM/Vendors choose to lock the kernel for modifications. Unlocking this capability may requires to modify BIOS settings **Make Ubuntu Up-to-date:** - 1. Update Ubuntu distribution, including getting the latest stable kernel + * Update Ubuntu distribution, including getting the latest stable kernel: * `sudo apt-get update && sudo apt-get upgrade && sudo apt-get dist-upgrade`
- **Note:** On stock Ubuntu 14 LTS systems and Kernel prior to 4.4.0-04 the standard *apt-get upgrade* command is not sufficient to bring the distribution to the latest recommended baseline. - On those systems the following should be used: - `sudo apt-get install --install-recommends linux-generic-lts-xenial xserver-xorg-core-lts-xenial xserver-xorg-lts-xenial xserver-xorg-video-all-lts-xenial xserver-xorg-input-all-lts-xenial libwayland-egl1-mesa-lts-xenial `
+**Note:** On stock Ubuntu 14 LTS systems and Kernel prior to 4.4.0-04 the standard `apt-get upgrade` command is not sufficient to bring the distribution to the latest recommended baseline. + It is recommended to upgrade the distribution with: + * `sudo apt-get install --install-recommends linux-generic-lts-xenial xserver-xorg-core-lts-xenial xserver-xorg-lts-xenial xserver-xorg-video-all-lts-xenial xserver-xorg-input-all-lts-xenial libwayland-egl1-mesa-lts-xenial `
* Update OS Boot and reboot to enforce the correct kernel selection with
`sudo update-grub && sudo reboot`
* Interrupt the boot process at Grub2 Boot Menu -> "Advanced Options for Ubuntu" and select the kernel version installed in the previous step. Press and hold SHIFT if the Boot menu is not presented. - * Complete the boot, login and verify that a supported kernel version (4.4.0-.., 4.8.0-.., 4.10.0-.. , 4.13.0-..or 4.15.0-.. as of May 2018) is in place with `uname -r` + * Complete the boot, login and verify that a supported kernel version (4.**[4,8,10,13,15,16]**]) is in place with `uname -r` + + + **Download/Clone librealsense github repository:** + 1. Navigate to *librealsense* root directory to run the following scripts.
+ Unplug any connected Intel RealSense camera.
+ **Prepare Linux Backend and the Dev. Environment:** @@ -40,10 +46,9 @@ The scripts and commands below invoke `wget, git, add-apt-repository` which may * Ubuntu 18:
`sudo apt-get install libglfw3-dev libgl1-mesa-dev libglu1-mesa-dev`

- Cmake: *librealsense* requires version 3.8+ which is currently not made available via apt manager for Ubuntu LTS. + Cmake: certain *librealsense* CMAKE flags (e.g. CUDA) require version 3.8+ which is currently not made available via apt manager for Ubuntu LTS. Go to the [official CMake site](https://cmake.org/download/) to download and install the application - **Note** on graphic sub-system utilization:
*glfw3*, *mesa* and *gtk* packages are required if you plan to build the SDK's OpenGl-enabled examples. The *librealsense* core library and a range of demos/tools are designed for headless environment deployment. @@ -53,12 +58,15 @@ The scripts and commands below invoke `wget, git, add-apt-repository` which may
4. Build and apply patched kernel modules for:
- * **Ubuntu 14/16/18 LTS** + * **Ubuntu 14/16/18 with LTS kernel** script will download, patch and build realsense-affected kernel modules (drivers).
Then it will attempt to insert the patched module instead of the active one. If failed the original uvc modules will be restored. `./scripts/patch-realsense-ubuntu-lts.sh`
+ * **Ubuntu with Kernel 4.16** + + `./scripts/patch-ubuntu-kernel-4.16.sh`
* **Intel® Joule™ with Ubuntu** Based on the custom kernel provided by Canonical Ltd. diff --git a/examples/align/rs-align.cpp b/examples/align/rs-align.cpp index 3ac5376207..098d4a503a 100644 --- a/examples/align/rs-align.cpp +++ b/examples/align/rs-align.cpp @@ -103,7 +103,7 @@ int main(int argc, char * argv[]) try pip_stream.y = altered_other_frame_rect.y + altered_other_frame_rect.h - pip_stream.h - (std::max(w, h) / 25); // Render depth (as picture in pipcture) - renderer.upload(c(aligned_depth_frame)); + renderer.upload(c.process(aligned_depth_frame)); renderer.show(pip_stream); // Using ImGui library to provide a slide controller to select the depth clipping distance diff --git a/examples/capture/rs-capture.cpp b/examples/capture/rs-capture.cpp index 95bf29b4ba..c472718770 100644 --- a/examples/capture/rs-capture.cpp +++ b/examples/capture/rs-capture.cpp @@ -26,7 +26,7 @@ int main(int argc, char * argv[]) try { rs2::frameset data = pipe.wait_for_frames(); // Wait for next set of frames from the camera - rs2::frame depth = color_map(data.get_depth_frame()); // Find and colorize the depth data + rs2::frame depth = color_map.process(data.get_depth_frame()); // Find and colorize the depth data rs2::frame color = data.get_color_frame(); // Find the color data // For cameras that don't have RGB sensor, we'll render infrared frames instead of color @@ -50,6 +50,3 @@ catch (const std::exception& e) std::cerr << e.what() << std::endl; return EXIT_FAILURE; } - - - diff --git a/examples/example.hpp b/examples/example.hpp index 32c78bfb9e..aca687bbc7 100644 --- a/examples/example.hpp +++ b/examples/example.hpp @@ -9,6 +9,7 @@ #include #include #include +#include ////////////////////////////// // Basic Data Types // @@ -58,6 +59,31 @@ inline void draw_text(int x, int y, const char * text) class texture { public: + void render(const rs2::frameset& frames, int window_width, int window_height) + { + std::vector supported_frames; + for (auto f : frames) + { + if (can_render(f)) + supported_frames.push_back(f); + } + if (supported_frames.empty()) + return; + + std::sort(supported_frames.begin(), supported_frames.end(), [](rs2::frame first, rs2::frame second) + { return first.get_profile().stream_type() < second.get_profile().stream_type(); }); + + auto image_grid = calc_grid(float2{ (float)window_width, (float)window_height }, supported_frames); + + int image_index = 0; + for (auto f : supported_frames) + { + upload(f); + show(image_grid.at(image_index)); + image_index++; + } + } + void render(const rs2::video_frame& frame, const rect& r) { upload(frame); @@ -122,20 +148,80 @@ class texture draw_text((int)r.x + 15, (int)r.y + 20, rs2_stream_to_string(stream)); } + private: GLuint gl_handle = 0; int width = 0; int height = 0; rs2_stream stream = RS2_STREAM_ANY; + + bool can_render(const rs2::frame& f) const + { + auto format = f.get_profile().format(); + switch (format) + { + case RS2_FORMAT_RGB8: + case RS2_FORMAT_RGBA8: + case RS2_FORMAT_Y8: + return true; + default: + return false; + } + } + + rect calc_grid(float2 window, int streams) + { + if (window.x <= 0 || window.y <= 0 || streams <= 0) + throw std::runtime_error("invalid window configuration request, failed to calculate window grid"); + float ratio = window.x / window.y; + auto x = sqrt(ratio * (float)streams); + auto y = (float)streams / x; + auto w = round(x); + auto h = round(y); + if (w == 0 || h == 0) + throw std::runtime_error("invalid window configuration request, failed to calculate window grid"); + while (w*h > streams) + h > w ? h-- : w--; + while (w*h < streams) + h > w ? w++ : h++; + auto new_w = round(window.x / w); + auto new_h = round(window.y / h); + return rect{ w, h, new_w, new_h}; //column count, line count, cell width cell height + } + + std::vector calc_grid(float2 window, std::vector& frames) + { + auto grid = calc_grid(window, frames.size()); + + int index = 0; + std::vector rv; + int curr_line = -1; + for (auto f : frames) + { + auto mod = index % (int)grid.x; + auto fw = (float)frames[index].get_width(); + auto fh = (float)frames[index].get_height(); + + float cell_x_postion = (float)(mod * grid.w); + if (mod == 0) curr_line++; + float cell_y_position = curr_line * grid.h; + + auto r = rect{ cell_x_postion, cell_y_position, grid.w, grid.h }; + rv.push_back(r.adjust_ratio(float2{ fw, fh })); + index++; + } + + return rv; + } }; class window { public: - std::function on_left_mouse = [](bool) {}; + std::function on_left_mouse = [](bool) {}; std::function on_mouse_scroll = [](double, double) {}; - std::function on_mouse_move = [](double, double) {}; - std::function on_key_release = [](int) {}; + std::function on_mouse_move = [](double, double) {}; + std::function on_key_release = [](int) {}; window(int width, int height, const char* title) : _width(width), _height(height) @@ -209,7 +295,7 @@ class window operator GLFWwindow*() { return win; } private: - GLFWwindow* win; + GLFWwindow * win; int _width, _height; }; diff --git a/examples/measure/rs-measure.cpp b/examples/measure/rs-measure.cpp index 690d5b0163..16972485e4 100644 --- a/examples/measure/rs-measure.cpp +++ b/examples/measure/rs-measure.cpp @@ -193,32 +193,33 @@ int main(int argc, char * argv[]) try rs2::frame_source& source) // Frame pool that can allocate new frames { // First make the frames spatially aligned - data = align_to.process(data); + data = data.apply_filter(align_to); - // Next, apply depth post-processing - rs2::frame depth = data.get_depth_frame(); // Decimation will reduce the resultion of the depth image, // closing small holes and speeding-up the algorithm - depth = dec.process(depth); + data = data.apply_filter(dec); + // To make sure far-away objects are filtered proportionally // we try to switch to disparity domain - depth = depth2disparity.process(depth); + data = data.apply_filter(depth2disparity); + // Apply spatial filtering - depth = spat.process(depth); + data = data.apply_filter(spat); + // Apply temporal filtering - depth = temp.process(depth); + data = data.apply_filter(temp); + // If we are in disparity domain, switch back to depth - depth = disparity2depth.process(depth); + data = data.apply_filter(disparity2depth); + // Send the post-processed depth for path-finding - pathfinding_queue.enqueue(depth); + pathfinding_queue.enqueue(data.get_depth_frame()); + + //Apply color map for visualization of depth + data = data.apply_filter(color_map); - // Apply color map for visualization of depth - auto colorized = color_map(depth); - auto color = data.get_color_frame(); - // Group the two frames together (to make sure they are rendered in sync) - rs2::frameset combined = source.allocate_composite_frame({ colorized, color }); // Send the composite frame for rendering - source.frame_ready(combined); + source.frame_ready(data); }); // Indicate that we want the results of frame_processor // to be pushed into postprocessed_frames queue @@ -329,13 +330,14 @@ int main(int argc, char * argv[]) try { auto depth = current_frameset.get_depth_frame(); auto color = current_frameset.get_color_frame(); + auto colorized_depth = current_frameset.first(RS2_STREAM_DEPTH, RS2_FORMAT_RGB8); glEnable(GL_BLEND); // Use the Alpha channel for blending glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); // First render the colorized depth image - depth_image.render(depth, { 0, 0, app.width(), app.height() }); + depth_image.render(colorized_depth, { 0, 0, app.width(), app.height() }); // Next, set global alpha for the color image to 90% // (to make it slightly translucent) //glColor4f(1.f, 1.f, 1.f, 0.9f); diff --git a/examples/multicam/rs-multicam.cpp b/examples/multicam/rs-multicam.cpp index 7df62d94bd..4e359dec36 100644 --- a/examples/multicam/rs-multicam.cpp +++ b/examples/multicam/rs-multicam.cpp @@ -108,7 +108,7 @@ class device_container { rs2::frame new_frame = frameset[i]; int stream_id = new_frame.get_profile().unique_id(); - view.second.frames_per_stream[stream_id] = view.second.colorize_frame(new_frame); //update view port with the new stream + view.second.frames_per_stream[stream_id] = view.second.colorize_frame.process(new_frame); //update view port with the new stream } } } diff --git a/examples/post-processing/rs-post-processing.cpp b/examples/post-processing/rs-post-processing.cpp index 9be38eea27..3bf425f7ad 100644 --- a/examples/post-processing/rs-post-processing.cpp +++ b/examples/post-processing/rs-post-processing.cpp @@ -34,10 +34,10 @@ Class to encapsulate a filter alongside its options class filter_options { public: - filter_options(const std::string name, rs2::process_interface& filter); + filter_options(const std::string name, rs2::processing_block& filter); filter_options(filter_options&& other); std::string filter_name; //Friendly name of the filter - rs2::process_interface& filter; //The filter in use + rs2::processing_block& filter; //The filter in use std::map supported_options; //maps from an option supported by the filter, to the corresponding slider std::atomic_bool is_enabled; //A boolean controlled by the user that determines whether to apply the filter or not }; @@ -232,7 +232,7 @@ void update_data(rs2::frame_queue& data, rs2::frame& colorized_depth, rs2::point if (data.poll_for_frame(&f)) // Try to take the depth and points from the queue { points = pc.calculate(f); // Generate pointcloud from the depth data - colorized_depth = color_map(f); // Colorize the depth frame with a color map + colorized_depth = color_map.process(f); // Colorize the depth frame with a color map pc.map_to(colorized_depth); // Map the colored depth to the point cloud view.tex.upload(colorized_depth); // and upload the texture to the view (without this the view will be B&W) } @@ -348,7 +348,7 @@ bool filter_slider_ui::is_all_integers(const rs2::option_range& range) /** Constructor for filter_options, takes a name and a filter. */ -filter_options::filter_options(const std::string name, rs2::process_interface& filter) : +filter_options::filter_options(const std::string name, rs2::processing_block& filter) : filter_name(name), filter(filter), is_enabled(true) diff --git a/examples/record-playback/rs-record-playback.cpp b/examples/record-playback/rs-record-playback.cpp index 05c1752e93..b788828851 100644 --- a/examples/record-playback/rs-record-playback.cpp +++ b/examples/record-playback/rs-record-playback.cpp @@ -69,7 +69,7 @@ int main(int argc, char * argv[]) try if (!device.as()) { frames = pipe->wait_for_frames(); // wait for next set of frames from the camera - depth = color_map(frames.get_depth_frame()); // Find and colorize the depth data + depth = color_map.process(frames.get_depth_frame()); // Find and colorize the depth data } // Set options for the ImGui buttons @@ -164,7 +164,7 @@ int main(int argc, char * argv[]) try rs2::playback playback = device.as(); if (pipe->poll_for_frames(&frames)) // Check if new frames are ready { - depth = color_map(frames.get_depth_frame()); // Find and colorize the depth data for rendering + depth = color_map.process(frames.get_depth_frame()); // Find and colorize the depth data for rendering } // Render a seek bar for the player diff --git a/examples/save-to-disk/rs-save-to-disk.cpp b/examples/save-to-disk/rs-save-to-disk.cpp index 1bad49522c..25ddaa66d8 100644 --- a/examples/save-to-disk/rs-save-to-disk.cpp +++ b/examples/save-to-disk/rs-save-to-disk.cpp @@ -38,7 +38,7 @@ int main(int argc, char * argv[]) try { auto stream = frame.get_profile().stream_type(); // Use the colorizer to get an rgb image for the depth stream - if (vf.is()) vf = color_map(frame); + if (vf.is()) vf = color_map.process(frame); // Write images to disk std::stringstream png_file; diff --git a/examples/sensor-control/helper.h b/examples/sensor-control/helper.h index 1c7313c49f..73652bed4a 100644 --- a/examples/sensor-control/helper.h +++ b/examples/sensor-control/helper.h @@ -81,7 +81,7 @@ namespace helper { try { - renderer.render(colorize(decimate.process(frame)), { 0, 0, view_width, view_height }); + renderer.render(colorize.process(decimate.process(frame)), { 0, 0, view_width, view_height }); } catch (const std::exception& e) { diff --git a/include/librealsense2/h/rs_context.h b/include/librealsense2/h/rs_context.h index 077ba6385c..87d375bb35 100644 --- a/include/librealsense2/h/rs_context.h +++ b/include/librealsense2/h/rs_context.h @@ -108,7 +108,7 @@ void rs2_delete_device_hub(const rs2_device_hub* hub); * \param[out] error If non-null, receives any error that occurs during this call, otherwise, errors are ignored. * \return device object */ -rs2_device* rs2_device_hub_wait_for_device(rs2_context* ctx, const rs2_device_hub* hub, rs2_error** error); +rs2_device* rs2_device_hub_wait_for_device(const rs2_device_hub* hub, rs2_error** error); /** * Checks if device is still connected diff --git a/include/librealsense2/h/rs_frame.h b/include/librealsense2/h/rs_frame.h index c346d24aa0..c2d35fce68 100644 --- a/include/librealsense2/h/rs_frame.h +++ b/include/librealsense2/h/rs_frame.h @@ -276,6 +276,17 @@ int rs2_is_frame_extendable_to(const rs2_frame* frame, rs2_extension extension_t rs2_frame* rs2_allocate_synthetic_video_frame(rs2_source* source, const rs2_stream_profile* new_stream, rs2_frame* original, int new_bpp, int new_width, int new_height, int new_stride, rs2_extension frame_type, rs2_error** error); +/** +* Allocate new points frame using a frame-source provided from a processing block +* \param[in] source Frame pool to allocate the frame from +* \param[in] new_stream New stream profile to assign to newly created frame +* \param[in] original A reference frame that can be used to fill in auxilary information like format, width, height, bpp, stride (if applicable) +* \param[out] error If non-null, receives any error that occurs during this call, otherwise, errors are ignored +* \return reference to a newly allocated frame, must be released with release_frame +* memory for the frame is likely to be re-used from previous frame, but in lack of available frames in the pool will be allocated from the free store +*/ +rs2_frame* rs2_allocate_points(rs2_source* source, const rs2_stream_profile* new_stream, rs2_frame* original, rs2_error** error); + /** * Allocate new composite frame, aggregating a set of existing frames * \param[in] source Frame pool to allocate the frame from @@ -323,9 +334,6 @@ void rs2_synthetic_frame_ready(rs2_source* source, rs2_frame* frame, rs2_error** */ void rs2_pose_frame_get_pose_data(const rs2_frame* frame, rs2_pose* pose, rs2_error** error); - - - #ifdef __cplusplus } #endif diff --git a/include/librealsense2/h/rs_option.h b/include/librealsense2/h/rs_option.h index 71be14b9b0..215da93dc2 100644 --- a/include/librealsense2/h/rs_option.h +++ b/include/librealsense2/h/rs_option.h @@ -1,5 +1,5 @@ /* License: Apache 2.0. See LICENSE file in root directory. - Copyright(c) 2017 Intel Corporation. All Rights Reserved. */ +Copyright(c) 2017 Intel Corporation. All Rights Reserved. */ /** \file rs_option.h * \brief @@ -16,156 +16,159 @@ extern "C" { #include "rs_types.h" -/** \brief Defines general configuration controls. + /** \brief Defines general configuration controls. These can generally be mapped to camera UVC controls, and unless stated otherwise, can be set/queried at any time. */ -typedef enum rs2_option -{ - RS2_OPTION_BACKLIGHT_COMPENSATION , /**< Enable / disable color backlight compensation*/ - RS2_OPTION_BRIGHTNESS , /**< Color image brightness*/ - RS2_OPTION_CONTRAST , /**< Color image contrast*/ - RS2_OPTION_EXPOSURE , /**< Controls exposure time of color camera. Setting any value will disable auto exposure*/ - RS2_OPTION_GAIN , /**< Color image gain*/ - RS2_OPTION_GAMMA , /**< Color image gamma setting*/ - RS2_OPTION_HUE , /**< Color image hue*/ - RS2_OPTION_SATURATION , /**< Color image saturation setting*/ - RS2_OPTION_SHARPNESS , /**< Color image sharpness setting*/ - RS2_OPTION_WHITE_BALANCE , /**< Controls white balance of color image. Setting any value will disable auto white balance*/ - RS2_OPTION_ENABLE_AUTO_EXPOSURE , /**< Enable / disable color image auto-exposure*/ - RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE , /**< Enable / disable color image auto-white-balance*/ - RS2_OPTION_VISUAL_PRESET , /**< Provide access to several recommend sets of option presets for the depth camera */ - RS2_OPTION_LASER_POWER , /**< Power of the F200 / SR300 projector, with 0 meaning projector off*/ - RS2_OPTION_ACCURACY , /**< Set the number of patterns projected per frame. The higher the accuracy value the more patterns projected. Increasing the number of patterns help to achieve better accuracy. Note that this control is affecting the Depth FPS */ - RS2_OPTION_MOTION_RANGE , /**< Motion vs. Range trade-off, with lower values allowing for better motion sensitivity and higher values allowing for better depth range*/ - RS2_OPTION_FILTER_OPTION , /**< Set the filter to apply to each depth frame. Each one of the filter is optimized per the application requirements*/ - RS2_OPTION_CONFIDENCE_THRESHOLD , /**< The confidence level threshold used by the Depth algorithm pipe to set whether a pixel will get a valid range or will be marked with invalid range*/ - RS2_OPTION_EMITTER_ENABLED , /**< Laser Emitter enabled */ - RS2_OPTION_FRAMES_QUEUE_SIZE , /**< Number of frames the user is allowed to keep per stream. Trying to hold-on to more frames will cause frame-drops.*/ - RS2_OPTION_TOTAL_FRAME_DROPS , /**< Total number of detected frame drops from all streams */ - RS2_OPTION_AUTO_EXPOSURE_MODE , /**< Auto-Exposure modes: Static, Anti-Flicker and Hybrid */ - RS2_OPTION_POWER_LINE_FREQUENCY , /**< Power Line Frequency control for anti-flickering Off/50Hz/60Hz/Auto */ - RS2_OPTION_ASIC_TEMPERATURE , /**< Current Asic Temperature */ - RS2_OPTION_ERROR_POLLING_ENABLED , /**< disable error handling */ - RS2_OPTION_PROJECTOR_TEMPERATURE , /**< Current Projector Temperature */ - RS2_OPTION_OUTPUT_TRIGGER_ENABLED , /**< Enable / disable trigger to be outputed from the camera to any external device on every depth frame */ - RS2_OPTION_MOTION_MODULE_TEMPERATURE , /**< Current Motion-Module Temperature */ - RS2_OPTION_DEPTH_UNITS , /**< Number of meters represented by a single depth unit */ - RS2_OPTION_ENABLE_MOTION_CORRECTION , /**< Enable/Disable automatic correction of the motion data */ - RS2_OPTION_AUTO_EXPOSURE_PRIORITY , /**< Allows sensor to dynamically ajust the frame rate depending on lighting conditions */ - RS2_OPTION_COLOR_SCHEME , /**< Color scheme for data visualization */ - RS2_OPTION_HISTOGRAM_EQUALIZATION_ENABLED , /**< Perform histogram equalization post-processing on the depth data */ - RS2_OPTION_MIN_DISTANCE , /**< Minimal distance to the target */ - RS2_OPTION_MAX_DISTANCE , /**< Maximum distance to the target */ - RS2_OPTION_TEXTURE_SOURCE , /**< Texture mapping stream unique ID */ - RS2_OPTION_FILTER_MAGNITUDE , /**< The 2D-filter effect. The specific interpretation is given within the context of the filter */ - RS2_OPTION_FILTER_SMOOTH_ALPHA , /**< 2D-filter parameter controls the weight/radius for smoothing.*/ - RS2_OPTION_FILTER_SMOOTH_DELTA , /**< 2D-filter range/validity threshold*/ - RS2_OPTION_HOLES_FILL , /**< Enhance depth data post-processing with holes filling where appropriate*/ - RS2_OPTION_STEREO_BASELINE , /**< The distance in mm between the first and the second imagers in stereo-based depth cameras*/ - RS2_OPTION_AUTO_EXPOSURE_CONVERGE_STEP , /**< Allows dynamically ajust the converge step value of the target exposure in Auto-Exposure algorithm*/ - RS2_OPTION_INTER_CAM_SYNC_MODE , /**< Impose Inter-camera HW synchronization mode. Applicable for D400/Rolling Shutter SKUs */ - RS2_OPTION_COUNT /**< Number of enumeration values. Not a valid input: intended to be used in for-loops. */ -} rs2_option; -const char* rs2_option_to_string(rs2_option option); - -/** \brief For SR300 devices: provides optimized settings (presets) for specific types of usage. */ -typedef enum rs2_sr300_visual_preset -{ - RS2_SR300_VISUAL_PRESET_SHORT_RANGE , /**< Preset for short range */ - RS2_SR300_VISUAL_PRESET_LONG_RANGE , /**< Preset for long range */ - RS2_SR300_VISUAL_PRESET_BACKGROUND_SEGMENTATION , /**< Preset for background segmentation */ - RS2_SR300_VISUAL_PRESET_GESTURE_RECOGNITION , /**< Preset for gesture recognition */ - RS2_SR300_VISUAL_PRESET_OBJECT_SCANNING , /**< Preset for object scanning */ - RS2_SR300_VISUAL_PRESET_FACE_ANALYTICS , /**< Preset for face analytics */ - RS2_SR300_VISUAL_PRESET_FACE_LOGIN , /**< Preset for face login */ - RS2_SR300_VISUAL_PRESET_GR_CURSOR , /**< Preset for GR cursor */ - RS2_SR300_VISUAL_PRESET_DEFAULT , /**< Camera default settings */ - RS2_SR300_VISUAL_PRESET_MID_RANGE , /**< Preset for mid-range */ - RS2_SR300_VISUAL_PRESET_IR_ONLY , /**< Preset for IR only */ - RS2_SR300_VISUAL_PRESET_COUNT /**< Number of enumeration values. Not a valid input: intended to be used in for-loops. */ -} rs2_sr300_visual_preset; -const char* rs2_sr300_visual_preset_to_string(rs2_sr300_visual_preset preset); - -/** \brief For RS400 devices: provides optimized settings (presets) for specific types of usage. */ -typedef enum rs2_rs400_visual_preset -{ - RS2_RS400_VISUAL_PRESET_CUSTOM, - RS2_RS400_VISUAL_PRESET_DEFAULT, - RS2_RS400_VISUAL_PRESET_HAND, - RS2_RS400_VISUAL_PRESET_HIGH_ACCURACY, - RS2_RS400_VISUAL_PRESET_HIGH_DENSITY, - RS2_RS400_VISUAL_PRESET_MEDIUM_DENSITY, - RS2_RS400_VISUAL_PRESET_REMOVE_IR_PATTERN, - RS2_RS400_VISUAL_PRESET_COUNT -} rs2_rs400_visual_preset; -const char* rs2_rs400_visual_preset_to_string(rs2_rs400_visual_preset preset); - -/** -* check if an option is read-only -* \param[in] sensor the RealSense sensor -* \param[in] option option id to be checked -* \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored -* \return true if option is read-only -*/ -int rs2_is_option_read_only(const rs2_options* options, rs2_option option, rs2_error** error); - -/** -* read option value from the sensor -* \param[in] sensor the RealSense sensor -* \param[in] option option id to be queried -* \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored -* \return value of the option -*/ -float rs2_get_option(const rs2_options* options, rs2_option option, rs2_error** error); - -/** -* write new value to sensor option -* \param[in] sensor the RealSense sensor -* \param[in] option option id to be queried -* \param[in] value new value for the option -* \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored -*/ -void rs2_set_option(const rs2_options* options, rs2_option option, float value, rs2_error** error); - -/** -* check if particular option is supported by a subdevice -* \param[in] sensor the RealSense sensor -* \param[in] option option id to be checked -* \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored -* \return true if option is supported -*/ -int rs2_supports_option(const rs2_options* options, rs2_option option, rs2_error** error); - -/** -* retrieve the available range of values of a supported option -* \param[in] sensor the RealSense device -* \param[in] option the option whose range should be queried -* \param[out] min the minimum value which will be accepted for this option -* \param[out] max the maximum value which will be accepted for this option -* \param[out] step the granularity of options which accept discrete values, or zero if the option accepts continuous values -* \param[out] def the default value of the option -* \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored -*/ -void rs2_get_option_range(const rs2_options* sensor, rs2_option option, float* min, float* max, float* step, float* def, rs2_error** error); - -/** -* get option description -* \param[in] sensor the RealSense sensor -* \param[in] option option id to be checked -* \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored -* \return human-readable option description -*/ -const char* rs2_get_option_description(const rs2_options* options, rs2_option option, rs2_error ** error); - -/** -* get option value description (in case specific option value hold special meaning) -* \param[in] device the RealSense device -* \param[in] option option id to be checked -* \param[in] value value of the option -* \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored -* \return human-readable description of a specific value of an option or null if no special meaning -*/ -const char* rs2_get_option_value_description(const rs2_options* options, rs2_option option, float value, rs2_error ** error); + typedef enum rs2_option + { + RS2_OPTION_BACKLIGHT_COMPENSATION, /**< Enable / disable color backlight compensation*/ + RS2_OPTION_BRIGHTNESS, /**< Color image brightness*/ + RS2_OPTION_CONTRAST, /**< Color image contrast*/ + RS2_OPTION_EXPOSURE, /**< Controls exposure time of color camera. Setting any value will disable auto exposure*/ + RS2_OPTION_GAIN, /**< Color image gain*/ + RS2_OPTION_GAMMA, /**< Color image gamma setting*/ + RS2_OPTION_HUE, /**< Color image hue*/ + RS2_OPTION_SATURATION, /**< Color image saturation setting*/ + RS2_OPTION_SHARPNESS, /**< Color image sharpness setting*/ + RS2_OPTION_WHITE_BALANCE, /**< Controls white balance of color image. Setting any value will disable auto white balance*/ + RS2_OPTION_ENABLE_AUTO_EXPOSURE, /**< Enable / disable color image auto-exposure*/ + RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE, /**< Enable / disable color image auto-white-balance*/ + RS2_OPTION_VISUAL_PRESET, /**< Provide access to several recommend sets of option presets for the depth camera */ + RS2_OPTION_LASER_POWER, /**< Power of the F200 / SR300 projector, with 0 meaning projector off*/ + RS2_OPTION_ACCURACY, /**< Set the number of patterns projected per frame. The higher the accuracy value the more patterns projected. Increasing the number of patterns help to achieve better accuracy. Note that this control is affecting the Depth FPS */ + RS2_OPTION_MOTION_RANGE, /**< Motion vs. Range trade-off, with lower values allowing for better motion sensitivity and higher values allowing for better depth range*/ + RS2_OPTION_FILTER_OPTION, /**< Set the filter to apply to each depth frame. Each one of the filter is optimized per the application requirements*/ + RS2_OPTION_CONFIDENCE_THRESHOLD, /**< The confidence level threshold used by the Depth algorithm pipe to set whether a pixel will get a valid range or will be marked with invalid range*/ + RS2_OPTION_EMITTER_ENABLED, /**< Laser Emitter enabled */ + RS2_OPTION_FRAMES_QUEUE_SIZE, /**< Number of frames the user is allowed to keep per stream. Trying to hold-on to more frames will cause frame-drops.*/ + RS2_OPTION_TOTAL_FRAME_DROPS, /**< Total number of detected frame drops from all streams */ + RS2_OPTION_AUTO_EXPOSURE_MODE, /**< Auto-Exposure modes: Static, Anti-Flicker and Hybrid */ + RS2_OPTION_POWER_LINE_FREQUENCY, /**< Power Line Frequency control for anti-flickering Off/50Hz/60Hz/Auto */ + RS2_OPTION_ASIC_TEMPERATURE, /**< Current Asic Temperature */ + RS2_OPTION_ERROR_POLLING_ENABLED, /**< disable error handling */ + RS2_OPTION_PROJECTOR_TEMPERATURE, /**< Current Projector Temperature */ + RS2_OPTION_OUTPUT_TRIGGER_ENABLED, /**< Enable / disable trigger to be outputed from the camera to any external device on every depth frame */ + RS2_OPTION_MOTION_MODULE_TEMPERATURE, /**< Current Motion-Module Temperature */ + RS2_OPTION_DEPTH_UNITS, /**< Number of meters represented by a single depth unit */ + RS2_OPTION_ENABLE_MOTION_CORRECTION, /**< Enable/Disable automatic correction of the motion data */ + RS2_OPTION_AUTO_EXPOSURE_PRIORITY, /**< Allows sensor to dynamically ajust the frame rate depending on lighting conditions */ + RS2_OPTION_COLOR_SCHEME, /**< Color scheme for data visualization */ + RS2_OPTION_HISTOGRAM_EQUALIZATION_ENABLED, /**< Perform histogram equalization post-processing on the depth data */ + RS2_OPTION_MIN_DISTANCE, /**< Minimal distance to the target */ + RS2_OPTION_MAX_DISTANCE, /**< Maximum distance to the target */ + RS2_OPTION_TEXTURE_SOURCE, /**< Texture mapping stream unique ID */ + RS2_OPTION_FILTER_MAGNITUDE, /**< The 2D-filter effect. The specific interpretation is given within the context of the filter */ + RS2_OPTION_FILTER_SMOOTH_ALPHA, /**< 2D-filter parameter controls the weight/radius for smoothing.*/ + RS2_OPTION_FILTER_SMOOTH_DELTA, /**< 2D-filter range/validity threshold*/ + RS2_OPTION_HOLES_FILL, /**< Enhance depth data post-processing with holes filling where appropriate*/ + RS2_OPTION_STEREO_BASELINE, /**< The distance in mm between the first and the second imagers in stereo-based depth cameras*/ + RS2_OPTION_AUTO_EXPOSURE_CONVERGE_STEP, /**< Allows dynamically ajust the converge step value of the target exposure in Auto-Exposure algorithm*/ + RS2_OPTION_INTER_CAM_SYNC_MODE, /**< Impose Inter-camera HW synchronization mode. Applicable for D400/Rolling Shutter SKUs */ + RS2_OPTION_STREAM_FILTER, /**< Select a stream to process */ + RS2_OPTION_STREAM_FORMAT_FILTER, /**< Select a stream format to process */ + RS2_OPTION_STREAM_INDEX_FILTER, /**< Select a stream index to process */ + RS2_OPTION_COUNT /**< Number of enumeration values. Not a valid input: intended to be used in for-loops. */ + } rs2_option; + const char* rs2_option_to_string(rs2_option option); + + /** \brief For SR300 devices: provides optimized settings (presets) for specific types of usage. */ + typedef enum rs2_sr300_visual_preset + { + RS2_SR300_VISUAL_PRESET_SHORT_RANGE, /**< Preset for short range */ + RS2_SR300_VISUAL_PRESET_LONG_RANGE, /**< Preset for long range */ + RS2_SR300_VISUAL_PRESET_BACKGROUND_SEGMENTATION, /**< Preset for background segmentation */ + RS2_SR300_VISUAL_PRESET_GESTURE_RECOGNITION, /**< Preset for gesture recognition */ + RS2_SR300_VISUAL_PRESET_OBJECT_SCANNING, /**< Preset for object scanning */ + RS2_SR300_VISUAL_PRESET_FACE_ANALYTICS, /**< Preset for face analytics */ + RS2_SR300_VISUAL_PRESET_FACE_LOGIN, /**< Preset for face login */ + RS2_SR300_VISUAL_PRESET_GR_CURSOR, /**< Preset for GR cursor */ + RS2_SR300_VISUAL_PRESET_DEFAULT, /**< Camera default settings */ + RS2_SR300_VISUAL_PRESET_MID_RANGE, /**< Preset for mid-range */ + RS2_SR300_VISUAL_PRESET_IR_ONLY, /**< Preset for IR only */ + RS2_SR300_VISUAL_PRESET_COUNT /**< Number of enumeration values. Not a valid input: intended to be used in for-loops. */ + } rs2_sr300_visual_preset; + const char* rs2_sr300_visual_preset_to_string(rs2_sr300_visual_preset preset); + + /** \brief For RS400 devices: provides optimized settings (presets) for specific types of usage. */ + typedef enum rs2_rs400_visual_preset + { + RS2_RS400_VISUAL_PRESET_CUSTOM, + RS2_RS400_VISUAL_PRESET_DEFAULT, + RS2_RS400_VISUAL_PRESET_HAND, + RS2_RS400_VISUAL_PRESET_HIGH_ACCURACY, + RS2_RS400_VISUAL_PRESET_HIGH_DENSITY, + RS2_RS400_VISUAL_PRESET_MEDIUM_DENSITY, + RS2_RS400_VISUAL_PRESET_REMOVE_IR_PATTERN, + RS2_RS400_VISUAL_PRESET_COUNT + } rs2_rs400_visual_preset; + const char* rs2_rs400_visual_preset_to_string(rs2_rs400_visual_preset preset); + + /** + * check if an option is read-only + * \param[in] sensor the RealSense sensor + * \param[in] option option id to be checked + * \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored + * \return true if option is read-only + */ + int rs2_is_option_read_only(const rs2_options* options, rs2_option option, rs2_error** error); + + /** + * read option value from the sensor + * \param[in] sensor the RealSense sensor + * \param[in] option option id to be queried + * \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored + * \return value of the option + */ + float rs2_get_option(const rs2_options* options, rs2_option option, rs2_error** error); + + /** + * write new value to sensor option + * \param[in] sensor the RealSense sensor + * \param[in] option option id to be queried + * \param[in] value new value for the option + * \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored + */ + void rs2_set_option(const rs2_options* options, rs2_option option, float value, rs2_error** error); + + /** + * check if particular option is supported by a subdevice + * \param[in] sensor the RealSense sensor + * \param[in] option option id to be checked + * \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored + * \return true if option is supported + */ + int rs2_supports_option(const rs2_options* options, rs2_option option, rs2_error** error); + + /** + * retrieve the available range of values of a supported option + * \param[in] sensor the RealSense device + * \param[in] option the option whose range should be queried + * \param[out] min the minimum value which will be accepted for this option + * \param[out] max the maximum value which will be accepted for this option + * \param[out] step the granularity of options which accept discrete values, or zero if the option accepts continuous values + * \param[out] def the default value of the option + * \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored + */ + void rs2_get_option_range(const rs2_options* sensor, rs2_option option, float* min, float* max, float* step, float* def, rs2_error** error); + + /** + * get option description + * \param[in] sensor the RealSense sensor + * \param[in] option option id to be checked + * \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored + * \return human-readable option description + */ + const char* rs2_get_option_description(const rs2_options* options, rs2_option option, rs2_error ** error); + + /** + * get option value description (in case specific option value hold special meaning) + * \param[in] device the RealSense device + * \param[in] option option id to be checked + * \param[in] value value of the option + * \param[out] error if non-null, receives any error that occurs during this call, otherwise, errors are ignored + * \return human-readable description of a specific value of an option or null if no special meaning + */ + const char* rs2_get_option_value_description(const rs2_options* options, rs2_option option, float value, rs2_error ** error); #ifdef __cplusplus } diff --git a/include/librealsense2/h/rs_sensor.h b/include/librealsense2/h/rs_sensor.h index 9120275838..cb810d9e43 100644 --- a/include/librealsense2/h/rs_sensor.h +++ b/include/librealsense2/h/rs_sensor.h @@ -56,7 +56,7 @@ typedef enum rs2_format { RS2_FORMAT_ANY , /**< When passed to enable stream, librealsense will try to provide best suited format */ RS2_FORMAT_Z16 , /**< 16-bit linear depth values. The depth is meters is equal to depth scale * pixel value. */ - RS2_FORMAT_DISPARITY16 , /**< 16-bit linear disparity values. The depth in meters is equal to depth scale / pixel value. */ + RS2_FORMAT_DISPARITY16 , /**< 16-bit float-point disparity values. Depth->Disparity conversion : Disparity = Baseline*FocalLength/Depth. */ RS2_FORMAT_XYZ32F , /**< 32-bit floating point 3D coordinates. */ RS2_FORMAT_YUYV , /**< 32-bit y0, u, y1, v data for every two pixels. Similar to YUV422 but packed in a different order - https://en.wikipedia.org/wiki/YUV */ RS2_FORMAT_RGB8 , /**< 8-bit red, green and blue channels */ diff --git a/include/librealsense2/hpp/rs_context.hpp b/include/librealsense2/hpp/rs_context.hpp index 97dd9917c5..a1b0fc2800 100644 --- a/include/librealsense2/hpp/rs_context.hpp +++ b/include/librealsense2/hpp/rs_context.hpp @@ -207,13 +207,15 @@ namespace rs2 rs2::error::handle(e); } + context(std::shared_ptr ctx) + : _context(ctx) + {} + explicit operator std::shared_ptr() { return _context; }; protected: friend class rs2::pipeline; friend class rs2::device_hub; - context(std::shared_ptr ctx) - : _context(ctx) - {} + std::shared_ptr _context; }; @@ -224,11 +226,10 @@ namespace rs2 { public: explicit device_hub(context ctx) - : _ctx(std::move(ctx)) { rs2_error* e = nullptr; _device_hub = std::shared_ptr( - rs2_create_device_hub(_ctx._context.get(), &e), + rs2_create_device_hub(ctx._context.get(), &e), rs2_delete_device_hub); error::handle(e); } @@ -241,7 +242,7 @@ namespace rs2 { rs2_error* e = nullptr; std::shared_ptr dev( - rs2_device_hub_wait_for_device(_ctx._context.get(), _device_hub.get(), &e), + rs2_device_hub_wait_for_device(_device_hub.get(), &e), rs2_delete_device); error::handle(e); @@ -262,8 +263,10 @@ namespace rs2 return res > 0 ? true : false; } + + explicit operator std::shared_ptr() { return _device_hub; } + explicit device_hub(std::shared_ptr hub) : _device_hub(std::move(hub)) {} private: - context _ctx; std::shared_ptr _device_hub; }; diff --git a/include/librealsense2/hpp/rs_device.hpp b/include/librealsense2/hpp/rs_device.hpp index 917450d48b..9c1ed2a71a 100644 --- a/include/librealsense2/hpp/rs_device.hpp +++ b/include/librealsense2/hpp/rs_device.hpp @@ -134,6 +134,9 @@ namespace rs2 virtual ~device() { } + + explicit operator std::shared_ptr() { return _dev; }; + explicit device(std::shared_ptr dev) : _dev(dev) {} protected: friend class rs2::context; friend class rs2::device_list; @@ -141,9 +144,7 @@ namespace rs2 friend class rs2::device_hub; std::shared_ptr _dev; - explicit device(std::shared_ptr dev) : _dev(dev) - { - } + }; class debug_protocol : public device @@ -283,6 +284,8 @@ namespace rs2 return _list.get(); } + operator std::shared_ptr() { return _list; }; + private: std::shared_ptr _list; }; diff --git a/include/librealsense2/hpp/rs_frame.hpp b/include/librealsense2/hpp/rs_frame.hpp index 14f8d00302..18f68e7ac4 100644 --- a/include/librealsense2/hpp/rs_frame.hpp +++ b/include/librealsense2/hpp/rs_frame.hpp @@ -77,10 +77,10 @@ namespace rs2 */ bool operator==(const stream_profile& rhs) { - return stream_index() == rhs.stream_index()&& - stream_type() == rhs.stream_type()&& - format() == rhs.format()&& - fps() == rhs.fps(); + return stream_index() == rhs.stream_index() && + stream_type() == rhs.stream_type() && + format() == rhs.format() && + fps() == rhs.fps(); } /** @@ -139,10 +139,6 @@ namespace rs2 Operator implement, return the internal stream profile instance. * \return rs2_stream_profile* - internal instance to communicate with real implementation. */ - operator const rs2_stream_profile*() - { - return _profile; - } /** * Get the extrinsic transformation between two profiles (representing physical sensors) * \param[in] stream_profile to - the stream profile (another sensor) to be based to return the extrinsic @@ -169,12 +165,7 @@ namespace rs2 error::handle(e); } - protected: - friend class rs2::sensor; - friend class rs2::frame; - friend class rs2::pipeline_profile; - friend class software_sensor; - + bool is_cloned() { return bool(_clone); } explicit stream_profile(const rs2_stream_profile* profile) : _profile(profile) { rs2_error* e = nullptr; @@ -185,6 +176,14 @@ namespace rs2 error::handle(e); } + operator const rs2_stream_profile*() { return _profile; } + explicit operator std::shared_ptr() { return _clone; } + + protected: + friend class rs2::sensor; + friend class rs2::frame; + friend class rs2::pipeline_profile; + friend class software_sensor; const rs2_stream_profile* _profile; std::shared_ptr _clone; @@ -283,6 +282,17 @@ namespace rs2 } }; + /** + Interface for frame processing functionality + */ + class process_interface + { + public: + virtual rs2::frame process(rs2::frame frame) const = 0; + virtual rs2_processing_block* get() const = 0; + virtual ~process_interface() = default; + }; + class frame { public: @@ -335,6 +345,7 @@ namespace rs2 swap(other); return *this; } + /** * Set the internal frame handle to the one in parameter, the function create additional reference if internal reference exist. * \param[in] frame other - another frame instance to be pointed to @@ -345,7 +356,7 @@ namespace rs2 if (frame_ref) add_ref(); #ifdef _DEBUG frame_number = other.frame_number; - profile = other.profile; + profile = other.profile; #endif } /** @@ -493,6 +504,12 @@ namespace rs2 * \return rs2_frame - internal frame handle. */ rs2_frame* get() const { return frame_ref; } + explicit operator rs2_frame*() { return frame_ref; } + + frame apply_filter(process_interface& processing_block) + { + return processing_block.process(*this); + } protected: /** @@ -777,7 +794,7 @@ namespace rs2 rs2_vector get_motion_data() { auto data = reinterpret_cast(get_data()); - return rs2_vector{data[0], data[1], data[2]}; + return rs2_vector{ data[0], data[1], data[2] }; } }; @@ -843,17 +860,18 @@ namespace rs2 } /** - * Retrieve back the first frame of specific stream type, if no frame found, return the default one(frame instance) + * Retrieve back the first frame of specific stream and format types, if no frame found, return the default one(frame instance) * \param[in] rs2_stream s - frame to be retrieved from this stream type. + * \param[in] rs2_format f - frame to be retrieved from this format type. * \return frame - first found frame with s stream type. */ - frame first_or_default(rs2_stream s) const + frame first_or_default(rs2_stream s, rs2_format f = RS2_FORMAT_ANY) const { frame result; - foreach([&result, s](frame f) { - if (!result && f.get_profile().stream_type() == s) + foreach([&result, s, f](frame frame) { + if (!result && frame.get_profile().stream_type() == s && (f == RS2_FORMAT_ANY || f == frame.get_profile().format())) { - result = std::move(f); + result = std::move(frame); } }); return result; @@ -861,13 +879,14 @@ namespace rs2 /** * Retrieve back the first frame of specific stream type, if no frame found, error will be thrown * \param[in] rs2_stream s - frame to be retrieved from this stream type. + * \param[in] rs2_format f - frame to be retrieved from this format type. * \return frame - first found frame with s stream type. */ - frame first(rs2_stream s) const + frame first(rs2_stream s, rs2_format f = RS2_FORMAT_ANY) const { - auto f = first_or_default(s); - if (!f) throw error("Frame of requested stream type was not found!"); - return f; + auto frame = first_or_default(s, f); + if (!frame) throw error("Frame of requested stream type was not found!"); + return frame; } /** @@ -876,7 +895,7 @@ namespace rs2 */ depth_frame get_depth_frame() const { - auto f = first_or_default(RS2_STREAM_DEPTH); + auto f = first_or_default(RS2_STREAM_DEPTH, RS2_FORMAT_Z16); return f.as(); } /** diff --git a/include/librealsense2/hpp/rs_pipeline.hpp b/include/librealsense2/hpp/rs_pipeline.hpp index 120a80c2e2..670957f5f0 100644 --- a/include/librealsense2/hpp/rs_pipeline.hpp +++ b/include/librealsense2/hpp/rs_pipeline.hpp @@ -102,12 +102,11 @@ namespace rs2 return _pipeline_profile != nullptr; } - private: + explicit operator std::shared_ptr() { return _pipeline_profile; } pipeline_profile(std::shared_ptr profile) : - _pipeline_profile(profile) - { - - } + _pipeline_profile(profile){} + private: + std::shared_ptr _pipeline_profile; friend class config; friend class pipeline; @@ -316,12 +315,14 @@ namespace rs2 { return _config; } - private: - config(std::shared_ptr config) : _config(config) + explicit operator std::shared_ptr() const { + return _config; } - std::shared_ptr _config; + config(std::shared_ptr config) : _config(config) {} + private: + std::shared_ptr _config; }; /** @@ -343,7 +344,6 @@ namespace rs2 * \param[in] ctx The context allocated by the application. Using the platform context by default. */ pipeline(context ctx = context()) - : _ctx(ctx) { rs2_error* e = nullptr; _pipeline = std::shared_ptr( @@ -509,9 +509,9 @@ namespace rs2 { return _pipeline; } + explicit pipeline(std::shared_ptr ptr) : _pipeline(ptr) {} private: - context _ctx; std::shared_ptr _pipeline; friend class config; }; diff --git a/include/librealsense2/hpp/rs_processing.hpp b/include/librealsense2/hpp/rs_processing.hpp index 66f5c8cf16..c536a23cbf 100644 --- a/include/librealsense2/hpp/rs_processing.hpp +++ b/include/librealsense2/hpp/rs_processing.hpp @@ -43,6 +43,16 @@ namespace rs2 error::handle(e); return result; } + + frame allocate_points(const stream_profile& profile, + const frame& original) const + { + rs2_error* e = nullptr; + auto result = rs2_allocate_points(_source, profile.get(), original.get(), &e); + error::handle(e); + return result; + } + /** * Allocate composite frame with given params * @@ -101,89 +111,6 @@ namespace rs2 void release() override { delete this; } }; - /** - * Define the processing block flow, inherit this class to generate your own processing_block. Best understanding is to refer to colorizer.h/cpp - */ - class processing_block : public options - { - public: - /** - * Start the processing block with callback function on_frame to inform the application the frame is processed. - * - * \param[in] on_frame callback function for noticing the frame to be processed is ready. - */ - template - void start(S on_frame) - { - rs2_error* e = nullptr; - rs2_start_processing(_block.get(), new frame_callback(on_frame), &e); - error::handle(e); - } - /** - * Does the same thing as "start" function - * - * \param[in] on_frame address of callback function for noticing the frame to be processed is ready. - * \return address of callback function. - */ - template - S& operator>>(S& on_frame) - { - start(on_frame); - return on_frame; - } - /** - * Ask processing block to process the frame - * - * \param[in] on_frame frame to be processed. - */ - void invoke(frame f) const - { - rs2_frame* ptr = nullptr; - std::swap(f.frame_ref, ptr); - - rs2_error* e = nullptr; - rs2_process_frame(_block.get(), ptr, &e); - error::handle(e); - } - /** - * Does the same thing as "invoke" function - */ - void operator()(frame f) const - { - invoke(std::move(f)); - } - /** - * constructor with already created low level processing block assigned. - * - * \param[in] block - low level rs2_processing_block created before. - */ - processing_block(std::shared_ptr block) - : options((rs2_options*)block.get()),_block(block) - { - } - - /** - * constructor with callback function on_frame in rs2_frame_processor_callback structure assigned. - * - * \param[in] processing_function - function pointer of on_frame function in rs2_frame_processor_callback structure, which will be called back by invoke function . - */ - template - processing_block(S processing_function) - { - rs2_error* e = nullptr; - _block = std::shared_ptr( - rs2_create_processing_block(new frame_processor_callback(processing_function),&e), - rs2_delete_processing_block); - options::operator=(_block); - error::handle(e); - } - - operator rs2_options*() const { return (rs2_options*)_block.get(); } - - private: - std::shared_ptr _block; - }; - class frame_queue { public: @@ -192,12 +119,12 @@ namespace rs2 * to help developers who are not using async APIs * param[in] capacity size of the frame queue */ - explicit frame_queue(unsigned int capacity): _capacity(capacity) + explicit frame_queue(unsigned int capacity) : _capacity(capacity) { rs2_error* e = nullptr; _queue = std::shared_ptr( - rs2_create_frame_queue(capacity, &e), - rs2_delete_frame_queue); + rs2_create_frame_queue(capacity, &e), + rs2_delete_frame_queue); error::handle(e); } @@ -272,29 +199,118 @@ namespace rs2 }; /** - * Generating the 3D point cloud base on depth frame also create the mapped texture. + * Define the processing block flow, inherit this class to generate your own processing_block. Best understanding is to refer to the viewer class in examples.hpp */ - class pointcloud : public options + class processing_block : public process_interface, public options { public: /** - * create pointcloud instance + * Start the processing block with callback function on_frame to inform the application the frame is processed. + * + * \param[in] on_frame callback function for noticing the frame to be processed is ready. */ - pointcloud() : _queue(1) + template + void start(S on_frame) { rs2_error* e = nullptr; + rs2_start_processing(get(), new frame_callback(on_frame), &e); + error::handle(e); + } + /** + * Does the same thing as "start" function + * + * \param[in] on_frame address of callback function for noticing the frame to be processed is ready. + * \return address of callback function. + */ + template + S& operator>>(S& on_frame) + { + start(on_frame); + return on_frame; + } + /** + * Ask processing block to process the frame + * + * \param[in] on_frame frame to be processed. + */ + void invoke(frame f) const + { + rs2_frame* ptr = nullptr; + std::swap(f.frame_ref, ptr); - auto pb = std::shared_ptr( - rs2_create_pointcloud(&e), - rs2_delete_processing_block); - _block = std::make_shared(pb); + rs2_error* e = nullptr; + rs2_process_frame(get(), ptr, &e); + error::handle(e); + } + /** + * Ask processing block to process the frame and poll the processed frame from internal queue + * + * \param[in] on_frame frame to be processed. + * return processed frame + */ + rs2::frame process(rs2::frame frame) const override + { + invoke(frame); + rs2::frame f; + if (!_queue.poll_for_frame(&f)) + throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); + return f; + } + /** + * constructor with already created low level processing block assigned. + * + * \param[in] block - low level rs2_processing_block created before. + */ + processing_block(std::shared_ptr block) + : _block(block), options((rs2_options*)block.get()) + { + } + /** + * constructor with callback function on_frame in rs2_frame_processor_callback structure assigned. + * + * \param[in] processing_function - function pointer of on_frame function in rs2_frame_processor_callback structure, which will be called back by invoke function . + */ + template + processing_block(S processing_function) + { + rs2_error* e = nullptr; + _block = std::shared_ptr( + rs2_create_processing_block(new frame_processor_callback(processing_function), &e), + rs2_delete_processing_block); + options::operator=(_block); error::handle(e); + } - // Redirect options API to the processing block - options::operator=(pb); + frame_queue get_queue() { return _queue; } + operator rs2_options*() const { return (rs2_options*)get(); } + rs2_processing_block* get() const override { return _block.get(); } + protected: + processing_block(std::shared_ptr block, int queue_size) + : _block(block), _queue(queue_size), options((rs2_options*)block.get()) + { + start(_queue); + } - _block->start(_queue); + std::shared_ptr _block; + frame_queue _queue; + }; + + /** + * Generating the 3D point cloud base on depth frame also create the mapped texture. + */ + class pointcloud : public processing_block + { + public: + /** + * create pointcloud instance + */ + pointcloud() : processing_block(init(), 1) {} + + pointcloud(rs2_stream stream, int index = 0) : processing_block(init(), 1) + { + set_option(RS2_OPTION_STREAM_FILTER, float(stream)); + set_option(RS2_OPTION_STREAM_INDEX_FILTER, float(index)); } /** * Generate the pointcloud and texture mappings of depth map. @@ -304,11 +320,7 @@ namespace rs2 */ points calculate(frame depth) { - _block->invoke(std::move(depth)); - rs2::frame f; - if (!_queue.poll_for_frame(&f)) - throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); - return points(f); + return process(depth); } /** * Map the point cloud to other frame. @@ -317,52 +329,50 @@ namespace rs2 */ void map_to(frame mapped) { - _block->set_option(RS2_OPTION_TEXTURE_SOURCE, float(mapped.get_profile().unique_id())); - _block->invoke(std::move(mapped)); + set_option(RS2_OPTION_STREAM_FILTER, float(mapped.get_profile().stream_type())); + set_option(RS2_OPTION_STREAM_FORMAT_FILTER, float(mapped.get_profile().format())); + set_option(RS2_OPTION_STREAM_INDEX_FILTER, float(mapped.get_profile().stream_index())); + invoke(mapped); } + private: friend class context; - std::shared_ptr _block; - frame_queue _queue; + std::shared_ptr init() + { + rs2_error* e = nullptr; + + auto block = std::shared_ptr( + rs2_create_pointcloud(&e), + rs2_delete_processing_block); + + error::handle(e); + + // Redirect options API to the processing block + //options::operator=(pb); + return block; + } }; - class asynchronous_syncer + class asynchronous_syncer : public processing_block { public: /** * Real asynchronous syncer within syncer class */ - asynchronous_syncer() + asynchronous_syncer() : processing_block(init(), 1) {} + + private: + std::shared_ptr init() { rs2_error* e = nullptr; - _processing_block = std::make_shared( - std::shared_ptr( - rs2_create_sync_processing_block(&e), - rs2_delete_processing_block)); + auto block = std::shared_ptr( + rs2_create_sync_processing_block(&e), + rs2_delete_processing_block); error::handle(e); + return block; } - - /** - * Start and set the callback when the synchronization is done. - * \param[in] on_frame - callback function, will be invoked when synchronization is finished. - */ - template - void start(S on_frame) - { - _processing_block->start(on_frame); - } - /** - * Doing the actual synchronization work for the frame - * \param[in] f - frame to send to processing block to do the synchronization. - */ - void operator()(frame f) const - { - _processing_block->operator()(std::move(f)); - } - private: - std::shared_ptr _processing_block; }; class syncer @@ -375,7 +385,6 @@ namespace rs2 :_results(queue_size) { _sync.start(_results); - } /** @@ -423,7 +432,7 @@ namespace rs2 void operator()(frame f) const { - _sync(std::move(f)); + _sync.invoke(std::move(f)); } private: asynchronous_syncer _sync; @@ -431,83 +440,72 @@ namespace rs2 }; /** - Auxiliary processing block that performs image alignment using depth data and camera calibration + Auxiliary processing block that performs image alignment using depth data and camera calibration */ - class align + class align : public processing_block { public: /** - Create align processing block - Alignment is performed between a depth image and another image. - To perform alignment of a depth image to the other, set the align_to parameter with the other stream type. - To perform alignment of a non depth image to a depth image, set the align_to parameter to RS2_STREAM_DEPTH - Camera calibration and frame's stream type are determined on the fly, according to the first valid frameset passed to process() + Create align processing block + Alignment is performed between a depth image and another image. + To perform alignment of a depth image to the other, set the align_to parameter with the other stream type. + To perform alignment of a non depth image to a depth image, set the align_to parameter to RS2_STREAM_DEPTH + Camera calibration and frame's stream type are determined on the fly, according to the first valid frameset passed to process() - * \param[in] align_to The stream type to which alignment should be made. + * \param[in] align_to The stream type to which alignment should be made. */ - align(rs2_stream align_to) :_queue(1) - { - rs2_error* e = nullptr; - _block = std::make_shared( - std::shared_ptr( - rs2_create_align(align_to, &e), - rs2_delete_processing_block)); - error::handle(e); - - _block->start(_queue); - } + align(rs2_stream align_to) : processing_block(init(align_to), 1) {} /** * Run the alignment process on the given frames to get an aligned set of frames * - * \param[in] frame A pair of images, where at least one of which is a depth frame + * \param[in] frames A set of frames, where at least one of which is a depth frame * \return Input frames aligned to one another */ - frameset process(frameset frame) + frameset process(frameset frames) { - (*_block)(frame); - rs2::frame f; - if (!_queue.poll_for_frame(&f)) - throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); - return frameset(f); - } - /** - * Run the alignment process on the given frame - * - * \param[in] f - A pair of images, where at least one of which is a depth frame - * \return Input frames aligned to one another - */ - void operator()(frame f) const - { - (*_block)(std::move(f)); + return processing_block::process(frames); } + private: friend class context; + std::shared_ptr init(rs2_stream align_to) + { + rs2_error* e = nullptr; + auto block = std::shared_ptr( + rs2_create_align(align_to, &e), + rs2_delete_processing_block); + error::handle(e); - std::shared_ptr _block; - frame_queue _queue; + return block; + } }; - class colorizer : public options + class colorizer : public processing_block { public: /** * Create colorizer processing block * Colorizer generate color image base on input depth frame */ - colorizer() : _queue(1) + colorizer() : processing_block(init(), 1) { } + /** + * Create colorizer processing block + * Colorizer generate color image base on input depth frame + * \param[in] color_scheme - select one of the available color schemes: + * 0 - Jet + * 1 - Classic + * 2 - WhiteToBlack + * 3 - BlackToWhite + * 4 - Bio + * 5 - Cold + * 6 - Warm + * 7 - Quantized + * 8 - Pattern + */ + colorizer(float color_scheme) : processing_block(init(), 1) { - rs2_error* e = nullptr; - auto pb = std::shared_ptr( - rs2_create_colorizer(&e), - rs2_delete_processing_block); - _block = std::make_shared(pb); - error::handle(e); - - // Redirect options API to the processing block - options::operator=(pb); - - _block->start(_queue); + set_option(RS2_OPTION_COLOR_SCHEME, float(color_scheme)); } /** * Start to generate color image base on depth frame @@ -516,140 +514,114 @@ namespace rs2 */ video_frame colorize(frame depth) const { - if(depth) - { - _block->invoke(std::move(depth)); - rs2::frame f; - if (!_queue.poll_for_frame(&f)) - throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); - return video_frame(f); - } - return depth; + return process(depth); } - /** - * Same function as colorize(depth) - * \param[in] depth - depth frame to be processed to generate the color image - * \return video_frame - generated color image - */ - video_frame operator()(frame depth) const { return colorize(depth); } - private: - std::shared_ptr _block; - frame_queue _queue; - }; + private: + std::shared_ptr init() + { + rs2_error* e = nullptr; + auto block = std::shared_ptr( + rs2_create_colorizer(&e), + rs2_delete_processing_block); + error::handle(e); - /** - Interface for frame processing functionality - */ - class process_interface : public options - { - public: - virtual rs2::frame process(rs2::frame frame) = 0; - virtual void operator()(frame f) const = 0; - virtual ~process_interface() = default; + // Redirect options API to the processing block + //options::operator=(pb); + + return block; + } }; - class decimation_filter : public process_interface + class decimation_filter : public processing_block { public: /** * Create decimation filter processing block * decimation filter performing downsampling by using the median with specific kernel size */ - decimation_filter() :_queue(1) + decimation_filter() : processing_block(init(), 1) {} + /** + * Create decimation filter processing block + * decimation filter performing downsampling by using the median with specific kernel size + * \param[in] magnitude - number of filter iterations. + */ + decimation_filter(float magnitude) : processing_block(init(), 1) + { + set_option(RS2_OPTION_FILTER_MAGNITUDE, magnitude); + } + + private: + friend class context; + + std::shared_ptr init() { rs2_error* e = nullptr; - auto pb = std::shared_ptr( + auto block = std::shared_ptr( rs2_create_decimation_filter_block(&e), rs2_delete_processing_block); - _block = std::make_shared(pb); error::handle(e); // Redirect options API to the processing block - options::operator=(pb); + //options::operator=(this); - _block->start(_queue); - } - /** - * process the frame AND return the result - * \param[in] frame - depth frame to be processed - * \return rs2::frame - filtered frame - */ - rs2::frame process(rs2::frame frame) override - { - (*_block)(frame); - rs2::frame f; - if (!_queue.poll_for_frame(&f)) - throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); - return f; + return block; } - /** - * process the frame - * \param[in] frame - depth frame to be processed - */ - void operator()(frame f) const override - { - (*_block)(std::move(f)); - } - private: - friend class context; - - std::shared_ptr _block; - frame_queue _queue; }; - class temporal_filter : public process_interface + class temporal_filter : public processing_block { public: /** - * Create temporal filter processing block + * Create temporal filter processing block with default settings * temporal filter smooth the image by calculating multiple frames with alpha and delta settings * alpha defines the weight of current frame, delta defines threshold for edge classification and preserving. * For more information, check the temporal-filter.cpp */ - temporal_filter() :_queue(1) + temporal_filter() : processing_block(init(), 1) {} + /** + * Create temporal filter processing block with user settings + * temporal filter smooth the image by calculating multiple frames with alpha and delta settings + * \param[in] smooth_alpha - defines the weight of current frame. + * \param[in] smooth_delta - delta defines threshold for edge classification and preserving. + * \param[in] persistence_control - A set of predefined rules (masks) that govern when missing pixels will be replace with the last valid value so that the data will remain persistent over time: + * 0 - Disabled - Persistency filter is not activated and no hole filling occurs. + * 1 - Valid in 8/8 - Persistency activated if the pixel was valid in 8 out of the last 8 frames + * 2 - Valid in 2/last 3 - Activated if the pixel was valid in two out of the last 3 frames + * 3 - Valid in 2/last 4 - Activated if the pixel was valid in two out of the last 4 frames + * 4 - Valid in 2/8 - Activated if the pixel was valid in two out of the last 8 frames + * 5 - Valid in 1/last 2 - Activated if the pixel was valid in one of the last two frames + * 6 - Valid in 1/last 5 - Activated if the pixel was valid in one out of the last 5 frames + * 7 - Valid in 1/last 8 - Activated if the pixel was valid in one out of the last 8 frames + * 8 - Persist Indefinitely - Persistency will be imposed regardless of the stored history(most aggressive filtering) + * For more information, check the temporal-filter.cpp + */ + temporal_filter(float smooth_alpha, float smooth_delta, int persistence_control) : processing_block(init(), 1) + { + set_option(RS2_OPTION_HOLES_FILL, float(persistence_control)); + set_option(RS2_OPTION_FILTER_SMOOTH_ALPHA, float(smooth_alpha)); + set_option(RS2_OPTION_FILTER_SMOOTH_DELTA, float(smooth_delta)); + } + + private: + friend class context; + + std::shared_ptr init() { rs2_error* e = nullptr; - auto pb = std::shared_ptr( + auto block = std::shared_ptr( rs2_create_temporal_filter_block(&e), rs2_delete_processing_block); - _block = std::make_shared(pb); error::handle(e); // Redirect options API to the processing block - options::operator=(pb); + //options::operator=(pb); - _block->start(_queue); + return block; } - /** - * process the frame AND return the result - * \param[in] frame - depth frame to be processed - * \return rs2::frame - filtered frame - */ - rs2::frame process(rs2::frame frame) override - { - (*_block)(frame); - rs2::frame f; - if (!_queue.poll_for_frame(&f)) - throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); - return f; - } - /** - * process the frame - * \param[in] frame - depth frame to be processed - */ - void operator()(frame f) const override - { - (*_block)(std::move(f)); - } - private: - friend class context; - - std::shared_ptr _block; - frame_queue _queue; }; - class spatial_filter : public process_interface + class spatial_filter : public processing_block { public: /** @@ -659,148 +631,107 @@ namespace rs2 * delta defines the depth gradient below which the smoothing will occur as number of depth levels * For more information, check the spatial-filter.cpp */ - spatial_filter() :_queue(1) - { - rs2_error* e = nullptr; - auto pb = std::shared_ptr( - rs2_create_spatial_filter_block(&e), - rs2_delete_processing_block); - _block = std::make_shared(pb); - error::handle(e); - - // Redirect options API to the processing block - options::operator=(pb); - - _block->start(_queue); - } + spatial_filter() : processing_block(init(), 1) { } /** - * process the frame AND return the result - * \param[in] frame - depth frame to be processed - * \return rs2::frame - filtered frame + * Create spatial filter processing block + * spatial filter smooth the image by calculating frame with alpha and delta settings + * \param[in] smooth_alpha - defines the weight of the current pixel for smoothing is bounded within [25..100]% + * \param[in] smooth_delta - defines the depth gradient below which the smoothing will occur as number of depth levels + * \param[in] magnitude - number of filter iterations. + * \param[in] hole_fill - an in-place heuristic symmetric hole-filling mode applied horizontally during the filter passes. + * Intended to rectify minor artefacts with minimal performance impact */ - rs2::frame process(rs2::frame frame) override + spatial_filter(float smooth_alpha, float smooth_delta, float magnitude, float hole_fill) : processing_block(init(), 1) { - (*_block)(frame); - rs2::frame f; - if (!_queue.poll_for_frame(&f)) - throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); - return f; + set_option(RS2_OPTION_FILTER_SMOOTH_ALPHA, float(smooth_alpha)); + set_option(RS2_OPTION_FILTER_SMOOTH_DELTA, float(smooth_delta)); + set_option(RS2_OPTION_FILTER_MAGNITUDE, magnitude); + set_option(RS2_OPTION_HOLES_FILL, hole_fill); } - /** - * process the frame - * \param[in] frame - depth frame to be processed - */ - void operator()(frame f) const override - { - (*_block)(std::move(f)); - } private: friend class context; - std::shared_ptr _block; - frame_queue _queue; + std::shared_ptr init() + { + rs2_error* e = nullptr; + auto block = std::shared_ptr( + rs2_create_spatial_filter_block(&e), + rs2_delete_processing_block); + error::handle(e); + + // Redirect options API to the processing block + //options::operator=(pb); + + return block; + } }; - class disparity_transform : public process_interface + class disparity_transform : public processing_block { public: /** * Create disparity transform processing block * the processing convert the depth and disparity from each pixel */ - disparity_transform(bool transform_to_disparity=true) :_queue(1) + disparity_transform(bool transform_to_disparity = true) : processing_block(init(transform_to_disparity), 1) { } + + private: + friend class context; + std::shared_ptr init(bool transform_to_disparity) { rs2_error* e = nullptr; - auto pb = std::shared_ptr( - rs2_create_disparity_transform_block(uint8_t(transform_to_disparity),&e), + auto block = std::shared_ptr( + rs2_create_disparity_transform_block(uint8_t(transform_to_disparity), &e), rs2_delete_processing_block); - _block = std::make_shared(pb); error::handle(e); // Redirect options API to the processing block - options::operator=(pb); + //options::operator=(pb); - _block->start(_queue); + return block; } + }; + class hole_filling_filter : public processing_block + { + public: /** - * process the frame AND return the result - * \param[in] frame - depth frame to be processed - * \return rs2::frame - filtered frame + * Create hole filling processing block + * the processing perform the hole filling base on different hole filling mode. */ - rs2::frame process(rs2::frame frame) override - { - (*_block)(frame); - rs2::frame f; - if (!_queue.poll_for_frame(&f)) - throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); - return f; - } + hole_filling_filter() : processing_block(init(), 1) {} /** - * process the frame - * \param[in] frame - depth frame to be processed + * Create hole filling processing block + * the processing perform the hole filling base on different hole filling mode. + * \param[in] mode - select the hole fill mode: + * 0 - fill_from_left - Use the value from the left neighbor pixel to fill the hole + * 1 - farest_from_around - Use the value from the neighboring pixel which is furthest away from the sensor + * 2 - nearest_from_around - - Use the value from the neighboring pixel closest to the sensor */ - void operator()(frame f) const override + hole_filling_filter(int mode) : processing_block(init(), 1) { - (*_block)(std::move(f)); + set_option(RS2_OPTION_HOLES_FILL, float(mode)); } + private: friend class context; - std::shared_ptr _block; - frame_queue _queue; - }; - - class hole_filling_filter : public process_interface - { - public: - /** - * Create hole filling processing block - * the processing perform the hole filling base on different hole filling mode. - */ - hole_filling_filter() :_queue(1) + std::shared_ptr init() { rs2_error* e = nullptr; - auto pb = std::shared_ptr( + auto block = std::shared_ptr( rs2_create_hole_filling_filter_block(&e), rs2_delete_processing_block); - _block = std::make_shared(pb); error::handle(e); // Redirect options API to the processing block - options::operator=(pb); + //options::operator=(_block); - _block->start(_queue); + return block; } - /** - * process the frame AND return the result - * \param[in] frame - depth frame to be processed - * \return rs2::frame - filtered frame - */ - rs2::frame process(rs2::frame frame) override - { - (*_block)(frame); - rs2::frame f; - if (!_queue.poll_for_frame(&f)) - throw std::runtime_error("Error occured during execution of the processing block! See the log for more info"); - return f; - } - /** - * process the frame - * \param[in] frame - depth frame to be processed - */ - void operator()(frame f) const override - { - (*_block)(std::move(f)); - } - private: - friend class context; - - std::shared_ptr _block; - frame_queue _queue; }; } #endif // LIBREALSENSE_RS2_PROCESSING_HPP diff --git a/include/librealsense2/hpp/rs_sensor.hpp b/include/librealsense2/hpp/rs_sensor.hpp index 21e41ec68c..b3995dda9d 100644 --- a/include/librealsense2/hpp/rs_sensor.hpp +++ b/include/librealsense2/hpp/rs_sensor.hpp @@ -9,6 +9,7 @@ namespace rs2 { + class notification { public: @@ -211,6 +212,8 @@ namespace rs2 _options = other._options; return *this; } + // if operator= is ok, this should be ok too + options(const options& other) : _options(other._options) {} virtual ~options() = default; protected: @@ -223,7 +226,7 @@ namespace rs2 return *this; } - options(const options& other) : _options(other._options) {} + private: rs2_options* _options; @@ -411,6 +414,12 @@ namespace rs2 return extension; } + explicit sensor(std::shared_ptr dev) + :options((rs2_options*)dev.get()), _sensor(dev) + { + } + explicit operator std::shared_ptr() { return _sensor; } + protected: friend context; friend device_list; @@ -420,10 +429,7 @@ namespace rs2 std::shared_ptr _sensor; - explicit sensor(std::shared_ptr dev) - :options((rs2_options*)dev.get()), _sensor(dev) - { - } + }; inline bool operator==(const sensor& lhs, const sensor& rhs) @@ -495,6 +501,7 @@ namespace rs2 } operator bool() const { return _sensor.get() != nullptr; } + explicit depth_sensor(std::shared_ptr dev) : depth_sensor(sensor(dev)) {} }; class depth_stereo_sensor : public depth_sensor diff --git a/include/librealsense2/rs.h b/include/librealsense2/rs.h index 3ff761aef8..1f5a95a52c 100644 --- a/include/librealsense2/rs.h +++ b/include/librealsense2/rs.h @@ -23,7 +23,7 @@ extern "C" { #include "h/rs_sensor.h" #define RS2_API_MAJOR_VERSION 2 -#define RS2_API_MINOR_VERSION 15 +#define RS2_API_MINOR_VERSION 16 #define RS2_API_PATCH_VERSION 0 #define RS2_API_BUILD_VERSION 0 diff --git a/include/librealsense2/rsutil.h b/include/librealsense2/rsutil.h index e6218b1e57..43672d4797 100644 --- a/include/librealsense2/rsutil.h +++ b/include/librealsense2/rsutil.h @@ -6,8 +6,12 @@ #include "h/rs_types.h" #include "h/rs_sensor.h" +#include "h/rs_frame.h" +#include "rs.h" #include "assert.h" - +#include +#include +#include #include @@ -80,4 +84,80 @@ static void rs2_fov(const struct rs2_intrinsics * intrin, float to_fov[2]) to_fov[1] = (atan2f(intrin->ppy + 0.5f, intrin->fy) + atan2f(intrin->height - (intrin->ppy + 0.5f), intrin->fy)) * 57.2957795f; } +static void next_pixel_in_line(float curr[2], const float start[2], const float end[2]) +{ + float line_slope = (end[1] - start[1]) / (end[0] - start[0]); + if (abs(end[0] - curr[0]) > abs(end[1] - curr[1])) + { + curr[0] = end[0] > curr[0] ? curr[0] + 1 : curr[0] - 1; + curr[1] = end[1] - line_slope * (end[0] - curr[0]); + } + else + { + curr[1] = end[1] > curr[1] ? curr[1] + 1 : curr[1] - 1; + curr[0] = end[0] - ((end[1] + curr[1]) / line_slope); + } +} + +static bool is_pixel_in_line(const float curr[2], const float start[2], const float end[2]) +{ + return ((end[0] >= start[0] && end[0] >= curr[0] && curr[0] >= start[0]) || (end[0] <= start[0] && end[0] <= curr[0] && curr[0] <= start[0])) && + ((end[1] >= start[1] && end[1] >= curr[1] && curr[1] >= start[1]) || (end[1] <= start[1] && end[1] <= curr[1] && curr[1] <= start[1])); +} + +static void adjust_2D_point_to_boundary(float p[2], int width, int height) +{ + if (p[0] < 0) p[0] = 0; + if (p[0] > width) p[0] = width; + if (p[1] < 0) p[1] = 0; + if (p[1] > height) p[1] = height; +} + +/* Find projected pixel with unknown depth search along line. */ +static void rs2_project_color_pixel_to_depth_pixel(float to_pixel[2], + const uint16_t* data, float depth_scale, + float depth_min, float depth_max, + const struct rs2_intrinsics* depth_intrin, + const struct rs2_intrinsics* color_intrin, + const struct rs2_extrinsics* color_to_depth, + const struct rs2_extrinsics* depth_to_color, + const float from_pixel[2]) +{ + //Find line start pixel + float start_pixel[2] = { 0 }, min_point[3] = { 0 }, min_transformed_point[3] = { 0 }; + rs2_deproject_pixel_to_point(min_point, color_intrin, from_pixel, depth_min); + rs2_transform_point_to_point(min_transformed_point, color_to_depth, min_point); + rs2_project_point_to_pixel(start_pixel, depth_intrin, min_transformed_point); + adjust_2D_point_to_boundary(start_pixel, depth_intrin->width, depth_intrin->height); + + //Find line end depth pixel + float end_pixel[2] = { 0 }, max_point[3] = { 0 }, max_transformed_point[3] = { 0 }; + rs2_deproject_pixel_to_point(max_point, color_intrin, from_pixel, depth_max); + rs2_transform_point_to_point(max_transformed_point, color_to_depth, max_point); + rs2_project_point_to_pixel(end_pixel, depth_intrin, max_transformed_point); + adjust_2D_point_to_boundary(end_pixel, depth_intrin->width, depth_intrin->height); + + //search along line for the depth pixel that it's projected pixel is the closest to the input pixel + float min_dist = -1; + for (float p[2] = { start_pixel[0], start_pixel[1] }; is_pixel_in_line(p, start_pixel, end_pixel); next_pixel_in_line(p, start_pixel, end_pixel)) + { + float depth = depth_scale * data[(int)p[1] * depth_intrin->width + (int)p[0]]; + if (depth == 0) + continue; + + float projected_pixel[2] = { 0 }, point[3] = { 0 }, transformed_point[3] = { 0 }; + rs2_deproject_pixel_to_point(point, depth_intrin, p, depth); + rs2_transform_point_to_point(transformed_point, depth_to_color, point); + rs2_project_point_to_pixel(projected_pixel, color_intrin, transformed_point); + + float new_dist = pow((projected_pixel[1] - from_pixel[1]), 2) + pow((projected_pixel[0] - from_pixel[0]), 2); + if (new_dist < min_dist || min_dist < 0) + { + min_dist = new_dist; + to_pixel[0] = p[0]; + to_pixel[1] = p[1]; + } + } +} + #endif diff --git a/package.xml b/package.xml index 2aea4be6b9..e754d10799 100644 --- a/package.xml +++ b/package.xml @@ -7,7 +7,7 @@ librealsense2 - 2.14.1 + 2.16.0 Library for capturing data from the Intel(R) RealSense(TM) SR300 and D400 cameras. This effort was initiated to better support researchers, creative coders, and app developers in domains such as robotics, virtual reality, and the internet of things. Several often-requested features of RealSense(TM); devices are implemented in this project, including multi-camera capture. diff --git a/readme.md b/readme.md index ecb39adcc2..9ecf721677 100644 --- a/readme.md +++ b/readme.md @@ -9,7 +9,7 @@ Linux/MacOS | Windows | ----------------- ## Overview -**Intel® RealSense™ SDK 2.0** is a cross-platform library for Intel® RealSense™ depth cameras (D400 series and the SR300). +**Intel® RealSense™ SDK 2.0** is a cross-platform library for Intel® RealSense™ depth cameras (D400 series and the SR300). > :pushpin: For other Intel® RealSense™ devices (F200, R200, LR200 and ZR300), please refer to the [latest legacy release](https://github.com/IntelRealSense/librealsense/tree/v1.12.1). @@ -37,7 +37,7 @@ Information about the Intel® RealSense™ technology at [realsense.intel.com](h | **[Depth Quality Tool](./tools/depth-quality)** | This application allows you to test the camera’s depth quality, including: standard deviation from plane fit, normalized RMS – the subpixel accuracy, distance accuracy and fill rate. You should be able to easily get and interpret several of the depth quality metrics and record and save the data for offline analysis. |[**Depth.Quality.Tool.exe**](https://github.com/IntelRealSense/librealsense/releases) | | **[Debug Tools](./tools/)** | Device enumeration, FW logger, etc as can be seen at the tools directory | Included in [**Intel.RealSense.SDK.exe**](https://github.com/IntelRealSense/librealsense/releases)| | **[Code Samples](./examples)** |These simple examples demonstrate how to easily use the SDK to include code snippets that access the camera into your applications. Check some of the [**C++ examples**](./examples) including capture, pointcloud and more and basic [**C examples**](./examples/C) | Included in [**Intel.RealSense.SDK.exe**](https://github.com/IntelRealSense/librealsense/releases) | -| **[Wrappers](https://github.com/IntelRealSense/librealsense/tree/development/wrappers)** | [Python](./wrappers/python), [C#/.NET](./wrappers/csharp), [Node.js](./wrappers/nodejs) API, as well as integration with the following 3rd-party technologies: [ROS](https://github.com/intel-ros/realsense/releases), [LabVIEW](./wrappers/labview), [OpenCV](./wrappers/opencv), [PCL](./wrappers/pcl), [Unity](./wrappers/unity), and more to come, including Matlab and OpenNI. | | +| **[Wrappers](https://github.com/IntelRealSense/librealsense/tree/development/wrappers)** | [Python](./wrappers/python), [C#/.NET](./wrappers/csharp), [Node.js](./wrappers/nodejs) API, as well as integration with the following 3rd-party technologies: [ROS](https://github.com/intel-ros/realsense/releases), [LabVIEW](./wrappers/labview), [OpenCV](./wrappers/opencv), [PCL](./wrappers/pcl), [Unity](./wrappers/unity), [Matlab](./wrappers/matlab) and more to come, including OpenNI. | | ## Ready to Hack! @@ -55,19 +55,19 @@ p.start(); while (true) { // Block program until frames arrive - rs2::frameset frames = p.wait_for_frames(); - + rs2::frameset frames = p.wait_for_frames(); + // Try to get a frame of a depth image - rs2::depth_frame depth = frames.get_depth_frame(); + rs2::depth_frame depth = frames.get_depth_frame(); // Get the depth frame's dimensions float width = depth.get_width(); float height = depth.get_height(); - + // Query the distance from the camera to the object in the center of the image float dist_to_center = depth.get_distance(width / 2, height / 2); - - // Print the distance + + // Print the distance std::cout << "The camera is facing an object " << dist_to_center << " meters away \r"; } ``` @@ -77,5 +77,5 @@ For more information on the library, please follow our [examples](./examples), a In order to contribute to Intel RealSense SDK, please follow our [contribution guidelines](CONTRIBUTING.md). ## License -This project is licensed under the [Apache License, Version 2.0](LICENSE). +This project is licensed under the [Apache License, Version 2.0](LICENSE). Copyright 2017 Intel Corporation diff --git a/scripts/patch-ubuntu-kernel-4.16.sh b/scripts/patch-ubuntu-kernel-4.16.sh new file mode 100755 index 0000000000..2a0569bd86 --- /dev/null +++ b/scripts/patch-ubuntu-kernel-4.16.sh @@ -0,0 +1,166 @@ +#!/bin/bash + +#Break execution on any error received +set -e + +#trap read debug + +echo -e "\e[36mDevelopment script for kernel 4.16 with metadata node\e[0m" + +#Locally suppress stderr to avoid raising not relevant messages +exec 3>&2 +exec 2> /dev/null +con_dev=$(ls /dev/video* | wc -l) +exec 2>&3 + +if [ $con_dev -ne 0 ]; +then + echo -e "\e[32m" + read -p "Remove all RealSense cameras attached. Hit any key when ready" + echo -e "\e[0m" +fi + +#Include usability functions +source ./scripts/patch-utils.sh + +# Get the required tools and headers to build the kernel +sudo apt-get install linux-headers-generic build-essential git +#Packages to build the patched modules / kernel 4.16 +require_package libusb-1.0-0-dev +require_package libssl-dev +require_package bison +require_package flex +require_package libelf-dev + +retpoline_retrofit=0 + +LINUX_BRANCH=$(uname -r) + +# Construct branch name from distribution codename {xenial,bionic,..} and kernel version +ubuntu_codename=`. /etc/os-release; echo ${UBUNTU_CODENAME/*, /}` +if [ -z "${ubuntu_codename}" ]; +then + # Trusty Tahr shall use xenial code base + ubuntu_codename="xenial" + retpoline_retrofit=1 +fi + +minor=$(uname -r | cut -d '.' -f 2) +if [ $minor -ne 16 ]; +then + echo -e "\e[43mThe patch is applicable for kernel version 4.16. \n/ + For earlier kernels please use patch-realsense-ubuntu-lts.sh script \e[0m" + exit 1 +fi + +kernel_branch=$(uname -r | awk -F '[.-]' '{print "v"$1"."$2"."$3}') +kernel_major_minor=$(uname -r | awk -F '[.-]' '{print "v"$1"."$2}') +kernel_name="ubuntu-${ubuntu_codename}-$kernel_branch" + + +#Distribution-specific packages +if [ ${ubuntu_codename} == "bionic" ]; +then + require_package libelf-dev + require_package elfutils +fi + + +# Get the linux kernel and change into source tree +[ ! -d ${kernel_name} ] && git clone -b $kernel_branch git://git.launchpad.net/~ubuntu-kernel-test/ubuntu/+source/linux/+git/mainline-crack --depth 1 ./${kernel_name} + +cd ${kernel_name} + +# Verify that there are no trailing changes., warn the user to make corrective action if needed +if [ $(git status | grep 'modified:' | wc -l) -ne 0 ]; +then + echo -e "\e[36mThe kernel has modified files:\e[0m" + git status | grep 'modified:' + echo -e "\e[36mProceeding will reset all local kernel changes. Press 'n' within 10 seconds to abort the operation" + set +e + read -n 1 -t 10 -r -p "Do you want to proceed? [Y/n]" response + set -e + response=${response,,} # tolower + if [[ $response =~ ^(n|N)$ ]]; + then + echo -e "\e[41mScript has been aborted on user requiest. Please resolve the modified files are rerun\e[0m" + exit 1 + else + echo -e "\e[0m" + echo -e "\e[32mUpdate the folder content with the latest from mainline branch\e[0m" + git fetch origin $kernel_branch --depth 1 + printf "Resetting local changes in %s folder\n " ${kernel_name} + git reset --hard $kernel_branch + fi +fi + +#Check if we need to apply patches or get reload stock drivers (Developers' option) +[ "$#" -ne 0 -a "$1" == "reset" ] && reset_driver=1 || reset_driver=0 + +if [ $reset_driver -eq 1 ]; +then + echo -e "\e[43mUser requested to rebuild and reinstall ubuntu-${ubuntu_codename} stock drivers\e[0m" +else + # Patching kernel for RealSense devices + echo -e "\e[32mApplying realsense-uvc patch\e[0m" + patch -p1 < ../scripts/realsense-camera-formats_ubuntu-${ubuntu_codename}-${kernel_major_minor}.patch + echo -e "\e[32mApplying realsense-metadata patch\e[0m" + patch -p1 < ../scripts/realsense-metadata-ubuntu-${ubuntu_codename}-${kernel_major_minor}.patch + echo -e "\e[32mApplying realsense-hid patch\e[0m" + patch -p1 < ../scripts/realsense-hid-ubuntu-${ubuntu_codename}-${kernel_major_minor}.patch + echo -e "\e[32mApplying realsense-powerlinefrequency-fix patch\e[0m" + patch -p1 < ../scripts/realsense-powerlinefrequency-control-fix.patch + #echo -e "\e[32mApplying kernel debug patches\e[0m" + #patch -p1 < ../scripts/0001-Kernel-debugs.patch +fi + +# Copy configuration +sudo cp /usr/src/linux-headers-$(uname -r)/.config . +sudo cp /usr/src/linux-headers-$(uname -r)/Module.symvers . + +# Basic build for kernel modules +echo -e "\e[32mPrepare kernel modules configuration\e[0m" +#Retpoline script manual retrieval. based on https://github.com/IntelRealSense/librealsense/issues/1493 +#Required since the retpoline patches were introduced into Ubuntu kernels +if [ ! -f scripts/ubuntu-retpoline-extract-one ]; then + pwd + for f in $(find . -name 'retpoline-extract-one'); do cp ${f} scripts/ubuntu-retpoline-extract-one; done; + echo $$$ +fi +sudo make silentoldconfig modules_prepare + +#Vermagic identity is required +IFS='.' read -a kernel_version <<< "$LINUX_BRANCH" +sudo sed -i "s/\".*\"/\"$LINUX_BRANCH\"/g" ./include/generated/utsrelease.h +sudo sed -i "s/.*/$LINUX_BRANCH/g" ./include/config/kernel.release +#Patch for Trusty Tahr (Ubuntu 14.05) with GCC not retrofitted with the retpoline patch. +[ $retpoline_retrofit -eq 1 ] && sudo sed -i "s/#ifdef RETPOLINE/#if (1)/g" ./include/linux/vermagic.h + +# Build the uvc, accel and gyro modules +KBASE=`pwd` +cd drivers/media/usb/uvc +sudo cp $KBASE/Module.symvers . + +echo -e "\e[32mCompiling uvc module\e[0m" +sudo make -j -C $KBASE M=$KBASE/drivers/media/usb/uvc/ modules +#echo -e "\e[32mCompiling accelerometer and gyro modules\e[0m" +sudo make -j -C $KBASE M=$KBASE/drivers/iio/accel modules +sudo make -j -C $KBASE M=$KBASE/drivers/iio/gyro modules +echo -e "\e[32mCompiling v4l2-core modules\e[0m" +sudo make -j -C $KBASE M=$KBASE/drivers/media/v4l2-core modules + +# Copy the patched modules to a sane location +sudo cp $KBASE/drivers/media/usb/uvc/uvcvideo.ko ~/$LINUX_BRANCH-uvcvideo.ko +sudo cp $KBASE/drivers/iio/accel/hid-sensor-accel-3d.ko ~/$LINUX_BRANCH-hid-sensor-accel-3d.ko +sudo cp $KBASE/drivers/iio/gyro/hid-sensor-gyro-3d.ko ~/$LINUX_BRANCH-hid-sensor-gyro-3d.ko +sudo cp $KBASE/drivers/media/v4l2-core/videodev.ko ~/$LINUX_BRANCH-videodev.ko + +echo -e "\e[32mPatched kernels modules were created successfully\n\e[0m" + +# Load the newly-built modules +try_module_insert videodev ~/$LINUX_BRANCH-videodev.ko /lib/modules/`uname -r`/kernel/drivers/media/v4l2-core/videodev.ko +try_module_insert uvcvideo ~/$LINUX_BRANCH-uvcvideo.ko /lib/modules/`uname -r`/kernel/drivers/media/usb/uvc/uvcvideo.ko +try_module_insert hid_sensor_accel_3d ~/$LINUX_BRANCH-hid-sensor-accel-3d.ko /lib/modules/`uname -r`/kernel/drivers/iio/accel/hid-sensor-accel-3d.ko +try_module_insert hid_sensor_gyro_3d ~/$LINUX_BRANCH-hid-sensor-gyro-3d.ko /lib/modules/`uname -r`/kernel/drivers/iio/gyro/hid-sensor-gyro-3d.ko + +echo -e "\e[92m\n\e[1mScript has completed. Please consult the installation guide for further instruction.\n\e[0m" diff --git a/scripts/patch-utils.sh b/scripts/patch-utils.sh index 1ffd70491a..68867130db 100755 --- a/scripts/patch-utils.sh +++ b/scripts/patch-utils.sh @@ -13,7 +13,7 @@ function require_package { if [ $installed -eq 0 ]; then echo -e "\e[31m - not found, installing now...\e[0m" - sudo apt-get install ${package_name} + sudo apt-get install ${package_name} -y echo -e "\e[32mMissing package installed\e[0m" else echo -e "\e[32m - found\e[0m" diff --git a/scripts/realsense-camera-formats_ubuntu-xenial-Ubuntu-hwe-4.13.0-45.50_16.04.1.patch b/scripts/realsense-camera-formats_ubuntu-xenial-Ubuntu-hwe-4.13.0-45.50_16.04.1.patch index 02b655f2f0..318d95dd7f 100644 --- a/scripts/realsense-camera-formats_ubuntu-xenial-Ubuntu-hwe-4.13.0-45.50_16.04.1.patch +++ b/scripts/realsense-camera-formats_ubuntu-xenial-Ubuntu-hwe-4.13.0-45.50_16.04.1.patch @@ -97,13 +97,14 @@ diff --git a/drivers/media/v4l2-core/v4l2-ioctl.c b/drivers/media/v4l2-core/v4l2 index cab63bb..85935a9 100644 --- a/drivers/media/v4l2-core/v4l2-ioctl.c +++ b/drivers/media/v4l2-core/v4l2-ioctl.c -@@ -1239,6 +1239,9 @@ static void v4l_fill_fmtdesc(struct v4l2_fmtdesc *fmt) +@@ -1239,6 +1239,10 @@ static void v4l_fill_fmtdesc(struct v4l2_fmtdesc *fmt) case V4L2_TCH_FMT_TU08: descr = "8-bit unsigned touch data"; break; case V4L2_META_FMT_VSP1_HGO: descr = "R-Car VSP1 1-D Histogram"; break; case V4L2_META_FMT_VSP1_HGT: descr = "R-Car VSP1 2-D Histogram"; break; + /* Librealsense formats*/ + case V4L2_PIX_FMT_RW16: descr = "16-bit Raw data"; break; + case V4L2_PIX_FMT_W10: descr = "10-bit packed 8888[2222]"; break; ++ case V4L2_PIX_FMT_CONFIDENCE_MAP: descr = "4-bit per pixel packed into 8 bit"; break; default: /* Compressed formats */ diff --git a/scripts/realsense-camera-formats_ubuntu-xenial-hwe-zesty.patch b/scripts/realsense-camera-formats_ubuntu-xenial-hwe-zesty.patch index bec177383d..aee26c27a7 100644 --- a/scripts/realsense-camera-formats_ubuntu-xenial-hwe-zesty.patch +++ b/scripts/realsense-camera-formats_ubuntu-xenial-hwe-zesty.patch @@ -1,26 +1,20 @@ -From 6fa3dfdaff8443b4882411ac4def091c3c5eb64e Mon Sep 17 00:00:00 2001 -From: icarpis -Date: Mon, 30 Oct 2017 13:30:01 +0200 -Subject: [PATCH] Added W10 format +From ceb29b08b480a2a6a5e9dccf396a284378268037 Mon Sep 17 00:00:00 2001 +From: aangerma +Date: Wed, 22 Aug 2018 13:08:03 +0300 +Subject: [PATCH] C -From edf79915b7af157ddb4f9c77ca8abd8e5c2ede18 Mon Sep 17 00:00:00 2001 -From: Evgeni Raikhel -Date: Mon, 10 Jul 2017 10:44:13 +0300 -Subject: [PATCH] [PATCH] RS$xx and SR300 pixel formats - -Signed-off-by: Evgeni Raikhel --- - drivers/media/usb/uvc/uvc_driver.c | 50 ++++++++++++++++++++++++++++++++++++ - drivers/media/usb/uvc/uvcvideo.h | 31 ++++++++++++++++++++++ - drivers/media/v4l2-core/v4l2-ioctl.c | 3 +++ - include/uapi/linux/videodev2.h | 4 +++ - 4 files changed, 88 insertions(+) + drivers/media/usb/uvc/uvc_driver.c | 55 ++++++++++++++++++++++++++++++++++++ + drivers/media/usb/uvc/uvcvideo.h | 33 ++++++++++++++++++++++ + drivers/media/v4l2-core/v4l2-ioctl.c | 5 +++- + include/uapi/linux/videodev2.h | 6 +++- + 4 files changed, 97 insertions(+), 2 deletions(-) diff --git a/drivers/media/usb/uvc/uvc_driver.c b/drivers/media/usb/uvc/uvc_driver.c -index 04bf350..fa2eb1c 100644 +index 04bf350..5df98e5 100644 --- a/drivers/media/usb/uvc/uvc_driver.c +++ b/drivers/media/usb/uvc/uvc_driver.c -@@ -188,6 +188,56 @@ static struct uvc_format_desc uvc_fmts[] = { +@@ -188,6 +188,61 @@ static struct uvc_format_desc uvc_fmts[] = { .guid = UVC_GUID_FORMAT_GR16, .fcc = V4L2_PIX_FMT_SGRBG16, }, @@ -73,15 +67,20 @@ index 04bf350..fa2eb1c 100644 + .name = "Packed raw data 10-bit", + .guid = UVC_GUID_FORMAT_W10, + .fcc = V4L2_PIX_FMT_W10, ++ }, ++ { ++ .name = "Confidence data (C )", ++ .guid = UVC_GUID_FORMAT_CONFIDENCE_MAP, ++ .fcc = V4L2_PIX_FMT_CONFIDENCE_MAP, + }, }; /* ------------------------------------------------------------------------ diff --git a/drivers/media/usb/uvc/uvcvideo.h b/drivers/media/usb/uvc/uvcvideo.h -index 3d6cc62..6ab5034 100644 +index 3d6cc62..575c1ea 100644 --- a/drivers/media/usb/uvc/uvcvideo.h +++ b/drivers/media/usb/uvc/uvcvideo.h -@@ -143,6 +143,37 @@ +@@ -143,6 +143,39 @@ #define UVC_GUID_FORMAT_RW10 \ { 'R', 'W', '1', '0', 0x00, 0x00, 0x10, 0x00, \ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} @@ -115,39 +114,45 @@ index 3d6cc62..6ab5034 100644 +#define UVC_GUID_FORMAT_W10 \ + { 'W', '1', '0', ' ', 0x00, 0x00, 0x10, 0x00, \ + 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} -+ ++#define UVC_GUID_FORMAT_CONFIDENCE_MAP \ ++ { 'C', ' ', ' ', ' ', 0x00, 0x00, 0x10, 0x00, \ ++ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} /* ------------------------------------------------------------------------ * Driver specific constants. diff --git a/drivers/media/v4l2-core/v4l2-ioctl.c b/drivers/media/v4l2-core/v4l2-ioctl.c -index 0c3f238..221494d 100644 +index 0c3f238..09fbc6d 100644 --- a/drivers/media/v4l2-core/v4l2-ioctl.c +++ b/drivers/media/v4l2-core/v4l2-ioctl.c -@@ -1217,6 +1217,9 @@ static void v4l_fill_fmtdesc(struct v4l2_fmtdesc *fmt) +@@ -1217,7 +1217,10 @@ static void v4l_fill_fmtdesc(struct v4l2_fmtdesc *fmt) case V4L2_TCH_FMT_DELTA_TD08: descr = "8-bit signed deltas"; break; case V4L2_TCH_FMT_TU16: descr = "16-bit unsigned touch data"; break; case V4L2_TCH_FMT_TU08: descr = "8-bit unsigned touch data"; break; +- + case V4L2_PIX_FMT_RW16: descr = "16-bit Raw data"; break; + case V4L2_PIX_FMT_INZI: descr = "32-bit IR:Depth 10:16"; break; + case V4L2_PIX_FMT_W10: descr = "10-bit packed 8888[2222]"; break; - ++ case V4L2_PIX_FMT_CONFIDENCE_MAP: descr = "4-bit per pixel packed into 8 bit"; break; default: /* Compressed formats */ + flags = V4L2_FMT_FLAG_COMPRESSED; diff --git a/include/uapi/linux/videodev2.h b/include/uapi/linux/videodev2.h -index 45184a2..3f247ca 100644 +index 45184a2..48c3592 100644 --- a/include/uapi/linux/videodev2.h +++ b/include/uapi/linux/videodev2.h -@@ -661,6 +661,10 @@ struct v4l2_pix_format { +@@ -661,7 +661,11 @@ struct v4l2_pix_format { #define V4L2_PIX_FMT_Y12I v4l2_fourcc('Y', '1', '2', 'I') /* Greyscale 12-bit L/R interleaved */ #define V4L2_PIX_FMT_Z16 v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */ #define V4L2_PIX_FMT_MT21C v4l2_fourcc('M', 'T', '2', '1') /* Mediatek compressed block mode */ +- +#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* Greyscale 16-bit */ +#define V4L2_PIX_FMT_RW16 v4l2_fourcc('R', 'W', '1', '6') /* Raw data 16-bit */ +#define V4L2_PIX_FMT_INZI v4l2_fourcc('I', 'N', 'Z', 'I') /* 24 Depth/IR 16:8 */ +#define V4L2_PIX_FMT_W10 v4l2_fourcc('W', '1', '0', ' ') /* Packed raw data 10-bit */ - ++#define V4L2_PIX_FMT_CONFIDENCE_MAP v4l2_fourcc('C', ' ', ' ', ' ') /* Two pixels in one byte */ /* SDR formats - used only for Software Defined Radio devices */ #define V4L2_SDR_FMT_CU8 v4l2_fourcc('C', 'U', '0', '8') /* IQ u8 */ + #define V4L2_SDR_FMT_CU16LE v4l2_fourcc('C', 'U', '1', '6') /* IQ u16le */ -- 2.7.4 diff --git a/scripts/realsense-camera-formats_ubuntu-xenial-master.patch b/scripts/realsense-camera-formats_ubuntu-xenial-master.patch index 19abed2dd9..8184b3b18d 100644 --- a/scripts/realsense-camera-formats_ubuntu-xenial-master.patch +++ b/scripts/realsense-camera-formats_ubuntu-xenial-master.patch @@ -1,21 +1,20 @@ -From 0473259d92fe1a908ca4305de3fad371279de320 Mon Sep 17 00:00:00 2001 +From e5d0e1f6e3a35ea469d41aaeccecc69bcda5d37f Mon Sep 17 00:00:00 2001 +From: aangerma From: icarpis +From: administrator Date: Mon, 30 Oct 2017 10:14:04 +0200 -Subject: [PATCH] Added W10 format -From 2fb29ac45b298b1dbea7fca174b6e8b7d22e4fb3 Mon Sep 17 00:00:00 2001 -From: administrator -Date: Sun, 4 Jun 2017 17:30:59 +0300 -Subject: [PATCH] RS4xx and ZR300 Pixel formats patch +Subject: [PATCH] RS4xx, ZR300, RS5 Pixel formats patch +Date: Wed, 22 Aug 2018 10:02:43 +0300 + -Signed-off-by: administrator --- drivers/media/usb/uvc/Makefile | 1 + - drivers/media/usb/uvc/uvc_driver.c | 65 ++++++++++++++++++++++++++++++++++++ - drivers/media/usb/uvc/uvcvideo.h | 41 ++++++++++++++++++++++- + drivers/media/usb/uvc/uvc_driver.c | 70 ++++++++++++++++++++++++++++++++++++ + drivers/media/usb/uvc/uvcvideo.h | 43 +++++++++++++++++++++- drivers/media/v4l2-core/v4l2-ioctl.c | 6 ++++ - include/uapi/linux/videodev2.h | 7 ++++ - 5 files changed, 119 insertions(+), 1 deletion(-) + include/uapi/linux/videodev2.h | 9 ++++- + 5 files changed, 127 insertions(+), 2 deletions(-) diff --git a/drivers/media/usb/uvc/Makefile b/drivers/media/usb/uvc/Makefile index c26d12f..d86cf22 100644 @@ -27,10 +26,10 @@ index c26d12f..d86cf22 100644 uvc_status.o uvc_isight.o uvc_debugfs.o ifeq ($(CONFIG_MEDIA_CONTROLLER),y) diff --git a/drivers/media/usb/uvc/uvc_driver.c b/drivers/media/usb/uvc/uvc_driver.c -index 9b8ac20..92eded4 100644 +index 4a07535..4888495 100644 --- a/drivers/media/usb/uvc/uvc_driver.c +++ b/drivers/media/usb/uvc/uvc_driver.c -@@ -168,6 +168,71 @@ static struct uvc_format_desc uvc_fmts[] = { +@@ -168,6 +168,76 @@ static struct uvc_format_desc uvc_fmts[] = { .guid = UVC_GUID_FORMAT_RW10, .fcc = V4L2_PIX_FMT_SRGGB10P, }, @@ -98,12 +97,17 @@ index 9b8ac20..92eded4 100644 + .name = "Packed raw data 10-bit", + .guid = UVC_GUID_FORMAT_W10, + .fcc = V4L2_PIX_FMT_W10, ++ }, ++ { ++ .name = "Confidence data (C )", ++ .guid = UVC_GUID_FORMAT_CONFIDENCE_MAP, ++ .fcc = V4L2_PIX_FMT_CONFIDENCE_MAP, + }, }; /* ------------------------------------------------------------------------ diff --git a/drivers/media/usb/uvc/uvcvideo.h b/drivers/media/usb/uvc/uvcvideo.h -index 7e4d3ee..1bc3bc3 100644 +index 7e4d3ee..96d9f2b 100644 --- a/drivers/media/usb/uvc/uvcvideo.h +++ b/drivers/media/usb/uvc/uvcvideo.h @@ -115,7 +115,6 @@ @@ -114,7 +118,7 @@ index 7e4d3ee..1bc3bc3 100644 #define UVC_GUID_FORMAT_H264 \ { 'H', '2', '6', '4', 0x00, 0x00, 0x10, 0x00, \ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} -@@ -131,6 +130,46 @@ +@@ -131,6 +130,48 @@ #define UVC_GUID_FORMAT_RW10 \ { 'R', 'W', '1', '0', 0x00, 0x00, 0x10, 0x00, \ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} @@ -157,15 +161,17 @@ index 7e4d3ee..1bc3bc3 100644 +#define UVC_GUID_FORMAT_W10 \ + { 'W', '1', '0', ' ', 0x00, 0x00, 0x10, 0x00, \ + 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} -+ ++#define UVC_GUID_FORMAT_CONFIDENCE_MAP \ ++ { 'C', ' ', ' ', ' ', 0x00, 0x00, 0x10, 0x00, \ ++ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} /* ------------------------------------------------------------------------ * Driver specific constants. diff --git a/drivers/media/v4l2-core/v4l2-ioctl.c b/drivers/media/v4l2-core/v4l2-ioctl.c -index 7486af2..8c500e1 100644 +index 5e2a7e5..7c2714b 100644 --- a/drivers/media/v4l2-core/v4l2-ioctl.c +++ b/drivers/media/v4l2-core/v4l2-ioctl.c -@@ -1229,6 +1229,12 @@ static void v4l_fill_fmtdesc(struct v4l2_fmtdesc *fmt) +@@ -1229,6 +1229,13 @@ static void v4l_fill_fmtdesc(struct v4l2_fmtdesc *fmt) case V4L2_SDR_FMT_CS8: descr = "Complex S8"; break; case V4L2_SDR_FMT_CS14LE: descr = "Complex S14LE"; break; case V4L2_SDR_FMT_RU12LE: descr = "Real U12LE"; break; @@ -175,17 +181,19 @@ index 7486af2..8c500e1 100644 + case V4L2_PIX_FMT_RW16: descr = "16-bit Raw data"; break; + case V4L2_PIX_FMT_INZI: descr = "32-bit IR:Depth 10:16"; break; + case V4L2_PIX_FMT_W10: descr = "10-bit packed 8888[2222]"; break; ++ case V4L2_PIX_FMT_CONFIDENCE_MAP: descr = "4-bit per pixel packed into 8 bit"; break; default: /* Compressed formats */ diff --git a/include/uapi/linux/videodev2.h b/include/uapi/linux/videodev2.h -index 421d274..374ca95 100644 +index 421d274..89f20fa 100644 --- a/include/uapi/linux/videodev2.h +++ b/include/uapi/linux/videodev2.h -@@ -624,6 +624,13 @@ struct v4l2_pix_format { +@@ -624,7 +624,14 @@ struct v4l2_pix_format { #define V4L2_PIX_FMT_Y8I v4l2_fourcc('Y', '8', 'I', ' ') /* Greyscale 8-bit L/R interleaved */ #define V4L2_PIX_FMT_Y12I v4l2_fourcc('Y', '1', '2', 'I') /* Greyscale 12-bit L/R interleaved */ #define V4L2_PIX_FMT_Z16 v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */ +- +#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* Greyscale 16-bit */ +#define V4L2_PIX_FMT_RW16 v4l2_fourcc('R', 'W', '1', '6') /* Raw data 16-bit */ +#define V4L2_PIX_FMT_INZI v4l2_fourcc('I', 'N', 'Z', 'I') /* 24 Depth/IR 16:8 */ @@ -193,9 +201,10 @@ index 421d274..374ca95 100644 +#define V4L2_PIX_FMT_INRI v4l2_fourcc('I', 'N', 'R', 'I') /* 24 Depth/IR 16:8 */ +#define V4L2_PIX_FMT_RELI v4l2_fourcc('R', 'E', 'L', 'I') /* 8 IR alternating on off illumination */ +#define V4L2_PIX_FMT_W10 v4l2_fourcc('W', '1', '0', ' ') /* Packed raw data 10-bit */ - ++#define V4L2_PIX_FMT_CONFIDENCE_MAP v4l2_fourcc('C', ' ', ' ', ' ') /* Two pixels in one byte */ /* SDR formats - used only for Software Defined Radio devices */ #define V4L2_SDR_FMT_CU8 v4l2_fourcc('C', 'U', '0', '8') /* IQ u8 */ + #define V4L2_SDR_FMT_CU16LE v4l2_fourcc('C', 'U', '1', '6') /* IQ u16le */ -- 2.7.4 diff --git a/scripts/realsense-camera-formats_ubuntu-xenial-v4.16.patch b/scripts/realsense-camera-formats_ubuntu-xenial-v4.16.patch new file mode 100644 index 0000000000..4bb340319a --- /dev/null +++ b/scripts/realsense-camera-formats_ubuntu-xenial-v4.16.patch @@ -0,0 +1,160 @@ +From 574991645764df30366f9f2c319d5d9529eed5ba Mon Sep 17 00:00:00 2001 +From: icarpis +Date: Tue, 14 Aug 2018 10:18:09 +0300 +Date: Tue, 14 Aug 2018 10:54:48 +0300 +Subject: [PATCH] Patch D400/SR300/RS5 formats + +--- + drivers/media/usb/uvc/Makefile | 2 ++ + drivers/media/usb/uvc/uvc_driver.c | 37 ++++++++++++++++++++++++++++++++++++ + drivers/media/usb/uvc/uvcvideo.h | 28 ++++++++++++++++++++++++++- + drivers/media/v4l2-core/v4l2-dev.c | 1 + + drivers/media/v4l2-core/v4l2-ioctl.c | 3 +++ + include/uapi/linux/videodev2.h | 3 +++ + 6 files changed, 73 insertions(+), 1 deletion(-) + +diff --git a/drivers/media/usb/uvc/Makefile b/drivers/media/usb/uvc/Makefile +index 4f9eee4..71e0988 100644 +--- a/drivers/media/usb/uvc/Makefile ++++ b/drivers/media/usb/uvc/Makefile +@@ -1,4 +1,6 @@ + # SPDX-License-Identifier: GPL-2.0 ++CONFIG_MODULE_SIG=n ++ccflags-y := -DDRIVER_VERSION_SUFFIX=\".librealsense2-patch\" + uvcvideo-objs := uvc_driver.o uvc_queue.o uvc_v4l2.o uvc_video.o uvc_ctrl.o \ + uvc_status.o uvc_isight.o uvc_debugfs.o uvc_metadata.o + ifeq ($(CONFIG_MEDIA_CONTROLLER),y) +diff --git a/drivers/media/usb/uvc/uvc_driver.c b/drivers/media/usb/uvc/uvc_driver.c +index fd387bf..90c2bc9 100644 +--- a/drivers/media/usb/uvc/uvc_driver.c ++++ b/drivers/media/usb/uvc/uvc_driver.c +@@ -209,6 +209,43 @@ static struct uvc_format_desc uvc_fmts[] = { + .guid = UVC_GUID_FORMAT_INZI, + .fcc = V4L2_PIX_FMT_INZI, + }, ++ { ++ .name = "Luminosity data 8-bit (L8)", ++ .guid = UVC_GUID_FORMAT_L8, ++ .fcc = V4L2_PIX_FMT_GREY, ++ }, ++ { ++ .name = "Depth data 16-bit (D16)", ++ .guid = UVC_GUID_FORMAT_D16, ++ .fcc = V4L2_PIX_FMT_Z16, ++ }, ++ { ++ .name = "Packed raw data 10-bit", ++ .guid = UVC_GUID_FORMAT_W10, ++ .fcc = V4L2_PIX_FMT_W10, ++ }, ++ { ++ .name = "Confidence data (C )", ++ .guid = UVC_GUID_FORMAT_CONFIDENCE_MAP, ++ .fcc = V4L2_PIX_FMT_CONFIDENCE_MAP, ++ }, ++ /* FishEye 8-bit monochrome */ ++ { ++ .name = "Raw data 8-bit (RAW8)", ++ .guid = UVC_GUID_FORMAT_RAW8, ++ .fcc = V4L2_PIX_FMT_GREY, ++ }, ++ /* Legacy formats for backward-compatibility*/ ++ { ++ .name = "Raw data 16-bit (RW16)", ++ .guid = UVC_GUID_FORMAT_RW16, ++ .fcc = V4L2_PIX_FMT_RW16, ++ }, ++ { ++ .name = "16-bit Bayer BGBG/GRGR", ++ .guid = UVC_GUID_FORMAT_BAYER16, ++ .fcc = V4L2_PIX_FMT_SBGGR16, ++ }, + }; + + /* ------------------------------------------------------------------------ +diff --git a/drivers/media/usb/uvc/uvcvideo.h b/drivers/media/usb/uvc/uvcvideo.h +index d9e7c70..4d51e80 100644 +--- a/drivers/media/usb/uvc/uvcvideo.h ++++ b/drivers/media/usb/uvc/uvcvideo.h +@@ -153,6 +153,28 @@ + #define UVC_GUID_FORMAT_INVI \ + { 'I', 'N', 'V', 'I', 0xdb, 0x57, 0x49, 0x5e, \ + 0x8e, 0x3f, 0xf4, 0x79, 0x53, 0x2b, 0x94, 0x6f} ++#define UVC_GUID_FORMAT_L8 \ ++ { '2', 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, \ ++ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} ++#define UVC_GUID_FORMAT_D16 \ ++ { 'P', 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, \ ++ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} ++#define UVC_GUID_FORMAT_W10 \ ++ { 'W', '1', '0', ' ', 0x00, 0x00, 0x10, 0x00, \ ++ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} ++#define UVC_GUID_FORMAT_RAW8 \ ++ { 'R', 'A', 'W', '8', 0x66, 0x1a, 0x42, 0xa2, \ ++ 0x90, 0x65, 0xd0, 0x18, 0x14, 0xa8, 0xef, 0x8a} ++#define UVC_GUID_FORMAT_CONFIDENCE_MAP \ ++ { 'C', ' ', ' ', ' ', 0x00, 0x00, 0x10, 0x00, \ ++ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} ++/* Legacy formats */ ++#define UVC_GUID_FORMAT_RW16 \ ++ { 'R', 'W', '1', '6', 0x00, 0x00, 0x10, 0x00, \ ++ 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71} ++#define UVC_GUID_FORMAT_BAYER16 \ ++ { 'R', 'W', '1', '6', 0x66, 0x1a, 0x42, 0xa2, \ ++ 0x90, 0x65, 0xd0, 0x18, 0x14, 0xa8, 0xef, 0x8a} + + #define UVC_GUID_FORMAT_D3DFMT_L8 \ + {0x32, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, \ +@@ -163,7 +185,11 @@ + * Driver specific constants. + */ + +-#define DRIVER_VERSION "1.1.1" ++#ifndef DRIVER_VERSION_SUFFIX ++#define DRIVER_VERSION_SUFFIX ++#endif ++ ++#define DRIVER_VERSION "1.1.1" DRIVER_VERSION_SUFFIX + + /* Number of isochronous URBs. */ + #define UVC_URBS 5 +diff --git a/drivers/media/v4l2-core/v4l2-dev.c b/drivers/media/v4l2-core/v4l2-dev.c +index 1d0b220..56df662 100644 +--- a/drivers/media/v4l2-core/v4l2-dev.c ++++ b/drivers/media/v4l2-core/v4l2-dev.c +@@ -1076,3 +1076,4 @@ MODULE_AUTHOR("Alan Cox, Mauro Carvalho Chehab "); + MODULE_DESCRIPTION("Device registrar for Video4Linux drivers v2"); + MODULE_LICENSE("GPL"); + MODULE_ALIAS_CHARDEV_MAJOR(VIDEO_MAJOR); ++MODULE_VERSION("librealsense2-patch"); +diff --git a/drivers/media/v4l2-core/v4l2-ioctl.c b/drivers/media/v4l2-core/v4l2-ioctl.c +index 260288c..53c66dc 100644 +--- a/drivers/media/v4l2-core/v4l2-ioctl.c ++++ b/drivers/media/v4l2-core/v4l2-ioctl.c +@@ -1251,6 +1251,9 @@ static void v4l_fill_fmtdesc(struct v4l2_fmtdesc *fmt) + case V4L2_META_FMT_VSP1_HGO: descr = "R-Car VSP1 1-D Histogram"; break; + case V4L2_META_FMT_VSP1_HGT: descr = "R-Car VSP1 2-D Histogram"; break; + case V4L2_META_FMT_UVC: descr = "UVC payload header metadata"; break; ++ case V4L2_PIX_FMT_RW16: descr = "16-bit Raw data"; break; ++ case V4L2_PIX_FMT_W10: descr = "10-bit packed 8888[2222]"; break; ++ case V4L2_PIX_FMT_CONFIDENCE_MAP: descr = "4-bit/pixel 8-bit pack"; break; + + default: + /* Compressed formats */ +diff --git a/include/uapi/linux/videodev2.h b/include/uapi/linux/videodev2.h +index 9827189..b6ce918 100644 +--- a/include/uapi/linux/videodev2.h ++++ b/include/uapi/linux/videodev2.h +@@ -668,6 +668,9 @@ struct v4l2_pix_format { + #define V4L2_PIX_FMT_Z16 v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */ + #define V4L2_PIX_FMT_MT21C v4l2_fourcc('M', 'T', '2', '1') /* Mediatek compressed block mode */ + #define V4L2_PIX_FMT_INZI v4l2_fourcc('I', 'N', 'Z', 'I') /* Intel Planar Greyscale 10-bit and Depth 16-bit */ ++#define V4L2_PIX_FMT_RW16 v4l2_fourcc('R', 'W', '1', '6') /* Raw data 16-bit */ ++#define V4L2_PIX_FMT_W10 v4l2_fourcc('W', '1', '0', ' ') /* Packed raw data 10-bit */ ++#define V4L2_PIX_FMT_CONFIDENCE_MAP v4l2_fourcc('C', ' ', ' ', ' ') /* Two pixels in one byte */ + + /* 10bit raw bayer packed, 32 bytes for every 25 pixels, last LSB 6 bits unused */ + #define V4L2_PIX_FMT_IPU3_SBGGR10 v4l2_fourcc('i', 'p', '3', 'b') /* IPU3 packed 10-bit BGGR bayer */ +-- +2.7.4 + diff --git a/scripts/realsense-hid-ubuntu-xenial-v4.16.patch b/scripts/realsense-hid-ubuntu-xenial-v4.16.patch new file mode 100644 index 0000000000..66a74d557f --- /dev/null +++ b/scripts/realsense-hid-ubuntu-xenial-v4.16.patch @@ -0,0 +1,92 @@ +From 3cd71dfdb89050b17a5e5e7c80c3d8e0cd02b196 Mon Sep 17 00:00:00 2001 +From: Srinivas Pandruvada +From: Evgeni +Date: Sun, 13 May 2018 10:49:26 -0400 + +Subject: [PATCH] Adding missing HID timestamp patch for Gyro sensor. + Ubuntu 18.04 Bionic Beaver with KErnel 4.15 + A symmetric patch for Accelerator was already upstreamed, + this patch was skipped from upstream + The patch was written by Srinivas Pandruvada +--- + drivers/iio/gyro/hid-sensor-gyro-3d.c | 28 ++++++++++++++++++++------- + 1 file changed, 21 insertions(+), 7 deletions(-) + +diff --git a/drivers/iio/gyro/hid-sensor-gyro-3d.c b/drivers/iio/gyro/hid-sensor-gyro-3d.c +index f59995a90..e3f2a8357 100644 +--- a/drivers/iio/gyro/hid-sensor-gyro-3d.c ++++ b/drivers/iio/gyro/hid-sensor-gyro-3d.c +@@ -42,11 +42,13 @@ struct gyro_3d_state { + struct hid_sensor_hub_callbacks callbacks; + struct hid_sensor_common common_attributes; + struct hid_sensor_hub_attribute_info gyro[GYRO_3D_CHANNEL_MAX]; +- u32 gyro_val[GYRO_3D_CHANNEL_MAX]; ++ /* Reserve for 3 channels + padding + timestamp */ ++ u32 gyro_val[GYRO_3D_CHANNEL_MAX + 3]; + int scale_pre_decml; + int scale_post_decml; + int scale_precision; + int value_offset; ++ int64_t timestamp; + }; + + static const u32 gyro_3d_addresses[GYRO_3D_CHANNEL_MAX] = { +@@ -87,7 +89,8 @@ static const struct iio_chan_spec gyro_3d_channels[] = { + BIT(IIO_CHAN_INFO_SAMP_FREQ) | + BIT(IIO_CHAN_INFO_HYSTERESIS), + .scan_index = CHANNEL_SCAN_INDEX_Z, +- } ++ }, ++ IIO_CHAN_SOFT_TIMESTAMP(3) + }; + + /* Adjust channel real bits based on report descriptor */ +@@ -191,11 +194,11 @@ static const struct iio_info gyro_3d_info = { + }; + + /* Function to push data to buffer */ +-static void hid_sensor_push_data(struct iio_dev *indio_dev, const void *data, +- int len) ++static void hid_sensor_push_data(struct iio_dev *indio_dev, void *data, ++ int len, int64_t timestamp) + { + dev_dbg(&indio_dev->dev, "hid_sensor_push_data\n"); +- iio_push_to_buffers(indio_dev, data); ++ iio_push_to_buffers_with_timestamp(indio_dev, data, timestamp); + } + + /* Callback handler to send event after all samples are received and captured */ +@@ -207,10 +210,17 @@ static int gyro_3d_proc_event(struct hid_sensor_hub_device *hsdev, + struct gyro_3d_state *gyro_state = iio_priv(indio_dev); + + dev_dbg(&indio_dev->dev, "gyro_3d_proc_event\n"); +- if (atomic_read(&gyro_state->common_attributes.data_ready)) ++ if (atomic_read(&gyro_state->common_attributes.data_ready)) { ++ if (!gyro_state->timestamp) ++ gyro_state->timestamp = iio_get_time_ns(indio_dev); ++ + hid_sensor_push_data(indio_dev, + gyro_state->gyro_val, +- sizeof(gyro_state->gyro_val)); ++ sizeof(gyro_state->gyro_val), ++ gyro_state->timestamp); ++ ++ gyro_state->timestamp = 0; ++ } + + return 0; + } +@@ -235,6 +245,10 @@ static int gyro_3d_capture_sample(struct hid_sensor_hub_device *hsdev, + *(u32 *)raw_data; + ret = 0; + break; ++ case HID_USAGE_SENSOR_TIME_TIMESTAMP: ++ gyro_state->timestamp = *(int64_t *)raw_data; ++ ret = 0; ++ break; + default: + break; + } +-- +2.17.0 + diff --git a/scripts/realsense-metadata-ubuntu-xenial-v4.16.patch b/scripts/realsense-metadata-ubuntu-xenial-v4.16.patch new file mode 100644 index 0000000000..bfac4f96ca --- /dev/null +++ b/scripts/realsense-metadata-ubuntu-xenial-v4.16.patch @@ -0,0 +1,201 @@ +From af04383d9816d971c7f11d99ab6e96fd946c4f22 Mon Sep 17 00:00:00 2001 +From: Evgeni Raikhel +From: Elad Zucker +From: icarpis +From: Guennadi Liakhovetski +Date: Tue, 14 Aug 2018 15:51:51 +0300 +Subject: [PATCH] D4M metadata enablement w/metanode kernel 4.16 + Enabling UVC Metadata D4M format according to a + pending patch https://patchwork.kernel.org/patch/10131589/ + Also extending it to additoinal SKUs. + This patchs drops the documentation chapter as unnecessary + +--- + drivers/media/usb/uvc/uvc_driver.c | 137 +++++++++++++++++++++++++++++++++++++ + drivers/media/usb/uvc/uvcvideo.h | 2 +- + include/uapi/linux/videodev2.h | 1 + + 3 files changed, 139 insertions(+), 1 deletion(-) + +diff --git a/drivers/media/usb/uvc/uvc_driver.c b/drivers/media/usb/uvc/uvc_driver.c +index fd387bf..3219519 100644 +--- a/drivers/media/usb/uvc/uvc_driver.c ++++ b/drivers/media/usb/uvc/uvc_driver.c +@@ -2346,6 +2346,8 @@ static const struct uvc_device_info uvc_quirk_force_y8 = { + }; + + #define UVC_QUIRK_INFO(q) (kernel_ulong_t)&(struct uvc_device_info){.quirks = q} ++#define UVC_QUIRK_META(m) (kernel_ulong_t)&(struct uvc_device_info) \ ++ {.meta_format = m} + + /* + * The Logitech cameras listed below have their interface class set to +@@ -2819,6 +2821,141 @@ static const struct usb_device_id uvc_ids[] = { + .bInterfaceSubClass = 1, + .bInterfaceProtocol = 0, + .driver_info = (kernel_ulong_t)&uvc_quirk_force_y8 }, ++ /* Intel D400/PSR depth camera*/ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0ad1, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D410/ASR depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0ad2, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D415/ASRC depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0ad3, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D430/AWG depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0ad4, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D450/AWGT depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0ad5, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D420/PWG depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0af6, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D420_MM/PWGT depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0afe, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D410_MM/ASRT depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0aff, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D400_MM/PSRT depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0b00, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D430_MM/AWGCT depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0b01, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D460/DS5U depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0b03, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D435/AWGC depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0b07, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel SR300 depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0aa5, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel D405 S depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0b0c, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, ++ /* Intel L500 depth camera */ ++ { .match_flags = USB_DEVICE_ID_MATCH_DEVICE ++ | USB_DEVICE_ID_MATCH_INT_INFO, ++ .idVendor = 0x8086, ++ .idProduct = 0x0b0d, ++ .bInterfaceClass = USB_CLASS_VIDEO, ++ .bInterfaceSubClass = 1, ++ .bInterfaceProtocol = 0, ++ .driver_info = UVC_QUIRK_META(V4L2_META_FMT_D4XX) }, + /* Generic USB Video Class */ + { USB_INTERFACE_INFO(USB_CLASS_VIDEO, 1, UVC_PC_PROTOCOL_UNDEFINED) }, + { USB_INTERFACE_INFO(USB_CLASS_VIDEO, 1, UVC_PC_PROTOCOL_15) }, +diff --git a/drivers/media/usb/uvc/uvcvideo.h b/drivers/media/usb/uvc/uvcvideo.h +index d9e7c70..8470b7f 100644 +--- a/drivers/media/usb/uvc/uvcvideo.h ++++ b/drivers/media/usb/uvc/uvcvideo.h +@@ -170,7 +170,7 @@ + /* Maximum number of packets per URB. */ + #define UVC_MAX_PACKETS 32 + /* Maximum status buffer size in bytes of interrupt URB. */ +-#define UVC_MAX_STATUS_SIZE 16 ++#define UVC_MAX_STATUS_SIZE 32 + + #define UVC_CTRL_CONTROL_TIMEOUT 500 + #define UVC_CTRL_STREAMING_TIMEOUT 5000 +diff --git a/include/uapi/linux/videodev2.h b/include/uapi/linux/videodev2.h +index 9827189..d947f9a 100644 +--- a/include/uapi/linux/videodev2.h ++++ b/include/uapi/linux/videodev2.h +@@ -695,6 +695,7 @@ struct v4l2_pix_format { + #define V4L2_META_FMT_VSP1_HGO v4l2_fourcc('V', 'S', 'P', 'H') /* R-Car VSP1 1-D Histogram */ + #define V4L2_META_FMT_VSP1_HGT v4l2_fourcc('V', 'S', 'P', 'T') /* R-Car VSP1 2-D Histogram */ + #define V4L2_META_FMT_UVC v4l2_fourcc('U', 'V', 'C', 'H') /* UVC Payload Header metadata */ ++#define V4L2_META_FMT_D4XX v4l2_fourcc('D', '4', 'X', 'X') /* D4XX Payload Header metadata */ + + /* priv field value to indicates that subsequent fields are valid. */ + #define V4L2_PIX_FMT_PRIV_MAGIC 0xfeedcafe +-- +2.7.4 + diff --git a/src/archive.cpp b/src/archive.cpp index e5cea668ed..7e23aa34f2 100644 --- a/src/archive.cpp +++ b/src/archive.cpp @@ -1,6 +1,7 @@ #include "metadata-parser.h" #include "archive.h" #include +#include "core/processing.h" #define MIN_DISTANCE 1e-6 @@ -264,7 +265,7 @@ namespace librealsense public: explicit frame_archive(std::atomic* in_max_frame_queue_size, std::shared_ptr ts, - std::shared_ptr parsers) + std::shared_ptr parsers) : max_frame_queue_size(in_max_frame_queue_size), mutex(), recycle_frames(true), _time_service(ts), _metadata_parsers(parsers) @@ -477,5 +478,4 @@ namespace librealsense << "ms, FPS: " << get_stream()->get_framerate() << ", Max Duration: " << callback_warning_duration << "ms)"); } } - } diff --git a/src/backend.h b/src/backend.h index 5ab4158141..2c0388a9b0 100644 --- a/src/backend.h +++ b/src/backend.h @@ -171,7 +171,6 @@ namespace librealsense const void * pixels; const void * metadata; rs2_time_t backend_time; - }; typedef std::function)> frame_callback; @@ -208,6 +207,9 @@ namespace librealsense std::string unique_id = ""; std::string device_path = ""; usb_spec conn_spec = usb_undefined; + uint32_t uvc_capabilities = 0; + bool has_metadata_node = false; + std::string metadata_node_id = ""; operator std::string() { @@ -218,7 +220,8 @@ namespace librealsense "\nmi- " << mi << "\nunique_id- " << unique_id << "\npath- " << device_path << - "\nsusb specification- " << std::hex << (uint16_t)conn_spec << std::dec; + "\nsusb specification- " << std::hex << (uint16_t)conn_spec << std::dec << + (has_metadata_node ? ( "\nmetadata node-" + metadata_node_id) : ""); return s.str(); } @@ -227,6 +230,7 @@ namespace librealsense { return (std::make_tuple(id, vid, pid, mi, unique_id, device_path) < std::make_tuple(obj.id, obj.vid, obj.pid, obj.mi, obj.unique_id, obj.device_path)); } + }; inline bool operator==(const uvc_device_info& a, diff --git a/src/core/streaming.h b/src/core/streaming.h index c161cafc75..4d0668d33d 100644 --- a/src/core/streaming.h +++ b/src/core/streaming.h @@ -17,6 +17,7 @@ namespace librealsense class sensor_interface; class archive_interface; class device_interface; + class processing_block_interface; class sensor_part { diff --git a/src/device_hub.cpp b/src/device_hub.cpp index fdc73699a0..e3b0bd258c 100644 --- a/src/device_hub.cpp +++ b/src/device_hub.cpp @@ -128,5 +128,10 @@ namespace librealsense std::unique_lock lock(_mutex); return dev.is_valid(); } + + std::shared_ptr device_hub::get_context() + { + return _ctx; + } } diff --git a/src/device_hub.h b/src/device_hub.h index 60ddc7a318..77defa0c00 100644 --- a/src/device_hub.h +++ b/src/device_hub.h @@ -41,6 +41,8 @@ namespace librealsense */ bool is_connected(const device_interface& dev); + std::shared_ptr get_context(); + ~device_hub() { _ctx->stop(); diff --git a/src/ds5/ds5-device.cpp b/src/ds5/ds5-device.cpp index 1265cca45a..72180566c9 100644 --- a/src/ds5/ds5-device.cpp +++ b/src/ds5/ds5-device.cpp @@ -289,7 +289,6 @@ namespace librealsense for (auto&& info : filter_by_mi(all_device_infos, 0)) // Filter just mi=0, DEPTH depth_devices.push_back(backend.create_uvc_device(info)); - std::unique_ptr ds5_timestamp_reader_backup(new ds5_timestamp_reader(backend.create_time_service())); auto depth_ep = std::make_shared(this, std::make_shared(depth_devices), std::unique_ptr(new ds5_timestamp_reader_from_metadata(std::move(ds5_timestamp_reader_backup)))); @@ -506,7 +505,7 @@ namespace librealsense depth_ep.register_metadata((rs2_frame_metadata_value)RS2_FRAME_METADATA_WIDTH, make_attribute_parser(&md_configuration::width, md_configuration_attributes::width_attribute, md_prop_offset)); depth_ep.register_metadata((rs2_frame_metadata_value)RS2_FRAME_METADATA_HEIGHT, make_attribute_parser(&md_configuration::height, md_configuration_attributes::height_attribute, md_prop_offset)); depth_ep.register_metadata((rs2_frame_metadata_value)RS2_FRAME_METADATA_ACTUAL_FPS, std::make_shared ()); - + register_info(RS2_CAMERA_INFO_NAME, device_name); register_info(RS2_CAMERA_INFO_SERIAL_NUMBER, serial); register_info(RS2_CAMERA_INFO_FIRMWARE_VERSION, _fw_version); diff --git a/src/ds5/ds5-factory.cpp b/src/ds5/ds5-factory.cpp index 3610c3a836..7f6c8b0c84 100644 --- a/src/ds5/ds5-factory.cpp +++ b/src/ds5/ds5-factory.cpp @@ -432,18 +432,15 @@ namespace librealsense auto valid_pid = filter_by_product(group.uvc_devices, ds::rs400_sku_pid); auto group_devices = group_devices_and_hids_by_unique_id(group_devices_by_unique_id(valid_pid), group.hid_devices); + for (auto& g : group_devices) { auto& devices = g.first; auto& hids = g.second; - if((devices[0].pid == ds::RS430_MM_PID || devices[0].pid == ds::RS420_MM_PID) && hids.size()==0) - continue; - bool all_sensors_present = mi_present(devices, 0); - constexpr std::array multi_sensors = { ds::RS415_PID, ds::RS430_MM_RGB_PID, ds::RS435_RGB_PID}; - auto is_pid_of_multisensor_device = [&multi_sensors](int pid) { return std::find(std::begin(multi_sensors), std::end(multi_sensors), pid) != std::end(multi_sensors); }; + auto is_pid_of_multisensor_device = [](int pid) { return std::find(std::begin(ds::multi_sensors_pid), std::end(ds::multi_sensors_pid), pid) != std::end(ds::multi_sensors_pid); }; bool is_device_multisensor = false; for (auto&& uvc : devices) { diff --git a/src/ds5/ds5-motion.cpp b/src/ds5/ds5-motion.cpp index 1ac9e7c57d..1f036077a1 100644 --- a/src/ds5/ds5-motion.cpp +++ b/src/ds5/ds5-motion.cpp @@ -174,7 +174,8 @@ namespace librealsense { if (all_hid_infos.empty()) { - throw std::runtime_error("HID device is missing!"); + LOG_WARNING("HID device is missing!"); + return nullptr; } static const char* custom_sensor_fw_ver = "5.6.0.0"; @@ -393,25 +394,27 @@ namespace librealsense register_stream_to_extrinsic_group(*_gpio_streams[i], 0); } - // Add hid endpoint + // Try to add hid endpoint auto hid_ep = create_hid_device(ctx, group.hid_devices, _fw_version); - _motion_module_device_idx = add_sensor(hid_ep); - - try - { - hid_ep->register_option(RS2_OPTION_ENABLE_MOTION_CORRECTION, - std::make_shared(hid_ep.get(), - *_accel_intrinsics, - *_gyro_intrinsics, - option_range{0, 1, 1, 1})); - } - catch (const std::exception& ex) + if (hid_ep) { - LOG_ERROR("Motion Device is not calibrated! Motion Data Correction will not be available! Error: " << ex.what()); - } + _motion_module_device_idx = add_sensor(hid_ep); - if (!motion_module_fw_version.empty()) - register_info(RS2_CAMERA_INFO_FIRMWARE_VERSION, motion_module_fw_version); + try + { + hid_ep->register_option(RS2_OPTION_ENABLE_MOTION_CORRECTION, + std::make_shared(hid_ep.get(), + *_accel_intrinsics, + *_gyro_intrinsics, + option_range{ 0, 1, 1, 1 })); + } + catch (const std::exception& ex) + { + LOG_ERROR("Motion Device is not calibrated! Motion Data Correction will not be available! Error: " << ex.what()); + } + if (!motion_module_fw_version.empty()) + register_info(RS2_CAMERA_INFO_FIRMWARE_VERSION, motion_module_fw_version); + } } } diff --git a/src/ds5/ds5-private.h b/src/ds5/ds5-private.h index 80b05171cc..714eaa7df0 100644 --- a/src/ds5/ds5-private.h +++ b/src/ds5/ds5-private.h @@ -57,6 +57,16 @@ namespace librealsense ds::RS_USB2_PID }; + static const std::set multi_sensors_pid = { + ds::RS400_MM_PID, + ds::RS410_MM_PID, + ds::RS415_PID, + ds::RS420_MM_PID, + ds::RS430_MM_PID, + ds::RS430_MM_RGB_PID, + ds::RS435_RGB_PID + }; + static const std::map rs400_sku_names = { { RS400_PID, "Intel RealSense D400"}, { RS400_MM_PID, "Intel RealSense D400 with Tracking Module"}, @@ -77,6 +87,7 @@ namespace librealsense // DS5 fisheye XU identifiers const uint8_t FISHEYE_EXPOSURE = 1; + // subdevice[h] unit[fw], node[h] guid[fw] const platform::extension_unit depth_xu = { 0, 3, 2, { 0xC9606CCB, 0x594C, 0x4D25,{ 0xaf, 0x47, 0xcc, 0xc4, 0x96, 0x43, 0x59, 0x95 } } }; diff --git a/src/image.cpp b/src/image.cpp index f119cc4bc3..89b08d0e3c 100644 --- a/src/image.cpp +++ b/src/image.cpp @@ -312,6 +312,7 @@ namespace librealsense #ifndef ANDROID #ifdef __SSSE3__ static bool do_avx = has_avx(); + #ifdef __AVX2__ if (do_avx) { @@ -323,6 +324,7 @@ namespace librealsense if (FORMAT == RS2_FORMAT_BGRA8) unpack_yuy2_avx_bgra8(d, s, n); } else + #endif { auto src = reinterpret_cast(s); auto dst = reinterpret_cast<__m128i *>(d[0]); @@ -982,7 +984,6 @@ namespace librealsense { requires_processing, ©_pixels<2>, { { RS2_STREAM_DEPTH, RS2_FORMAT_Z16 } } } } }; const native_pixel_format pf_y8_l500 = { 'GREY', 1, 1, { { true, &rotate_270_degrees_clockwise<1>, { { RS2_STREAM_INFRARED, RS2_FORMAT_Y8, rotate_resolution } } }, { requires_processing, ©_pixels<1>, { { RS2_STREAM_INFRARED, RS2_FORMAT_Y8 } } } } }; - const native_pixel_format pf_y8 = { 'GREY', 1, 1, { { requires_processing, ©_pixels<1>, { { { RS2_STREAM_INFRARED, 1 }, RS2_FORMAT_Y8 } } } } }; const native_pixel_format pf_y16 = { 'Y16 ', 1, 2, { { true, &unpack_y16_from_y16_10, { { { RS2_STREAM_INFRARED, 1 }, RS2_FORMAT_Y16 } } } } }; const native_pixel_format pf_y8i = { 'Y8I ', 1, 2, { { true, &unpack_y8_y8_from_y8i, { { { RS2_STREAM_INFRARED, 1 }, RS2_FORMAT_Y8 }, diff --git a/src/image_avx.cpp b/src/image_avx.cpp index 86438ecc0b..91a746eeb8 100644 --- a/src/image_avx.cpp +++ b/src/image_avx.cpp @@ -8,7 +8,7 @@ //#include "../include/librealsense2/rsutil.h" // For projection/deprojection logic #ifndef ANDROID - #ifdef __SSSE3__ + #if defined(__SSSE3__) && defined(__AVX2__) #include // For SSE3 intrinsic used in unpack_yuy2_sse #include diff --git a/src/image_avx.h b/src/image_avx.h index 0ae2f44673..037fdff6c5 100644 --- a/src/image_avx.h +++ b/src/image_avx.h @@ -10,7 +10,7 @@ namespace librealsense { #ifndef ANDROID - #ifdef __SSSE3__ + #if defined(__SSSE3__) && defined(__AVX2__) void unpack_yuy2_avx_y8(byte * const d[], const byte * s, int n); void unpack_yuy2_avx_y16(byte * const d[], const byte * s, int n); void unpack_yuy2_avx_rgb8(byte * const d[], const byte * s, int n); diff --git a/src/libuvc/libuvc.cpp b/src/libuvc/libuvc.cpp index 21fcb050b3..b45377d371 100644 --- a/src/libuvc/libuvc.cpp +++ b/src/libuvc/libuvc.cpp @@ -30,6 +30,7 @@ #include #include #include +#include #include @@ -53,6 +54,13 @@ namespace librealsense { namespace platform { + // the table provides the substitution 4CC used when firmware-published formats + // differ from the recognized scheme + const std::unordered_map fourcc_map = { + { 0x32000000, 0x47524559 }, /* 'GREY' from 'L8 ' */ + { 0x52415738, 0x47524559 }, /* 'GREY' from 'RAW8' */ + }; + static void internal_uvc_callback(uvc_frame_t *frame, void *ptr); static std::tuple get_usb_descriptors(libusb_device* usb_device) @@ -297,6 +305,10 @@ namespace librealsense uvc_error_t res; uvc_stream_ctrl_t ctrl; + // Assume that the base and the substituted codes do not co-exist + if (_substitute_4cc.count(profile.format)) + profile.format = _substitute_4cc.at(profile.format); + // request all formats for all pins in the device. res = uvc_get_stream_ctrl_format_size_all( _device_handle, &ctrl, @@ -346,6 +358,9 @@ namespace librealsense void stop_callbacks() override { _is_started = false; + _stream_ctrls.clear(); + _profiles.clear(); + _callbacks.clear(); } void close(stream_profile) override @@ -356,6 +371,7 @@ namespace librealsense _is_capturing = false; _is_started = false; } + uvc_stop_streaming(_device_handle); } void power_D0() { @@ -398,8 +414,7 @@ namespace librealsense } void power_D3() { - - uvc_unref_device(_device); + uvc_unref_device(_device); //uvc_stop_streaming(_device_handle); _profiles.clear(); uvc_close(_device_handle); @@ -407,7 +422,7 @@ namespace librealsense _device_handle = NULL; _real_state = D3; } - + void set_power_state(power_state state) override { std::lock_guard lock(_power_mutex); @@ -629,8 +644,16 @@ namespace librealsense uvc_format_t *cur_format = formats; // build a list of all stream profiles and return to the caller. while ( cur_format != NULL) { + // Substitude HW profiles with 4CC codes recognized by the core + uint32_t device_fourcc = cur_format->fourcc; + if (fourcc_map.count(device_fourcc)) + { + _substitute_4cc[fourcc_map.at(device_fourcc)] = device_fourcc; + device_fourcc = fourcc_map.at(device_fourcc); + } + stream_profile p{}; - p.format = cur_format->fourcc; + p.format = device_fourcc; p.fps = cur_format->fps; p.width = cur_format->width; p.height = cur_format->height; @@ -706,6 +729,7 @@ namespace librealsense std::vector _profiles; std::vector _callbacks; std::vector _stream_ctrls; + mutable std::unordered_map _substitute_4cc; std::atomic _is_capturing; std::atomic _is_alive; std::atomic _is_started; diff --git a/src/libuvc/libuvc_internal.h b/src/libuvc/libuvc_internal.h index 54ea61eb65..e32d41b47b 100644 --- a/src/libuvc/libuvc_internal.h +++ b/src/libuvc/libuvc_internal.h @@ -261,6 +261,7 @@ struct uvc_stream_handle { uint8_t *outbuf, *holdbuf; std::mutex cb_mutex; std::condition_variable cb_cond; + std::condition_variable cb_cancel; std::thread cb_thread; uint32_t last_polled_seq; uvc_frame_callback_t *user_cb; diff --git a/src/libuvc/stream.cpp b/src/libuvc/stream.cpp index 5a55b63fce..77bce7d0e4 100644 --- a/src/libuvc/stream.cpp +++ b/src/libuvc/stream.cpp @@ -699,10 +699,7 @@ uvc_error_t uvc_probe_stream_ctrl( */ void _uvc_swap_buffers(uvc_stream_handle_t *strmh) { uint8_t *tmp_buf; - { - std::unique_lock lock(strmh->cb_mutex); - /* swap the buffers */ tmp_buf = strmh->holdbuf; strmh->hold_bytes = strmh->got_bytes; @@ -840,9 +837,14 @@ void LIBUSB_CALL _uvc_stream_callback(struct libusb_transfer *transfer) { switch (transfer->status) { case LIBUSB_TRANSFER_COMPLETED: if (transfer->num_iso_packets == 0) { + std::unique_lock lock(strmh->cb_mutex); /* This is a bulk mode transfer, so it just has one payload transfer */ _uvc_process_payload(strmh, transfer->buffer, transfer->actual_length); - } else { + + + } + else + { /* This is an isochronous mode transfer, so each packet has a payload transfer */ int packet_id; @@ -887,8 +889,9 @@ void LIBUSB_CALL _uvc_stream_callback(struct libusb_transfer *transfer) { } resubmit = 0; + strmh->cb_cancel.notify_all(); } - strmh->cb_cond.notify_all(); + break; } @@ -896,13 +899,14 @@ void LIBUSB_CALL _uvc_stream_callback(struct libusb_transfer *transfer) { case LIBUSB_TRANSFER_STALL: case LIBUSB_TRANSFER_OVERFLOW: case LIBUSB_TRANSFER_ERROR: - UVC_DEBUG("retrying transfer, status = %d", transfer->status); break; } - if ( strmh->running && resubmit ) - libusb_submit_transfer(transfer); + if (strmh && strmh->running && resubmit ) + { + auto res = libusb_submit_transfer(transfer); + } } /** Begin streaming video from the camera into the callback function. @@ -1194,7 +1198,6 @@ void *_uvc_user_caller(void *arg) { _uvc_populate_frame(strmh); } - strmh->user_cb(&strmh->frame, strmh->user_ptr); } while(1); @@ -1381,32 +1384,21 @@ uvc_error_t uvc_stream_stop(uvc_stream_handle_t *strmh) { if (!strmh->running) return UVC_ERROR_INVALID_PARAM; - strmh->running = 0; - { - std::unique_lock lock(strmh->cb_mutex); - - for (i = 0; i < LIBUVC_NUM_TRANSFER_BUFS; i++) { + std::unique_lock lock(strmh->cb_mutex); + strmh->running = 0; + for (i = 0; i < LIBUVC_NUM_TRANSFER_BUFS; i++) { if (strmh->transfers[i] != NULL) { int res = libusb_cancel_transfer(strmh->transfers[i]); - if (res < 0 && res != LIBUSB_ERROR_NOT_FOUND) { + if (res < 0) { free(strmh->transfers[i]->buffer); libusb_free_transfer(strmh->transfers[i]); strmh->transfers[i] = NULL; } + else + strmh->cb_cancel.wait(lock); } } - - /* Wait for transfers to complete/cancel */ - do { - for (i = 0; i < LIBUVC_NUM_TRANSFER_BUFS; i++) { - if (strmh->transfers[i] != NULL) - break; - } - if (i == LIBUVC_NUM_TRANSFER_BUFS) - break; - strmh->cb_cond.wait(lock); - } while (1); } // Kick the user thread awake strmh->cb_cond.notify_all(); @@ -1418,6 +1410,7 @@ uvc_error_t uvc_stream_stop(uvc_stream_handle_t *strmh) { * LIBUSB_TRANSFER_CANCELLED transfer) */ strmh->cb_thread.join(); } + auto res = libusb_clear_halt(strmh->devh->usb_devh,strmh->stream_if->bEndpointAddress); return UVC_SUCCESS; } diff --git a/src/linux/backend-hid.cpp b/src/linux/backend-hid.cpp index a0b2d32d87..bcdd26c0f7 100644 --- a/src/linux/backend-hid.cpp +++ b/src/linux/backend-hid.cpp @@ -325,7 +325,10 @@ namespace librealsense void hid_custom_sensor::stop_capture() { if (!_is_capturing) + { + enable(false); return; + } _is_capturing = false; signal_stop(); @@ -1101,7 +1104,7 @@ namespace librealsense LOG_WARNING("Failed to read busnum/devnum. Device Path: " << elem); } #else - LOG_WARNING("Failed to read busnum/devnum. Device Path: " << elem); + LOG_INFO("Failed to read busnum/devnum. Device Path: " << elem); #endif continue; } diff --git a/src/linux/backend-v4l2.cpp b/src/linux/backend-v4l2.cpp index 291d228b56..036bb237bc 100644 --- a/src/linux/backend-v4l2.cpp +++ b/src/linux/backend-v4l2.cpp @@ -179,19 +179,21 @@ namespace librealsense return r; } - - buffer::buffer(int fd, bool use_memory_map, int index) - : _use_memory_map(use_memory_map), _index(index) + buffer::buffer(int fd, v4l2_buf_type type, bool use_memory_map, int index) + : _type(type), _use_memory_map(use_memory_map), _index(index) { v4l2_buffer buf = {}; - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.type = _type; buf.memory = use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR; buf.index = index; if(xioctl(fd, VIDIOC_QUERYBUF, &buf) < 0) throw linux_backend_exception("xioctl(VIDIOC_QUERYBUF) failed"); + // Prior to kernel 4.16 metadata payload was attached to the end of the video payload + auto md_extra = (V4L2_BUF_TYPE_VIDEO_CAPTURE==type) ? MAX_META_DATA_SIZE : 0; _original_length = buf.length; - _length = buf.length; + _length = _original_length + md_extra; + if (use_memory_map) { _start = static_cast(mmap(NULL, buf.length, @@ -202,8 +204,8 @@ namespace librealsense } else { - _length += MAX_META_DATA_SIZE; - _start = static_cast(malloc( buf.length + MAX_META_DATA_SIZE)); + //_length += (V4L2_BUF_TYPE_VIDEO_CAPTURE==type) ? MAX_META_DATA_SIZE : 0; + _start = static_cast(malloc( _length)); if (!_start) throw linux_backend_exception("User_p allocation failed!"); memset(_start, 0, _length); } @@ -212,7 +214,7 @@ namespace librealsense void buffer::prepare_for_streaming(int fd) { v4l2_buffer buf = {}; - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.type = _type; buf.memory = _use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR; buf.index = _index; buf.length = _length; @@ -257,19 +259,55 @@ namespace librealsense if (_must_enqueue) { - if (V4L2_MEMORY_USERPTR == _use_memory_map) + if (!_use_memory_map) { auto metadata_offset = get_full_length() - MAX_META_DATA_SIZE; memset((byte*)(get_frame_start()) + metadata_offset, 0, MAX_META_DATA_SIZE); } + LOG_DEBUG("Enqueue buf " << _buf.index << " for fd " << fd); if (xioctl(fd, VIDIOC_QBUF, &_buf) < 0) - LOG_ERROR("xioctl(VIDIOC_QBUF) failed when requesting new frame! Last-error: " << strerror(errno)); + { + LOG_ERROR("xioctl(VIDIOC_QBUF) failed when requesting new frame! fd: " << fd << " error: " << strerror(errno)); + } _must_enqueue = false; } } + void buffers_mgr::handle_buffer(supported_kernel_buf_types buf_type, + int file_desc, + v4l2_buffer v4l_buf, + std::shared_ptr data_buf + ) + { + if (e_max_kernel_buf_type <=buf_type) + throw linux_backend_exception("invalid kernel buffer type request"); + + if (file_desc < 1) + { + // QBUF to be performed by a 3rd party + this->buffers.at(buf_type)._managed = true; + } + else + { + buffers.at(buf_type)._file_desc = file_desc; + buffers.at(buf_type)._managed = false; + buffers.at(buf_type)._data_buf = data_buf; + buffers.at(buf_type)._dq_buf = v4l_buf; + } + } + + void buffers_mgr::request_next_frame() + { + for (auto& buf : buffers) + { + if (buf._data_buf && (buf._file_desc >= 0)) + buf._data_buf->request_next_frame(buf._file_desc); + }; + } + + static std::tuple get_usb_descriptors(libusb_device* usb_device) { auto usb_bus = std::to_string(libusb_get_bus_number(usb_device)); @@ -315,13 +353,61 @@ namespace librealsense return res; } + // Retrieve device video capabilities to discriminate video capturing and metadata nodes + static uint32_t get_dev_capabilities(const std::string dev_name) + { + // RAII to handle exceptions + std::unique_ptr > fd( + new int (open(dev_name.c_str(), O_RDWR | O_NONBLOCK, 0)), + [](int* d){ if (d && (*d)) ::close(*d);}); + + if(*fd < 0) + throw linux_backend_exception(to_string() << __FUNCTION__ << ": Cannot open '" << dev_name); + + v4l2_capability cap = {}; + if(xioctl(*fd, VIDIOC_QUERYCAP, &cap) < 0) + { + if(errno == EINVAL) + throw linux_backend_exception(to_string() << __FUNCTION__ << " " << dev_name << " is no V4L2 device"); + else + throw linux_backend_exception(to_string() <<__FUNCTION__ << " xioctl(VIDIOC_QUERYCAP) failed"); + } + + return cap.device_caps; + } + + void stream_ctl_on(int fd, v4l2_buf_type type=V4L2_BUF_TYPE_VIDEO_CAPTURE) + { + if(xioctl(fd, VIDIOC_STREAMON, &type) < 0) + throw linux_backend_exception(to_string() << "xioctl(VIDIOC_STREAMON) failed for buf_type=" << type); + } + + void stream_off(int fd, v4l2_buf_type type=V4L2_BUF_TYPE_VIDEO_CAPTURE) + { + if(xioctl(fd, VIDIOC_STREAMOFF, &type) < 0) + throw linux_backend_exception(to_string() << "xioctl(VIDIOC_STREAMOFF) failed for buf_type=" << type); + } + + void req_io_buff(int fd, uint32_t count, std::string dev_name, + v4l2_memory mem_type, v4l2_buf_type type) + { + struct v4l2_requestbuffers req = { count, type, mem_type}; + + if(xioctl(fd, VIDIOC_REQBUFS, &req) < 0) + { + if(errno == EINVAL) + LOG_ERROR(dev_name + " does not support memory mapping"); + else + throw linux_backend_exception("xioctl(VIDIOC_REQBUFS) failed"); + } + } + v4l_usb_device::v4l_usb_device(const usb_device_info& info) { int status = libusb_init(&_usb_context); if(status < 0) throw linux_backend_exception(to_string() << "libusb_init(...) returned " << libusb_error_name(status)); - std::vector results; v4l_usb_device::foreach_usb_device(_usb_context, [&results, info, this](const usb_device_info& i, libusb_device* dev) @@ -432,6 +518,11 @@ namespace librealsense LOG_ERROR("Cannot access /sys/class/video4linux"); return; } + + // Collect UVC nodes info to bundle metadata and video + typedef std::pair node_info; + std::vector uvc_nodes,uvc_devices; + while (dirent * entry = readdir(dir)) { std::string name = entry->d_name; @@ -451,7 +542,6 @@ namespace librealsense try { int vid, pid, mi; - usb_spec usb_specification{usb_undefined}; std::string busnum, devnum, devpath; auto dev_name = "/dev/" + name; @@ -488,7 +578,7 @@ namespace librealsense #ifndef RS2_USE_CUDA /* On the Jetson TX, the camera module is CSI & I2C and does not report as this code expects Patch suggested by JetsonHacks: https://github.com/jetsonhacks/buildLibrealsense2TX */ - LOG_WARNING("Failed to read busnum/devnum. Device Path: " << path); + LOG_INFO("Failed to read busnum/devnum. Device Path: " << path); #endif continue; } @@ -512,7 +602,7 @@ namespace librealsense // /sys/devices/pci0000:00/0000:00:xx.0/ABC/M-N/3-6:1.0/video4linux/video0 // to // /sys/devices/pci0000:00/0000:00:xx.0/ABC/M-N/version - usb_specification = get_usb_connection_type(real_path + "/../../../"); + usb_spec usb_specification = get_usb_connection_type(real_path + "/../../../"); uvc_device_info info{}; info.pid = pid; @@ -522,7 +612,9 @@ namespace librealsense info.device_path = std::string(buff); info.unique_id = busnum + "-" + devpath + "-" + devnum; info.conn_spec = usb_specification; - action(info, dev_name); + info.uvc_capabilities = get_dev_capabilities(dev_name); + + uvc_nodes.emplace_back(info, dev_name); } catch(const std::exception & e) { @@ -530,15 +622,75 @@ namespace librealsense } } closedir(dir); + + // Differenciate and merge video and metadata nodes + // UVC nodes shall be traversed in ascending order for metadata nodes assignment ("dev/video1, Video2.. + std::sort(begin(uvc_nodes),end(uvc_nodes), + [](const node_info& lhs, const node_info& rhs){ return lhs.first.id < rhs.first.id; }); + + // Assume for each metadata node with index N there is a origin streaming node with index (N-1) + for (auto&& cur_node : uvc_nodes) + { + try + { + if (!(cur_node.first.uvc_capabilities & V4L2_CAP_META_CAPTURE)) + uvc_devices.emplace_back(cur_node); + else + { + if (uvc_devices.empty()) + { + LOG_ERROR("uvc meta-node with no video streaming node encountered: " << std::string(cur_node.first)); + continue; + } + + // Update the preceding uvc item with metadata node info + auto uvc_node = uvc_devices.back(); + + if (uvc_node.first.uvc_capabilities & V4L2_CAP_META_CAPTURE) + { + LOG_ERROR("Consequtive uvc meta-nodes encountered: " << std::string(uvc_node.first) << " and " << std::string(cur_node.first) ); + continue; + } + + if (uvc_node.first.has_metadata_node) + { + LOG_ERROR( "Metadata node for uvc device: " << std::string(uvc_node.first) << " was already been assigned "); + continue; + } + + // modify the last element with metadata node info + uvc_node.first.has_metadata_node = true; + uvc_node.first.metadata_node_id = cur_node.first.id; + uvc_devices.back() = uvc_node; + } + } + catch(const std::exception & e) + { + LOG_ERROR("Failed to map meta-node " << std::string(cur_node.first) <<", error encountered: " << e.what()); + } + } + + try + { + // Dispatch registration for enumerated uvc devices + for (auto&& dev : uvc_devices) + action(dev.first, dev.second); + } + catch(const std::exception & e) + { + LOG_ERROR("Registration of UVC device failed: " << e.what()); + } } v4l_uvc_device::v4l_uvc_device(const uvc_device_info& info, bool use_memory_map) : _name(""), _info(), _is_capturing(false), _is_alive(true), + _is_started(false), _thread(nullptr), + _named_mtx(nullptr), _use_memory_map(use_memory_map), - _is_started(false), + _fd(-1), _stop_pipe_fd{} { foreach_uvc_device([&info, this](const uvc_device_info& i, const std::string& name) @@ -561,6 +713,7 @@ namespace librealsense { _is_capturing = false; if (_thread) _thread->join(); + _fds.clear(); } void v4l_uvc_device::probe_and_commit(stream_profile profile, frame_callback callback, int buffers) @@ -569,6 +722,7 @@ namespace librealsense { v4l2_fmtdesc pixel_format = {}; pixel_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + while (ioctl(_fd, VIDIOC_ENUM_FMT, &pixel_format) == 0) { v4l2_frmsizeenum frame_size = {}; @@ -615,19 +769,7 @@ namespace librealsense ++pixel_format.index; } - - v4l2_format fmt = {}; - fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - fmt.fmt.pix.width = profile.width; - fmt.fmt.pix.height = profile.height; - fmt.fmt.pix.pixelformat = (const big_endian &)profile.format; - fmt.fmt.pix.field = V4L2_FIELD_NONE; - if(xioctl(_fd, VIDIOC_S_FMT, &fmt) < 0) - { - throw linux_backend_exception("xioctl(VIDIOC_S_FMT) failed"); - } - - LOG_INFO("Trying to configure fourcc " << fourcc_to_string(fmt.fmt.pix.pixelformat)); + set_format(profile); v4l2_streamparm parm = {}; parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; @@ -640,22 +782,8 @@ namespace librealsense throw linux_backend_exception("xioctl(VIDIOC_S_PARM) failed"); // Init memory mapped IO - v4l2_requestbuffers req = {}; - req.count = buffers; - req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - req.memory = _use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR; - if(xioctl(_fd, VIDIOC_REQBUFS, &req) < 0) - { - if(errno == EINVAL) - throw linux_backend_exception(_name + " does not support memory mapping"); - else - throw linux_backend_exception("xioctl(VIDIOC_REQBUFS) failed"); - } - - for(size_t i = 0; i < buffers; ++i) - { - _buffers.push_back(std::make_shared(_fd, _use_memory_map, i)); - } + negotiate_kernel_buffers(buffers); + allocate_io_buffers(buffers); _profile = profile; _callback = callback; @@ -673,17 +801,36 @@ namespace librealsense _error_handler = error_handler; // Start capturing - for (auto&& buf : _buffers) buf->prepare_for_streaming(_fd); + prepare_capture_buffers(); - v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(xioctl(_fd, VIDIOC_STREAMON, &type) < 0) - throw linux_backend_exception("xioctl(VIDIOC_STREAMON) failed"); + // Synchronise stream requests for meta and video data. + streamon(); _is_capturing = true; _thread = std::unique_ptr(new std::thread([this](){ capture_loop(); })); } } + void v4l_uvc_device::prepare_capture_buffers() + { + for (auto&& buf : _buffers) buf->prepare_for_streaming(_fd); + } + + void v4l_uvc_device::stop_data_capture() + { + _is_capturing = false; + _is_started = false; + + // Stop nn-demand frames polling + signal_stop(); + + _thread->join(); + _thread.reset(); + + // Notify kernel + streamoff(); + } + void v4l_uvc_device::start_callbacks() { _is_started = true; @@ -698,40 +845,16 @@ namespace librealsense { if(_is_capturing) { - _is_capturing = false; - _is_started = false; - signal_stop(); - - _thread->join(); - _thread.reset(); - - - // Stop streamining - v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(xioctl(_fd, VIDIOC_STREAMOFF, &type) < 0) - throw linux_backend_exception("xioctl(VIDIOC_STREAMOFF) failed"); + stop_data_capture(); } if (_callback) { - for(size_t i = 0; i < _buffers.size(); i++) - { - _buffers[i]->detach_buffer(); - } - _buffers.resize(0); + // Release allocated buffers + allocate_io_buffers(0); - // Close memory mapped IO - struct v4l2_requestbuffers req = {}; - req.count = 0; - req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - req.memory = _use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR; - if(xioctl(_fd, VIDIOC_REQBUFS, &req) < 0) - { - if(errno == EINVAL) - LOG_ERROR(_name + " does not support memory mapping"); - else - throw linux_backend_exception("xioctl(VIDIOC_REQBUFS) failed"); - } + // Release IO + negotiate_kernel_buffers(0); _callback = nullptr; } @@ -748,40 +871,37 @@ namespace librealsense void v4l_uvc_device::signal_stop() { - char buff[1]; - buff[0] = 0; + char buff[1]={}; if (write(_stop_pipe_fd[1], buff, 1) < 0) { throw linux_backend_exception("Could not signal video capture thread to stop. Error write to pipe."); } } - void v4l_uvc_device::poll() { - fd_set fds{}; - FD_ZERO(&fds); - FD_SET(_fd, &fds); - FD_SET(_stop_pipe_fd[0], &fds); - FD_SET(_stop_pipe_fd[1], &fds); - - int max_fd = std::max(std::max(_stop_pipe_fd[0], _stop_pipe_fd[1]), _fd); + fd_set fds{}; + FD_ZERO(&fds); + for (auto fd : _fds) + { + FD_SET(fd, &fds); + } struct timespec mono_time; - int r = clock_gettime(CLOCK_MONOTONIC, &mono_time); - if (r) throw linux_backend_exception("could not query time!"); + int ret = clock_gettime(CLOCK_MONOTONIC, &mono_time); + if (ret) throw linux_backend_exception("could not query time!"); struct timeval expiration_time = { mono_time.tv_sec + 5, mono_time.tv_nsec / 1000 }; int val = 0; do { struct timeval remaining; - r = clock_gettime(CLOCK_MONOTONIC, &mono_time); - if (r) throw linux_backend_exception("could not query time!"); + ret = clock_gettime(CLOCK_MONOTONIC, &mono_time); + if (ret) throw linux_backend_exception("could not query time!"); struct timeval current_time = { mono_time.tv_sec, mono_time.tv_nsec / 1000 }; timersub(&expiration_time, ¤t_time, &remaining); if (timercmp(¤t_time, &expiration_time, <)) { - val = select(max_fd + 1, &fds, NULL, NULL, &remaining); + val = select(_max_fd + 1, &fds, NULL, NULL, &remaining); } else { val = 0; @@ -790,166 +910,132 @@ namespace librealsense if(val < 0) { - _is_capturing = false; - _is_started = false; - signal_stop(); - - _thread->join(); - _thread.reset(); - - // Stop streamining - v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(xioctl(_fd, VIDIOC_STREAMOFF, &type) < 0) - throw linux_backend_exception("xioctl(VIDIOC_STREAMOFF) failed"); + stop_data_capture(); } - else if(val > 0) + else { - if(FD_ISSET(_stop_pipe_fd[0], &fds) || FD_ISSET(_stop_pipe_fd[1], &fds)) - { - if(!_is_capturing) - { - LOG_INFO("Stream finished"); - return; - } - } - else if(FD_ISSET(_fd, &fds)) + if(val > 0) { - FD_ZERO(&fds); - FD_SET(_fd, &fds); - v4l2_buffer buf = {}; - buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - buf.memory = _use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR; - if(xioctl(_fd, VIDIOC_DQBUF, &buf) < 0) + if(FD_ISSET(_stop_pipe_fd[0], &fds) || FD_ISSET(_stop_pipe_fd[1], &fds)) { - if(errno == EAGAIN) + if(!_is_capturing) + { + LOG_INFO("Stream finished"); return; - - throw linux_backend_exception("xioctl(VIDIOC_DQBUF) failed"); + } + else + { + LOG_ERROR("Stop pipe was signalled during streaming"); + return; + } } - - bool moved_qbuff = false; - auto buffer = _buffers[buf.index]; - - if (_is_started) + else // Check and acquire data buffers from kernel { - if((buf.bytesused < buffer->get_full_length() - MAX_META_DATA_SIZE) && - buf.bytesused > 0) - { - auto percentage = (100 * buf.bytesused) / buffer->get_full_length(); - std::stringstream s; - s << "Incomplete frame detected!\nSize " << buf.bytesused - << " out of " << buffer->get_full_length() << " bytes (" << percentage << "%)"; - librealsense::notification n = { RS2_NOTIFICATION_CATEGORY_FRAME_CORRUPTED, 0, RS2_LOG_SEVERITY_WARN, s.str()}; + buffers_mgr buf_mgr(_use_memory_map); + // Read metadata from a node + acquire_metadata(buf_mgr,fds); - _error_handler(n); - } - else if (buf.bytesused > 0) + if(FD_ISSET(_fd, &fds)) { - void* md_start = nullptr; - uint8_t md_size = 0; - if (has_metadata()) + FD_CLR(_fd,&fds); + v4l2_buffer buf = {}; + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = _use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR; + if(xioctl(_fd, VIDIOC_DQBUF, &buf) < 0) { - md_start = buffer->get_frame_start() + buffer->get_length_frame_only(); - md_size = (*(uint8_t*)md_start); + LOG_DEBUG("Dequeued empty buf for fd " << _fd); + if(errno == EAGAIN) + return; + + throw linux_backend_exception(to_string() << "xioctl(VIDIOC_DQBUF) failed for fd: " << _fd); } + LOG_DEBUG("Dequeued buf " << buf.index << " for fd " << _fd); - auto timestamp = (double)buf.timestamp.tv_sec*1000.f + (double)buf.timestamp.tv_usec/1000.f; - timestamp = monotonic_to_realtime(timestamp); + auto buffer = _buffers[buf.index]; + buf_mgr.handle_buffer(e_video_buf,_fd, buf,buffer); - frame_object fo{ buffer->get_length_frame_only(), md_size, - buffer->get_frame_start(), md_start, timestamp }; + if (_is_started) + { + if((buf.bytesused < buffer->get_full_length() - MAX_META_DATA_SIZE) && + buf.bytesused > 0) + { + auto percentage = (100 * buf.bytesused) / buffer->get_full_length(); + std::stringstream s; + s << "Incomplete video frame detected!\nSize " << buf.bytesused + << " out of " << buffer->get_full_length() << " bytes (" << percentage << "%)"; + librealsense::notification n = { RS2_NOTIFICATION_CATEGORY_FRAME_CORRUPTED, 0, RS2_LOG_SEVERITY_WARN, s.str()}; - buffer->attach_buffer(buf); - moved_qbuff = true; - auto fd = _fd; - _callback(_profile, fo, - [fd, buffer]() mutable { - buffer->request_next_frame(fd); - }); + _error_handler(n); + } + else + { + if (buf.bytesused > 0) + { + auto timestamp = (double)buf.timestamp.tv_sec*1000.f + (double)buf.timestamp.tv_usec/1000.f; + timestamp = monotonic_to_realtime(timestamp); + + // read metadata from the frame appendix + acquire_metadata(buf_mgr,fds); + + if (val > 1) + LOG_INFO("Frame buf ready, md size: " << std::dec << (int)buf_mgr.metadata_size() << " seq. id: " << buf.sequence); + frame_object fo{ buffer->get_length_frame_only(), buf_mgr.metadata_size(), + buffer->get_frame_start(), buf_mgr.metadata_start(), timestamp }; + + buffer->attach_buffer(buf); + buf_mgr.handle_buffer(e_video_buf,-1); // transfer new buffer request to the frame callback + + //Invoke user callback and enqueue next frame + _callback(_profile, fo, + [buf_mgr]() mutable { + buf_mgr.request_next_frame(); + }); + } + else + { + LOG_INFO("Empty video frame arrived"); + } + } + } + else + { + LOG_ERROR("Video frame arrived in idle mode."); // TODO - verification + } } else { - LOG_WARNING("Empty frame has arrived."); + LOG_WARNING("FD_ISSET returned false - video node is not signalled (md only)"); } } - - if (!moved_qbuff) - { - if (xioctl(_fd, VIDIOC_QBUF, &buf) < 0) - throw linux_backend_exception("xioctl(VIDIOC_QBUF) failed"); - } } - else + else // (val==0) { - throw linux_backend_exception("FD_ISSET returned false"); + LOG_WARNING("Frames didn't arrived within 5 seconds"); + librealsense::notification n = {RS2_NOTIFICATION_CATEGORY_FRAMES_TIMEOUT, 0, RS2_LOG_SEVERITY_WARN, "Frames didn't arrived within 5 seconds"}; + + _error_handler(n); } } - else - { - LOG_WARNING("Frames didn't arrived within 5 seconds"); - librealsense::notification n = {RS2_NOTIFICATION_CATEGORY_FRAMES_TIMEOUT, 0, RS2_LOG_SEVERITY_WARN, "Frames didn't arrived within 5 seconds"}; + } - _error_handler(n); - } + void v4l_uvc_device::acquire_metadata(buffers_mgr & buf_mgr,fd_set &fds) + { + if (has_metadata()) + buf_mgr.set_md_from_video_node(); + else + buf_mgr.set_md_attributes(0, nullptr); } void v4l_uvc_device::set_power_state(power_state state) { if (state == D0 && _state == D3) { - _fd = open(_name.c_str(), O_RDWR | O_NONBLOCK, 0); - if(_fd < 0) - throw linux_backend_exception(to_string() << "Cannot open '" << _name); - - - if (pipe(_stop_pipe_fd) < 0) - throw linux_backend_exception("v4l_uvc_device: Cannot create pipe!"); - - v4l2_capability cap = {}; - if(xioctl(_fd, VIDIOC_QUERYCAP, &cap) < 0) - { - if(errno == EINVAL) - throw linux_backend_exception(_name + " is no V4L2 device"); - else - throw linux_backend_exception("xioctl(VIDIOC_QUERYCAP) failed"); - } - if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) - throw linux_backend_exception(_name + " is no video capture device"); - - if(!(cap.capabilities & V4L2_CAP_STREAMING)) - throw linux_backend_exception(_name + " does not support streaming I/O"); - - // Select video input, video standard and tune here. - v4l2_cropcap cropcap = {}; - cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - if(xioctl(_fd, VIDIOC_CROPCAP, &cropcap) == 0) - { - v4l2_crop crop = {}; - crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; - crop.c = cropcap.defrect; // reset to default - if(xioctl(_fd, VIDIOC_S_CROP, &crop) < 0) - { - switch (errno) - { - case EINVAL: break; // Cropping not supported - default: break; // Errors ignored - } - } - } else {} // Errors ignored + map_device_descriptor(); } if (state == D3 && _state == D0) { close(_profile); - if(::close(_fd) < 0) - throw linux_backend_exception("v4l_uvc_device: close(_fd) failed"); - - if(::close(_stop_pipe_fd[0]) < 0) - throw linux_backend_exception("v4l_uvc_device: close(_stop_pipe_fd[0]) failed"); - if(::close(_stop_pipe_fd[1]) < 0) - throw linux_backend_exception("v4l_uvc_device: close(_stop_pipe_fd[1]) failed"); - - _fd = 0; - _stop_pipe_fd[0] = _stop_pipe_fd[1] = 0; + unmap_device_descriptor(); } _state = state; } @@ -1254,25 +1340,352 @@ namespace librealsense } } - bool v4l_uvc_device::has_metadata() + bool v4l_uvc_device::has_metadata() const { return !_use_memory_map; } + void v4l_uvc_device::streamon() const + { + stream_ctl_on(_fd); + } + + void v4l_uvc_device::streamoff() const + { + stream_off(_fd); + } + + void v4l_uvc_device::negotiate_kernel_buffers(size_t num) const + { + req_io_buff(_fd, num, _name, + _use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR, + V4L2_BUF_TYPE_VIDEO_CAPTURE); + } + + void v4l_uvc_device::allocate_io_buffers(size_t buffers) + { + if (buffers) + { + for(size_t i = 0; i < buffers; ++i) + { + _buffers.push_back(std::make_shared(_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, _use_memory_map, i)); + } + } + else + { + for(size_t i = 0; i < _buffers.size(); i++) + { + _buffers[i]->detach_buffer(); + } + _buffers.resize(0); + } + } + + void v4l_uvc_device::map_device_descriptor() + { + _fd = open(_name.c_str(), O_RDWR | O_NONBLOCK, 0); + if(_fd < 0) + throw linux_backend_exception(to_string() <<__FUNCTION__ << " Cannot open '" << _name); + + if (pipe(_stop_pipe_fd) < 0) + throw linux_backend_exception(to_string() <<__FUNCTION__ << " Cannot create pipe!"); + + if (_fds.size()) + throw linux_backend_exception(to_string() <<__FUNCTION__ << " Device descriptor is already allocated"); + + _fds.insert(_fds.end(),{_fd,_stop_pipe_fd[0],_stop_pipe_fd[1]}); + _max_fd = *std::max_element(_fds.begin(),_fds.end()); + + v4l2_capability cap = {}; + if(xioctl(_fd, VIDIOC_QUERYCAP, &cap) < 0) + { + if(errno == EINVAL) + throw linux_backend_exception(_name + " is not V4L2 device"); + else + throw linux_backend_exception("xioctl(VIDIOC_QUERYCAP) failed"); + } + if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) + throw linux_backend_exception(_name + " is no video capture device"); + + if(!(cap.capabilities & V4L2_CAP_STREAMING)) + throw linux_backend_exception(_name + " does not support streaming I/O"); + + // Select video input, video standard and tune here. + v4l2_cropcap cropcap = {}; + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if(xioctl(_fd, VIDIOC_CROPCAP, &cropcap) == 0) + { + v4l2_crop crop = {}; + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + crop.c = cropcap.defrect; // reset to default + if(xioctl(_fd, VIDIOC_S_CROP, &crop) < 0) + { + switch (errno) + { + case EINVAL: break; // Cropping not supported + default: break; // Errors ignored + } + } + } else {} // Errors ignored + } + + void v4l_uvc_device::unmap_device_descriptor() + { + if(::close(_fd) < 0) + throw linux_backend_exception("v4l_uvc_device: close(_fd) failed"); + + if(::close(_stop_pipe_fd[0]) < 0) + throw linux_backend_exception("v4l_uvc_device: close(_stop_pipe_fd[0]) failed"); + if(::close(_stop_pipe_fd[1]) < 0) + throw linux_backend_exception("v4l_uvc_device: close(_stop_pipe_fd[1]) failed"); + + _fd = 0; + _stop_pipe_fd[0] = _stop_pipe_fd[1] = 0; + _fds.clear(); + } + + void v4l_uvc_device::set_format(stream_profile profile) + { + v4l2_format fmt = {}; + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = profile.width; + fmt.fmt.pix.height = profile.height; + fmt.fmt.pix.pixelformat = (const big_endian &)profile.format; + fmt.fmt.pix.field = V4L2_FIELD_NONE; + if(xioctl(_fd, VIDIOC_S_FMT, &fmt) < 0) + { + throw linux_backend_exception("xioctl(VIDIOC_S_FMT) failed"); + } + + LOG_INFO("Trying to configure fourcc " << fourcc_to_string(fmt.fmt.pix.pixelformat)); + } + + v4l_uvc_meta_device::v4l_uvc_meta_device(const uvc_device_info& info, bool use_memory_map): + v4l_uvc_device(info,use_memory_map), + _md_fd(0), + _md_name(info.metadata_node_id) + { + LOG_INFO(__FUNCTION__); + } + + v4l_uvc_meta_device::~v4l_uvc_meta_device() + { + LOG_INFO(__FUNCTION__); + } + + void v4l_uvc_meta_device::streamon() const + { + // Metadata stream shall be configured first to allow sync with video node + stream_ctl_on(_md_fd,LOCAL_V4L2_BUF_TYPE_META_CAPTURE); + + // Invoke UVC streaming request + v4l_uvc_device::streamon(); + + } + + void v4l_uvc_meta_device::streamoff() const + { + v4l_uvc_device::streamoff(); + + stream_off(_md_fd,LOCAL_V4L2_BUF_TYPE_META_CAPTURE); + } + + void v4l_uvc_meta_device::negotiate_kernel_buffers(size_t num) const + { + v4l_uvc_device::negotiate_kernel_buffers(num); + + req_io_buff(_md_fd, num, _name, + _use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR, + LOCAL_V4L2_BUF_TYPE_META_CAPTURE); + } + + void v4l_uvc_meta_device::allocate_io_buffers(size_t buffers) + { + v4l_uvc_device::allocate_io_buffers(buffers); + + if (buffers) + { + for(size_t i = 0; i < buffers; ++i) + { + _md_buffers.push_back(std::make_shared(_md_fd, LOCAL_V4L2_BUF_TYPE_META_CAPTURE, _use_memory_map, i)); + } + } + else + { + for(size_t i = 0; i < _buffers.size(); i++) + { + _md_buffers[i]->detach_buffer(); + } + _md_buffers.resize(0); + } + } + + void v4l_uvc_meta_device::map_device_descriptor() + { + v4l_uvc_device::map_device_descriptor(); + + if (_md_fd>0) + throw linux_backend_exception(to_string() << _md_name << " descriptor is already opened"); + + _md_fd = open(_md_name.c_str(), O_RDWR | O_NONBLOCK, 0); + if(_md_fd < 0) + throw linux_backend_exception(to_string() << "Cannot open '" << _md_name); + + _fds.push_back(_md_fd); + _max_fd = *std::max_element(_fds.begin(),_fds.end()); + + v4l2_capability cap = {}; + if(xioctl(_md_fd, VIDIOC_QUERYCAP, &cap) < 0) + { + if(errno == EINVAL) + throw linux_backend_exception(_md_name + " is no V4L2 device"); + else + throw linux_backend_exception(_md_name + " xioctl(VIDIOC_QUERYCAP) for metadata failed"); + } + + if(!(cap.capabilities & V4L2_CAP_META_CAPTURE)) + throw linux_backend_exception(_md_name + " is not metadata capture device"); + + if(!(cap.capabilities & V4L2_CAP_STREAMING)) + throw linux_backend_exception(_md_name + " does not support metadata streaming I/O"); + } + + + void v4l_uvc_meta_device::unmap_device_descriptor() + { + v4l_uvc_device::unmap_device_descriptor(); + + if(::close(_md_fd) < 0) + throw linux_backend_exception("v4l_uvc_meta_device: close(_md_fd) failed"); + + _md_fd = 0; + } + + void v4l_uvc_meta_device::set_format(stream_profile profile) + { + // Select video node streaming format + v4l_uvc_device::set_format(profile); + + // Configure metadata node stream format + v4l2_format fmt{ }; + fmt.type = LOCAL_V4L2_BUF_TYPE_META_CAPTURE; + + if (xioctl(_md_fd, VIDIOC_G_FMT, &fmt)) + throw linux_backend_exception(_md_name + " ioctl(VIDIOC_G_FMT) for metadata node failed"); + + if (fmt.type != LOCAL_V4L2_BUF_TYPE_META_CAPTURE) + throw linux_backend_exception("ioctl(VIDIOC_G_FMT): " + _md_name + " node is not metadata capture"); + + bool success = false; + for (const uint32_t& request : { V4L2_META_FMT_D4XX, V4L2_META_FMT_UVC}) + { + // Configure metadata format - try d4xx, then fallback to currently retrieve UVC default header of 12 bytes + memcpy(fmt.fmt.raw_data,&request,sizeof(request)); + + if(xioctl(_md_fd, VIDIOC_S_FMT, &fmt) >= 0) + { + LOG_DEBUG("Metadata node was successfully configured to " << fourcc_to_string(request) << " format"); + success =true; + break; + } + else + { + LOG_INFO("Metadata configuration failed for " << fourcc_to_string(request)); + } + } + + if (!success) + throw linux_backend_exception(_md_name + " ioctl(VIDIOC_S_FMT) for metadata node failed"); + + } + + void v4l_uvc_meta_device::prepare_capture_buffers() + { + // Meta node to be initialized first to enforce initial sync + for (auto&& buf : _md_buffers) buf->prepare_for_streaming(_md_fd); + + // Request streaming for video node + v4l_uvc_device::prepare_capture_buffers(); + + } + + // retrieve metadata from a dedicated UVC node + void v4l_uvc_meta_device::acquire_metadata(buffers_mgr & buf_mgr,fd_set &fds) + { + // Metadata is calculated once per frame + if (buf_mgr.metadata_size()) + return; + + if(FD_ISSET(_md_fd, &fds)) + { + FD_CLR(_md_fd,&fds); + //FD_ZERO(&fds); + v4l2_buffer buf{}; + buf.type = LOCAL_V4L2_BUF_TYPE_META_CAPTURE; + buf.memory = _use_memory_map ? V4L2_MEMORY_MMAP : V4L2_MEMORY_USERPTR; + + if(xioctl(_md_fd, VIDIOC_DQBUF, &buf) < 0) + { + if(errno == EAGAIN) + return; + + throw linux_backend_exception(to_string() << "xioctl(VIDIOC_DQBUF) failed for metadata fd: " << _md_fd); + } + LOG_DEBUG("Dequeued buf " << buf.index << " for fd " << _md_fd); + + auto buffer = _md_buffers[buf.index]; + buf_mgr.handle_buffer(e_metadata_buf,_md_fd, buf,buffer); + + if (_is_started) + { + static const size_t uvc_md_start_offset = sizeof(uvc_meta_buffer::ns) + sizeof(uvc_meta_buffer::sof); + + if(buf.bytesused > uvc_md_start_offset ) + { + // The first uvc_md_start_offset bytes of metadata buffer are generated by host driver + buf_mgr.set_md_attributes(buf.bytesused - uvc_md_start_offset, + buffer->get_frame_start() + uvc_md_start_offset); + + buffer->attach_buffer(buf); + buf_mgr.handle_buffer(e_metadata_buf,-1); // transfer new buffer request to the frame callback + } + else + { + // Zero-size buffers generate empty md. Non-zero partial bufs handled as errors + if(buf.bytesused > 0) + { + std::stringstream s; + s << "Invalid metadata payload, size " << buf.bytesused; + LOG_INFO(s.str()); + _error_handler({ RS2_NOTIFICATION_CATEGORY_FRAME_CORRUPTED, 0, RS2_LOG_SEVERITY_WARN, s.str()}); + } + } + } + else + { + LOG_WARNING("Metadata frame arrived in idle mode."); + } + } + } + std::shared_ptr v4l_backend::create_uvc_device(uvc_device_info info) const { - return std::make_shared( - std::make_shared(info)); + auto v4l_uvc_dev = (!info.has_metadata_node) ? std::make_shared(info) : + std::make_shared(info); + + return std::make_shared(v4l_uvc_dev); } + std::vector v4l_backend::query_uvc_devices() const { - std::vector results; + std::vector uvc_nodes; v4l_uvc_device::foreach_uvc_device( - [&results](const uvc_device_info& i, const std::string&) + [&uvc_nodes](const uvc_device_info& i, const std::string&) { - results.push_back(i); + uvc_nodes.push_back(i); }); - return results; + + return uvc_nodes; } std::shared_ptr v4l_backend::create_usb_device(usb_device_info info) const diff --git a/src/linux/backend-v4l2.h b/src/linux/backend-v4l2.h index 292faad18a..baed10e251 100644 --- a/src/linux/backend-v4l2.h +++ b/src/linux/backend-v4l2.h @@ -43,6 +43,43 @@ #include "../third-party/libusb/libusb/libusb.h" #pragma GCC diagnostic pop +// Metadata streaming nodes are available with kernels 4.16+ +#ifdef V4L2_META_FMT_UVC +constexpr bool metadata_node = true; +#else +#pragma message ( "\nLibrealsense notification: V4L2_META_FMT_UVC was not defined, adding metadata constructs") + +constexpr bool metadata_node = false; + +// Providing missing parts from videodev2.h +// V4L2_META_FMT_UVC >> V4L2_CAP_META_CAPTURE is also defined, but the opposite does not hold +#define V4L2_META_FMT_UVC v4l2_fourcc('U', 'V', 'C', 'H') /* UVC Payload Header */ + +#ifndef V4L2_CAP_META_CAPTURE +#define V4L2_CAP_META_CAPTURE 0x00800000 /* Specified in kernel header v4.16 */ +#endif // V4L2_CAP_META_CAPTURE + +#endif // V4L2_META_FMT_UVC + +#ifndef V4L2_META_FMT_D4XX +#define V4L2_META_FMT_D4XX v4l2_fourcc('D', '4', 'X', 'X') /* D400 Payload Header metadata */ +#endif + +// Use local definition of buf type to resolve for kernel versions +constexpr auto LOCAL_V4L2_BUF_TYPE_META_CAPTURE = (v4l2_buf_type)(13); + +#pragma pack(push, 1) +// The struct definition is identical to uvc_meta_buf defined uvcvideo.h/ kernel 4.16 headers, and is provided to allow for cross-kernel compilation +struct uvc_meta_buffer { + __u64 ns; // system timestamp of the payload in nanoseconds + __u16 sof; // USB Frame Number + __u8 length; // length of the payload metadata header + __u8 flags; // payload header flags + __u8* buf; //device-specific metadata payload data +}; +#pragma pack(pop) + + namespace librealsense { namespace platform @@ -75,7 +112,7 @@ namespace librealsense class buffer { public: - buffer(int fd, bool use_memory_map, int index); + buffer(int fd, v4l2_buf_type type, bool use_memory_map, int index); void prepare_for_streaming(int fd); @@ -92,7 +129,10 @@ namespace librealsense uint8_t* get_frame_start() const { return _start; } + bool use_memory_map() const { return _use_memory_map; } + private: + v4l2_buf_type _type; uint8_t* _start; size_t _length; size_t _original_length; @@ -103,6 +143,80 @@ namespace librealsense bool _must_enqueue = false; }; + enum supported_kernel_buf_types : uint8_t + { + e_video_buf, + e_metadata_buf, + e_max_kernel_buf_type + }; + + + // RAII handling of kernel buffers interchanges + class buffers_mgr + { + public: + buffers_mgr(bool memory_mapped_buf) : + _md_start(nullptr), + _md_size(0), + _mmap_bufs(memory_mapped_buf) + {}; + + ~buffers_mgr(){}; + + void request_next_frame(); + void handle_buffer(supported_kernel_buf_types buf_type, int file_desc, + v4l2_buffer buf= v4l2_buffer(), + std::shared_ptr data_buf=nullptr); + + uint8_t metadata_size() const { return _md_size; }; + void* metadata_start() const { return _md_start; }; + + void set_md_attributes(uint8_t md_size, void* md_start) + { _md_start = md_start; _md_size = md_size; } + + void set_md_from_video_node() + { + void* start = nullptr; + auto size = 0; + + if (buffers.at(e_video_buf)._file_desc >=0) + { + auto buffer = buffers.at(e_video_buf)._data_buf; + start = buffer->get_frame_start() + buffer->get_length_frame_only(); + size = (*(uint8_t*)start); + } + set_md_attributes(size,start); + } + + private: + void* _md_start; // marks the address of metadata blob + uint8_t _md_size; // metadata size is bounded by 255 bytes by design + bool _mmap_bufs; + + // RAII for buffer exchange with kernel + struct kernel_buf_guard + { + ~kernel_buf_guard() + { + if (_data_buf && (!_managed)) + { + LOG_DEBUG("Enqueue buf " << _dq_buf.index << " for fd " << _file_desc); + if (xioctl(_file_desc, (int)VIDIOC_QBUF, &_dq_buf) < 0) + { + LOG_ERROR("xioctl(VIDIOC_QBUF) guard failed"); + } + } + } + + int _file_desc=-1; + bool _managed=false; + std::shared_ptr _data_buf=nullptr; + v4l2_buffer _dq_buf{}; + }; + + std::array buffers; + }; + class v4l_usb_device : public usb_device { public: @@ -125,7 +239,26 @@ namespace librealsense int _mi; }; - class v4l_uvc_device : public uvc_device + class v4l_uvc_interface + { + virtual void capture_loop() = 0; + + virtual bool has_metadata() const = 0; + + virtual void streamon() const = 0; + virtual void streamoff() const = 0; + virtual void negotiate_kernel_buffers(size_t num) const = 0; + + virtual void allocate_io_buffers(size_t num) = 0; + virtual void map_device_descriptor() = 0; + virtual void unmap_device_descriptor() = 0; + virtual void set_format(stream_profile profile) = 0; + virtual void prepare_capture_buffers() = 0; + virtual void stop_data_capture() = 0; + virtual void acquire_metadata(buffers_mgr & buf_mgr,fd_set &fds) = 0; + }; + + class v4l_uvc_device : public uvc_device, public v4l_uvc_interface { public: static void foreach_uvc_device( @@ -174,20 +307,30 @@ namespace librealsense std::string get_device_location() const override { return _device_path; } usb_spec get_usb_specification() const override { return _device_usb_spec; } - private: + protected: static uint32_t get_cid(rs2_option option); - void capture_loop(); + virtual void capture_loop(); + + virtual bool has_metadata() const; - bool has_metadata(); + virtual void streamon() const; + virtual void streamoff() const; + virtual void negotiate_kernel_buffers(size_t num) const; + + virtual void allocate_io_buffers(size_t num); + virtual void map_device_descriptor(); + virtual void unmap_device_descriptor(); + virtual void set_format(stream_profile profile); + virtual void prepare_capture_buffers(); + virtual void stop_data_capture(); + virtual void acquire_metadata(buffers_mgr & buf_mgr,fd_set &fds); power_state _state = D3; std::string _name = ""; std::string _device_path = ""; usb_spec _device_usb_spec = usb_undefined; uvc_device_info _info; - int _fd = 0; - int _stop_pipe_fd[2]; // write to _stop_pipe_fd[1] and read from _stop_pipe_fd[0] std::vector> _buffers; stream_profile _profile; @@ -198,6 +341,40 @@ namespace librealsense std::unique_ptr _thread; std::unique_ptr _named_mtx; bool _use_memory_map; + int _max_fd = 0; // specifies the maximal pipe number the polling process will monitor + std::vector _fds; // list the file descriptors to be monitored during frames polling + + private: + int _fd = 0; // prevent unintentional abuse in derived class + int _stop_pipe_fd[2]; // write to _stop_pipe_fd[1] and read from _stop_pipe_fd[0] + + }; + + // Composition layer for uvc/metadata split nodes introduced with kernel 4.16 + class v4l_uvc_meta_device : public v4l_uvc_device + { + public: + v4l_uvc_meta_device(const uvc_device_info& info, bool use_memory_map = false); + + ~v4l_uvc_meta_device(); + + protected: + + void streamon() const; + void streamoff() const; + void negotiate_kernel_buffers(size_t num) const; + void allocate_io_buffers(size_t num); + void map_device_descriptor(); + void unmap_device_descriptor(); + void set_format(stream_profile profile); + void prepare_capture_buffers(); + virtual void acquire_metadata(buffers_mgr & buf_mgr,fd_set &fds); + + int _md_fd = -1; + std::string _md_name = ""; + + std::vector> _md_buffers; + stream_profile _md_profile; }; class v4l_backend : public backend diff --git a/src/metadata-parser.h b/src/metadata-parser.h index 74993e1c9d..5d9d4b615e 100644 --- a/src/metadata-parser.h +++ b/src/metadata-parser.h @@ -147,9 +147,10 @@ namespace librealsense if ((s->header.md_type_id != expected_type) || (s->header.md_size !=sizeof(*s))) { std::string type = (md_type_desc.count(s->header.md_type_id) > 0) ? - md_type_desc.at(s->header.md_type_id) : (to_string() << "0x" << static_cast(s->header.md_type_id)); + md_type_desc.at(s->header.md_type_id) : (to_string() + << "0x" << std::hex << static_cast(s->header.md_type_id) << std::dec); LOG_DEBUG("Metadata mismatch - actual: " << type - << ", expected: " << md_type_desc.at(expected_type) << "(0x" << std::hex << (uint32_t)expected_type << std::dec << ")"); + << ", expected: 0x" << std::hex << (uint32_t)expected_type << std::dec << " (" << md_type_desc.at(expected_type) << ")"); return false; } diff --git a/src/pipeline.cpp b/src/pipeline.cpp index 22194d92d2..70179ab54c 100644 --- a/src/pipeline.cpp +++ b/src/pipeline.cpp @@ -602,6 +602,7 @@ namespace librealsense } catch (const std::exception& e) { + LOG_INFO(e.what()); return false; } } diff --git a/src/pipeline.h b/src/pipeline.h index 03321f8bfe..144694b25f 100644 --- a/src/pipeline.h +++ b/src/pipeline.h @@ -96,9 +96,6 @@ namespace librealsense pipeline_config(const pipeline_config& other) { - if (this == &other) - return; - _device_request = other._device_request; _stream_requests = other._stream_requests; _enable_all_streams = other._enable_all_streams; diff --git a/src/proc/align.cpp b/src/proc/align.cpp index cb0cb939c3..b1b9a17b30 100644 --- a/src/proc/align.cpp +++ b/src/proc/align.cpp @@ -72,7 +72,6 @@ namespace librealsense const __m128i mask1 = _mm_set_epi8((char)0xff, (char)0xff, (char)15, (char)14, (char)0xff, (char)0xff, (char)13, (char)12, (char)0xff, (char)0xff, (char)11, (char)10, (char)0xff, (char)0xff, (char)9, (char)8); - auto zerro = _mm_set_ps1(0); auto scale = _mm_set_ps1(depth_scale); auto mapx = pre_compute_x; @@ -360,6 +359,9 @@ namespace librealsense case 3: move_other_to_depth(z_pixels, reinterpret_cast*>(source), reinterpret_cast*>(dest), to, _pixel_top_left_int, bottom_right); + case 4: + move_other_to_depth(z_pixels, reinterpret_cast*>(source), reinterpret_cast*>(dest), to, + _pixel_top_left_int, bottom_right); break; default: break; @@ -503,49 +505,47 @@ namespace librealsense align_other_to_depth(other_aligned_to_z, [z_pixels, z_scale](int z_pixel_index) { return z_scale * z_pixels[z_pixel_index]; }, z_intrin, z_to_other, other_intrin, other_pixels, other_format); } - int align::get_unique_id(const std::shared_ptr& original_profile, - const std::shared_ptr& to_profile, - const std::shared_ptr& aligned_profile) + int align::get_unique_id(rs2::video_stream_profile& original_profile, + rs2::video_stream_profile& to_profile, + rs2::video_stream_profile& aligned_profile) { //align_stream_unique_ids holds a cache of mapping between the 2 streams that created the new aligned stream // to it stream id. //When an aligned frame is created from other streams (but with the same instance of this class) - // the from_to pair will be different so a new id will be added to the cache. + // the from-to pair will be different so a new id will be added to the cache. //This allows the user to pass different streams to this class and for every pair of from_to //the user will always get the same stream id for the aligned stream. - auto from_to = std::make_pair(original_profile->get_unique_id(), to_profile->get_unique_id()); - auto it = align_stream_unique_ids.find(from_to); - if (it != align_stream_unique_ids.end()) + auto from_to = std::make_pair(original_profile.get()->profile->get_unique_id(), to_profile.get()->profile->get_unique_id()); + auto it = _align_stream_unique_ids.find(from_to); + if (it != _align_stream_unique_ids.end()) { return it->second; } else { - int new_id = aligned_profile->get_unique_id(); - align_stream_unique_ids[from_to] = new_id; + int new_id = aligned_profile.get()->profile->get_unique_id(); + _align_stream_unique_ids[from_to] = new_id; return new_id; } } - std::shared_ptr align::create_aligned_profile( - const std::shared_ptr& original_profile, - const std::shared_ptr& to_profile) + + std::shared_ptr align::create_aligned_profile( + rs2::video_stream_profile& original_profile, + rs2::video_stream_profile& to_profile) { - auto aligned_profile = original_profile->clone(); - int aligned_unique_id = get_unique_id(original_profile, to_profile, aligned_profile); - aligned_profile->set_unique_id(aligned_unique_id); - environment::get_instance().get_extrinsics_graph().register_same_extrinsics(*aligned_profile, *original_profile); - aligned_profile->set_stream_index(original_profile->get_stream_index()); - aligned_profile->set_stream_type(original_profile->get_stream_type()); - aligned_profile->set_format(original_profile->get_format()); - aligned_profile->set_framerate(original_profile->get_framerate()); - if (auto original_video_profile = As(original_profile)) + auto aligned_profile = std::make_shared(original_profile.clone(original_profile.stream_type(), original_profile.stream_index(), original_profile.format())); + int aligned_unique_id = get_unique_id(original_profile, to_profile, *aligned_profile); + aligned_profile->get()->profile->set_unique_id(aligned_unique_id); + environment::get_instance().get_extrinsics_graph().register_same_extrinsics(*aligned_profile.get()->get()->profile, *original_profile.get()->profile); + aligned_profile->get()->profile->set_framerate(original_profile.fps()); + if (auto original_video_profile = As(original_profile.get()->profile)) { - if (auto to_video_profile = As(to_profile)) + if (auto to_video_profile = As(to_profile.get()->profile)) { - if (auto aligned_video_profile = As(aligned_profile)) + if (auto aligned_video_profile = As(aligned_profile->get()->profile)) { aligned_video_profile->set_dims(to_video_profile->get_width(), to_video_profile->get_height()); - auto aligned_intrinsics = original_video_profile->get_intrinsics(); + auto aligned_intrinsics = to_video_profile->get_intrinsics(); aligned_intrinsics.width = to_video_profile->get_width(); aligned_intrinsics.height = to_video_profile->get_height(); aligned_video_profile->set_intrinsics([aligned_intrinsics]() { return aligned_intrinsics; }); @@ -554,148 +554,129 @@ namespace librealsense } return aligned_profile; } - void align::on_frame(frame_holder frameset, librealsense::synthetic_source_interface* source) + + bool align::should_process(const rs2::frame& frame) { - auto composite = As(frameset.frame); - if (composite == nullptr) - { - LOG_WARNING("Trying to align a non composite frame"); - return; - } + if (!frame) + return false; + + auto set = frame.as(); + if (!set) + return false; + + auto profile = frame.get_profile(); + rs2_stream stream = profile.stream_type(); + rs2_format format = profile.format(); + int index = profile.stream_index(); + + //process composite frame only if it contains both a depth frame and the requested texture frame + bool has_tex = false, has_depth = false; + set.foreach([this, &has_tex](const rs2::frame& frame) { if (frame.get_profile().stream_type() == _to_stream_type) has_tex = true; }); + set.foreach([&has_depth](const rs2::frame& frame) + { if (frame.get_profile().stream_type() == RS2_STREAM_DEPTH && frame.get_profile().format() == RS2_FORMAT_Z16) has_depth = true; }); + if (!has_tex || !has_depth) + return false; + + return true; + } - if (composite->get_embedded_frames_count() < 2) - { - LOG_WARNING("Trying to align a single frame"); - return; - } + rs2::frame align::process_frame(const rs2::frame_source& source, const rs2::frame& f) + { + rs2::frame rv; + auto frames = f.as(); + auto depth_frame = frames.first_or_default(RS2_STREAM_DEPTH, RS2_FORMAT_Z16).as(); - librealsense::video_frame* depth_frame = nullptr; - std::vector other_frames; - //Find the depth frame - for (int i = 0; i < composite->get_embedded_frames_count(); i++) + auto curr_depth = depth_frame.get_profile().as(); + // TODO: use sensor/profile id, to enable handle a case that two different depth streams with similar resolution being proceesed by the same "align" object + if (_prev_depth_res.first != curr_depth.width() || _prev_depth_res.second != curr_depth.height()) { - frame_interface* f = composite->get_frame(i); - if (f->get_stream()->get_stream_type() == RS2_STREAM_DEPTH) - { - assert(depth_frame == nullptr); // Trying to align multiple depth frames is not supported, in release we take the last one - depth_frame = As(f); - if (depth_frame == nullptr) - { - LOG_ERROR("Given depth frame is not a librealsense::video_frame"); - return; - } - } - else - { - auto other_video_frame = As(f); - auto other_stream_profile = f->get_stream(); - assert(other_stream_profile != nullptr); - - if (other_video_frame == nullptr) - { - LOG_ERROR("Given frame of type " << other_stream_profile->get_stream_type() << ", is not a librealsense::video_frame, ignoring it"); - return; - } - - if (_to_stream_type == RS2_STREAM_DEPTH) - { - //In case of alignment to depth, we will align any image given in the frameset to the depth one - other_frames.push_back(other_video_frame); - } - else - { - //In case of alignment from depth to other, we only want the other frame with the stream type that was requested to align to - if (other_stream_profile->get_stream_type() == _to_stream_type) - { - assert(other_frames.size() == 0); // Trying to align depth to multiple frames is not supported, in release we take the last one - other_frames.push_back(other_video_frame); - } - } - } +#ifdef __SSSE3__ + _stream_transform = nullptr; +#endif + _prev_depth_res.first = curr_depth.width(); + _prev_depth_res.second = curr_depth.height(); } - if (depth_frame == nullptr) - { - LOG_WARNING("No depth frame provided to align"); - return; - } + std::vector other_frames; - if (other_frames.empty()) - { - LOG_WARNING("Only depth frame provided to align"); - return; - } + //In case of alignment to depth, we will align any image given in the frameset to the depth one + //In case of alignment from depth to other, we only want the other frame with the stream type that was requested to align to - auto depth_profile = As(depth_frame->get_stream()); - if (depth_profile == nullptr) + if(_to_stream_type == RS2_STREAM_DEPTH) + frames.foreach([&other_frames](const rs2::frame& f) {if (f.get_profile().stream_type() != RS2_STREAM_DEPTH) other_frames.push_back(f); }); + else + frames.foreach([this, &other_frames](const rs2::frame& f) {if (f.get_profile().stream_type() == _to_stream_type) other_frames.push_back(f); }); + + auto depth_profile = depth_frame.get_profile().as(); + if (!depth_profile) { LOG_ERROR("Depth profile is not a video stream profile"); - return; + return rv; } - rs2_intrinsics depth_intrinsics = depth_profile->get_intrinsics(); - std::vector output_frames; + rs2_intrinsics depth_intrinsics = depth_profile.get_intrinsics(); + std::vector output_frames; if (_to_stream_type == RS2_STREAM_DEPTH) { //Storing the original depth frame for output frameset - depth_frame->acquire(); + depth_frame.keep(); output_frames.push_back(depth_frame); } - for (librealsense::video_frame* other_frame : other_frames) + for (rs2::video_frame other_frame : other_frames) { - auto other_profile = As(other_frame->get_stream()); + auto other_profile = other_frame.get_profile().as(); if (other_profile == nullptr) { - LOG_WARNING("Other frame with type " << other_frame->get_stream()->get_stream_type() << ", is not a video stream profile. Ignoring it"); + LOG_WARNING("Other frame with type " << other_frame.get_profile().stream_name() << ", is not a video stream profile. Ignoring it"); continue; } - rs2_intrinsics other_intrinsics = other_profile->get_intrinsics(); + rs2_intrinsics other_intrinsics = other_profile.get_intrinsics(); rs2_extrinsics depth_to_other_extrinsics{}; - if (!environment::get_instance().get_extrinsics_graph().try_fetch_extrinsics(*depth_profile, *other_profile, &depth_to_other_extrinsics)) + if (!environment::get_instance().get_extrinsics_graph().try_fetch_extrinsics(*depth_profile.get()->profile, *other_profile.get()->profile, &depth_to_other_extrinsics)) { - LOG_WARNING("Failed to get extrinsics from depth to " << other_profile->get_stream_type() << ", ignoring it"); + LOG_WARNING("Failed to get extrinsics from depth to " << other_profile.stream_name() << ", ignoring it"); continue; } - auto sensor = depth_frame->get_sensor(); + auto sensor = ((frame_interface*)depth_frame.get())->get_sensor().get(); if (sensor == nullptr) { LOG_ERROR("Failed to get sensor from depth frame"); - return; + return rv; } if (sensor->supports_option(RS2_OPTION_DEPTH_UNITS) == false) { LOG_ERROR("Sensor of depth frame does not provide depth units"); - return; + return rv; } float depth_scale = sensor->get_option(RS2_OPTION_DEPTH_UNITS).query(); - frame_holder aligned_frame{ nullptr }; + rs2::frame aligned_frame{ nullptr }; if (_to_stream_type == RS2_STREAM_DEPTH) { //Align a stream to depth - auto aligned_bytes_per_pixel = other_frame->get_bpp() / 8; + auto aligned_bytes_per_pixel = other_frame.get_bytes_per_pixel(); auto aligned_profile = create_aligned_profile(other_profile, depth_profile); - aligned_frame = source->allocate_video_frame( - aligned_profile, + aligned_frame = source.allocate_video_frame( + *aligned_profile, other_frame, aligned_bytes_per_pixel, - depth_frame->get_width(), - depth_frame->get_height(), - depth_frame->get_width() * aligned_bytes_per_pixel, + depth_frame.get_width(), + depth_frame.get_height(), + depth_frame.get_width() * aligned_bytes_per_pixel, RS2_EXTENSION_VIDEO_FRAME); - if (aligned_frame == nullptr) + if (!aligned_frame) { LOG_ERROR("Failed to allocate frame for aligned output"); - return; + return rv; } - byte* other_aligned_to_depth = const_cast(aligned_frame.frame->get_frame_data()); + byte* other_aligned_to_depth = reinterpret_cast(const_cast(aligned_frame.get_data())); memset(other_aligned_to_depth, 0, depth_intrinsics.height * depth_intrinsics.width * aligned_bytes_per_pixel); #ifdef __SSSE3__ if (_stream_transform == nullptr) @@ -706,29 +687,29 @@ namespace librealsense _stream_transform->pre_compute_x_y_map_corners(); } - _stream_transform->align_other_to_depth(reinterpret_cast(depth_frame->get_frame_data()), - reinterpret_cast(other_frame->get_frame_data()), - other_aligned_to_depth, other_frame->get_bpp()/8, + _stream_transform->align_other_to_depth(reinterpret_cast(depth_frame.get_data()), + reinterpret_cast(other_frame.get_data()), + other_aligned_to_depth, other_frame.get_bytes_per_pixel(), other_intrinsics, depth_to_other_extrinsics); #else align_other_to_z(other_aligned_to_depth, - reinterpret_cast(depth_frame->get_frame_data()), + reinterpret_cast(depth_frame.get_data()), depth_scale, depth_intrinsics, depth_to_other_extrinsics, other_intrinsics, - other_frame->get_frame_data(), - other_profile->get_format()); + reinterpret_cast(other_frame.get_data()), + other_profile.format()); #endif } else { //Align depth to some stream - auto aligned_bytes_per_pixel = depth_frame->get_bpp() / 8; + auto aligned_bytes_per_pixel = depth_frame.get_bytes_per_pixel(); auto aligned_profile = create_aligned_profile(depth_profile, other_profile); - aligned_frame = source->allocate_video_frame( - aligned_profile, + aligned_frame = source.allocate_video_frame( + *aligned_profile, depth_frame, aligned_bytes_per_pixel, other_intrinsics.width, @@ -736,14 +717,14 @@ namespace librealsense other_intrinsics.width * aligned_bytes_per_pixel, RS2_EXTENSION_DEPTH_FRAME); - if (aligned_frame == nullptr) + if (!aligned_frame) { LOG_ERROR("Failed to allocate frame for aligned output"); - return; + return rv; } - byte* z_aligned_to_other = const_cast(aligned_frame.frame->get_frame_data()); + byte* z_aligned_to_other = reinterpret_cast(const_cast(aligned_frame.get_data())); memset(z_aligned_to_other, 0, other_intrinsics.height * other_intrinsics.width * aligned_bytes_per_pixel); - auto data = (int16_t*)depth_frame->get_frame_data(); + auto data = (int16_t*)depth_frame.get_data(); #ifdef __SSSE3__ if (_stream_transform == nullptr) @@ -753,34 +734,24 @@ namespace librealsense _stream_transform->pre_compute_x_y_map_corners(); } - _stream_transform->align_depth_to_other(reinterpret_cast(depth_frame->get_frame_data()), - reinterpret_cast(z_aligned_to_other), depth_frame->get_bpp() / 8, - depth_intrinsics, other_intrinsics, - depth_to_other_extrinsics); - + _stream_transform->align_depth_to_other(reinterpret_cast(depth_frame.get_data()), + reinterpret_cast(z_aligned_to_other), depth_frame.get_bytes_per_pixel(), + depth_intrinsics, other_intrinsics, depth_to_other_extrinsics); #else align_z_to_other(z_aligned_to_other, - reinterpret_cast(depth_frame->get_frame_data()), + reinterpret_cast(depth_frame.get_data()), depth_scale, depth_intrinsics, depth_to_other_extrinsics, other_intrinsics); #endif assert(output_frames.size() == 0); //When aligning depth to other, only 2 frames are expected in the output. - other_frame->acquire(); + other_frame.keep(); output_frames.push_back(other_frame); } output_frames.push_back(std::move(aligned_frame)); } - auto new_composite = source->allocate_composite_frame(std::move(output_frames)); - source->frame_ready(std::move(new_composite)); + auto new_composite = source.allocate_composite_frame(std::move(output_frames)); + return new_composite; } - - align::align(rs2_stream to_stream) : _to_stream_type(to_stream) - { - auto cb = [this](frame_holder frameset, librealsense::synthetic_source_interface* source) { on_frame(std::move(frameset), source); }; - auto callback = new internal_frame_processor_callback(cb); - processing_block::set_processing_callback(std::shared_ptr(callback)); - } - - } \ No newline at end of file +} diff --git a/src/proc/align.h b/src/proc/align.h index 6f9097e610..72d22fcfe3 100644 --- a/src/proc/align.h +++ b/src/proc/align.h @@ -77,21 +77,25 @@ namespace librealsense }; #endif - class align : public processing_block + class align : public generic_processing_block { public: - align(rs2_stream align_to); + align(rs2_stream to_stream) : _to_stream_type(to_stream){} + + protected: + bool should_process(const rs2::frame& frame) override; + rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) override; private: - void on_frame(frame_holder frameset, librealsense::synthetic_source_interface* source); - std::shared_ptr create_aligned_profile( - const std::shared_ptr& original_profile, - const std::shared_ptr& to_profile); - int get_unique_id(const std::shared_ptr& original_profile, - const std::shared_ptr& to_profile, - const std::shared_ptr& aligned_profile); + std::shared_ptr create_aligned_profile( + rs2::video_stream_profile& original_profile, + rs2::video_stream_profile& to_profile); + int get_unique_id(rs2::video_stream_profile& original_profile, + rs2::video_stream_profile& to_profile, + rs2::video_stream_profile& aligned_profile); rs2_stream _to_stream_type; - std::map, int> align_stream_unique_ids; + std::map, int> _align_stream_unique_ids; + std::pair _prev_depth_res; #ifdef __SSSE3__ std::shared_ptr _stream_transform; diff --git a/src/proc/colorizer.cpp b/src/proc/colorizer.cpp index 8dfc653a6c..4d46ac8278 100644 --- a/src/proc/colorizer.cpp +++ b/src/proc/colorizer.cpp @@ -12,15 +12,15 @@ namespace librealsense { - static color_map jet {{ - { 0, 0, 255 }, - { 0, 255, 255 }, - { 255, 255, 0 }, - { 255, 0, 0 }, - { 50, 0, 0 }, - }}; - - static color_map classic { { + static color_map jet{ { + { 0, 0, 255 }, + { 0, 255, 255 }, + { 255, 255, 0 }, + { 255, 0, 0 }, + { 50, 0, 0 }, + } }; + + static color_map classic{ { { 30, 77, 203 }, { 25, 60, 192 }, { 45, 117, 220 }, @@ -39,16 +39,16 @@ namespace librealsense { 255, 255, 255 }, } }; - static color_map biomes { { + static color_map biomes{ { { 0, 0, 204 }, { 204, 230, 255 }, { 255, 255, 153 }, { 170, 255, 128 }, { 0, 153, 0 }, { 230, 242, 255 }, - } }; + } }; - static color_map cold { { + static color_map cold{ { { 230, 247, 255 }, { 0, 92, 230 }, { 0, 179, 179 }, @@ -56,21 +56,21 @@ namespace librealsense { 0, 5, 15 } } }; - static color_map warm { { + static color_map warm{ { { 255, 255, 230 }, { 255, 204, 0 }, { 255, 136, 77 }, { 255, 51, 0 }, { 128, 0, 0 }, - { 10, 0, 0} + { 10, 0, 0 } } }; - static color_map quantized { { + static color_map quantized{ { { 255, 255, 255 }, { 0, 0, 0 }, }, 6 }; - static color_map pattern { { + static color_map pattern{ { { 255, 255, 255 }, { 0, 0, 0 }, { 255, 255, 255 }, @@ -126,6 +126,9 @@ namespace librealsense colorizer::colorizer() : _min(0.f), _max(6.f), _equalize(true), _stream() { + _stream_filter = RS2_STREAM_DEPTH; + _stream_format_filter = RS2_FORMAT_Z16; + _maps = { &jet, &classic, &grayscale, &inv_grayscale, &biomes, &cold, &warm, &quantized, &pattern }; auto min_opt = std::make_shared>(0.f, 16.f, 0.1f, 0.f, &_min, "Min range in meters"); @@ -189,106 +192,93 @@ namespace librealsense auto hist_opt = std::make_shared>(false, true, true, true, &_equalize, "Perform histogram equalization"); register_option(RS2_OPTION_HISTOGRAM_EQUALIZATION_ENABLED, hist_opt); + } - auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) + rs2::frame colorizer::process_frame(const rs2::frame_source& source, const rs2::frame& f) + { + if (!_stream || (f.get_profile().get() != _stream->get())) + { + _stream = std::make_shared(f.get_profile().clone(RS2_STREAM_DEPTH, 0, RS2_FORMAT_RGB8)); + environment::get_instance().get_extrinsics_graph().register_same_extrinsics(*_stream->get()->profile, *f.get_profile().get()->profile); + } + auto make_equalized_histogram = [this](const rs2::video_frame& depth, rs2::video_frame rgb) { - auto process_frame = [this, &source](const rs2::frame f) + const auto max_depth = 0x10000; + static uint32_t histogram[max_depth]; + memset(histogram, 0, sizeof(histogram)); + + const auto w = depth.get_width(), h = depth.get_height(); + const auto depth_data = reinterpret_cast(depth.get_data()); + auto rgb_data = reinterpret_cast(const_cast(rgb.get_data())); + + for (auto i = 0; i < w*h; ++i) ++histogram[depth_data[i]]; + for (auto i = 2; i < max_depth; ++i) histogram[i] += histogram[i - 1]; // Build a cumulative histogram for the indices in [1,0xFFFF] + auto cm = _maps[_map_index]; + for (auto i = 0; i < w*h; ++i) { - { - std::lock_guard lock(_mutex); - if (!_stream || (f.get_profile().get() != _stream->get())) - { - _stream = std::make_shared(f.get_profile().clone(RS2_STREAM_DEPTH, 0, RS2_FORMAT_RGB8)); - environment::get_instance().get_extrinsics_graph().register_same_extrinsics(*_stream->get()->profile, *f.get_profile().get()->profile); - } - } + auto d = depth_data[i]; - auto make_equalized_histogram = [this](const rs2::video_frame& depth, rs2::video_frame rgb) + if (d) { - const auto max_depth = 0x10000; - static uint32_t histogram[max_depth]; - memset(histogram, 0, sizeof(histogram)); - - const auto w = depth.get_width(), h = depth.get_height(); - const auto depth_data = reinterpret_cast(depth.get_data()); - auto rgb_data = reinterpret_cast(const_cast(rgb.get_data())); - - for (auto i = 0; i < w*h; ++i) ++histogram[depth_data[i]]; - for (auto i = 2; i < max_depth; ++i) histogram[i] += histogram[i - 1]; // Build a cumulative histogram for the indices in [1,0xFFFF] - auto cm = _maps[_map_index]; - for (auto i = 0; i < w*h; ++i) - { - auto d = depth_data[i]; - - if (d) - { - auto f = histogram[d] / (float)histogram[0xFFFF]; // 0-255 based on histogram location - - auto c = cm->get(f); - rgb_data[i * 3 + 0] = (uint8_t)c.x; - rgb_data[i * 3 + 1] = (uint8_t)c.y; - rgb_data[i * 3 + 2] = (uint8_t)c.z; - } - else - { - rgb_data[i * 3 + 0] = 0; - rgb_data[i * 3 + 1] = 0; - rgb_data[i * 3 + 2] = 0; - } - } - }; - auto make_value_cropped_frame = [this](const rs2::video_frame& depth, rs2::video_frame rgb) + auto f = histogram[d] / (float)histogram[0xFFFF]; // 0-255 based on histogram location + + auto c = cm->get(f); + rgb_data[i * 3 + 0] = (uint8_t)c.x; + rgb_data[i * 3 + 1] = (uint8_t)c.y; + rgb_data[i * 3 + 2] = (uint8_t)c.z; + } + else { - const auto max_depth = 0x10000; - const auto w = depth.get_width(), h = depth.get_height(); - const auto depth_data = reinterpret_cast(depth.get_data()); - auto rgb_data = reinterpret_cast(const_cast(rgb.get_data())); - - auto fi = (frame_interface*)depth.get(); - auto df = dynamic_cast(fi); - auto depth_units = df->get_units(); - - for (auto i = 0; i < w*h; ++i) - { - auto d = depth_data[i]; - - if (d) - { - auto f = (d * depth_units - _min) / (_max - _min); - - auto c = _maps[_map_index]->get(f); - rgb_data[i * 3 + 0] = (uint8_t)c.x; - rgb_data[i * 3 + 1] = (uint8_t)c.y; - rgb_data[i * 3 + 2] = (uint8_t)c.z; - } - else - { - rgb_data[i * 3 + 0] = 0; - rgb_data[i * 3 + 1] = 0; - rgb_data[i * 3 + 2] = 0; - } - } - }; - rs2::frame ret = f; - - if (f.get_profile().stream_type() == RS2_STREAM_DEPTH) + rgb_data[i * 3 + 0] = 0; + rgb_data[i * 3 + 1] = 0; + rgb_data[i * 3 + 2] = 0; + } + } + }; + + auto make_value_cropped_frame = [this](const rs2::video_frame& depth, rs2::video_frame rgb) + { + const auto max_depth = 0x10000; + const auto w = depth.get_width(), h = depth.get_height(); + const auto depth_data = reinterpret_cast(depth.get_data()); + auto rgb_data = reinterpret_cast(const_cast(rgb.get_data())); + + auto fi = (frame_interface*)depth.get(); + auto df = dynamic_cast(fi); + auto depth_units = df->get_units(); + + for (auto i = 0; i < w*h; ++i) + { + auto d = depth_data[i]; + + if (d) { - auto vf = f.as(); - rs2_extension ext = f.is() ? RS2_EXTENSION_DISPARITY_FRAME : RS2_EXTENSION_DEPTH_FRAME; - ret = source.allocate_video_frame(*_stream, f, 3, vf.get_width(), vf.get_height(), vf.get_width() * 3, ext); + auto f = (d * depth_units - _min) / (_max - _min); - if (_equalize) make_equalized_histogram(f, ret); - else make_value_cropped_frame(f, ret); + auto c = _maps[_map_index]->get(f); + rgb_data[i * 3 + 0] = (uint8_t)c.x; + rgb_data[i * 3 + 1] = (uint8_t)c.y; + rgb_data[i * 3 + 2] = (uint8_t)c.z; + } + else + { + rgb_data[i * 3 + 0] = 0; + rgb_data[i * 3 + 1] = 0; + rgb_data[i * 3 + 2] = 0; } + } + }; + rs2::frame ret; - source.frame_ready(ret); - }; + auto vf = f.as(); + //rs2_extension ext = f.is() ? RS2_EXTENSION_DISPARITY_FRAME : RS2_EXTENSION_DEPTH_FRAME; + ret = source.allocate_video_frame(*_stream, f, 3, vf.get_width(), vf.get_height(), vf.get_width() * 3, RS2_EXTENSION_VIDEO_FRAME); - if (auto composite = f.as()) composite.foreach(process_frame); - else process_frame(f); - }; + if (_equalize) + make_equalized_histogram(f, ret); + else + make_value_cropped_frame(f, ret); - auto callback = new rs2::frame_processor_callback(on_frame); - processing_block::set_processing_callback(std::shared_ptr(callback)); + return ret; } } diff --git a/src/proc/colorizer.h b/src/proc/colorizer.h index 00e04704e5..7f75f77f1b 100644 --- a/src/proc/colorizer.h +++ b/src/proc/colorizer.h @@ -93,18 +93,20 @@ namespace librealsense { size_t _size; float3* _data; }; - class colorizer : public processing_block + class colorizer : public stream_filter_processing_block { public: colorizer(); + protected: + rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) override; + private: float _min, _max; bool _equalize; std::vector _maps; int _map_index = 0; int _preset = 0; - std::mutex _mutex; std::shared_ptr _stream; }; } diff --git a/src/proc/decimation-filter.cpp b/src/proc/decimation-filter.cpp index 1019b1e3ab..6a11f5c96c 100644 --- a/src/proc/decimation-filter.cpp +++ b/src/proc/decimation-filter.cpp @@ -192,10 +192,10 @@ namespace librealsense return PIX_MIN(p[4], p[2]); } - const uint8_t decimation_min_val = 1; - const uint8_t decimation_max_val = 8; // Decimation levels according to the reference design - const uint8_t decimation_default_val = 2; - const uint8_t decimation_step = 1; // Linear decimation + const uint8_t decimation_min_val = 1; + const uint8_t decimation_max_val = 8; // Decimation levels according to the reference design + const uint8_t decimation_default_val = 2; + const uint8_t decimation_step = 1; // Linear decimation decimation_filter::decimation_filter() : _decimation_factor(decimation_default_val), @@ -209,6 +209,9 @@ namespace librealsense _recalc_profile(false), _options_changed(false) { + _stream_filter = RS2_STREAM_DEPTH; + _stream_format_filter = RS2_FORMAT_Z16; + auto decimation_control = std::make_shared>( decimation_min_val, decimation_max_val, @@ -227,119 +230,45 @@ namespace librealsense if (_control_val != _decimation_factor) { _patch_size = _decimation_factor = _control_val; - _kernel_size = _patch_size*_patch_size; + _kernel_size = _patch_size * _patch_size; _options_changed = true; } }); register_option(RS2_OPTION_FILTER_MAGNITUDE, decimation_control); + } - auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) - { - std::lock_guard lock(_mutex); - - rs2::frame out = f, tgt; - bool composite = f.is(); - - if (composite) - { - rs2::frame depth = f.as().first_or_default(RS2_STREAM_DEPTH); - if (depth) - { - update_output_profile(depth); - rs2_extension tgt_type = depth.is() ? RS2_EXTENSION_DISPARITY_FRAME : RS2_EXTENSION_DEPTH_FRAME; - if (tgt = prepare_target_frame(depth, source, tgt_type)) - { - auto src = depth.as(); - decimate_depth(static_cast(src.get_data()), - static_cast(const_cast(tgt.get_data())), - src.get_width(), src.get_height(), this->_patch_size); - } - } - - rs2::frame color = f.as().first_or_default(RS2_STREAM_COLOR); - if (color) - { - update_output_profile(color); - if (tgt = prepare_target_frame(color, source, RS2_EXTENSION_VIDEO_FRAME)) - { - auto src = color.as(); - decimate_others(color.get_profile().format(), src.get_data(), - const_cast(tgt.get_data()), - src.get_width(), src.get_height(), this->_patch_size); - } - } + rs2::frame decimation_filter::process_frame(const rs2::frame_source& source, const rs2::frame& f) + { + update_output_profile(f); - rs2::frame infared_0 = f.as().get_infrared_frame(0); - if (infared_0) - { - update_output_profile(infared_0); - if (tgt = prepare_target_frame(infared_0, source, RS2_EXTENSION_VIDEO_FRAME)) - { - auto src = infared_0.as(); - decimate_others(infared_0.get_profile().format(), src.get_data(), - const_cast(tgt.get_data()), - src.get_width(), src.get_height(), this->_patch_size); - } - } + auto src = f.as(); + rs2::stream_profile profile = f.get_profile(); + rs2_format format = profile.format(); + rs2_stream type = profile.stream_type(); - rs2::frame infared_1 = f.as().get_infrared_frame(1); - if (infared_1) - { - update_output_profile(infared_1); - if (tgt = prepare_target_frame(infared_1, source, RS2_EXTENSION_VIDEO_FRAME)) - { - auto src = infared_1.as(); - decimate_others(infared_1.get_profile().format(), src.get_data(), - const_cast(tgt.get_data()), - src.get_width(), src.get_height(), this->_patch_size); - } - } + rs2_extension tgt_type; + if (type == RS2_STREAM_COLOR || type == RS2_STREAM_INFRARED) + tgt_type = RS2_EXTENSION_VIDEO_FRAME; + else + tgt_type = f.is() ? RS2_EXTENSION_DISPARITY_FRAME : RS2_EXTENSION_DEPTH_FRAME; - out = source.allocate_composite_frame({ tgt }); + if (auto tgt = prepare_target_frame(f, source, tgt_type)) + { + if (format == RS2_FORMAT_Z16) + { + decimate_depth(static_cast(src.get_data()), + static_cast(const_cast(tgt.get_data())), + src.get_width(), src.get_height(), this->_patch_size); } else { - if (f) - { - update_output_profile(f); - - auto src = f.as(); - rs2::stream_profile profile = f.get_profile(); - rs2_format format = profile.format(); - rs2_stream type = profile.stream_type(); - - rs2_extension tgt_type; - if (type == RS2_STREAM_COLOR || type == RS2_STREAM_INFRARED) - tgt_type = RS2_EXTENSION_VIDEO_FRAME; - else - tgt_type = f.is() ? RS2_EXTENSION_DISPARITY_FRAME : RS2_EXTENSION_DEPTH_FRAME; - - if (tgt = prepare_target_frame(f, source, tgt_type)) - { - if (format == RS2_FORMAT_Z16) - { - decimate_depth(static_cast(src.get_data()), - static_cast(const_cast(tgt.get_data())), - src.get_width(), src.get_height(), this->_patch_size); - } - else - { - decimate_others(format, src.get_data(), - const_cast(tgt.get_data()), - src.get_width(), src.get_height(), this->_patch_size); - } - } - } - - out = tgt; + decimate_others(format, src.get_data(), + const_cast(tgt.get_data()), + src.get_width(), src.get_height(), this->_patch_size); } - - source.frame_ready(out); - }; - - auto callback = new rs2::frame_processor_callback(on_frame); - processing_block::set_processing_callback(std::shared_ptr(callback)); + return tgt; + } } void decimation_filter::update_output_profile(const rs2::frame& f) @@ -377,11 +306,11 @@ namespace librealsense environment::get_instance().get_extrinsics_graph().register_same_extrinsics(*(stream_interface*)(_source_stream_profile.get()->profile), *(stream_interface*)(tmp_profile.get()->profile)); auto src_vspi = dynamic_cast(_source_stream_profile.get()->profile); auto tgt_vspi = dynamic_cast(tmp_profile.get()->profile); - rs2_intrinsics src_intrin = src_vspi->get_intrinsics(); - rs2_intrinsics tgt_intrin = tgt_vspi->get_intrinsics(); + rs2_intrinsics src_intrin = src_vspi->get_intrinsics(); + rs2_intrinsics tgt_intrin = tgt_vspi->get_intrinsics(); // recalculate real/padded output frame size based on new input porperties - _real_width = src_vspi->get_width() / _patch_size; + _real_width = src_vspi->get_width() / _patch_size; _real_height = src_vspi->get_height() / _patch_size; // The resulted frame dimension will be dividible by 4; @@ -393,17 +322,17 @@ namespace librealsense _padded_height /= 4; _padded_height *= 4; - tgt_intrin.width = _padded_width; - tgt_intrin.height = _padded_height; - tgt_intrin.fx = src_intrin.fx/_patch_size; - tgt_intrin.fy = src_intrin.fy/_patch_size; - tgt_intrin.ppx = src_intrin.ppx/_patch_size; - tgt_intrin.ppy = src_intrin.ppy/_patch_size; + tgt_intrin.width = _padded_width; + tgt_intrin.height = _padded_height; + tgt_intrin.fx = src_intrin.fx / _patch_size; + tgt_intrin.fy = src_intrin.fy / _patch_size; + tgt_intrin.ppx = src_intrin.ppx / _patch_size; + tgt_intrin.ppy = src_intrin.ppy / _patch_size; tgt_vspi->set_intrinsics([tgt_intrin]() { return tgt_intrin; }); tgt_vspi->set_dims(tgt_intrin.width, tgt_intrin.height); - _registered_profiles[std::make_tuple(_source_stream_profile.get(),_decimation_factor)]= _target_stream_profile = tmp_profile; + _registered_profiles[std::make_tuple(_source_stream_profile.get(), _decimation_factor)] = _target_stream_profile = tmp_profile; _recalc_profile = false; } @@ -501,7 +430,7 @@ namespace librealsense *frame_data_out++ = 0; // Skip N lines to the beginnig of the next processing segment - block_start += width_in*scale; + block_start += width_in * scale; } } else @@ -541,7 +470,7 @@ namespace librealsense *frame_data_out++ = 0; // Skip N lines to the beginnig of the next processing segment - block_start += width_in*scale; + block_start += width_in * scale; } } diff --git a/src/proc/decimation-filter.h b/src/proc/decimation-filter.h index 49fd0c17f0..ddbf7fb1aa 100644 --- a/src/proc/decimation-filter.h +++ b/src/proc/decimation-filter.h @@ -9,7 +9,7 @@ namespace librealsense { - class decimation_filter : public processing_block + class decimation_filter : public stream_filter_processing_block { public: decimation_filter(); @@ -22,6 +22,7 @@ namespace librealsense void decimate_others(rs2_format format, const void * frame_data_in, void * frame_data_out, size_t width_in, size_t height_in, size_t scale); + rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) override; private: void update_output_profile(const rs2::frame& f); diff --git a/src/proc/disparity-transform.cpp b/src/proc/disparity-transform.cpp index eb9d3cd32d..b0afbea102 100644 --- a/src/proc/disparity-transform.cpp +++ b/src/proc/disparity-transform.cpp @@ -41,42 +41,47 @@ namespace librealsense unregister_option(RS2_OPTION_FRAMES_QUEUE_SIZE); - auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) - { - std::lock_guard lock(_mutex); + on_set_mode(_transform_to_disparity); + } - rs2::frame out = f; - rs2::frame tgt, depth_data; + bool disparity_transform::should_process(const rs2::frame& frame) + { + if (!frame) + return false; - bool composite = f.is(); + if (frame.is()) + return false; + + if (_transform_to_disparity && (frame.get_profile().stream_type() != RS2_STREAM_DEPTH || frame.get_profile().format() != RS2_FORMAT_Z16)) + return false; - tgt = depth_data = (composite) ? f.as().first_or_default(RS2_STREAM_DEPTH) : f; + if (!_transform_to_disparity && (frame.get_profile().stream_type() != RS2_STREAM_DEPTH || + (frame.get_profile().format() != RS2_FORMAT_DISPARITY16 && frame.get_profile().format() != RS2_FORMAT_DISPARITY32))) + return false; - // Verify that the input depth format is aligned with the block's configuration - if (depth_data && (f.is() != _transform_to_disparity)) - { - update_transformation_profile(depth_data); + if (frame.is() == _transform_to_disparity) + return false; - if (_stereoscopic_depth && (tgt = prepare_target_frame(depth_data, source))) - { - auto src = depth_data.as(); + return true; + } - if (_transform_to_disparity) - convert(src.get_data(), const_cast(tgt.get_data())); - else - convert(src.get_data(), const_cast(tgt.get_data())); - } - } + rs2::frame disparity_transform::process_frame(const rs2::frame_source& source, const rs2::frame& f) + { + rs2::frame tgt; - out = composite ? source.allocate_composite_frame({ tgt }) : tgt; + update_transformation_profile(f); - source.frame_ready(out); - }; + if (_stereoscopic_depth && (tgt = prepare_target_frame(f, source))) + { + auto src = f.as(); - auto callback = new rs2::frame_processor_callback(on_frame); - processing_block::set_processing_callback(std::shared_ptr(callback)); + if (_transform_to_disparity) + convert(src.get_data(), const_cast(tgt.get_data())); + else + convert(src.get_data(), const_cast(tgt.get_data())); + } - on_set_mode(_transform_to_disparity); + return tgt; } void disparity_transform::on_set_mode(bool to_disparity) diff --git a/src/proc/disparity-transform.h b/src/proc/disparity-transform.h index 3fff9deeb9..6d82775896 100644 --- a/src/proc/disparity-transform.h +++ b/src/proc/disparity-transform.h @@ -11,10 +11,12 @@ namespace librealsense { - class disparity_transform : public processing_block + class disparity_transform : public generic_processing_block { public: disparity_transform(bool transform_to_disparity); + bool should_process(const rs2::frame& frame) override; + rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) override; protected: rs2::frame prepare_target_frame(const rs2::frame& f, const rs2::frame_source& source); diff --git a/src/proc/hole-filling-filter.cpp b/src/proc/hole-filling-filter.cpp index 5299813d8f..a2b75eee40 100644 --- a/src/proc/hole-filling-filter.cpp +++ b/src/proc/hole-filling-filter.cpp @@ -15,7 +15,7 @@ namespace librealsense { // The holes filling mode const uint8_t hole_fill_min = hf_fill_from_left; - const uint8_t hole_fill_max = hf_max_value-1; + const uint8_t hole_fill_max = hf_max_value - 1; const uint8_t hole_fill_step = 1; const uint8_t hole_fill_def = hf_farest_from_around; @@ -25,6 +25,9 @@ namespace librealsense _current_frm_size_pixels(0), _hole_filling_mode(hole_fill_def) { + _stream_filter = RS2_STREAM_DEPTH; + _stream_format_filter = RS2_FORMAT_Z16; + auto hole_filling_mode = std::make_shared>( hole_fill_min, hole_fill_max, @@ -32,8 +35,8 @@ namespace librealsense hole_fill_def, &_hole_filling_mode, "Hole Filling mode"); - hole_filling_mode->set_description(hf_fill_from_left, "Fill from Left"); - hole_filling_mode->set_description(hf_farest_from_around, "Farest from around"); + hole_filling_mode->set_description(hf_fill_from_left, "Fill from Left"); + hole_filling_mode->set_description(hf_farest_from_around, "Farest from around"); hole_filling_mode->set_description(hf_nearest_from_around, "Nearest from around"); hole_filling_mode->on_set([this, hole_filling_mode](float val) @@ -49,35 +52,20 @@ namespace librealsense register_option(RS2_OPTION_HOLES_FILL, hole_filling_mode); - auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) - { - std::lock_guard lock(_mutex); - - rs2::frame out = f; - rs2::frame tgt, depth; - - bool composite = f.is(); - - depth = (composite) ? f.as().first_or_default(RS2_STREAM_DEPTH) : f; - if (depth) // Processing required - { - update_configuration(f); - tgt = prepare_target_frame(depth, source); - - // Hole filling pass - if (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) - apply_hole_filling(const_cast(tgt.get_data())); - else - apply_hole_filling(const_cast(tgt.get_data())); - } + } - out = composite ? source.allocate_composite_frame({ tgt }) : tgt; + rs2::frame hole_filling_filter::process_frame(const rs2::frame_source& source, const rs2::frame& f) + { + update_configuration(f); + auto tgt = prepare_target_frame(f, source); - source.frame_ready(out); - }; + // Hole filling pass + if (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) + apply_hole_filling(const_cast(tgt.get_data())); + else + apply_hole_filling(const_cast(tgt.get_data())); - auto callback = new rs2::frame_processor_callback(on_frame); - processing_block::set_processing_callback(std::shared_ptr(callback)); + return tgt; } void hole_filling_filter::update_configuration(const rs2::frame& f) @@ -92,12 +80,12 @@ namespace librealsense *(stream_interface*)(_target_stream_profile.get()->profile)); _extension_type = f.is() ? RS2_EXTENSION_DISPARITY_FRAME : RS2_EXTENSION_DEPTH_FRAME; - _bpp = (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) ? sizeof(float) : sizeof(uint16_t); + _bpp = (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) ? sizeof(float) : sizeof(uint16_t); auto vp = _target_stream_profile.as(); - _width = vp.width(); - _height = vp.height(); - _stride = _width*_bpp; - _current_frm_size_pixels = _width * _height; + _width = vp.width(); + _height = vp.height(); + _stride = _width * _bpp; + _current_frm_size_pixels = _width * _height; } } diff --git a/src/proc/hole-filling-filter.h b/src/proc/hole-filling-filter.h index bcfe816320..c60ee1a773 100644 --- a/src/proc/hole-filling-filter.h +++ b/src/proc/hole-filling-filter.h @@ -13,13 +13,14 @@ namespace librealsense hf_max_value }; - class hole_filling_filter : public processing_block + class hole_filling_filter : public depth_processing_block { public: hole_filling_filter(); protected: - void update_configuration(const rs2::frame& f); + void update_configuration(const rs2::frame& f); + rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) override; rs2::frame prepare_target_frame(const rs2::frame& f, const rs2::frame_source& source); diff --git a/src/proc/pointcloud.cpp b/src/proc/pointcloud.cpp index dc389ecdbd..6ac49ce0b0 100644 --- a/src/proc/pointcloud.cpp +++ b/src/proc/pointcloud.cpp @@ -9,6 +9,8 @@ #include "proc/occlusion-filter.h" #include "proc/pointcloud.h" #include "option.h" +#include "environment.h" +#include "context.h" #include @@ -35,7 +37,7 @@ namespace librealsense } } - const float3 * depth_to_points( uint8_t* image, const rs2_intrinsics &depth_intrinsics, const uint16_t * depth_image, float depth_scale) + const float3 * depth_to_points(uint8_t* image, const rs2_intrinsics &depth_intrinsics, const uint16_t * depth_image, float depth_scale) { #ifdef RS2_USE_CUDA rscuda::deproject_depth_cuda(reinterpret_cast(image), depth_intrinsics, depth_image, depth_scale); @@ -50,27 +52,27 @@ namespace librealsense float2 pixel_to_texcoord(const rs2_intrinsics *intrin, const float2 & pixel) { return{ pixel.x / (intrin->width), pixel.y / (intrin->height) }; } float2 project_to_texcoord(const rs2_intrinsics *intrin, const float3 & point) { return pixel_to_texcoord(intrin, project(intrin, point)); } - bool pointcloud::stream_changed(stream_profile_interface* old, stream_profile_interface* curr) + bool pointcloud::stream_changed(const rs2::stream_profile& old, const rs2::stream_profile& curr) { - auto v_old = dynamic_cast(old); - auto v_curr = dynamic_cast(curr); + auto v_old = old.as(); + auto v_curr = curr.as(); - return v_old->get_height() != v_curr->get_height(); + return v_old.height() != v_curr.height(); } - void pointcloud::inspect_depth_frame(const rs2::frame& depth) + void pointcloud::inspect_depth_frame(const rs2::frame& depth) { - auto depth_frame = (frame_interface*)depth.get(); - std::lock_guard lock(_mutex); - - if (!_output_stream.get() || _depth_stream != depth_frame->get_stream().get() || stream_changed(_depth_stream, depth_frame->get_stream().get())) + if (!_output_stream.get() || _depth_stream->unique_id() != depth.get_profile().unique_id() || + stream_changed(*_depth_stream, depth.get_profile())) { - _output_stream = depth_frame->get_stream()->clone(); - _depth_stream = depth_frame->get_stream().get(); - environment::get_instance().get_extrinsics_graph().register_same_extrinsics(*_output_stream, *depth_frame->get_stream()); + _output_stream = std::make_shared(depth.get_profile().as().clone( + RS2_STREAM_DEPTH, depth.get_profile().stream_index(), RS2_FORMAT_XYZ32F)); + _depth_stream = std::make_shared(depth.get_profile().as()); + environment::get_instance().get_extrinsics_graph().register_same_extrinsics(*_output_stream->get()->profile, *depth.get_profile().get()->profile); _depth_intrinsics = optional_value(); _depth_units = optional_value(); _extrinsics = optional_value(); + _other_stream = nullptr; } bool found_depth_intrinsics = false; @@ -78,10 +80,10 @@ namespace librealsense if (!_depth_intrinsics) { - auto stream_profile = depth_frame->get_stream(); - if (auto video = dynamic_cast(stream_profile.get())) + auto stream_profile = depth.get_profile(); + if (auto video = stream_profile.as()) { - _depth_intrinsics = video->get_intrinsics(); + _depth_intrinsics = video.get_intrinsics(); _pixels_map.resize(_depth_intrinsics->height*_depth_intrinsics->width); _occlusion_filter->set_depth_intrinsics(_depth_intrinsics.value()); #ifdef __SSSE3__ @@ -93,7 +95,7 @@ namespace librealsense if (!_depth_units) { - auto sensor = depth_frame->get_sensor(); + auto sensor = ((frame_interface*)depth.get())->get_sensor().get(); _depth_units = sensor->get_option(RS2_OPTION_DEPTH_UNITS).query(); found_depth_units = true; } @@ -101,8 +103,10 @@ namespace librealsense if (_other_stream && !_extrinsics) { rs2_extrinsics ex; + const rs2_stream_profile* d = _depth_stream->get(); + const rs2_stream_profile* o = _other_stream->get(); if (environment::get_instance().get_extrinsics_graph().try_fetch_extrinsics( - *_depth_stream, *_other_stream, &ex)) + *d->profile, *o->profile, &ex)) { _extrinsics = ex; } @@ -111,27 +115,22 @@ namespace librealsense void pointcloud::inspect_other_frame(const rs2::frame& other) { - auto other_frame = (frame_interface*)other.get(); - std::lock_guard lock(_mutex); - - if (_other_stream && _invalidate_mapped) - { - _other_stream = nullptr; - _invalidate_mapped = false; - } + if (_other_stream != nullptr && other.get_profile().as() == *_other_stream.get()) + return; if (!_other_stream.get()) { - _other_stream = other_frame->get_stream(); + auto osp = other.get_profile().as(); + _other_stream = std::make_shared(osp.clone(osp.stream_type(), osp.stream_index(), osp.format())); _other_intrinsics = optional_value(); _extrinsics = optional_value(); } if (!_other_intrinsics) { - if (auto video = dynamic_cast(_other_stream.get())) + if (auto video = _other_stream->as()) { - _other_intrinsics = video->get_intrinsics(); + _other_intrinsics = video.get_intrinsics(); _occlusion_filter->set_texel_intrinsics(_other_intrinsics.value()); } } @@ -139,8 +138,10 @@ namespace librealsense if (_output_stream && !_extrinsics) { rs2_extrinsics ex; + const rs2_stream_profile* os = _output_stream->get(); + const rs2_stream_profile* of = other.get_profile().get(); if (environment::get_instance().get_extrinsics_graph().try_fetch_extrinsics( - *_output_stream, *other_frame->get_stream(), &ex + *os->profile, *of->profile, &ex )) { _extrinsics = ex; @@ -148,7 +149,6 @@ namespace librealsense } } - void pointcloud::pre_compute_x_y_map() { _pre_compute_map_x.resize(_depth_intrinsics->width*_depth_intrinsics->height); @@ -166,10 +166,10 @@ namespace librealsense if (_depth_intrinsics->model == RS2_DISTORTION_INVERSE_BROWN_CONRADY) { - float r2 = x*x + y*y; + float r2 = x * x + y * y; float f = 1 + _depth_intrinsics->coeffs[0] * r2 + _depth_intrinsics->coeffs[1] * r2*r2 + _depth_intrinsics->coeffs[4] * r2*r2*r2; - float ux = x*f + 2 * _depth_intrinsics->coeffs[2] * x*y + _depth_intrinsics->coeffs[3] * (r2 + 2 * x*x); - float uy = y*f + 2 * _depth_intrinsics->coeffs[3] * x*y + _depth_intrinsics->coeffs[2] * (r2 + 2 * y*y); + float ux = x * f + 2 * _depth_intrinsics->coeffs[2] * x*y + _depth_intrinsics->coeffs[3] * (r2 + 2 * x*x); + float uy = y * f + 2 * _depth_intrinsics->coeffs[3] * x*y + _depth_intrinsics->coeffs[2] * (r2 + 2 * y*y); x = ux; y = uy; } @@ -196,7 +196,6 @@ namespace librealsense const __m128i mask1 = _mm_set_epi8((char)0xff, (char)0xff, (char)15, (char)14, (char)0xff, (char)0xff, (char)13, (char)12, (char)0xff, (char)0xff, (char)11, (char)10, (char)0xff, (char)0xff, (char)9, (char)8); - auto zerro = _mm_set_ps1(0); auto scale = _mm_set_ps1(depth_scale); auto mapx = pre_compute_x; @@ -212,7 +211,7 @@ namespace librealsense __m128i d = _mm_load_si128((__m128i const*)(depth + i)); //d7 d7 d6 d6 d5 d5 d4 d4 d3 d3 d2 d2 d1 d1 d0 d0 - //split the depth pixel to 2 registers of 4 floats each + //split the depth pixel to 2 registers of 4 floats each __m128i d0 = _mm_shuffle_epi8(d, mask0); // 00 00 d3 d3 00 00 d2 d2 00 00 d1 d1 00 00 d0 d0 __m128i d1 = _mm_shuffle_epi8(d, mask1); // 00 00 d7 d7 00 00 d6 d6 00 00 d5 d5 00 00 d4 d4 @@ -413,11 +412,10 @@ namespace librealsense } } - void pointcloud::process_depth_frame(const rs2::depth_frame& depth) + rs2::frame pointcloud::process_depth_frame(const rs2::frame_source& source, const rs2::depth_frame& depth) { - frame_holder res = get_source().allocate_points(_output_stream, (frame_interface*)depth.get()); - - auto pframe = (points*)(res.frame); + auto res = source.allocate_points(*_output_stream, depth); + auto pframe = (librealsense::points*)(res.get()); auto depth_data = (const uint16_t*)depth.get_data(); @@ -436,7 +434,6 @@ namespace librealsense rs2_extrinsics extr; bool map_texture = false; { - std::lock_guard lock(_mutex); if (_extrinsics && _other_intrinsics) { mapped_intr = *_other_intrinsics; @@ -460,25 +457,14 @@ namespace librealsense { _occlusion_filter->process(pframe->get_vertices(), pframe->get_texture_coordinates(), _pixels_map); } - } - get_source().frame_ready(std::move(res)); + return res; } pointcloud::pointcloud() : - _other_stream(nullptr), - _invalidate_mapped(false) + _other_stream(nullptr) { - auto mapped_opt = std::make_shared>(0, std::numeric_limits::max(), 1, -1, &_other_stream_id, "Mapped stream ID"); - register_option(RS2_OPTION_TEXTURE_SOURCE, mapped_opt); - float old_value = static_cast(_other_stream_id); - mapped_opt->on_set([this, old_value](float x) mutable { - if (fabs(old_value - x) > 1e-6) - { - _invalidate_mapped = true; - old_value = x; - } - }); + _stream_filter = RS2_STREAM_ANY; _occlusion_filter = std::make_shared(); @@ -502,39 +488,74 @@ namespace librealsense occlusion_invalidation->set_description(1.f, "Heuristic"); occlusion_invalidation->set_description(2.f, "Exhaustive"); register_option(RS2_OPTION_FILTER_MAGNITUDE, occlusion_invalidation); + } - auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) + bool pointcloud::should_process(const rs2::frame& frame) + { + if (!frame) + return false; + + auto set = frame.as(); + + if (set) { - if (auto composite = f.as()) - { - auto depth = composite.first_or_default(RS2_STREAM_DEPTH); - if (depth) - { - inspect_depth_frame(depth); - process_depth_frame(depth); - } + //process composite frame only if it contains both a depth frame and the requested texture frame + if (_stream_filter == RS2_STREAM_ANY) + return false; + + auto tex = set.first_or_default(_stream_filter, _stream_format_filter); + if (!tex) + return false; + auto depth = set.first_or_default(RS2_STREAM_DEPTH, RS2_FORMAT_Z16); + if (!depth) + return false; + } + else + { + if (frame.get_profile().stream_type() == RS2_STREAM_DEPTH && frame.get_profile().format() == RS2_FORMAT_Z16) + return true; + + auto p = frame.get_profile(); + if (p.stream_type() == _stream_filter && p.format() == _stream_format_filter && p.stream_index() == _stream_index_filter) + return true; + return false; + + //TODO: switch to this code when map_to api is removed + //if (_stream_filter != RS2_STREAM_ANY) + // return false; + //process single frame only if it is a depth frame + //if (frame.get_profile().stream_type() != RS2_STREAM_DEPTH || frame.get_profile().format() != RS2_FORMAT_Z16) + // return false; + } + + return true; + } + + rs2::frame pointcloud::process_frame(const rs2::frame_source& source, const rs2::frame& f) + { + rs2::frame rv; + if (auto composite = f.as()) + { + auto depth = composite.first(RS2_STREAM_DEPTH, RS2_FORMAT_Z16); + inspect_depth_frame(depth); + rv = process_depth_frame(source, depth); - composite.foreach([&](const rs2::frame& f) { - if (f.get_profile().unique_id() == _other_stream_id) - { - inspect_other_frame(f); - } - }); + auto texture = composite.first(_stream_filter); + inspect_other_frame(texture); + } + else + { + + if (f.is()) + { + inspect_depth_frame(f); + rv = process_depth_frame(source, f); } - else + if (f.get_profile().stream_type() == _stream_filter && f.get_profile().format() == _stream_format_filter) { - if (f.get_profile().stream_type() == RS2_STREAM_DEPTH && f.get_profile().format() == RS2_FORMAT_Z16) - { - inspect_depth_frame(f); - process_depth_frame(f); - } - if (f.get_profile().unique_id() == _other_stream_id) - { - inspect_other_frame(f); - } + inspect_other_frame(f); } - }; - auto callback = new rs2::frame_processor_callback(on_frame); - processing_block::set_processing_callback(std::shared_ptr(callback)); + } + return rv; } } diff --git a/src/proc/pointcloud.h b/src/proc/pointcloud.h index c7f11307ed..84f753f7b4 100644 --- a/src/proc/pointcloud.h +++ b/src/proc/pointcloud.h @@ -6,31 +6,32 @@ namespace librealsense { class occlusion_filter; - class pointcloud : public processing_block + class pointcloud : public stream_filter_processing_block { public: pointcloud(); - + protected: + bool should_process(const rs2::frame& frame) override; + rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) override; private: optional_value _depth_intrinsics; optional_value _other_intrinsics; optional_value _depth_units; optional_value _extrinsics; - std::atomic_bool _invalidate_mapped; std::shared_ptr _occlusion_filter; // Intermediate translation table of (depth_x*depth_y) with actual texel coordinates per depth pixel std::vector _pixels_map; - std::shared_ptr _output_stream, _other_stream; - int _other_stream_id = -1; - stream_profile_interface* _depth_stream = nullptr; + std::shared_ptr _output_stream; + std::shared_ptr _other_stream; + std::shared_ptr _depth_stream; void inspect_depth_frame(const rs2::frame& depth); void inspect_other_frame(const rs2::frame& other); - void process_depth_frame(const rs2::depth_frame& depth); + rs2::frame process_depth_frame(const rs2::frame_source& source, const rs2::depth_frame& depth); - bool stream_changed(stream_profile_interface* old, stream_profile_interface* curr); + bool stream_changed(const rs2::stream_profile& old, const rs2::stream_profile& curr); std::vector _pre_compute_map_x; std::vector _pre_compute_map_y; diff --git a/src/proc/spatial-filter.cpp b/src/proc/spatial-filter.cpp index 96120b9677..ddc09d2e77 100644 --- a/src/proc/spatial-filter.cpp +++ b/src/proc/spatial-filter.cpp @@ -25,16 +25,16 @@ namespace librealsense }; // The weight of the current pixel for smoothing is bounded within [25..100]% - const float alpha_min_val = 0.25f; - const float alpha_max_val = 1.f; - const float alpha_default_val = 0.5f; - const float alpha_step = 0.01f; + const float alpha_min_val = 0.25f; + const float alpha_max_val = 1.f; + const float alpha_default_val = 0.5f; + const float alpha_step = 0.01f; // The depth gradient below which the smoothing will occur as number of depth levels - const uint8_t delta_min_val = 1; - const uint8_t delta_max_val = 50; - const uint8_t delta_default_val = 20; - const uint8_t delta_step = 1; + const uint8_t delta_min_val = 1; + const uint8_t delta_max_val = 50; + const uint8_t delta_default_val = 20; + const uint8_t delta_step = 1; // the number of passes used in the iterative smoothing approach const uint8_t filter_iter_min = 1; @@ -44,7 +44,7 @@ namespace librealsense // The holes filling mode const uint8_t holes_fill_min = sp_hf_disabled; - const uint8_t holes_fill_max = sp_hf_max_value-1; + const uint8_t holes_fill_max = sp_hf_max_value - 1; const uint8_t holes_fill_step = 1; const uint8_t holes_fill_def = sp_hf_disabled; @@ -61,6 +61,9 @@ namespace librealsense _holes_filling_mode(holes_fill_def), _holes_filling_radius(0) { + _stream_filter = RS2_STREAM_DEPTH; + _stream_format_filter = RS2_FORMAT_Z16; + auto spatial_filter_alpha = std::make_shared>( alpha_min_val, alpha_max_val, @@ -100,12 +103,12 @@ namespace librealsense holes_fill_def, &_holes_filling_mode, "Holes filling mode"); - holes_filling_mode->set_description(sp_hf_disabled, "Disabled"); - holes_filling_mode->set_description(sp_hf_2_pixel_radius, "2-pixel radius"); - holes_filling_mode->set_description(sp_hf_4_pixel_radius, "4-pixel radius"); - holes_filling_mode->set_description(sp_hf_8_pixel_radius, "8-pixel radius"); - holes_filling_mode->set_description(sp_hf_16_pixel_radius, "16-pixel radius"); - holes_filling_mode->set_description(sp_hf_unlimited_radius, "Unlimited"); + holes_filling_mode->set_description(sp_hf_disabled, "Disabled"); + holes_filling_mode->set_description(sp_hf_2_pixel_radius, "2-pixel radius"); + holes_filling_mode->set_description(sp_hf_4_pixel_radius, "4-pixel radius"); + holes_filling_mode->set_description(sp_hf_8_pixel_radius, "8-pixel radius"); + holes_filling_mode->set_description(sp_hf_16_pixel_radius, "16-pixel radius"); + holes_filling_mode->set_description(sp_hf_unlimited_radius, "Unlimited"); holes_filling_mode->on_set([this, holes_filling_mode](float val) { @@ -141,36 +144,22 @@ namespace librealsense register_option(RS2_OPTION_FILTER_SMOOTH_DELTA, spatial_filter_delta); register_option(RS2_OPTION_FILTER_MAGNITUDE, spatial_filter_iterations); register_option(RS2_OPTION_HOLES_FILL, holes_filling_mode); + } - auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) - { - std::lock_guard lock(_mutex); - - rs2::frame out = f; - rs2::frame tgt, depth; - - bool composite = f.is(); - - depth = (composite) ? f.as().first_or_default(RS2_STREAM_DEPTH) : f; - if (depth) // Processing required - { - update_configuration(f); - tgt = prepare_target_frame(depth, source); - - // Spatial domain transform edge-preserving filter - if (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) - dxf_smooth(const_cast(tgt.get_data()), _spatial_alpha_param, _spatial_edge_threshold, _spatial_iterations); - else - dxf_smooth(const_cast(tgt.get_data()), _spatial_alpha_param, _spatial_edge_threshold, _spatial_iterations); - } + rs2::frame spatial_filter::process_frame(const rs2::frame_source& source, const rs2::frame& f) + { + rs2::frame tgt; - out = composite ? source.allocate_composite_frame({ tgt }) : tgt; + update_configuration(f); + tgt = prepare_target_frame(f, source); - source.frame_ready(out); - }; + // Spatial domain transform edge-preserving filter + if (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) + dxf_smooth(const_cast(tgt.get_data()), _spatial_alpha_param, _spatial_edge_threshold, _spatial_iterations); + else + dxf_smooth(const_cast(tgt.get_data()), _spatial_alpha_param, _spatial_edge_threshold, _spatial_iterations); - auto callback = new rs2::frame_processor_callback(on_frame); - processing_block::set_processing_callback(std::shared_ptr(callback)); + return tgt; } void spatial_filter::update_configuration(const rs2::frame& f) @@ -185,13 +174,13 @@ namespace librealsense *(stream_interface*)(_target_stream_profile.get()->profile)); _extension_type = f.is() ? RS2_EXTENSION_DISPARITY_FRAME : RS2_EXTENSION_DEPTH_FRAME; - _bpp = (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) ? sizeof(float) : sizeof(uint16_t); + _bpp = (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) ? sizeof(float) : sizeof(uint16_t); auto vp = _target_stream_profile.as(); - _focal_lenght_mm = vp.get_intrinsics().fx; - _width = vp.width(); - _height = vp.height(); - _stride = _width*_bpp; - _current_frm_size_pixels = _width * _height; + _focal_lenght_mm = vp.get_intrinsics().fx; + _width = vp.width(); + _height = vp.height(); + _stride = _width * _bpp; + _current_frm_size_pixels = _width * _height; // Check if the new frame originated from stereo-based depth sensor // retrieve the stereo baseline parameter @@ -227,7 +216,7 @@ namespace librealsense } _spatial_edge_threshold = _spatial_delta_param;// (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) ? - // (_focal_lenght_mm * _stereo_baseline_mm) / float(_spatial_delta_param) : _spatial_delta_param; + // (_focal_lenght_mm * _stereo_baseline_mm) / float(_spatial_delta_param) : _spatial_delta_param; } } diff --git a/src/proc/spatial-filter.h b/src/proc/spatial-filter.h index d567f47ba7..57dd0b1d68 100644 --- a/src/proc/spatial-filter.h +++ b/src/proc/spatial-filter.h @@ -15,7 +15,7 @@ namespace librealsense { - class spatial_filter : public processing_block + class spatial_filter : public depth_processing_block { public: spatial_filter(); @@ -24,6 +24,7 @@ namespace librealsense void update_configuration(const rs2::frame& f); rs2::frame prepare_target_frame(const rs2::frame& f, const rs2::frame_source& source); + rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) override; template void dxf_smooth(void *frame_data, float alpha, float delta, int iterations) @@ -78,7 +79,7 @@ namespace librealsense T val0 = im[0]; cur_fill = 0; - for (u = 1; u < _width-1; u++) + for (u = 1; u < _width - 1; u++) { T val1 = im[1]; @@ -204,7 +205,7 @@ namespace librealsense if ((fabs(im0) >= valid_threshold) && (fabs(imw) >= valid_threshold)) { T diff = static_cast(fabs(im0 - imw)); - if ( diff < delta_z) + if (diff < delta_z) { float filtered = im0 * alpha + imw * (1.f - alpha); im[0] = static_cast(filtered + round); diff --git a/src/proc/syncer-processing-block.h b/src/proc/syncer-processing-block.h index c7f6118407..bc8b54e80b 100644 --- a/src/proc/syncer-processing-block.h +++ b/src/proc/syncer-processing-block.h @@ -25,6 +25,5 @@ namespace librealsense } private: std::unique_ptr _matcher; - std::mutex _mutex; }; } diff --git a/src/proc/synthetic-stream.cpp b/src/proc/synthetic-stream.cpp index 5c9077e00d..da5d9774a5 100644 --- a/src/proc/synthetic-stream.cpp +++ b/src/proc/synthetic-stream.cpp @@ -3,6 +3,7 @@ #include "core/video.h" #include "proc/synthetic-stream.h" +#include "option.h" namespace librealsense { @@ -17,8 +18,8 @@ namespace librealsense _source.set_callback(callback); } - processing_block::processing_block() - : _source_wrapper(_source) + processing_block::processing_block() : + _source_wrapper(_source) { register_option(RS2_OPTION_FRAMES_QUEUE_SIZE, _source.get_published_size_option()); _source.init(std::shared_ptr()); @@ -37,12 +38,197 @@ namespace librealsense _callback->on_frame((rs2_frame*)ptr, _source_wrapper.get_c_wrapper()); } } - catch(...) + catch (...) { LOG_ERROR("Exception was thrown during user processing callback!"); } } + generic_processing_block::generic_processing_block() + { + auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) + { + std::lock_guard lock(_mutex); + + std::vector frames_to_process; + + frames_to_process.push_back(f); + if (auto composite = f.as()) + for (auto f : composite) + frames_to_process.push_back(f); + + std::vector results; + for (auto f : frames_to_process) + { + if (should_process(f)) + { + auto res = process_frame(source, f); + if (!res) continue; + if (auto composite = res.as()) + { + for (auto f : composite) + if (f) + results.push_back(f); + } + else + { + results.push_back(res); + } + } + } + + auto out = prepare_output(source, f, results); + if(out) + source.frame_ready(out); + }; + + auto callback = new rs2::frame_processor_callback(on_frame); + processing_block::set_processing_callback(std::shared_ptr(callback)); + } + + rs2::frame generic_processing_block::prepare_output(const rs2::frame_source& source, rs2::frame input, std::vector results) + { + if (results.empty()) + { + return input; + } + std::vector original_set; + if (auto composite = input.as()) + composite.foreach([&original_set](const rs2::frame& frame) { original_set.push_back(frame); }); + else + { + return results[0]; + } + original_set.push_back(input); + + for (auto s : original_set) + { + auto curr_profile = s.get_profile(); + //if the processed frames doesn't match any of the original frames add the original frame to the results queue + if (find_if(results.begin(), results.end(), [&curr_profile](rs2::frame& frame) { + auto processed_profile = frame.get_profile(); + return curr_profile.stream_type() == processed_profile.stream_type() && + curr_profile.format() == processed_profile.format() && + curr_profile.stream_index() == processed_profile.stream_index(); }) == results.end()) + { + results.push_back(s); + } + } + + return source.allocate_composite_frame(results); + } + + stream_filter_processing_block::stream_filter_processing_block() : + _stream_filter(RS2_STREAM_ANY), + _stream_format_filter(RS2_FORMAT_ANY), + _stream_index_filter(-1) + { + register_option(RS2_OPTION_FRAMES_QUEUE_SIZE, _source.get_published_size_option()); + _source.init(std::shared_ptr()); + + auto stream_selector = std::make_shared>(RS2_STREAM_ANY, RS2_STREAM_FISHEYE, 1, RS2_STREAM_ANY, (int*)&_stream_filter, "Stream type"); + for (int s = RS2_STREAM_ANY; s < RS2_STREAM_COUNT; s++) + { + stream_selector->set_description(s, "Process - " + std::string (rs2_stream_to_string((rs2_stream)s))); + } + stream_selector->on_set([this, stream_selector](float val) + { + std::lock_guard lock(_mutex); + + if (!stream_selector->is_valid(val)) + throw invalid_value_exception(to_string() + << "Unsupported stream filter, " << val << " is out of range."); + + _stream_filter = static_cast((int)val); + }); + + auto format_selector = std::make_shared>(RS2_FORMAT_ANY, RS2_FORMAT_DISPARITY32, 1, RS2_FORMAT_ANY, (int*)&_stream_format_filter, "Stream format"); + for (int f = RS2_FORMAT_ANY; f < RS2_FORMAT_COUNT; f++) + { + format_selector->set_description(f, "Process - " + std::string(rs2_format_to_string((rs2_format)f))); + } + format_selector->on_set([this, format_selector](float val) + { + std::lock_guard lock(_mutex); + + if (!format_selector->is_valid(val)) + throw invalid_value_exception(to_string() + << "Unsupported stream format filter, " << val << " is out of range."); + + _stream_format_filter = static_cast((int)val); + }); + + auto index_selector = std::make_shared>(0, std::numeric_limits::max(), 1, -1, &_stream_index_filter, "Stream index"); + index_selector->on_set([this, index_selector](float val) + { + std::lock_guard lock(_mutex); + + if (!index_selector->is_valid(val)) + throw invalid_value_exception(to_string() + << "Unsupported stream index filter, " << val << " is out of range."); + + _stream_index_filter = (int)val; + }); + + register_option(RS2_OPTION_STREAM_FILTER, stream_selector); + register_option(RS2_OPTION_STREAM_FORMAT_FILTER, format_selector); + register_option(RS2_OPTION_STREAM_INDEX_FILTER, index_selector); + } + + bool is_z_or_disparity(rs2_format format) + { + if (format == RS2_FORMAT_Z16 || format == RS2_FORMAT_DISPARITY16 || format == RS2_FORMAT_DISPARITY32) + return true; + return false; + } + + bool depth_processing_block::should_process(const rs2::frame& frame) + { + if (!frame || frame.is()) + return false; + auto profile = frame.get_profile(); + rs2_stream stream = profile.stream_type(); + rs2_format format = profile.format(); + int index = profile.stream_index(); + + if (_stream_filter != RS2_STREAM_ANY && _stream_filter != stream) + return false; + if (is_z_or_disparity(_stream_format_filter)) + { + if (_stream_format_filter != RS2_FORMAT_ANY && !is_z_or_disparity(format)) + return false; + } + else + { + if (_stream_format_filter != RS2_FORMAT_ANY && _stream_format_filter != format) + return false; + } + + if (_stream_index_filter != -1 && _stream_index_filter != index) + return false; + return true; + } + + bool stream_filter_processing_block::should_process(const rs2::frame& frame) + { + if (!frame || frame.is()) + return false; + auto profile = frame.get_profile(); + rs2_stream stream = profile.stream_type(); + rs2_format format = profile.format(); + int index = profile.stream_index(); + + if (_stream_filter != RS2_STREAM_ANY && _stream_filter != stream) + return false; + + if (_stream_format_filter != RS2_FORMAT_ANY && _stream_format_filter != format) + return false; + + if (_stream_index_filter != -1 && _stream_index_filter != index) + return false; + return true; + } + void synthetic_source::frame_ready(frame_holder result) { _actual_source.invoke_callback(std::move(result)); @@ -71,12 +257,12 @@ namespace librealsense frame_interface* synthetic_source::allocate_video_frame(std::shared_ptr stream, - frame_interface* original, - int new_bpp, - int new_width, - int new_height, - int new_stride, - rs2_extension frame_type) + frame_interface* original, + int new_bpp, + int new_width, + int new_height, + int new_stride, + rs2_extension frame_type) { video_frame* vf = nullptr; @@ -118,7 +304,7 @@ namespace librealsense { height = vf->get_height(); } - + auto of = dynamic_cast(original); frame_additional_data data = of->additional_data; auto res = _actual_source.alloc_frame(frame_type, stride * height, data, true); @@ -167,7 +353,7 @@ namespace librealsense frame_interface* synthetic_source::allocate_composite_frame(std::vector holders) { - frame_additional_data d {}; + frame_additional_data d{}; auto req_size = 0; for (auto&& f : holders) diff --git a/src/proc/synthetic-stream.h b/src/proc/synthetic-stream.h index 7feb5698e7..8714b5e7f5 100644 --- a/src/proc/synthetic-stream.h +++ b/src/proc/synthetic-stream.h @@ -6,6 +6,8 @@ #include "core/processing.h" #include "image.h" #include "source.h" +#include "../include/librealsense2/hpp/rs_frame.hpp" +#include "../include/librealsense2/hpp/rs_processing.hpp" namespace librealsense { @@ -13,17 +15,17 @@ namespace librealsense { public: synthetic_source(frame_source& actual) - : _actual_source(actual), _c_wrapper(new rs2_source { this }) + : _actual_source(actual), _c_wrapper(new rs2_source{ this }) { } frame_interface* allocate_video_frame(std::shared_ptr stream, - frame_interface* original, - int new_bpp = 0, - int new_width = 0, - int new_height = 0, - int new_stride = 0, - rs2_extension frame_type = RS2_EXTENSION_VIDEO_FRAME) override; + frame_interface* original, + int new_bpp = 0, + int new_width = 0, + int new_height = 0, + int new_stride = 0, + rs2_extension frame_type = RS2_EXTENSION_VIDEO_FRAME) override; frame_interface* allocate_composite_frame(std::vector frames) override; @@ -34,7 +36,7 @@ namespace librealsense rs2_source* get_c_wrapper() override { return _c_wrapper.get(); } private: - frame_source& _actual_source; + frame_source & _actual_source; std::shared_ptr _c_wrapper; }; @@ -46,14 +48,50 @@ namespace librealsense void set_processing_callback(frame_processor_callback_ptr callback) override; void set_output_callback(frame_callback_ptr callback) override; void invoke(frame_holder frames) override; - synthetic_source_interface& get_source() override { return _source_wrapper; } - virtual ~processing_block(){_source.flush();} + virtual ~processing_block() { _source.flush(); } protected: frame_source _source; std::mutex _mutex; frame_processor_callback_ptr _callback; synthetic_source _source_wrapper; }; + + class generic_processing_block : public processing_block + { + public: + generic_processing_block(); + virtual ~generic_processing_block() { _source.flush(); } + + protected: + rs2::frame prepare_output(const rs2::frame_source& source, rs2::frame input, std::vector results); + + virtual bool should_process(const rs2::frame& frame) = 0; + virtual rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) = 0; + }; + + class stream_filter_processing_block : public generic_processing_block + { + public: + stream_filter_processing_block(); + virtual ~stream_filter_processing_block() { _source.flush(); } + + protected: + rs2_stream _stream_filter; + rs2_format _stream_format_filter; + int _stream_index_filter; + + bool should_process(const rs2::frame& frame) override; + }; + + class depth_processing_block : public stream_filter_processing_block + { + public: + depth_processing_block() {} + virtual ~depth_processing_block() { _source.flush(); } + + protected: + bool should_process(const rs2::frame& frame) override; + }; } diff --git a/src/proc/temporal-filter.cpp b/src/proc/temporal-filter.cpp index c5d33685ec..345d0390cc 100644 --- a/src/proc/temporal-filter.cpp +++ b/src/proc/temporal-filter.cpp @@ -40,6 +40,9 @@ namespace librealsense _extension_type(RS2_EXTENSION_DEPTH_FRAME), _current_frm_size_pixels(0) { + _stream_filter = RS2_STREAM_DEPTH; + _stream_format_filter = RS2_FORMAT_Z16; + auto temporal_persistence_control = std::make_shared>( persistence_min, persistence_max, @@ -98,39 +101,26 @@ namespace librealsense register_option(RS2_OPTION_FILTER_SMOOTH_ALPHA, temporal_filter_alpha); register_option(RS2_OPTION_FILTER_SMOOTH_DELTA, temporal_filter_delta); - auto on_frame = [this](rs2::frame f, const rs2::frame_source& source) - { - std::lock_guard lock(_mutex); - rs2::frame res = f, tgt, depth; - - bool composite = f.is(); - - depth = (composite) ? f.as().first_or_default(RS2_STREAM_DEPTH) : f; - if (depth) // Processing required - { - update_configuration(f); - tgt = prepare_target_frame(depth, source); - - // Temporal filter execution - if (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) - temp_jw_smooth(const_cast(tgt.get_data()), _last_frame.data(), _history.data()); - else - temp_jw_smooth(const_cast(tgt.get_data()), _last_frame.data(), _history.data()); - } - - res = composite ? source.allocate_composite_frame({ tgt }) : tgt; - - source.frame_ready(res); - }; - - auto callback = new rs2::frame_processor_callback(on_frame); - processing_block::set_processing_callback(std::shared_ptr(callback)); - on_set_persistence_control(_persistence_param); on_set_delta(_delta_param); on_set_alpha(_alpha_param); } + rs2::frame temporal_filter::process_frame(const rs2::frame_source& source, const rs2::frame& f) + { + update_configuration(f); + auto tgt = prepare_target_frame(f, source); + + // Temporal filter execution + if (_extension_type == RS2_EXTENSION_DISPARITY_FRAME) + temp_jw_smooth(const_cast(tgt.get_data()), _last_frame.data(), _history.data()); + else + temp_jw_smooth(const_cast(tgt.get_data()), _last_frame.data(), _history.data()); + + return tgt; + } + + void temporal_filter::on_set_persistence_control(uint8_t val) { std::lock_guard lock(_mutex); diff --git a/src/proc/temporal-filter.h b/src/proc/temporal-filter.h index 83e932ee16..611802b182 100644 --- a/src/proc/temporal-filter.h +++ b/src/proc/temporal-filter.h @@ -8,13 +8,14 @@ namespace librealsense { const size_t PRESISTENCY_LUT_SIZE = 256; - class temporal_filter : public processing_block + class temporal_filter : public depth_processing_block { public: temporal_filter(); protected: void update_configuration(const rs2::frame& f); + rs2::frame process_frame(const rs2::frame_source& source, const rs2::frame& f) override; rs2::frame prepare_target_frame(const rs2::frame& f, const rs2::frame_source& source); diff --git a/src/rs.cpp b/src/rs.cpp index edf44d708b..4548d5ac9a 100644 --- a/src/rs.cpp +++ b/src/rs.cpp @@ -218,14 +218,13 @@ void rs2_delete_device_hub(const rs2_device_hub* hub) BEGIN_API_CALL } NOEXCEPT_RETURN(, hub) -rs2_device* rs2_device_hub_wait_for_device(rs2_context* ctx, const rs2_device_hub* hub, rs2_error** error) BEGIN_API_CALL +rs2_device* rs2_device_hub_wait_for_device(const rs2_device_hub* hub, rs2_error** error) BEGIN_API_CALL { VALIDATE_NOT_NULL(hub); - VALIDATE_NOT_NULL(ctx); auto dev = hub->hub->wait_for_device(); - return new rs2_device{ ctx->ctx, std::make_shared(dev), dev }; + return new rs2_device{ hub->hub->get_context(), std::make_shared(dev), dev }; } -HANDLE_EXCEPTIONS_AND_RETURN(nullptr, hub, ctx) +HANDLE_EXCEPTIONS_AND_RETURN(nullptr, hub) int rs2_device_hub_is_device_connected(const rs2_device_hub* hub, const rs2_device* device, rs2_error** error) BEGIN_API_CALL { @@ -1347,6 +1346,19 @@ rs2_frame* rs2_allocate_synthetic_video_frame(rs2_source* source, const rs2_stre } HANDLE_EXCEPTIONS_AND_RETURN(nullptr, source, new_stream, original, new_bpp, new_width, new_height, new_stride, frame_type) +rs2_frame* rs2_allocate_points(rs2_source* source, const rs2_stream_profile* new_stream, rs2_frame* original, rs2_error** error) BEGIN_API_CALL +{ + VALIDATE_NOT_NULL(source); + VALIDATE_NOT_NULL(original); + VALIDATE_NOT_NULL(new_stream); + + auto recovered_profile = std::dynamic_pointer_cast(new_stream->profile->shared_from_this()); + + return (rs2_frame*)source->source->allocate_points(recovered_profile, + (frame_interface*)original); +} +HANDLE_EXCEPTIONS_AND_RETURN(nullptr, source, new_stream, original) + void rs2_synthetic_frame_ready(rs2_source* source, rs2_frame* frame, rs2_error** error) BEGIN_API_CALL { VALIDATE_NOT_NULL(frame); diff --git a/src/types.cpp b/src/types.cpp index d553819859..de0d673873 100644 --- a/src/types.cpp +++ b/src/types.cpp @@ -261,6 +261,9 @@ namespace librealsense CASE(HOLES_FILL) CASE(AUTO_EXPOSURE_CONVERGE_STEP) CASE(INTER_CAM_SYNC_MODE) + CASE(STREAM_FILTER) + CASE(STREAM_FORMAT_FILTER) + CASE(STREAM_INDEX_FILTER) default: assert(!is_valid(value)); return UNKNOWN_VALUE; } #undef CASE diff --git a/src/win/win-helpers.cpp b/src/win/win-helpers.cpp index 2c6ff8b8c4..7e979a9ab5 100644 --- a/src/win/win-helpers.cpp +++ b/src/win/win-helpers.cpp @@ -167,6 +167,13 @@ namespace librealsense if (tokens.size() < 1 || tokens[0] != R"(usb)") return false; // Not a USB device auto ids = tokenize(tokens[1], '&'); + + if (ids.size() < 3) + { + LOG_ERROR("incomplete device id"); + return false; + } + if (ids[0].size() != 8 || ids[0].substr(0, 4) != "vid_" || !(std::istringstream(ids[0].substr(4, 4)) >> std::hex >> vid)) { LOG_ERROR("malformed vid string: " << tokens[1]); diff --git a/src/win/win-hid.cpp b/src/win/win-hid.cpp index 4d67334db0..7132923420 100644 --- a/src/win/win-hid.cpp +++ b/src/win/win-hid.cpp @@ -238,6 +238,7 @@ namespace librealsense void wmf_hid_device::foreach_hid_device(std::function)> action) { + return; // HID devices aren't supported on Windows OS try { CComPtr pSensorManager = nullptr; diff --git a/src/win/win-usb.cpp b/src/win/win-usb.cpp index 29a3112858..c98e039048 100644 --- a/src/win/win-usb.cpp +++ b/src/win/win-usb.cpp @@ -54,7 +54,6 @@ namespace librealsense } open(_lp_device_path); - _usb_interface = std::make_unique(_device_handle); } void winusb_device::release() diff --git a/src/win7/d415_driver/IntelD415_win7.inf b/src/win7/d415_driver/IntelD415_win7.inf index c28865aceb..1cf123c7c7 100644 Binary files a/src/win7/d415_driver/IntelD415_win7.inf and b/src/win7/d415_driver/IntelD415_win7.inf differ diff --git a/src/win7/d415_driver/WdfCoInstaller01011.dll b/src/win7/d415_driver/WdfCoInstaller01011.dll new file mode 100644 index 0000000000..e943ea45ef Binary files /dev/null and b/src/win7/d415_driver/WdfCoInstaller01011.dll differ diff --git a/src/win7/d415_driver/WdfCoinstaller01011.dll b/src/win7/d415_driver/WdfCoinstaller01011.dll deleted file mode 100644 index e8d53314ba..0000000000 Binary files a/src/win7/d415_driver/WdfCoinstaller01011.dll and /dev/null differ diff --git a/src/win7/d415_driver/inteld415_win7.cat b/src/win7/d415_driver/inteld415_win7.cat deleted file mode 100644 index 475f38997a..0000000000 Binary files a/src/win7/d415_driver/inteld415_win7.cat and /dev/null differ diff --git a/src/win7/d415_driver/winusbcoinstaller2.dll b/src/win7/d415_driver/winusbcoinstaller2.dll index 4b2136efc2..fc450d2b25 100644 Binary files a/src/win7/d415_driver/winusbcoinstaller2.dll and b/src/win7/d415_driver/winusbcoinstaller2.dll differ diff --git a/src/win7/winusb_uvc/winusb_uvc.cpp b/src/win7/winusb_uvc/winusb_uvc.cpp index 13662e4b38..e8305f3de2 100644 --- a/src/win7/winusb_uvc/winusb_uvc.cpp +++ b/src/win7/winusb_uvc/winusb_uvc.cpp @@ -1,5 +1,8 @@ // winusb_uvc.cpp : Defines the entry point for the console application. // + +#ifdef RS2_USE_WINUSB_UVC_BACKEND + #define NOMINMAX #include "windows.h" #include "SETUPAPI.H" @@ -82,16 +85,19 @@ void winusb_uvc_free_device_info(winusb_uvc_device_info_t *info) { free(stream_if); } - for (int i = 0; i < MAX_USB_INTERFACES; i++) + if (info->interfaces) { - if (info->interfaces->iface[i].extra != NULL) + for (int i = 0; i < MAX_USB_INTERFACES; i++) { - free(info->interfaces->iface[i].extra); - info->interfaces->iface[i].extra = NULL; + if (info->interfaces->iface[i].extra != NULL) + { + free(info->interfaces->iface[i].extra); + info->interfaces->iface[i].extra = NULL; + } } - } - free(info->interfaces); + free(info->interfaces); + } } @@ -860,7 +866,7 @@ void winusb_uvc_process_payload(winusb_uvc_stream_handle_t *strmh, if ((data_len > 0) && (strmh->cur_ctrl.dwMaxVideoFrameSize == (data_len))) { - if (header_info & (1 << 1)) { + //if (header_info & (1 << 1)) { // Temp patch to allow old firmware /* The EOF bit is set, so publish the complete frame */ winusb_uvc_swap_buffers(strmh); @@ -882,7 +888,7 @@ void winusb_uvc_process_payload(winusb_uvc_stream_handle_t *strmh, { LOG_INFO("WinUSB backend is dropping a frame because librealsense wasn't fast enough"); } - } + //} } } @@ -927,10 +933,11 @@ void stream_thread(winusb_uvc_stream_context *strctx) buffer, strctx->maxPayloadTransferSize, &transferred, - NULL)) { - printf("error : %d\n" + GetLastError()); - return; - } + NULL)) + { + printf("WinUsb_ReadPipe Error: %d\n" + GetLastError()); + break; + } LOG_DEBUG("Packet received with size " << transferred); winusb_uvc_process_payload(strctx->stream, buffer, transferred, &archive, &queue); @@ -2123,6 +2130,11 @@ uvc_error_t winusb_open(winusb_uvc_device *device) device->associateHandle = NULL; device->streams = NULL; + // Start by clearing deviceData, otherwise + // winusb_uvc_free_device_info(&device->deviceData); + // will do something not good + memset(&device->deviceData, 0, sizeof(winusb_uvc_device_info_t)); + // Create a handle for I/O operations to the IVCAM device device->deviceHandle = CreateFile(device->devPath, GENERIC_READ | GENERIC_WRITE, @@ -2164,7 +2176,6 @@ uvc_error_t winusb_open(winusb_uvc_device *device) ret = UVC_ERROR_INVALID_PARAM; goto fail; } - memset(&device->deviceData, 0, sizeof(winusb_uvc_device_info_t)); device->deviceData.config = cfgDesc; // Iterate over all descriptors and parse all Interface and Endpoint descriptors @@ -2287,3 +2298,5 @@ bool read_all_uvc_descriptors(winusb_uvc_device *device, PUCHAR buffer, ULONG bu return 0; } + +#endif diff --git a/tools/convert/converters/converter-png.hpp b/tools/convert/converters/converter-png.hpp index ee51ffe1f7..81782b1e76 100644 --- a/tools/convert/converters/converter-png.hpp +++ b/tools/convert/converters/converter-png.hpp @@ -46,7 +46,7 @@ namespace rs2 { } if (frame.get_profile().stream_type() == rs2_stream::RS2_STREAM_DEPTH) { - frame = _colorizer(frame); + frame = _colorizer.process(frame); } std::stringstream filename; diff --git a/tools/depth-quality/depth-quality-model.cpp b/tools/depth-quality/depth-quality-model.cpp index 5b8f93450b..5e2d1ee091 100644 --- a/tools/depth-quality/depth-quality-model.cpp +++ b/tools/depth-quality/depth-quality-model.cpp @@ -899,8 +899,9 @@ namespace rs2 { auto profile = f.get_profile(); auto stream_type = profile.stream_type(); + auto stream_format = profile.format(); - if (RS2_STREAM_DEPTH == stream_type) + if ((RS2_STREAM_DEPTH == stream_type) && (RS2_FORMAT_Z16 == stream_format)) { float su = 0, baseline = -1.f; rs2_intrinsics intrin{}; diff --git a/tools/realsense-viewer/realsense-viewer.cpp b/tools/realsense-viewer/realsense-viewer.cpp index 15f0f68e99..c954733c45 100644 --- a/tools/realsense-viewer/realsense-viewer.cpp +++ b/tools/realsense-viewer/realsense-viewer.cpp @@ -24,7 +24,7 @@ using namespace rs2; using namespace rs400; -void add_playback_device(context& ctx, std::vector& device_models, std::string& error_message, viewer_model& viewer_model, const std::string& file) +void add_playback_device(context& ctx, std::shared_ptr> device_models, std::string& error_message, viewer_model& viewer_model, const std::string& file) { bool was_loaded = false; bool failed = false; @@ -32,21 +32,21 @@ void add_playback_device(context& ctx, std::vector& device_models, { auto dev = ctx.load_device(file); was_loaded = true; - device_models.emplace_back(dev, error_message, viewer_model); //Will cause the new device to appear in the left panel + device_models->emplace_back(dev, error_message, viewer_model); //Will cause the new device to appear in the left panel if (auto p = dev.as()) { auto filename = p.file_name(); - p.set_status_changed_callback([&viewer_model, &device_models, filename](rs2_playback_status status) + p.set_status_changed_callback([&viewer_model, device_models, filename](rs2_playback_status status) { if (status == RS2_PLAYBACK_STATUS_STOPPED) { - auto it = std::find_if(device_models.begin(), device_models.end(), + auto it = std::find_if(device_models->begin(), device_models->end(), [&](const device_model& dm) { if (auto p = dm.dev.as()) return p.file_name() == filename; return false; }); - if (it != device_models.end()) + if (it != device_models->end()) { auto subs = it->subdevices; if (it->_playback_repeat) @@ -104,7 +104,7 @@ void refresh_devices(std::mutex& m, device_changes& devices_connection_changes, std::vector& current_connected_devices, std::vector>& device_names, - std::vector& device_models, + std::shared_ptr> device_models, viewer_model& viewer_model, std::string& error_message) { @@ -132,15 +132,15 @@ void refresh_devices(std::mutex& m, viewer_model.ppf.stop(); //Remove from devices - auto dev_model_itr = std::find_if(begin(device_models), end(device_models), + auto dev_model_itr = std::find_if(begin(*device_models), end(*device_models), [&](const device_model& other) { return get_device_name(other.dev) == get_device_name(dev); }); - if (dev_model_itr != end(device_models)) + if (dev_model_itr != end(*device_models)) { for (auto&& s : dev_model_itr->subdevices) s->streaming = false; - device_models.erase(dev_model_itr); + device_models->erase(dev_model_itr); } auto dev_name_itr = std::find(begin(device_names), end(device_names), get_device_name(dev)); if (dev_name_itr != end(device_names)) @@ -172,24 +172,24 @@ void refresh_devices(std::mutex& m, auto data = n.get_serialized_data(); if (!data.empty()) { - auto dev_model_itr = std::find_if(begin(device_models), end(device_models), + auto dev_model_itr = std::find_if(begin(*device_models), end(*device_models), [&](const device_model& other) { return get_device_name(other.dev) == dev_descriptor; }); - if (dev_model_itr == end(device_models)) + if (dev_model_itr == end(*device_models)) return; - dev_model_itr->handle_harware_events(data); + dev_model_itr->handle_hardware_events(data); } } viewer_model.not_model.add_notification({ n.get_description(), n.get_timestamp(), n.get_severity(), n.get_category() }); }); } - if (device_models.size() == 0 && + if (device_models->size() == 0 && dev.supports(RS2_CAMERA_INFO_NAME) && std::string(dev.get_info(RS2_CAMERA_INFO_NAME)) != "Platform Camera") { - device_models.emplace_back(dev, error_message, viewer_model); - viewer_model.not_model.add_log(to_string() << device_models.rbegin()->dev.get_info(RS2_CAMERA_INFO_NAME) << " was selected as a default device"); + device_models->emplace_back(dev, error_message, viewer_model); + viewer_model.not_model.add_log(to_string() << device_models->rbegin()->dev.get_info(RS2_CAMERA_INFO_NAME) << " was selected as a default device"); } } initial_refresh = false; @@ -223,7 +223,7 @@ int main(int argv, const char** argc) try std::string error_message{ "" }; std::string label{ "" }; - std::vector device_models; + std::shared_ptr> device_models = std::make_shared>(); device_model* device_to_remove = nullptr; viewer_model viewer_model; @@ -307,7 +307,7 @@ int main(int argv, const char** argc) try ImGui::OpenPopup("select"); auto new_devices_count = device_names.size() + 1; - for (auto&& dev_model : device_models) + for (auto&& dev_model : *device_models) { auto connected_devs_itr = std::find_if(begin(connected_devs), end(connected_devs), [&](const device& d) { return get_device_name(d) == get_device_name(dev_model.dev); }); @@ -326,7 +326,7 @@ int main(int argv, const char** argc) try for (size_t i = 0; i < device_names.size(); i++) { bool skip = false; - for (auto&& dev_model : device_models) + for (auto&& dev_model : *device_models) if (get_device_name(dev_model.dev) == device_names[i]) skip = true; if (skip) continue; @@ -335,7 +335,7 @@ int main(int argv, const char** argc) try try { auto dev = connected_devs[i]; - device_models.emplace_back(dev, error_message, viewer_model); + device_models->emplace_back(dev, error_message, viewer_model); } catch (const error& e) { @@ -408,12 +408,12 @@ int main(int argv, const char** argc) try // ********************* ImGui::Begin("Control Panel", nullptr, flags | ImGuiWindowFlags_AlwaysVerticalScrollbar); - if (device_models.size() > 0) + if (device_models->size() > 0) { std::vector> draw_later; auto windows_width = ImGui::GetContentRegionMax().x; - for (auto&& dev_model : device_models) + for (auto&& dev_model : *device_models) { dev_model.draw_controls(viewer_model.panel_width, viewer_model.panel_y, window, @@ -423,7 +423,7 @@ int main(int argv, const char** argc) try } if (viewer_model.ppf.is_rendering()) { - if (!std::any_of(device_models.begin(), device_models.end(), + if (!std::any_of(device_models->begin(), device_models->end(), [](device_model& dm) { return dm.is_streaming(); @@ -441,7 +441,7 @@ int main(int argv, const char** argc) try ctx.unload_device(p.file_name()); } - device_models.erase(std::find_if(begin(device_models), end(device_models), + device_models->erase(std::find_if(begin(*device_models), end(*device_models), [&](const device_model& other) { return get_device_name(other.dev) == get_device_name(device_to_remove->dev); })); device_to_remove = nullptr; } @@ -487,14 +487,14 @@ int main(int argv, const char** argc) try ImGui::PopStyleColor(); // Fetch and process frames from queue - viewer_model.handle_ready_frames(viewer_rect, window, static_cast(device_models.size()), error_message); + viewer_model.handle_ready_frames(viewer_rect, window, static_cast(device_models->size()), error_message); } // Stopping post processing filter rendering thread viewer_model.ppf.stop(); // Stop all subdevices - for (auto&& device_model : device_models) + for (auto&& device_model : *device_models) for (auto&& sub : device_model.subdevices) { if (sub->streaming) diff --git a/unit-tests/CMakeLists.txt b/unit-tests/CMakeLists.txt index 81c2def878..c9afbe96a7 100644 --- a/unit-tests/CMakeLists.txt +++ b/unit-tests/CMakeLists.txt @@ -38,7 +38,7 @@ install( TARGETS live-test - + RUNTIME DESTINATION ${CMAKE_INSTALL_PREFIX}/bin ) @@ -52,6 +52,12 @@ else() # Data shall be preserved between reboots. For Linux distributions/ ANDRO set(Deployment_Location /tmp/) endif() +add_custom_command(TARGET live-test POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_directory + ${CMAKE_CURRENT_SOURCE_DIR}/resources + ${Deployment_Location} +) + #Post-Processing data set for unit-tests #message(STATUS "Post processing deployment directory=${Deployment_Location}") list(APPEND PP_Tests_List 1525186403504 # D415_DS(2) diff --git a/unit-tests/resources/single_depth_color_640x480.bag b/unit-tests/resources/single_depth_color_640x480.bag new file mode 100644 index 0000000000..3f0c5ba1d1 Binary files /dev/null and b/unit-tests/resources/single_depth_color_640x480.bag differ diff --git a/unit-tests/unit-tests-live.cpp b/unit-tests/unit-tests-live.cpp index 103568e856..7d139359f6 100644 --- a/unit-tests/unit-tests-live.cpp +++ b/unit-tests/unit-tests-live.cpp @@ -13,6 +13,7 @@ #include #include #include +#include using namespace rs2; @@ -4811,3 +4812,94 @@ TEST_CASE("Syncer try wait for frames", "[live][software-device]") { } } } + +TEST_CASE("Projection from recording", "[software-device][using_pipeline][projection]") { + rs2::context ctx; + if (!make_context(SECTION_FROM_TEST_NAME, &ctx, "2.13.0")) + return; + std::string folder_name = get_folder_path(special_folder::temp_folder); + const std::string filename = folder_name + "single_depth_color_640x480.bag"; + REQUIRE(file_exists(filename)); + auto dev = ctx.load_device(filename); + + syncer sync; + std::vector sensors = dev.query_sensors(); + REQUIRE(sensors.size() == 2); + for (auto s : sensors) + { + REQUIRE_NOTHROW(s.open(s.get_stream_profiles().front())); + REQUIRE_NOTHROW(s.start(sync)); + } + + rs2::frame depth; + rs2::stream_profile depth_profile; + rs2::stream_profile color_profile; + + while (!depth_profile || !color_profile) + { + frameset frames = sync.wait_for_frames(200); + REQUIRE(frames.size() > 0); + if (frames.size() == 1) + { + if (frames.get_profile().stream_type() == RS2_STREAM_DEPTH) + { + depth = frames.get_depth_frame(); + depth_profile = depth.get_profile(); + } + else + { + color_profile = frames.get_color_frame().get_profile(); + } + } + else + { + depth = frames.get_depth_frame(); + depth_profile = depth.get_profile(); + color_profile = frames.get_color_frame().get_profile(); + } + } + + auto depth_intrin = depth_profile.as().get_intrinsics(); + auto color_intrin = color_profile.as().get_intrinsics(); + auto depth_extrin_to_color = depth_profile.as().get_extrinsics_to(color_profile); + auto color_extrin_to_depth = color_profile.as().get_extrinsics_to(depth_profile); + + float depth_scale = 0; + for (auto s : sensors) + { + auto depth_sensor = s.is(); + if (s.is()) + { + REQUIRE_NOTHROW(depth_scale = s.as().get_depth_scale()); + } + } + + int count = 0; + for (float i = 0; i < depth_intrin.width; i++) + { + for (float j = 0; j < depth_intrin.height; j++) + { + float depth_pixel[2] = { i, j }; + auto udist = depth.as().get_distance(depth_pixel[0], depth_pixel[1]); + if (udist == 0) continue; + + float from_pixel[2] = { 0 }, to_pixel[2] = { 0 }, point[3] = { 0 }, other_point[3] = { 0 }; + rs2_deproject_pixel_to_point(point, &depth_intrin, depth_pixel, udist); + rs2_transform_point_to_point(other_point, &depth_extrin_to_color, point); + rs2_project_point_to_pixel(from_pixel, &color_intrin, other_point); + + rs2_project_color_pixel_to_depth_pixel(to_pixel, reinterpret_cast(depth.get_data()), depth_scale, 0.1, 10, + &depth_intrin, &color_intrin, + &color_extrin_to_depth, &depth_extrin_to_color, from_pixel); + + float dist = sqrt(pow((depth_pixel[1] - to_pixel[1]), 2) + pow((depth_pixel[0] - to_pixel[0]), 2)); + if (dist > 1) + count++; + if (dist > 2) + printf("%f\n", dist); + } + } + const double MAX_ERROR_PERCENTAGE = 0.1; + REQUIRE(count * 100 / (depth_intrin.width * depth_intrin.height) < MAX_ERROR_PERCENTAGE); + CAPTURE(count); +} \ No newline at end of file diff --git a/unit-tests/unit-tests-post-processing.cpp b/unit-tests/unit-tests-post-processing.cpp index 7a9d3ea3ff..d76e33b673 100644 --- a/unit-tests/unit-tests-post-processing.cpp +++ b/unit-tests/unit-tests-post-processing.cpp @@ -338,3 +338,172 @@ TEST_CASE("Post-Processing Filters metadata validation", "[software-device][post } } } + +bool is_subset(rs2::frameset full, rs2::frameset sub) +{ + if (!sub.is()) + return false; + if (full.size() == 0 && sub.size() == 0) + return false; + for (auto f : full) + { + if (!sub.first(f.get_profile().stream_type(), f.get_profile().format())) + return false; + } + return true; +} + +bool is_equal(rs2::frameset org, rs2::frameset processed) +{ + if (!org.is() || !processed.is()) + return false; + if (org.size() != processed.size() || org.size() == 0) + return false; + for (auto o : org) + { + auto curr_profile = o.get_profile(); + bool found = false; + processed.foreach([&curr_profile, &found](const rs2::frame& f) + { + auto processed_profile = f.get_profile(); + if (curr_profile.unique_id() == processed_profile.unique_id()) + found = true; + }); + if(!found) + return false; + } + return true; +} + +TEST_CASE("Post-Processing expected output", "[post-processing-filters]") +{ + rs2::context ctx; + + if (!make_context(SECTION_FROM_TEST_NAME, &ctx)) + return; + + rs2::temporal_filter temporal; + rs2::hole_filling_filter hole_filling; + rs2::spatial_filter spatial; + rs2::decimation_filter decimation(4); + rs2::align aligner(RS2_STREAM_COLOR); + rs2::colorizer depth_colorizer; + rs2::disparity_transform to_disp; + rs2::disparity_transform from_disp(false); + + rs2::config cfg; + cfg.enable_all_streams(); + + rs2::pipeline pipe(ctx); + auto profile = pipe.start(cfg); + + bool supports_disparity = false; + for (auto s : profile.get_device().query_sensors()) + { + if (s.supports(RS2_OPTION_STEREO_BASELINE)) + { + supports_disparity = true; + break; + } + } + + rs2::frameset original = pipe.wait_for_frames(); + + //set to set + rs2::frameset temp_processed_set = original.apply_filter(temporal); + REQUIRE(is_subset(original, temp_processed_set)); + REQUIRE(is_subset(temp_processed_set, original)); + + rs2::frameset hole_processed_set = original.apply_filter(hole_filling); + REQUIRE(is_subset(original, hole_processed_set)); + REQUIRE(is_subset(hole_processed_set, original)); + + rs2::frameset spatial_processed_set = original.apply_filter(spatial); + REQUIRE(is_subset(original, spatial_processed_set)); + REQUIRE(is_subset(spatial_processed_set, original)); + + rs2::frameset decimation_processed_set = original.apply_filter(decimation); + REQUIRE(is_subset(original, decimation_processed_set)); + REQUIRE(is_subset(decimation_processed_set, original)); + + rs2::frameset align_processed_set = original.apply_filter(aligner); + REQUIRE(is_subset(original, align_processed_set)); + REQUIRE(is_subset(align_processed_set, original)); + + rs2::frameset colorizer_processed_set = original.apply_filter(depth_colorizer); + REQUIRE(is_subset(original, colorizer_processed_set)); + REQUIRE_THROWS(is_subset(colorizer_processed_set, original)); + + rs2::frameset to_disp_processed_set = original.apply_filter(to_disp); + REQUIRE(is_subset(original, to_disp_processed_set)); + if(supports_disparity) + REQUIRE_THROWS(is_subset(to_disp_processed_set, original)); + + rs2::frameset from_disp_processed_set = original.apply_filter(from_disp);//should bypass + REQUIRE(is_equal(original, from_disp_processed_set)); + + rs2::frameset full_pipe = original. + apply_filter(decimation). + apply_filter(to_disp). + apply_filter(spatial). + apply_filter(temporal). + apply_filter(from_disp). + apply_filter(aligner). + apply_filter(hole_filling). + apply_filter(depth_colorizer); + + REQUIRE(is_subset(original, full_pipe)); + REQUIRE_THROWS(is_subset(full_pipe, original)); + + //single to single + rs2::video_frame org_depth = original.get_depth_frame(); + + rs2::video_frame temp_processed_frame = org_depth.apply_filter(temporal); + REQUIRE_FALSE(temp_processed_frame.is()); + REQUIRE(temp_processed_frame.get_profile().stream_type() == RS2_STREAM_DEPTH); + REQUIRE(temp_processed_frame.get_profile().format() == RS2_FORMAT_Z16); + REQUIRE(org_depth.get_width() == temp_processed_frame.get_width()); + + rs2::video_frame hole_processed_frame = org_depth.apply_filter(hole_filling); + REQUIRE_FALSE(hole_processed_frame.is()); + REQUIRE(hole_processed_frame.get_profile().stream_type() == RS2_STREAM_DEPTH); + REQUIRE(hole_processed_frame.get_profile().format() == RS2_FORMAT_Z16); + REQUIRE(org_depth.get_width() == hole_processed_frame.get_width()); + + rs2::video_frame spatial_processed_frame = org_depth.apply_filter(spatial); + REQUIRE_FALSE(spatial_processed_frame.is()); + REQUIRE(spatial_processed_frame.get_profile().stream_type() == RS2_STREAM_DEPTH); + REQUIRE(spatial_processed_frame.get_profile().format() == RS2_FORMAT_Z16); + REQUIRE(org_depth.get_width() == spatial_processed_frame.get_width()); + + rs2::video_frame decimation_processed_frame = org_depth.apply_filter(decimation); + REQUIRE_FALSE(decimation_processed_frame.is()); + REQUIRE(decimation_processed_frame.get_profile().stream_type() == RS2_STREAM_DEPTH); + REQUIRE(decimation_processed_frame.get_profile().format() == RS2_FORMAT_Z16); + REQUIRE(org_depth.get_width() > decimation_processed_frame.get_width()); + + rs2::video_frame colorizer_processed_frame = org_depth.apply_filter(depth_colorizer); + REQUIRE_FALSE(colorizer_processed_frame.is()); + REQUIRE(colorizer_processed_frame.get_profile().stream_type() == RS2_STREAM_DEPTH); + REQUIRE(colorizer_processed_frame.get_profile().format() == RS2_FORMAT_RGB8); + REQUIRE(org_depth.get_width() == colorizer_processed_frame.get_width()); + + rs2::video_frame to_disp_processed_frame = org_depth.apply_filter(to_disp); + REQUIRE_FALSE(to_disp_processed_frame.is()); + REQUIRE(to_disp_processed_frame.get_profile().stream_type() == RS2_STREAM_DEPTH); + bool is_disp = to_disp_processed_frame.get_profile().format() == RS2_FORMAT_DISPARITY16 || + to_disp_processed_frame.get_profile().format() == RS2_FORMAT_DISPARITY32; + if (supports_disparity) + { + REQUIRE(is_disp); + REQUIRE(org_depth.get_width() == to_disp_processed_frame.get_width()); + } + + rs2::video_frame from_disp_processed_frame = org_depth.apply_filter(from_disp);//should bypass + REQUIRE_FALSE(from_disp_processed_frame.is()); + REQUIRE(from_disp_processed_frame.get_profile().stream_type() == RS2_STREAM_DEPTH); + REQUIRE(from_disp_processed_frame.get_profile().format() == RS2_FORMAT_Z16); + REQUIRE(org_depth.get_width() == from_disp_processed_frame.get_width()); + + pipe.stop(); +} diff --git a/wrappers/CMakeLists.txt b/wrappers/CMakeLists.txt index dd88e02a00..073d013864 100644 --- a/wrappers/CMakeLists.txt +++ b/wrappers/CMakeLists.txt @@ -32,6 +32,10 @@ if (BUILD_CV_EXAMPLES) add_subdirectory(opencv) endif() +if(BUILD_MATLAB_BINDINGS) + add_subdirectory(matlab) +endif() + if (BUILD_PCL_EXAMPLES) add_subdirectory(pcl) endif() @@ -63,4 +67,3 @@ if(BUILD_CSHARP_BINDINGS) add_subdirectory(csharp) endif() - diff --git a/wrappers/csharp/Intel.RealSense/CMakeLists.txt b/wrappers/csharp/Intel.RealSense/CMakeLists.txt index b3d0ac3698..b22e9ce71b 100644 --- a/wrappers/csharp/Intel.RealSense/CMakeLists.txt +++ b/wrappers/csharp/Intel.RealSense/CMakeLists.txt @@ -17,11 +17,12 @@ add_library(${PROJECT_NAME} Types.cs SoftwareDevice.cs .nuget/Intel.RealSense.targets + + Properties/AssemblyInfo.cs ) csharp_set_designer_cs_properties( .nuget/Intel.RealSense.targets - Properties/AssemblyInfo.cs ) if(BUILD_UNITY_BINDINGS) diff --git a/wrappers/csharp/Intel.RealSense/FrameSet.cs b/wrappers/csharp/Intel.RealSense/FrameSet.cs index efb94edc53..5c69e0d464 100644 --- a/wrappers/csharp/Intel.RealSense/FrameSet.cs +++ b/wrappers/csharp/Intel.RealSense/FrameSet.cs @@ -42,11 +42,11 @@ internal static Frame CreateFrame(IntPtr ptr) return new Frame(ptr); } - public T FirstOrDefault(Stream stream) where T : Frame + public T FirstOrDefault(Stream stream, Format format = Format.Any) where T : Frame { foreach (Frame frame in this) { - if (frame.Profile.Stream == stream) + if (frame.Profile.Stream == stream && (Format.Any == format || frame.Profile.Format == format)) return frame as T; frame.Dispose(); } diff --git a/wrappers/csharp/Intel.RealSense/NativeMethods.cs b/wrappers/csharp/Intel.RealSense/NativeMethods.cs index 33c7a0049c..1bab567133 100644 --- a/wrappers/csharp/Intel.RealSense/NativeMethods.cs +++ b/wrappers/csharp/Intel.RealSense/NativeMethods.cs @@ -243,9 +243,11 @@ internal static extern int rs2_poll_for_frame(IntPtr queue, internal static extern void rs2_keep_frame(IntPtr frame); [DllImport(dllName, CallingConvention = CallingConvention.Cdecl)] - internal static extern IntPtr rs2_get_frame_vertices(IntPtr frame, [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(Helpers.ErrorMarshaler))] out object error); - + internal static extern IntPtr rs2_frame_apply_filter(IntPtr frame, IntPtr block, [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(Helpers.ErrorMarshaler))] out object error); + [DllImport(dllName, CallingConvention = CallingConvention.Cdecl)] + internal static extern IntPtr rs2_get_frame_vertices(IntPtr frame, [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(Helpers.ErrorMarshaler))] out object error); + [DllImport(dllName, CallingConvention = CallingConvention.Cdecl)] internal static extern IntPtr rs2_get_frame_texture_coordinates(IntPtr frame, [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(Helpers.ErrorMarshaler))] out object error); diff --git a/wrappers/csharp/Intel.RealSense/Processing.cs b/wrappers/csharp/Intel.RealSense/Processing.cs index b35e20cf7f..0320ab2ea3 100644 --- a/wrappers/csharp/Intel.RealSense/Processing.cs +++ b/wrappers/csharp/Intel.RealSense/Processing.cs @@ -264,7 +264,9 @@ public Points Calculate(Frame original, FramesReleaser releaser = null) public void MapTexture(VideoFrame texture) { object error; - Options[Option.TextureSource].Value = Convert.ToSingle(texture.Profile.UniqueID); + Options[Option.StreamFilter].Value = Convert.ToSingle(texture.Profile.Stream); + Options[Option.StreamFormatFilter].Value = Convert.ToSingle(texture.Profile.Format); + Options[Option.StreamIndexFilter].Value = Convert.ToSingle(texture.Profile.Index); NativeMethods.rs2_frame_add_ref(texture.m_instance.Handle, out error); NativeMethods.rs2_process_frame(m_instance.Handle, texture.m_instance.Handle, out error); } diff --git a/wrappers/csharp/Intel.RealSense/Properties/AssemblyInfo.cs b/wrappers/csharp/Intel.RealSense/Properties/AssemblyInfo.cs index ab1f498c09..6578dbf3e0 100644 --- a/wrappers/csharp/Intel.RealSense/Properties/AssemblyInfo.cs +++ b/wrappers/csharp/Intel.RealSense/Properties/AssemblyInfo.cs @@ -5,13 +5,13 @@ // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. -[assembly: AssemblyTitle("Intel.RealSense")] -[assembly: AssemblyDescription(".NET Wrapper for Intel RealSense SDK 2.0")] +[assembly: AssemblyTitle("Intel(R) RealSense(TM) SDK C# Wrapper")] +[assembly: AssemblyDescription("Intel(R) RealSense(TM) SDK C# Wrapper")] [assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("Intel")] -[assembly: AssemblyProduct("Intel.RealSense")] -[assembly: AssemblyCopyright("Copyright � 2017")] -[assembly: AssemblyTrademark("")] +[assembly: AssemblyCompany("Intel(R) Corporation")] +[assembly: AssemblyProduct("Intel(R) RealSense(TM) SDK C# Wrapper")] +[assembly: AssemblyCopyright("Copyright � 2017, Intel Corporation. All rights reserved")] +[assembly: AssemblyTrademark("Intel(R) RealSense(TM)")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible @@ -20,7 +20,7 @@ [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("e488694f-f2fa-45ad-b1b8-8d4fabe34e86")] +[assembly: Guid("71c85ee0-c7c2-4e2e-8020-6af818537a14")] // Version information for an assembly consists of the following four values: // @@ -29,8 +29,5 @@ // Build Number // Revision // -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] \ No newline at end of file +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/wrappers/csharp/Intel.RealSense/Sensor.cs b/wrappers/csharp/Intel.RealSense/Sensor.cs index cdcd59251f..0af259f940 100644 --- a/wrappers/csharp/Intel.RealSense/Sensor.cs +++ b/wrappers/csharp/Intel.RealSense/Sensor.cs @@ -95,6 +95,11 @@ public void Dispose() public class Sensor : IDisposable { protected readonly IntPtr m_instance; + + public IntPtr Instance + { + get { return m_instance; } + } internal Sensor(IntPtr sensor) { diff --git a/wrappers/csharp/Intel.RealSense/Types.cs b/wrappers/csharp/Intel.RealSense/Types.cs index c53e4b8efb..1f3f62905e 100644 --- a/wrappers/csharp/Intel.RealSense/Types.cs +++ b/wrappers/csharp/Intel.RealSense/Types.cs @@ -210,6 +210,9 @@ public enum Option StereoBaseline = 40, AutoExposureConvergeStep = 41, InterCamSyncMode = 42, + StreamFilter = 43, + StreamFormatFilter = 44, + StreamIndexFilter = 45 } public enum Sr300VisualPreset diff --git a/wrappers/csharp/cs-tutorial-1-depth/CMakeLists.txt b/wrappers/csharp/cs-tutorial-1-depth/CMakeLists.txt index b2b7a781a8..9ff12c03c2 100644 --- a/wrappers/csharp/cs-tutorial-1-depth/CMakeLists.txt +++ b/wrappers/csharp/cs-tutorial-1-depth/CMakeLists.txt @@ -2,7 +2,10 @@ cmake_minimum_required( VERSION 3.8.0 ) project(cs-tutorial-1-depth) -add_executable(${PROJECT_NAME} Program.cs) +add_executable(${PROJECT_NAME} + Program.cs + Properties/AssemblyInfo.cs +) set_property(TARGET ${PROJECT_NAME} PROPERTY VS_DOTNET_TARGET_FRAMEWORK_VERSION "v${DOTNET_VERSION_EXAMPLES}") # set_property(TARGET ${PROJECT_NAME} PROPERTY WIN32_EXECUTABLE TRUE) diff --git a/wrappers/csharp/cs-tutorial-1-depth/Properties/AssemblyInfo.cs b/wrappers/csharp/cs-tutorial-1-depth/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..9b38be4820 --- /dev/null +++ b/wrappers/csharp/cs-tutorial-1-depth/Properties/AssemblyInfo.cs @@ -0,0 +1,33 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Intel(R) RealSense(TM) SDK C# Wrapper Tutorial-1")] +[assembly: AssemblyDescription("Intel(R) RealSense(TM) SDK C# Wrapper Examples")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("Intel(R) Corporation")] +[assembly: AssemblyProduct("Intel(R) RealSense(TM) SDK C# Wrapper")] +[assembly: AssemblyCopyright("Copyright � 2017, Intel Corporation. All rights reserved")] +[assembly: AssemblyTrademark("Intel(R) RealSense(TM)")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("71c85ee0-c7c2-4e2e-8020-6af818537a14")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/wrappers/csharp/cs-tutorial-2-capture/CMakeLists.txt b/wrappers/csharp/cs-tutorial-2-capture/CMakeLists.txt index b5471cbf5f..cc40741c3c 100644 --- a/wrappers/csharp/cs-tutorial-2-capture/CMakeLists.txt +++ b/wrappers/csharp/cs-tutorial-2-capture/CMakeLists.txt @@ -6,6 +6,8 @@ add_executable(${PROJECT_NAME} Program.cs Window.xaml Window.xaml.cs + + Properties/AssemblyInfo.cs ) set_property(TARGET ${PROJECT_NAME} PROPERTY VS_DOTNET_TARGET_FRAMEWORK_VERSION "v${DOTNET_VERSION_EXAMPLES}") diff --git a/wrappers/csharp/cs-tutorial-2-capture/Properties/AssemblyInfo.cs b/wrappers/csharp/cs-tutorial-2-capture/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..7be5514d75 --- /dev/null +++ b/wrappers/csharp/cs-tutorial-2-capture/Properties/AssemblyInfo.cs @@ -0,0 +1,33 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Intel(R) RealSense(TM) SDK C# Wrapper Tutorial-2")] +[assembly: AssemblyDescription("Intel(R) RealSense(TM) SDK C# Wrapper Examples")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("Intel(R) Corporation")] +[assembly: AssemblyProduct("Intel(R) RealSense(TM) SDK C# Wrapper")] +[assembly: AssemblyCopyright("Copyright � 2017, Intel Corporation. All rights reserved")] +[assembly: AssemblyTrademark("Intel(R) RealSense(TM)")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("71c85ee0-c7c2-4e2e-8020-6af818537a14")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/wrappers/csharp/cs-tutorial-2-capture/Window.xaml.cs b/wrappers/csharp/cs-tutorial-2-capture/Window.xaml.cs index 6b6f037036..f6d01646f5 100644 --- a/wrappers/csharp/cs-tutorial-2-capture/Window.xaml.cs +++ b/wrappers/csharp/cs-tutorial-2-capture/Window.xaml.cs @@ -23,6 +23,7 @@ public partial class CaptureWindow : Window { private Pipeline pipeline; private Colorizer colorizer; + private CancellationTokenSource tokenSource = new CancellationTokenSource(); private void UploadImage(Image img, VideoFrame frame) @@ -68,19 +69,17 @@ public CaptureWindow() { var frames = pipeline.WaitForFrames(); - var depth_frame = frames.DepthFrame; + var colorized_depth_frame = colorizer.Colorize(frames.DepthFrame); var color_frame = frames.ColorFrame; - var colorized_depth = colorizer.Colorize(depth_frame); - UploadImage(imgDepth, colorized_depth); + UploadImage(imgDepth, colorized_depth_frame); UploadImage(imgColor, color_frame); // It is important to pre-emptively dispose of native resources // to avoid creating bottleneck at finalization stage after GC // (Also see FrameReleaser helper object in next tutorial) frames.Dispose(); - depth_frame.Dispose(); - colorized_depth.Dispose(); + colorized_depth_frame.Dispose(); color_frame.Dispose(); } }, token); diff --git a/wrappers/csharp/cs-tutorial-3-processing/CMakeLists.txt b/wrappers/csharp/cs-tutorial-3-processing/CMakeLists.txt index 9b38ef98b6..cd0b2f61ea 100644 --- a/wrappers/csharp/cs-tutorial-3-processing/CMakeLists.txt +++ b/wrappers/csharp/cs-tutorial-3-processing/CMakeLists.txt @@ -6,6 +6,8 @@ add_executable(${PROJECT_NAME} Program.cs Window.xaml Window.xaml.cs + + Properties/AssemblyInfo.cs ) set_property(TARGET ${PROJECT_NAME} PROPERTY VS_DOTNET_TARGET_FRAMEWORK_VERSION "v${DOTNET_VERSION_EXAMPLES}") diff --git a/wrappers/csharp/cs-tutorial-3-processing/Properties/AssemblyInfo.cs b/wrappers/csharp/cs-tutorial-3-processing/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..5c6da73968 --- /dev/null +++ b/wrappers/csharp/cs-tutorial-3-processing/Properties/AssemblyInfo.cs @@ -0,0 +1,33 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Intel(R) RealSense(TM) SDK C# Wrapper Tutorial-3")] +[assembly: AssemblyDescription("Intel(R) RealSense(TM) SDK C# Wrapper Examples")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("Intel(R) Corporation")] +[assembly: AssemblyProduct("Intel(R) RealSense(TM) SDK C# Wrapper")] +[assembly: AssemblyCopyright("Copyright � 2017, Intel Corporation. All rights reserved")] +[assembly: AssemblyTrademark("Intel(R) RealSense(TM)")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("71c85ee0-c7c2-4e2e-8020-6af818537a14")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/wrappers/csharp/cs-tutorial-4-software-dev/CMakeLists.txt b/wrappers/csharp/cs-tutorial-4-software-dev/CMakeLists.txt index e31d32aa50..818dba09e2 100644 --- a/wrappers/csharp/cs-tutorial-4-software-dev/CMakeLists.txt +++ b/wrappers/csharp/cs-tutorial-4-software-dev/CMakeLists.txt @@ -6,6 +6,8 @@ add_executable(${PROJECT_NAME} Program.cs Window.xaml Window.xaml.cs + + Properties/AssemblyInfo.cs ) set_property(TARGET ${PROJECT_NAME} PROPERTY VS_DOTNET_TARGET_FRAMEWORK_VERSION "v${DOTNET_VERSION_EXAMPLES}") diff --git a/wrappers/csharp/cs-tutorial-4-software-dev/Properties/AssemblyInfo.cs b/wrappers/csharp/cs-tutorial-4-software-dev/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..4606fd43c7 --- /dev/null +++ b/wrappers/csharp/cs-tutorial-4-software-dev/Properties/AssemblyInfo.cs @@ -0,0 +1,33 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Intel(R) RealSense(TM) SDK C# Wrapper Tutorial-4")] +[assembly: AssemblyDescription("Intel(R) RealSense(TM) SDK C# Wrapper Examples")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("Intel(R) Corporation")] +[assembly: AssemblyProduct("Intel(R) RealSense(TM) SDK C# Wrapper")] +[assembly: AssemblyCopyright("Copyright � 2017, Intel Corporation. All rights reserved")] +[assembly: AssemblyTrademark("Intel(R) RealSense(TM)")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("71c85ee0-c7c2-4e2e-8020-6af818537a14")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/wrappers/matlab/CMakeLists.txt b/wrappers/matlab/CMakeLists.txt new file mode 100644 index 0000000000..95f0a2352f --- /dev/null +++ b/wrappers/matlab/CMakeLists.txt @@ -0,0 +1,32 @@ +# minimum required cmake version: 3.1.0 +cmake_minimum_required(VERSION 3.1.0) + +project(RealsenseMatlabWrappers) + +# Save the command line compile commands in the build output +set(CMAKE_EXPORT_COMPILE_COMMANDS 1) +# View the makefile commands during build +#set(CMAKE_VERBOSE_MAKEFILE on) + +include(CheckCXXCompilerFlag) +CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) +CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) +if(COMPILER_SUPPORTS_CXX11) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -pthread") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") +elseif(COMPILER_SUPPORTS_CXX0X) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") +endif() + +set(DEPENDENCIES realsense2) + +find_package(Matlab COMPONENTS MX_LIBRARY REQUIRED) + +set(MATLAB_CPP librealsense_mex.cpp Factory.cpp) +set(MATLAB_H Factory.h MatlabParamParser.h rs2_type_traits.h types.h) +set(MATLAB_M context.m device.m roi_sensor.m sensor.m) + +#TODO: There has to be more that needs to be done to make this work +matlab_add_mex(librealsense_mex ${MATLAB_CPP} ${MATLAB_H} ${DEPENDENCIES}) + +#TODO: additionally, the compiled mex file and all the .m files need to be copied into a folder called `+realsense` at the install site \ No newline at end of file diff --git a/wrappers/matlab/Factory.cpp b/wrappers/matlab/Factory.cpp new file mode 100644 index 0000000000..2f6bdb8aa4 --- /dev/null +++ b/wrappers/matlab/Factory.cpp @@ -0,0 +1,2 @@ +#include "Factory.h" + diff --git a/wrappers/matlab/Factory.h b/wrappers/matlab/Factory.h new file mode 100644 index 0000000000..f995971a23 --- /dev/null +++ b/wrappers/matlab/Factory.h @@ -0,0 +1,59 @@ +#ifndef __FACTORY_H__ +#define __FACTORY_H__ + +#include +#include +#include +#include + +typedef void mxFunc(int, mxArray*[], int, const mxArray*[]); + +struct func_data { + std::function f; + int out, in_min, in_max; + func_data() : f(), out(0), in_min(0), in_max(0) {}; + func_data(std::function function, int out_args, int in_args) : func_data(function, out_args, in_args, in_args) {} + func_data(std::function function, int out_args, int in_min_args, int in_max_args) : f(function), out(out_args), in_min(in_min_args), in_max(in_max_args) {} +}; + +class ClassFactory +{ +private: + std::string name; + std::map funcs; +public: + ClassFactory(std::string n) : name(n), funcs() {} + void record(std::string fname, int out, int in, std::function func) + { + funcs.emplace(fname, func_data(func, out, in)); + } + void record(std::string fname, int out, int in_min, int in_max, std::function func) + { + funcs.emplace(fname, func_data(func, out, in_min, in_max)); + } + + std::string get_name() { return name; } + + func_data get(std::string f){ + auto func = funcs.find(f); + if (func == funcs.end()) return func_data(); + return func->second; + } +}; + +class Factory +{ +private: + std::map classes; +public: + Factory() = default; + void record(ClassFactory cls) { classes.emplace(cls.get_name(), cls); } + + func_data get(std::string c, std::string f){ + auto cls = classes.find(c); + if (cls == classes.end()) return func_data(); + return cls->second.get(f); + } +}; + +#endif \ No newline at end of file diff --git a/wrappers/matlab/MatlabParamParser.h b/wrappers/matlab/MatlabParamParser.h new file mode 100644 index 0000000000..61e1cd7499 --- /dev/null +++ b/wrappers/matlab/MatlabParamParser.h @@ -0,0 +1,341 @@ +#pragma once +#include "mex.h" +#include "matrix.h" +#include +#include +#include +#include +#include + +template using is_basic_type = std::bool_constant::value || std::is_pointer::value || std::is_enum::value>; +template struct is_array_type : std::false_type {}; +template struct is_array_type> : std::true_type { using inner_t = T; }; + +// TODO: consider turning MatlabParamParser into a namespace. Might help make lots of things cleaner. can keep things hidden via nested namespace MatlabParamParser::detail +class MatlabParamParser +{ +private: + // in charge of which overloads to use + + // helper information for overloaded functions + template struct mx_wrapper_fns + { + static T parse(const mxArray* cell); + static mxArray* wrap(T&& var); + static void destroy(const mxArray* cell); + }; + template struct mx_wrapper; + // enums are exposed as 64bit integers, using the underlying type to determine signedness + template struct mx_wrapper::value>::type> + { + private: + using signed_t = std::integral_constant; + using unsigned_t = std::integral_constant; + using value_t = typename std::conditional::type>::value), signed_t, unsigned_t>::type; + public: +// static const mxClassID value = /*value_t::value*/ signed_t::value; + using value = signed_t; + using type = typename std::conditional::value, int64_t, uint64_t>::type; + }; + // pointers are cast to uint64_t because matlab doesn't have a concept of pointers + template struct mx_wrapper::value>::type> + { + // static const mxClassID value = mxUINT64_CLASS; + using value = std::integral_constant; + using type = uint64_t; + }; + // bools are exposed as matlab's native logical type + template <> struct mx_wrapper + { +// static const mxClassID value = mxLOGICAL_CLASS; + using value = std::integral_constant; + using type = mxLogical; + }; + // floating points are exposed as matlab's native double type + template struct mx_wrapper::value>::type> + { +// static const mxClassID value = mxDOUBLE_CLASS; + using value = std::integral_constant; + using type = double; + }; + // integral types are exposed like enums + template struct mx_wrapper::value>::type> + { + private: + using signed_t = std::integral_constant; + using unsigned_t = std::integral_constant; + using value_t = typename std::conditional::value, signed_t, unsigned_t>::type; + public: +// static const mxClassID value = value_t::value; + using value = value_t; + using type = typename std::conditional::value, int64_t, uint64_t>::type; + }; + // uint8_t, uint16_t, uint32_t, uint64_t, int8_t, int16_t, int32_t, and int64_t + // are exposed as the relevant native Matlab type + template <> struct mx_wrapper { + using value = std::integral_constant; + using type = uint8_t; + }; + template <> struct mx_wrapper { + using value = std::integral_constant; + using type = uint16_t; + }; + template <> struct mx_wrapper { + using value = std::integral_constant; + using type = uint32_t; + }; + template <> struct mx_wrapper { + using value = std::integral_constant; + using type = uint64_t; + }; + template <> struct mx_wrapper { + using value = std::integral_constant; + using type = int8_t; + }; + template <> struct mx_wrapper { + using value = std::integral_constant; + using type = int16_t; + }; + template <> struct mx_wrapper { + using value = std::integral_constant; + using type = int32_t; + }; + template <> struct mx_wrapper { + using value = std::integral_constant; + using type = int64_t; + }; + // by default non-basic types are wrapped as pointers + template struct mx_wrapper::value>::type> : mx_wrapper {}; +public: + template struct type_traits { + // KEEP THESE COMMENTED. They are only here to show the signature should you choose + // to declare these functions + // static rs2_internal_t* to_internal(T&& val); + // static T from_internal(rs2_internal_t* ptr); + }; +private: + struct traits_trampoline { + private: + template struct detector { + struct fallback { int to_internal, from_internal, use_cells; }; + struct derived : type_traits, fallback {}; + template struct checker; + typedef char ao1[1]; + typedef char ao2[2]; + template static ao1& check_to(checker *); + template static ao2& check_to(...); + template static ao1& check_from(checker *); + template static ao2& check_from(...); + template static ao1& check_cells(checker *); + template static ao2& check_cells(...); +// template struct use_cells_t : false_type {}; +// template struct use_cells_t(0)) == 2>::type> +// : T::use_cells {}; + + enum { has_to = sizeof(check_to(0)) == 2 }; + enum { has_from = sizeof(check_from(0)) == 2 }; +// enum { use_cells = use_cells_t::value }; + enum { use_cells = sizeof(check_cells(0)) == 2 }; + }; + template using internal_t = typename type_traits::rs2_internal_t; + public: + // selected if type_traits::to_internal exists + template static typename std::enable_if::has_to, internal_t*>::type + to_internal(T&& val) { return type_traits::to_internal(std::move(val)); } + // selected if it doesnt + template static typename std::enable_if::has_to, internal_t*>::type + to_internal(T&& val) { mexLock(); return new internal_t(val); } + + // selected if type_traits::from_internal exists + template static typename std::enable_if::has_from, T>::type + from_internal(typename internal_t* ptr) { return type_traits::from_internal(ptr); } + // selected if it doesnt + template static typename std::enable_if::has_from, T>::type + from_internal(typename internal_t* ptr) { return T(*ptr); } + template using use_cells = std::integral_constant::use_cells>; + }; +public: + MatlabParamParser() {}; + ~MatlabParamParser() {}; + + // TODO: try/catch->err msg? + template static T parse(const mxArray* cell) { return mx_wrapper_fns::parse(cell); } + template static mxArray* wrap(T&& var) { return mx_wrapper_fns::wrap(std::move(var)); }; + template static void destroy(const mxArray* cell) { return mx_wrapper_fns::destroy(cell); } + + template static typename std::enable_if::value, std::vector>::type parse_array(const mxArray* cells); + template static typename std::enable_if::value, std::vector>::type parse_array(const mxArray* cells); + + template static typename std::enable_if::value && !traits_trampoline::use_cells::value, mxArray*>::type wrap_array(const T* var, size_t length); + template static typename std::enable_if::value && traits_trampoline::use_cells::value, mxArray*>::type wrap_array(const T* var, size_t length); + template static typename std::enable_if::value, mxArray*>::type wrap_array(const T* var, size_t length); +}; + +#include "rs2_type_traits.h" + +// for basic types (aka arithmetic, pointer, and enum types) +template struct MatlabParamParser::mx_wrapper_fns::value && !extra_checks::value && !is_array_type::value>::type> +{ + // Use full width types when converting integers. + // Always using full width makes some things easier, and doing it this way + // still allows us to use the mx_wrapper class to send more exact types + // for frame data arrays + using type = typename std::conditional::value, typename std::conditional::value, int64_t, uint64_t>::type, T>::type; + using wrapper = mx_wrapper; + static T parse(const mxArray* cell) + { + // obtain pointer to data, cast to proper matlab type + auto *p = static_cast(mxGetData(cell)); + // dereference pointer and cast to correct C++ type + return T(*p); + } + static mxArray* wrap(T&& var) + { + // request 1x1 matlab matrix of correct type + mxArray* cell = mxCreateNumericMatrix(1, 1, wrapper::value::value, mxREAL); + // access matrix's data as pointer to correct C++ type + auto *outp = static_cast(mxGetData(cell)); + // cast object to correct C++ type and then store it in the matrix + *outp = wrapper::type(var); + return cell; + } + static void destroy(const mxArray* cell) + { + static_assert(!is_basic_type::value, "Trying to destroy basic type. This shouldn't happen."); + static_assert(is_basic_type::value, "Non-basic type ended up in basic type's destroy function. How?"); + } +}; + +// default for non-basic types (eg classes) +template struct MatlabParamParser::mx_wrapper_fns::value && !extra_checks::value && !is_array_type::value>::type> +{ + // librealsense types are sent to matlab using a pointer to the internal type. + // to get it back from matlab we first parse that pointer and then reconstruct the C++ wrapper + static T parse(const mxArray* cell) + { + using internal_t = typename type_traits::rs2_internal_t; + return traits_trampoline::from_internal(mx_wrapper_fns::parse(cell)); + } + + static mxArray* wrap(T&& var) + { + using internal_t = typename type_traits::rs2_internal_t; + return mx_wrapper_fns::wrap(traits_trampoline::to_internal(std::move(var))); + } + + static void destroy(const mxArray* cell) + { + using internal_t = typename type_traits::rs2_internal_t; + // get pointer to the internal type we put on the heap + auto ptr = mx_wrapper_fns::parse(cell); + delete ptr; + // signal to matlab that the wrapper owns one fewer objects + mexUnlock(); + } +}; + +// simple helper overload to refer std::array and std::vector to wrap_array +template struct MatlabParamParser::mx_wrapper_fns::value>::type> +{ + static T parse(const mxArray* cell) + { + return MatlabParamParser::parse_array::inner_t>(cell); + } + static mxArray* wrap(T&& var) + { + return MatlabParamParser::wrap_array(var.data(), var.size()); + } +}; + +// overload for wrapping C-strings. TODO: do we need special parsing too? +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(const char*&& str) +{ + return mxCreateString(str); +} + +template<> static std::string MatlabParamParser::mx_wrapper_fns::parse(const mxArray* cell) +{ + auto str = mxArrayToString(cell); + auto ret = std::string(str); + mxFree(str); + return ret; +} +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(std::string&& str) +{ + return mx_wrapper_fns::wrap(str.c_str()); +} + +template<> struct MatlabParamParser::mx_wrapper_fns +{ + static std::chrono::nanoseconds parse(const mxArray* cell) + { + auto ptr = static_cast(mxGetData(cell)); + return std::chrono::nanoseconds{ static_cast(*ptr * 1e6) }; // convert from milliseconds, smallest time unit that's easy to work with in matlab + } + static mxArray* wrap(std::chrono::nanoseconds&& dur) + { + auto cell = mxCreateNumericMatrix(1, 1, mxDOUBLE_CLASS, mxREAL); + auto ptr = static_cast(mxGetData(cell)); + *ptr = dur.count() / 1.e6; // convert to milliseconds, smallest time unit that's easy to work with in matlab + return cell; + } +}; + +template static typename std::enable_if::value, std::vector>::type MatlabParamParser::parse_array(const mxArray* cells) +{ + using wrapper_t = mx_wrapper; + using internal_t = typename type_traits::rs2_internal_t; + + std::vector ret; + auto length = mxGetNumberOfElements(cells); + ret.reserve(length); + auto ptr = static_cast(mxGetData(cells)); // array of uint64_t's + for (int i = 0; i < length; ++i) { + ret.push_back(traits_trampoline::from_internal(reinterpret_cast(ptr[i]))); + } + return ret; +} +template static typename std::enable_if::value, std::vector>::type MatlabParamParser::parse_array(const mxArray* cells) +{ + using wrapper_t = mx_wrapper; + + std::vector ret; + auto length = mxGetNumberOfElements(cells); + ret.reserve(length); + auto ptr = static_cast(mxGetData(cells)); // array of uint64_t's + for (int i = 0; i < length; ++i) { + ret.push_back(ptr[i]); + } + return ret; +} +template static typename std::enable_if::value && !MatlabParamParser::traits_trampoline::use_cells::value, mxArray*>::type +MatlabParamParser::wrap_array(const T* var, size_t length) +{ + auto cells = mxCreateNumericMatrix(1, length, MatlabParamParser::mx_wrapper::value::value, mxREAL); + auto ptr = static_cast::type*>(mxGetData(cells)); + for (int x = 0; x < length; ++x) + ptr[x] = reinterpret_cast::type>(traits_trampoline::to_internal(T(var[x]))); + + return cells; +} + +template static typename std::enable_if::value && MatlabParamParser::traits_trampoline::use_cells::value, mxArray*>::type +MatlabParamParser::wrap_array(const T* var, size_t length) +{ + auto cells = mxCreateCellMatrix(1, length); + for (int x = 0; x < length; ++x) + mxSetCell(cells, x, wrap(T(var[x]))); + + return cells; +} + +template static typename std::enable_if::value, mxArray*>::type MatlabParamParser::wrap_array(const T* var, size_t length) +{ + auto cells = mxCreateNumericMatrix(1, length, MatlabParamParser::mx_wrapper::value::value, mxREAL); + auto ptr = static_cast::type*>(mxGetData(cells)); + for (int x = 0; x < length; ++x) + ptr[x] = mx_wrapper::type(var[x]); + + return cells; +} +#include "types.h" \ No newline at end of file diff --git a/wrappers/matlab/align.m b/wrappers/matlab/align.m new file mode 100644 index 0000000000..ffd3b40222 --- /dev/null +++ b/wrappers/matlab/align.m @@ -0,0 +1,32 @@ +% Wraps librealsense2 align class +classdef align < handle + properties (SetAccess = private, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = align(align_to) + narginchk(1, 1); + validateattributes(align_to, {'realsense.stream', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.stream.count}); + this.objectHandle = realsense.librealsense_mex('rs2::align', 'new', int64(align_to)); + end + + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::align', 'delete', this.objectHandle); + end + end + + % Functions + function frames = process(this, frame) + narginchk(2, 2); + validateattributes(frame, {'realsense.frame'}, {'scalar'}, '', 'frame', 2); + if ~frame.is('frameset') + error('Expected input number 2, frame, to be a frameset'); + end + out = realsense.librealsense_mex('rs2::align', 'process', this.objectHandle, frame.objectHandle); + frames = realsense.frame(out); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/camera_info.m b/wrappers/matlab/camera_info.m new file mode 100644 index 0000000000..071bc3cf25 --- /dev/null +++ b/wrappers/matlab/camera_info.m @@ -0,0 +1,15 @@ +classdef camera_info < int64 + enumeration + name ( 0) + serial_number ( 1) + firmware_version ( 2) + recommended_firmware_version ( 3) + physical_port ( 4) + debug_op_code ( 5) + advanced_mode ( 6) + product_id ( 7) + camera_locked ( 8) + usb_type_descriptor ( 9) + count (10) + end +end \ No newline at end of file diff --git a/wrappers/matlab/colorizer.m b/wrappers/matlab/colorizer.m new file mode 100644 index 0000000000..bc4ed97fba --- /dev/null +++ b/wrappers/matlab/colorizer.m @@ -0,0 +1,23 @@ +% Wraps librealsense2 colorizer class +classdef colorizer < realsense.options + methods + % Constructor + function this = colorizer() + out = realsense.librealsense_mex('rs2::colorizer', 'new'); + this = this@realsense.options(out); + end + + % Destructor (uses base class destructor) + + % Functions + function video_frame = colorize(this, depth) + narginchk(2, 2) + validateattributes(depth, {'realsense.frame'}, {'scalar'}, '', 'depth', 2); + if ~depth.is('depth_frame') + error('Expected input number 2, depth, to be a depth_frame'); + end + out = realsense.librealsense_mex('rs2::colorizer', 'colorize', this.objectHandle, depth.objectHandle); + video_frame = realsense.video_frame(out); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/config.m b/wrappers/matlab/config.m new file mode 100644 index 0000000000..4df0fe091c --- /dev/null +++ b/wrappers/matlab/config.m @@ -0,0 +1,127 @@ +% Wraps librealsense2 config class +classdef config < handle + properties (SetAccess = private, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = config() + this.objectHandle = realsense.librealsense_mex('rs2::config', 'new'); + end + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::config', 'delete', this.objectHandle); + end + end + + % Functions + function enable_stream(this, varargin) + narginchk(2, 7); + validateattributes(varargin{1}, {'realsense.stream', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.stream.count}, '', 'stream_type', 2); + which = 'none' + switch nargin + case 2 + which = 'enable_stream#stream'; + case 3 + if isa(varargin{2}, 'realsense.format') + validateattributes(varargin{2}, {'realsense.format'}, {'scalar'}, '', 'format', 3); + which = 'enable_stream#format'; + else + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'stream_index', 3); + which = 'enable_stream#stream'; + end + case 4 + if isa(varargin{2}, 'realsense.format') + validateattributes(varargin{2}, {'realsense.format'}, {'scalar'}, '', 'format', 3); + validateattributes(varargin{3}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'framerate', 4); + which = 'enable_stream#format'; + elseif isa(varargin{3}, 'realsense.format') + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'stream_index', 3); + validateattributes(varargin{3}, {'realsense.format'}, {'scalar'}, '', 'format', 4); + which = 'enable_stream#extended'; + else + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'width', 3); + validateattributes(varargin{3}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'height', 4); + which = 'enable_stream#size'; + end + case 5 + if isa(varargin{3}, 'realsense.format') + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'stream_index', 3); + validateattributes(varargin{3}, {'realsense.format'}, {'scalar'}, '', 'format', 4); + validateattributes(varargin{4}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'framerate', 5); + which = 'enable_stream#extended'; + elseif isa(varargin{4}, 'realsense.format') + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'width', 3); + validateattributes(varargin{3}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'height', 4); + validateattributes(varargin{4}, {'realsense.format'}, {'scalar'}, '', 'format', 5); + which = 'enable_stream#size'; + else + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'stream_index', 3); + validateattributes(varargin{3}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'width', 4); + validateattributes(varargin{4}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'height', 5); + which = 'enable_stream#full'; + end + case 6 + if isa(varargin{4}, 'realsense.format') + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'width', 3); + validateattributes(varargin{3}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'height', 4); + validateattributes(varargin{4}, {'realsense.format'}, {'scalar'}, '', 'format', 5); + validateattributes(varargin{5}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'framerate', 6); + which = 'enable_stream#size'; + else + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'stream_index', 3); + validateattributes(varargin{3}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'width', 4); + validateattributes(varargin{4}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'height', 5); + validateattributes(varargin{5}, {'realsense.format', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.format.count}, '', 'format', 6); + which = 'enable_stream#full'; + end + case 7 + validateattributes(varargin{2}, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'stream_index', 3); + validateattributes(varargin{3}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'width', 4); + validateattributes(varargin{4}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'height', 5); + validateattributes(varargin{5}, {'realsense.format', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.format.count}, '', 'format', 6); + validateattributes(varargin{6}, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'framerate', 7); + end + args = num2cell(int64([varargin{:}])); + realsense.librealsense_mex('rs2::config', which, this.objectHandle, args{:}); + end + function enable_all_streams(this) + realsense.librealsense_mex('rs2::config', 'enable_all_streams', this.objectHandle); + end + function enable_device(this, serial) + narginchk(2, 2); + validateattributes(serial, {'char', 'string'}, {'scalartext'}, '', 'serial', 2); + realsense.librealsense_mex('rs2::config', 'enable_device', this.objectHandle, serial); + end + function enable_device_from_file(this, file_name, repeat_playback) + narginchk(2, 3); + validateattributes(file_name, {'char', 'string'}, {'scalartext'}, '', 'file_name', 2); + if nargin==2 + realsense.librealsense_mex('rs2::config', 'enable_device_from_file', this.objectHandle, file_name); + else + validateattributes(repeat_playback, {'logical', 'numeric'}, {'scalar', 'real'}, '', 'repeat_playback', 3); + realsense.librealsense_mex('rs2::config', 'enable_device_from_file', this.objectHandle, file_name, logical(repeat_playback)); + end + end + function enable_record_to_file(this, file_name) + narginchk(2, 2); + validateattributes(file_name, {'char', 'string'}, {'scalartext'}, '', 'file_name', 2); + realsense.librealsense_mex('rs2::config', 'enable_record_to_file', this.objectHandle, file_name); + end + function disable_stream(this, stream, index) + narginchk(2, 3); + validateattributes(stream, {'realsense.stream', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.stream.count}, '', 'stream', 2); + if nargin == 2 + out = realsense.librealsense_mex('rs2::config', 'disable_stream', this.objectHandle, int64(stream)); + else + validateattributes(index, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'index', 3); + out = realsense.librealsense_mex('rs2::config', 'disable_stream', this.objectHandle, int64(stream), int64(index)); + end + stream = realsense.stream_profile(out{:}); + end + function disable_all_streams(this) + realsense.librealsense_mex('rs2::config', 'disable_all_streams', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/context.m b/wrappers/matlab/context.m new file mode 100644 index 0000000000..f35f85e326 --- /dev/null +++ b/wrappers/matlab/context.m @@ -0,0 +1,47 @@ +% Wraps librealsense2 context class +classdef context < handle + properties (SetAccess = private, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = context() + this.objectHandle = realsense.librealsense_mex('rs2::context', 'new'); + end + + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::context', 'delete', this.objectHandle); + end + end + + % Functions + function device_array = query_devices(this) + arr = realsense.librealsense_mex('rs2::context', 'query_devices', this.objectHandle); + % TODO: Might be cell array + device_array = arrayfun(@(x) realsense.device(x{:}{:}), arr, 'UniformOutput', false); + end + function sensor_array = query_all_sensors(this) + arr = realsense.librealsense_mex('rs2::context', 'query_all_sensors', this.objectHandle); + sensor_array = arrayfun(@realsense.sensor, arr, 'UniformOutput', false); + end + function device = get_sensor_parent(this, sensor) + narginchk(2, 2); + validateattributes(sensor, {'realsense.sensor'}, {'scalar'}, '', 'sensor', 2); + out = realsense.librealsense_mex('rs2::context', 'get_sensor_parent', this.objectHandle, sensor.objectHandle); + device = realsense.device(out); + end + function playback = load_device(this, file) + narginchk(2, 2); + validateattributes(file, {'string', 'char'}, {'scalartext', 'nonempty'}, '', 'file', 2); + out = realsense.librealsense_mex('rs2::context', 'load_device', this.objectHandle, file); + playback = realsense.playback(out(1), out(2)); + end + function unload_device(this, file) + narginchk(2, 2); + validateattributes(file, {'string', 'char'}, {'scalartext', 'nonempty'}, '', 'file', 2); + realsense.librealsense_mex('rs2::context', 'unload_device', this.objectHandle, file); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/decimation_filter.m b/wrappers/matlab/decimation_filter.m new file mode 100644 index 0000000000..f48264c985 --- /dev/null +++ b/wrappers/matlab/decimation_filter.m @@ -0,0 +1,14 @@ +% Wraps librealsense2 decimation_filter class +classdef decimation_filter < realsense.process_interface + methods + % Constructor + function this = decimation_filter() + out = realsense.librealsense_mex('rs2::decimation_filter', 'new'); + this = this@realsense.process_interface(out); + end + + % Destructor (uses base class destructor) + + % Functions + end +end \ No newline at end of file diff --git a/wrappers/matlab/depth_example.m b/wrappers/matlab/depth_example.m new file mode 100644 index 0000000000..afc45f64f8 --- /dev/null +++ b/wrappers/matlab/depth_example.m @@ -0,0 +1,36 @@ +function depth_example() + % Make Pipeline object to manage streaming + pipe = realsense.pipeline(); + % Make Colorizer object to prettify depth output + colorizer = realsense.colorizer(); + + % Start streaming on an arbitrary camera with default settings + profile = pipe.start(); + + % Get streaming device's name + dev = profile.get_device(); + name = dev.get_info(realsense.camera_info.name); + + % Get frames. We discard the first couple to allow + % the camera time to settle + for i = 1:5 + fs = pipe.wait_for_frames(); + end + + % Stop streaming + pipe.stop(); + + % Select depth frame + depth = fs.get_depth_frame(); + % Colorize depth frame + color = colorizer.colorize(depth); + + % Get actual data and convert into a format imshow can use + % (Color data arrives as [R, G, B, R, G, B, ...] vector) + data = color.get_data(); + img = permute(reshape(data',[3,color.get_width(),color.get_height()]),[3 2 1]); + + % Display image + imshow(img); + title(sprintf("Colorized depth frame from %s", name)); +end \ No newline at end of file diff --git a/wrappers/matlab/depth_frame.m b/wrappers/matlab/depth_frame.m new file mode 100644 index 0000000000..61af9ae734 --- /dev/null +++ b/wrappers/matlab/depth_frame.m @@ -0,0 +1,19 @@ +% Wraps librealsense2 depth_frame class +classdef depth_frame < realsense.video_frame + methods + % Constructor + function this = depth_frame(handle) + this = this@realsense.video_frame(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function distance = get_distance(this, x, y) + narginchk(3, 3); + validateattributes(x, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'x', 2); + validateattributes(y, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'y', 2); + distance = realsense.librealsense_mex('rs2::depth_frame', 'get_distance', this.objectHandle, int64(x), int64(y)); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/depth_sensor.m b/wrappers/matlab/depth_sensor.m new file mode 100644 index 0000000000..67cc5968f2 --- /dev/null +++ b/wrappers/matlab/depth_sensor.m @@ -0,0 +1,16 @@ +% Wraps librealsense2 depth_sensor class +classdef depth_sensor < sensor + methods + % Constructor + function this = depth_sensor(handle) + this = this@realsense.sensor(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function scale = get_depth_scale(this) + scale = realsense.librealsense_mex('rs2::depth_sensor', 'get_depth_scale', this.objectHandle); + end + emd +end \ No newline at end of file diff --git a/wrappers/matlab/depth_stereo_sensor.m b/wrappers/matlab/depth_stereo_sensor.m new file mode 100644 index 0000000000..2533798d73 --- /dev/null +++ b/wrappers/matlab/depth_stereo_sensor.m @@ -0,0 +1,16 @@ +% Wraps librealsense2 depth_stereo_sensor class +classdef depth_stereo_sensor < depth_sensor + methods + % Constructor + function this = depth_stereo_sensor(handle) + this = this@realsense.depth_sensor(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function scale = get_stereo_baseline(this) + scale = realsense.librealsense_mex('rs2::depth_stereo_sensor', 'get_stereo_baseline', this.objectHandle); + end + emd +end \ No newline at end of file diff --git a/wrappers/matlab/device.m b/wrappers/matlab/device.m new file mode 100644 index 0000000000..73332b77a1 --- /dev/null +++ b/wrappers/matlab/device.m @@ -0,0 +1,100 @@ +% Wraps librealsense2 device class +classdef device < handle + properties (SetAccess = private, Hidden = true) + objectHandle; + id; + end + methods (Access = protected) + function do_init(this) + if (this.id >= 0) + this.objectHandle = realsense.librealsense_mex('rs2::device', 'init', this.objectHandle, uint64(this.id)); + this.id = -1; + end + end + end + methods + % Constructor + function this = device(handle, index) + narginchk(1, 2); + validateattributes(handle, {'uint64'}, {'scalar'}); + this.objectHandle = handle; + if (nargin == 1) % constructed from device + this.id = -1; + else % lazily "constructed" from device_list + validateattributes(index, {'numeric'}, {'scalar', 'real', 'integer'}); + this.id = int64(index); + end + end + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + if (this.id < 0) % still device list + realsense.librealsense_mex('rs2::device', 'delete#uninit', this.objectHandle); + else + realsense.librealsense_mex('rs2::device', 'delete#init', this.objectHandle); + end + end + end + + % Functions + function sensor_array = query_sensors(this) + this.do_init(); + arr = realsense.librealsense_mex('rs2::device', 'query_sensors', this.objectHandle); + [sensor_array{1:nargout}] = arrayfun(@realsense.sensor, arr, 'UniformOutput', false); + end + function sensor = first(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + this.do_init(); + out = realsense.librealsense_mex('rs2::device', 'first', this.objectHandle, type); + sensor = realsense.sensor(out).as(type); + end + function value = supports(this, info) + narginchk(2, 2); + validateattributes(info, {'realsense.camera_info', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.camera_info.count}, '', 'info', 2); + this.do_init(); + value = realsense.librealsense_mex('rs2::device', 'supports', this.objectHandle, int64(info)); + end + function info = get_info(this, info) + narginchk(2, 2); + validateattributes(info, {'realsense.camera_info', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.camera_info.count}, '', 'info', 2); + this.do_init(); + info = realsense.librealsense_mex('rs2::device', 'get_info', this.objectHandle, int64(info)); + end + function hardware_reset(this) + this.do_init(); + realsense.librealsense_mex('rs2::device', 'hardware_reset', this.objectHandle); + end + function l = logical(this) + l = realsense.librealsense_mex('rs2::device', 'operator bool', this.objectHandle); + end + function value = is(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + this.do_init(); + out = realsense.librealsense_mex('rs2::device', 'is', this.objectHandle, type); + value = logical(out); + end + function dev = as(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + this.do_init(); + out = realsense.librealsense_mex('rs2::device', 'as', this.objectHandle, type); + switch type + case 'device' + dev = realsense.device(out{:}); + case 'debug_protocol' + error('debug_protocol is not supported in Matlab'); + case 'advanced_mode' + error('advanced_mode is not supported in Matlab'); + case 'recorder' + dev = realsense.recorder(out{:}); + case 'playback' + dev = realsense.playback(out{:}); + end + end + end +end diff --git a/wrappers/matlab/device_hub.m b/wrappers/matlab/device_hub.m new file mode 100644 index 0000000000..15b84532a7 --- /dev/null +++ b/wrappers/matlab/device_hub.m @@ -0,0 +1,31 @@ +% Wraps librealsense2 device_hub class +classdef device_hub < handle + properties (SetAccess = private, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = device_hub(context) + narginchk(1, 1); + validateattributes(context, {'realsense.context'}, {'scalar'}); + this.objectHandle = realsense.librealsense_mex('rs2::device_hub', 'new', context.objectHandle); + end + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::device_hub', 'delete', this.objectHandle); + end + end + + % Functions + function device = wait_for_device(this) + out = realsense.librealsense_mex('rs2::device_hub', 'wait_for_device', this.objectHandle) + device = realsense.device(out(1), out(2)); + end + function value = is_connected(this, dev) + narginchk(2, 2); + validateattributes(dev, {'realsense.device'}, {'scalar'}, '', 'dev', 2); + value = realsense.librealsense_mex('rs2::device_hub', 'is_connected', this.objectHandle, dev.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/disparity_frame.m b/wrappers/matlab/disparity_frame.m new file mode 100644 index 0000000000..5d30bae0ac --- /dev/null +++ b/wrappers/matlab/disparity_frame.m @@ -0,0 +1,16 @@ +% Wraps librealsense2 disparity_frame class +classdef disparity_frame < realsense.depth_frame + methods + % Constructor + function this = disparity_frame(handle) + this = this@realsense.depth_frame(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function baseline = get_baseline(this) + baseline = realsense.librealsense_mex('rs2::disparity_frame', 'get_baseline', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/disparity_transform.m b/wrappers/matlab/disparity_transform.m new file mode 100644 index 0000000000..0d1b1844d5 --- /dev/null +++ b/wrappers/matlab/disparity_transform.m @@ -0,0 +1,18 @@ +% Wraps librealsense2 disparity_transform class +classdef disparity_transform < realsense.process_interface + methods + % Constructor + function this = disparity_transform(transform_to_disparity) + if nargin == 0 + out = realsense.librealsense_mex('rs2::disparity_transform', 'new'); + else + validateattributes(transform_to_disparity, {'logical', 'numeric'}, {'scalar', 'real'}); + out = realsense.librealsense_mex('rs2::disparity_transform', 'new', logical(transform_to_disparity)); + this = this@realsense.process_interface(out); + end + + % Destructor (uses base class destructor) + + % Functions + end +end \ No newline at end of file diff --git a/wrappers/matlab/format.m b/wrappers/matlab/format.m new file mode 100644 index 0000000000..55ac4324ec --- /dev/null +++ b/wrappers/matlab/format.m @@ -0,0 +1,25 @@ +classdef format < int64 + enumeration + any_format ( 0) + z16 ( 1) + disparity16 ( 2) + xyz32f ( 3) + yuyv ( 4) + rgb8 ( 5) + bgr8 ( 6) + rgba8 ( 7) + bgra8 ( 8) + y8 ( 9) + y16 (10) + raw10 (11) + raw16 (12) + raw8 (13) + uyvy (14) + motion_raw (15) + motion_xyz32f (16) + gpio_raw (17) + six_dof (18) + disparity32 (19) + count (20) + end +end \ No newline at end of file diff --git a/wrappers/matlab/frame.m b/wrappers/matlab/frame.m new file mode 100644 index 0000000000..d6492ed17d --- /dev/null +++ b/wrappers/matlab/frame.m @@ -0,0 +1,83 @@ +% Wraps librealsense2 frame class +classdef frame < handle + properties (SetAccess = protected, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = frame(handle) + validateattributes(handle, {'uint64'}, {'scalar'}); + this.objectHandle = handle; + end + % Destructor + function delete(this) + if this.objectHandle ~= 0 + realsense.librealsense_mex('rs2::frame', 'delete', this.objectHandle); + end + end + + % Functions + % TODO: keep + function l = logical(this) + l = realsense.librealsense_mex('rs2::frame', 'operator bool', this.objectHandle); + end + function timestamp = get_timestamp(this) + timestamp = realsense.librealsense_mex('rs2::frame', 'get_timestamp', this.objectHandle); + end + function domain = get_frame_timestamp_domain(this) + ret = realsense.librealsense_mex('rs2::frame', 'get_frame_timestamp_domain', this.objectHandle); + domain = realsense.timestamp_domain(ret); + end + function metadata = get_frame_metadata(this, frame_metadata) + narginchk(2, 2); + validateattributes(frame_metadata, {'realsense.frame_metadata_value', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.frame_metadata_value.count}, '', 'frame_metadata', 2); + metadata = realsense.librealsense_mex('rs2::frame', 'get_frame_metadata', this.objectHandle, int64(frame_metadata)); + end + function value = supports_frame_metadata(this, frame_metadata) + narginchk(2, 2); + validateattributes(frame_metadata, {'realsense.frame_metadata_value', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.frame_metadata_value.count}, '', 'frame_metadata', 2); + value = realsense.librealsense_mex('rs2::frame', 'supports_frame_metadata', this.objectHandle, int64(frame_metadata)); + end + function frame_number = get_frame_number(this) + frame_number = realsense.librealsense_mex('rs2::frame', 'get_frame_number', this.objectHandle); + end + function data = get_data(this) + data = realsense.librealsense_mex('rs2::frame', 'get_data', this.objectHandle); + end + function profile = get_profile(this) + ret = realsense.librealsense_mex('rs2::frame', 'get_profile', this.objectHandle); + profile = realsense.stream_profile(ret{:}); + end + function value = is(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + out = realsense.librealsense_mex('rs2::frame', 'is', this.objectHandle, type); + value = logical(out); + end + function frame = as(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + out = realsense.librealsense_mex('rs2::frame', 'as', this.objectHandle, type); + switch type + case 'frame' + frame = realsense.frame(out); + case 'video_frame' + frame = realsense.video_frame(out); + case 'points' + frame = realsense.points(out); + case 'depth_frame' + frame = realsense.depth_frame(out); + case 'disparity_frame' + frame = realsense.disparity_frame(out); + case 'motion_frame' + frame = realsense.motion_frame(out); + case 'pose_frame' + frame = realsense.pose_frame(out); + case 'frameset' + frame = realsense.frameset(out); + end + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/frame_metadata_value.m b/wrappers/matlab/frame_metadata_value.m new file mode 100644 index 0000000000..d86ccafc73 --- /dev/null +++ b/wrappers/matlab/frame_metadata_value.m @@ -0,0 +1,34 @@ +classdef frame_metadata_value < int64 + enumeration + frame_counter ( 0) + frame_timestamp ( 1) + sensor_timestamp ( 2) + actual_exposure ( 3) + gain_level ( 4) + auto_exposure ( 5) + white_balance ( 6) + time_of_arrival ( 7) + temperature ( 8) + backend_timestamp ( 9) + actual_fps (10) + frame_laser_power (11) + frame_laser_power_mode (12) + exposure_priority (13) + exposure_roi_left (14) + exposure_roi_right (15) + exposure_roi_top (16) + exposure_roi_bottom (17) + brightness (18) + contrast (19) + saturation (20) + sharpness (21) + auto_white_balance_temperature (22) + backlight_compensation (23) + hue (24) + gamma (25) + manual_white_balance (26) + power_line_frequency (27) + low_light_compensation (28) + count (29) + end +end \ No newline at end of file diff --git a/wrappers/matlab/frame_queue.m b/wrappers/matlab/frame_queue.m new file mode 100644 index 0000000000..b85dc58813 --- /dev/null +++ b/wrappers/matlab/frame_queue.m @@ -0,0 +1,43 @@ +% Wraps librealsense2 frame_queue class +classdef frame_queue < handle + properties (SetAccess = private, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = frame_queue(capacity) + if nargin == 0 + this.objectHandle = realsense.librealsense_mex('rs2::frame_queue', 'new'); + else + validateattributes(capacity, {'numeric'}, {'scalar', 'positive', 'real', 'integer'}); + this.objectHandle = realsense.librealsense_mex('rs2::frame_queue', 'new', uint64(capacity)); + end + end + + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::frame_queue', 'delete', this.objectHandle); + end + end + + % Functions + function frame = wait_for_frame(this, timeout_ms) + narginchk(1, 2); + if nargin == 1 + out = realsense.librealsense_mex('rs2::frame_queue', 'wait_for_frame', this.objectHandle); + else + if isduration(timeout_ms) + timeout_ms = milliseconds(timeout_ms); + end + validateattributes(timeout_ms, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'timeout_ms', 2); + out = realsense.librealsense_mex('rs2::frame_queue', 'wait_for_frame', this.objectHandle, double(timeout_ms)); + end + frame = realsense.frame(out); + end + % TODO: poll_for_frame [frame, video_frame, points, depth_frame, disparity_frame, motion_frame, pose_frame, frameset] + function cap = capacity(this) + cap = realsense.librealsense_mex('rs2::frame_queue', 'capacity', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/frameset.m b/wrappers/matlab/frameset.m new file mode 100644 index 0000000000..d3d8037056 --- /dev/null +++ b/wrappers/matlab/frameset.m @@ -0,0 +1,41 @@ +% Wraps librealsense2 frameset class +classdef frameset < realsense.frame + methods + % Constructor + function this = frameset(handle) + this = this@realsense.frame(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function frame = first_or_default(this, s) + narginchk(2, 2); + validateattributes(s, {'realsense.stream', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.stream.count}, '', 's', 2); + ret = realsense.librealsense_mex('rs2::frameset', 'first_or_default', this.objectHandle, int64_t(s)); + frame = realsense.frame(ret); + end + function frame = first(this, s) + narginchk(2, 2); + validateattributes(s, {'realsense.stream', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.stream.count}, '', 's', 2); + ret = realsense.librealsense_mex('rs2::frameset', 'first', this.objectHandle, int64_t(s)); + frame = realsense.frame(ret); + end + function depth_frame = get_depth_frame(this) + ret = realsense.librealsense_mex('rs2::frameset', 'get_depth_frame', this.objectHandle); + depth_frame = realsense.depth_frame(ret); + end + function color_frame = get_color_frame(this) + ret = realsense.librealsense_mex('rs2::frameset', 'get_color_frame', this.objectHandle); + color_frame = realsense.video_frame(ret); + end + function infrared_frame = get_infrared_frame(this) + ret = realsense.librealsense_mex('rs2::frameset', 'get_infrared_frame', this.objectHandle); + infrared_frame = realsense.video_frame(ret); + end + function size = get_size(this) + size = realsense.librealsense_mex('rs2::frameset', 'get_size', this.objectHandle); + end + % TODO: iterator protocol? + end +end \ No newline at end of file diff --git a/wrappers/matlab/hole_filling_filter.m b/wrappers/matlab/hole_filling_filter.m new file mode 100644 index 0000000000..a888994a74 --- /dev/null +++ b/wrappers/matlab/hole_filling_filter.m @@ -0,0 +1,14 @@ +% Wraps librealsense2 hole_filling_filter class +classdef hole_filling_filter < realsense.process_interface + methods + % Constructor + function this = hole_filling_filter() + out = realsense.librealsense_mex('rs2::hole_filling_filter', 'new'); + this = this@realsense.process_interface(out); + end + + % Destructor (uses base class destructor) + + % Functions + end +end \ No newline at end of file diff --git a/wrappers/matlab/librealsense_mex.cpp b/wrappers/matlab/librealsense_mex.cpp new file mode 100644 index 0000000000..b13d45e48a --- /dev/null +++ b/wrappers/matlab/librealsense_mex.cpp @@ -0,0 +1,1396 @@ +#include "MatlabParamParser.h" +#include "Factory.h" +#include "librealsense2/rs.hpp" + +#pragma comment(lib, "libmx.lib") +#pragma comment(lib, "libmex.lib") +#pragma comment(lib, "libmat.lib") +#pragma comment(lib, "realsense2.lib") + +Factory *factory; + +void make_factory(){ + factory = new Factory(); + + // rs_frame.hpp + { + ClassFactory stream_profile_factory("rs2::stream_profile"); + stream_profile_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::stream_profile()); + }); + stream_profile_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + stream_profile_factory.record("stream_index", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.stream_index()); + }); + stream_profile_factory.record("stream_type", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.stream_type()); + }); + stream_profile_factory.record("format", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.format()); + }); + stream_profile_factory.record("fps", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.fps()); + }); + stream_profile_factory.record("unique_id", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.unique_id()); + }); + stream_profile_factory.record("clone", 1, 4, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + auto index = MatlabParamParser::parse(inv[2]); + auto format = MatlabParamParser::parse(inv[3]); + outv[0] = MatlabParamParser::wrap(thiz.clone(type, index, format)); + }); + stream_profile_factory.record("operator==", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto rhs = MatlabParamParser::parse(inv[0]); + auto lhs = MatlabParamParser::parse(inv[1]); + MatlabParamParser::wrap(rhs == lhs); + }); + stream_profile_factory.record("is", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: something more maintainable? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + if (type == "stream_profile") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "video_stream_profile") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "motion_stream_profile") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else { + mexWarnMsgTxt("rs2::stream_profile::is: invalid type parameter"); + outv[0] = MatlabParamParser::wrap(false); + } + }); + stream_profile_factory.record("as", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: something more maintainable? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + if (type == "stream_profile") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "video_stream_profile") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "motion_stream_profile") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else { + mexWarnMsgTxt("rs2::stream_profile::as: invalid type parameter"); + outv[0] = MatlabParamParser::wrap(uint64_t(0)); + } + }); + stream_profile_factory.record("stream_name", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.stream_name()); + }); + stream_profile_factory.record("is_default", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.is_default()); + }); + stream_profile_factory.record("operator bool", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(bool(thiz)); + }); + stream_profile_factory.record("get_extrinsics_to", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto to = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.get_extrinsics_to(to)); + }); + // rs2::stream_profile::register_extrinsics_to [?] + stream_profile_factory.record("is_cloned", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.is_cloned()); + }); + factory->record(stream_profile_factory); + } + { + ClassFactory video_stream_profile_factory("rs2::video_stream_profile"); + // rs2::video_stream_profile::constructor(rs2::stream_profile) [?] + video_stream_profile_factory.record("width", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.width()); + }); + video_stream_profile_factory.record("height", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.height()); + }); + video_stream_profile_factory.record("get_intrinsics", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_intrinsics()); + }); + factory->record(video_stream_profile_factory); + } + { + ClassFactory motion_stream_profile_factory("rs2::motion_stream_profile"); + // rs2::motion_stream_profile::constructor(rs2::stream_profile) [?] + motion_stream_profile_factory.record("get_motion_intrinsics", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_motion_intrinsics()); + }); + factory->record(motion_stream_profile_factory); + } + { + ClassFactory frame_factory("rs2::frame"); + // rs2::frame::constructor() [?] + frame_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + // rs2::frame::operator= [?/HOW] + // rs2::frame::copy constructor [?/HOW] + // rs2::frame::swap [?/HOW] + // rs2::frame::keep [TODO/HOW] + frame_factory.record("operator bool", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(bool(thiz)); + }); + frame_factory.record("get_timestamp", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_timestamp()); + }); + frame_factory.record("get_frame_timestamp_domain", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_frame_timestamp_domain()); + }); + frame_factory.record("get_frame_metadata", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto frame_metadata = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.get_frame_metadata(frame_metadata)); + }); + frame_factory.record("supports_frame_metadata", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto frame_metadata = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.supports_frame_metadata(frame_metadata)); + }); + frame_factory.record("get_frame_number", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_frame_number()); + }); + frame_factory.record("get_data", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + size_t n_bytes = 0; + if (auto vf = thiz.as()) { + n_bytes = vf.get_height() * vf.get_stride_in_bytes(); + } // TODO: support more frame types? + + switch (thiz.get_profile().format()) { + case RS2_FORMAT_RAW10: + // TODO: Do the bit hackery ourselves? + mexWarnMsgTxt("Raw10 data provided as unsigned byte array."); + case RS2_FORMAT_RGB8: case RS2_FORMAT_RGBA8: + case RS2_FORMAT_BGR8: case RS2_FORMAT_BGRA8: + case RS2_FORMAT_Y8: case RS2_FORMAT_RAW8: + if (n_bytes == 0) { + n_bytes = 1; + mexWarnMsgTxt("Can't detect frame dims, sending only first pixel"); + } + outv[0] = MatlabParamParser::wrap_array(reinterpret_cast(thiz.get_data()), n_bytes); + break; + case RS2_FORMAT_Z16: case RS2_FORMAT_DISPARITY16: + case RS2_FORMAT_Y16: case RS2_FORMAT_RAW16: + if (n_bytes == 0) { + n_bytes = 2; + mexWarnMsgTxt("Can't detect frame dims, sending only first pixel"); + } + outv[0] = MatlabParamParser::wrap_array(reinterpret_cast(thiz.get_data()), n_bytes / 2); + break; + case RS2_FORMAT_XYZ32F: case RS2_FORMAT_DISPARITY32: + case RS2_FORMAT_MOTION_XYZ32F: + if (n_bytes == 0) { + n_bytes = 4; + mexWarnMsgTxt("Can't detect frame dims, sending only first pixel"); + } + outv[0] = MatlabParamParser::wrap_array(reinterpret_cast(thiz.get_data()), n_bytes / 4); + break; + case RS2_FORMAT_UYVY: case RS2_FORMAT_YUYV: + if (n_bytes == 0) { + n_bytes = 4; + mexWarnMsgTxt("Can't detect frame dims, sending only first pixel"); + } + outv[0] = MatlabParamParser::wrap_array(reinterpret_cast(thiz.get_data()), n_bytes / 4); + break; + default: + mexWarnMsgTxt("This format isn't supported yet. Sending unsigned byte stream"); + if (n_bytes == 0) { + n_bytes = 1; + mexWarnMsgTxt("Can't detect frame dims, sending only first pixel"); + } + outv[0] = MatlabParamParser::wrap_array(reinterpret_cast(thiz.get_data()), n_bytes); + } + }); + frame_factory.record("get_profile", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_profile()); + }); + frame_factory.record("is", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: something more maintainable? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + if (type == "frame") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "video_frame") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "points") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "depth_frame") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "disparity_frame") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "motion_frame") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "pose_frame") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "frameset") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else { + mexWarnMsgTxt("rs2::frame::is: invalid type parameter"); + outv[0] = MatlabParamParser::wrap(false); + } + }); + frame_factory.record("as", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: something more maintainable? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + if (type == "frame") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "video_frame") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "points") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "depth_frame") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "disparity_frame") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "motion_frame") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "pose_frame") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "frameset") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else { + mexWarnMsgTxt("rs2::frame::as: invalid type parameter"); + outv[0] = MatlabParamParser::wrap(uint64_t(0)); + } + }); + factory->record(frame_factory); + } + { + ClassFactory video_frame_factory("rs2::video_frame"); + // rs2::video_frame::constructor() [?] + video_frame_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + video_frame_factory.record("get_width", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_width()); + }); + video_frame_factory.record("get_height", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_height()); + }); + video_frame_factory.record("get_stride_in_bytes", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_stride_in_bytes()); + }); + video_frame_factory.record("get_bits_per_pixel", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_bits_per_pixel()); + }); + video_frame_factory.record("get_bytes_per_pixel", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_bytes_per_pixel()); + }); + factory->record(video_frame_factory); + } + { + ClassFactory points_factory("rs2::points"); + // rs2::points::constructor() [?] + // rs2::points::constrcutor(rs2::frame) [?] + points_factory.record("get_vertices", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + // TODO: turn into matrix instead of column? + outv[0] = MatlabParamParser::wrap_array(thiz.get_vertices(), thiz.size()); + }); + points_factory.record("export_to_ply", 0, 3, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto fname = MatlabParamParser::parse(inv[1]); + auto texture = MatlabParamParser::parse(inv[2]); + thiz.export_to_ply(fname, texture); + }); + points_factory.record("get_texture_coordinates", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + // TODO: turn into matrix instead of column? + outv[0] = MatlabParamParser::wrap_array(thiz.get_texture_coordinates(), thiz.size()); + }); + points_factory.record("size", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.size()); + }); + factory->record(points_factory); + } + { + ClassFactory depth_frame_factory("rs2::depth_frame"); + // rs2::depth_frame::constructor() [?] + depth_frame_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + depth_frame_factory.record("get_distance", 1, 3, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto x = MatlabParamParser::parse(inv[1]); + auto y = MatlabParamParser::parse(inv[2]); + outv[0] = MatlabParamParser::wrap(thiz.get_distance(x, y)); + }); + factory->record(depth_frame_factory); + } + { + ClassFactory disparity_frame_factory("rs2::disparity_frame"); + // rs2::disparity_frame::constructor(rs2::frame) [?] + disparity_frame_factory.record("get_baseline", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_baseline()); + }); + factory->record(disparity_frame_factory); + } + { + ClassFactory motion_frame_factory("rs2::motion_frame"); + // rs2::motion_frame::constructor(rs2::frame) [?] + motion_frame_factory.record("get_motion_data", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_motion_data()); + }); + factory->record(motion_frame_factory); + } + { + ClassFactory pose_frame_factory("rs2::pose_frame"); + // rs2::pose_frame::constructor(rs2::frame) [?] + pose_frame_factory.record("get_pose_data", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_pose_data()); + }); + factory->record(pose_frame_factory); + } + { + ClassFactory frameset_factory("rs2::frameset"); + // rs2::frameset::constructor() [?] + // rs2::frameset::constructor(frame) [?] + frameset_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + frameset_factory.record("first_or_default", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto s = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.first_or_default(s)); + }); + frameset_factory.record("first", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto s = MatlabParamParser::parse(inv[1]); + // try/catch moved to outer framework + outv[0] = MatlabParamParser::wrap(thiz.first(s)); + }); + frameset_factory.record("get_depth_frame", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + // try/catch moved to outer framework + outv[0] = MatlabParamParser::wrap(thiz.get_depth_frame()); + }); + frameset_factory.record("get_color_frame", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + // try/catch moved to outer framework + outv[0] = MatlabParamParser::wrap(thiz.get_color_frame()); + }); + frameset_factory.record("get_infrared_frame", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + // try/catch moved to outer framework + outv[0] = MatlabParamParser::wrap(thiz.get_infrared_frame()); + }); + frameset_factory.record("size", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.size()); + }); + // rs2::frameset::foreach [?/Callbacks] + // rs2::frameset::operator[] [?/HOW] + // rs2::frameset::iterator+begin+end [Pure Matlab?] + factory->record(frameset_factory); + } + + // rs_sensor.hpp + // rs2::notification [?] + { + ClassFactory options_factory("rs2::options"); + options_factory.record("supports#rs2_option", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto option = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.supports(option)); + }); + options_factory.record("get_option_description", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto option = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.get_option_description(option)); + }); + options_factory.record("get_option_value_description", 1, 3, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto option = MatlabParamParser::parse(inv[1]); + auto val = MatlabParamParser::parse(inv[2]); + outv[0] = MatlabParamParser::wrap(thiz.get_option_value_description(option, val)); + }); + options_factory.record("get_option", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto option = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.get_option(option)); + }); + options_factory.record("get_option_range", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto option = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.get_option_range(option)); + }); + options_factory.record("set_option", 0, 3, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto option = MatlabParamParser::parse(inv[1]); + auto val = MatlabParamParser::parse(inv[2]); + thiz.set_option(option, val); + }); + options_factory.record("is_option_read_only", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto option = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.is_option_read_only(option)); + }); + // rs2::options::operator= [?/HOW] + options_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + factory->record(options_factory); + } + { + ClassFactory sensor_factory("rs2::sensor"); + // rs2::sensor::constructor() [?] + sensor_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + sensor_factory.record("open#stream_profile", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto profile = MatlabParamParser::parse(inv[1]); + thiz.open(profile); + }); + sensor_factory.record("supports#rs2_camera_info", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto info = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.supports(info)); + }); + sensor_factory.record("get_info", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto info = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.get_info(info)); + }); + sensor_factory.record("open#vec_stream_profile", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto profiles = MatlabParamParser::parse>(inv[1]); + thiz.open(profiles); + }); + sensor_factory.record("close", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.close(); + }); + // rs2::sensor::start(*) [?/Which/Callbacks] + sensor_factory.record("start#frame_queue", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto queue = MatlabParamParser::parse(inv[1]); + thiz.start(queue); + }); + sensor_factory.record("stop", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.stop(); + }); + sensor_factory.record("get_stream_profiles", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + MatlabParamParser::wrap(thiz.get_stream_profiles()); + }); + // rs2::sensor::operator= [?] + sensor_factory.record("operator bool", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(bool(thiz)); + }); + sensor_factory.record("is", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: something more maintainable? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + if (type == "sensor") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "roi_sensor") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "depth_sensor") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "depth_stereo_sensor") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else { + mexWarnMsgTxt("rs2::sensor::is: invalid type parameter"); + outv[0] = MatlabParamParser::wrap(false); + } + }); + sensor_factory.record("as", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: something more maintainable? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + if (type == "sensor") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "roi_sensor") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "depth_sensor") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "depth_stereo_sensor") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else { + mexWarnMsgTxt("rs2::sensor::as: invalid type parameter"); + outv[0] = MatlabParamParser::wrap(uint64_t(0)); + } + }); + sensor_factory.record("operator==", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto rhs = MatlabParamParser::parse(inv[0]); + auto lhs = MatlabParamParser::parse(inv[1]); + MatlabParamParser::wrap(rhs == lhs); + }); + factory->record(sensor_factory); + } + { + ClassFactory roi_sensor_factory("rs2::roi_sensor"); + // rs2::roi_sensor::constructor(rs2::sensor) [?] + roi_sensor_factory.record("set_region_of_interest", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto roi = MatlabParamParser::parse(inv[1]); + thiz.set_region_of_interest(roi); + }); + roi_sensor_factory.record("get_region_of_interest", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_region_of_interest()); + }); + // rs2::roi_sensor::operator bool [?] + factory->record(roi_sensor_factory); + } + { + ClassFactory depth_sensor_factory("rs2::depth_sensor"); + // rs2::depth_sensor::constructor(rs2::sensor) [?] + depth_sensor_factory.record("get_depth_scale", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_depth_scale()); + }); + // rs2::depth_sensor::operator bool [?] + factory->record(depth_sensor_factory); + } + { + ClassFactory depth_stereo_sensor_factory("rs2::depth_stereo_sensor"); + // rs2::depth_stereo_sensor::constructor(rs2::sensor) [?] + depth_stereo_sensor_factory.record("get_stereo_baseline", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_stereo_baseline()); + }); + factory->record(depth_stereo_sensor_factory); + } + + // rs_device.hpp + { + ClassFactory device_factory("rs2::device"); + // rs2::device::constructor() [?] + // extra helper function for constructing device from device_list + device_factory.record("init", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto list = MatlabParamParser::parse(inv[0]); + auto idx = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(list[idx]); + MatlabParamParser::destroy(inv[0]); + }); + // destructor in case device was never initialized + device_factory.record("delete#uninit", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + // destructor in case device was initialized + device_factory.record("delete#init", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + device_factory.record("query_sensors", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.query_sensors()); + }); + device_factory.record("first", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: better, more maintainable implementation? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + try { + if (type == "sensor") + outv[0] = MatlabParamParser::wrap(thiz.first()); + else if (type == "roi_sensor") + outv[0] = MatlabParamParser::wrap(thiz.first()); + else if (type == "depth_sensor") + outv[0] = MatlabParamParser::wrap(thiz.first()); + else if (type == "depth_stereo_sensor") + outv[0] = MatlabParamParser::wrap(thiz.first()); + else mexErrMsgTxt("rs2::device::first: Could not find requested sensor type!"); + } + catch (rs2::error) { + mexErrMsgTxt("rs2::device::first: Could not find requested sensor type!"); + } + }); + device_factory.record("supports", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto info = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.supports(info)); + }); + device_factory.record("get_info", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto info = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.get_info(info)); + }); + device_factory.record("hardware_reset", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.hardware_reset(); + }); + // rs2::device::operator= [?] + device_factory.record("operator bool", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(bool(thiz)); + }); + device_factory.record("is", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: something more maintainable? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + if (type == "device") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "debug_protocol") { + mexWarnMsgTxt("rs2::device::is: Debug Protocol not supported in MATLAB"); + outv[0] = MatlabParamParser::wrap(false); + } + else if (type == "advanced_mode") { + mexWarnMsgTxt("rs2::device::is: Advanced Mode not supported in MATLAB"); + outv[0] = MatlabParamParser::wrap(false); + } + else if (type == "recorder") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else if (type == "playback") + outv[0] = MatlabParamParser::wrap(thiz.is()); + else { + mexWarnMsgTxt("rs2::device::is: invalid type parameter"); + outv[0] = MatlabParamParser::wrap(false); + } + }); + device_factory.record("as", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: something more maintainable? + auto thiz = MatlabParamParser::parse(inv[0]); + auto type = MatlabParamParser::parse(inv[1]); + if (type == "device") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "debug_protocol") { + mexErrMsgTxt("rs2::device::as: Debug Protocol not supported in MATLAB"); +// outv[0] = MatlabParamParser::wrap(thiz.as()); + } + else if (type == "advanced_mode") { + mexErrMsgTxt("rs2::device::as: Advanced Mode not supported in MATLAB"); +// outv[0] = MatlabParamParser::wrap(false); + } + else if (type == "recorder") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else if (type == "playback") + outv[0] = MatlabParamParser::wrap(thiz.as()); + else { + mexWarnMsgTxt("rs2::device::as: invalid type parameter"); + outv[0] = MatlabParamParser::wrap(false); + } + }); + factory->record(device_factory); + } + // rs2::debug_protocol [?] + // rs2::device_list [Pure Matlab] + + // rs2_record_playback.hpp + { + ClassFactory playback_factory("rs2::playback"); + // rs2::playback::constructor(rs2::device) [?] + playback_factory.record("pause", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.pause(); + }); + playback_factory.record("resume", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.resume(); + }); + playback_factory.record("file_name", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.file_name()); + }); + playback_factory.record("get_position", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_position()); + }); + playback_factory.record("get_duration", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_duration()); + }); + playback_factory.record("seek", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto time = MatlabParamParser::parse(inv[1]); + thiz.seek(time); + }); + playback_factory.record("is_real_time", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.is_real_time()); + }); + playback_factory.record("set_real_time", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto real_time = MatlabParamParser::parse(inv[1]); + thiz.set_real_time(real_time); + }); + playback_factory.record("set_playback_speed", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto speed = MatlabParamParser::parse(inv[1]); + thiz.set_playback_speed(speed); + }); + // rs2::playback::set_status_changed_callback() [?/Callbacks] + playback_factory.record("current_status", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.current_status()); + }); + playback_factory.record("stop", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.stop(); + }); + factory->record(playback_factory); + } + { + ClassFactory recorder_factory("rs2::recorder"); + // rs2::recorder::constructor(rs2::device) [?] + recorder_factory.record("new#string_device", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto file = MatlabParamParser::parse(inv[0]); + auto device = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(rs2::recorder(file, device)); + }); + recorder_factory.record("pause", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.pause(); + }); + recorder_factory.record("resume", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.resume(); + }); + recorder_factory.record("filename", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.filename()); + }); + factory->record(recorder_factory); + } + + // rs2_processing.hpp + // rs2::processing_block [?] + { + ClassFactory frame_queue_factory("rs2::frame_queue"); + frame_queue_factory.record("new", 1, 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + if (inc == 0) { + outv[0] = MatlabParamParser::wrap(rs2::frame_queue()); + } + else if (inc == 1) { + auto capacity = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(rs2::frame_queue(capacity)); + } + }); + // rs2::frame_queue::enqueue(frame) [?] + frame_queue_factory.record("wait_for_frame", 1, 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + if (inc == 0) { + outv[0] = MatlabParamParser::wrap(thiz.wait_for_frame()); + } + else if (inc == 1) { + auto timeout_ms = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.wait_for_frame(timeout_ms)); + } + }); + // rs2::frame_queue::poll_for_frame(T*) [TODO] [T = {frame, video_frame, points, depth_frame, disparity_frame, motion_frame, pose_frame, frameset}] + frame_queue_factory.record("capacity", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.capacity()); + }); + factory->record(frame_queue_factory); + } + { + ClassFactory pointcloud_factory("rs2::pointcloud"); + pointcloud_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::pointcloud()); + }); + pointcloud_factory.record("calculate", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto depth = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.calculate(depth)); + }); + pointcloud_factory.record("map_to", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto mapped = MatlabParamParser::parse(inv[1]); + thiz.map_to(mapped); + }); + factory->record(pointcloud_factory); + } + { + ClassFactory syncer_factory("rs2::syncer"); + syncer_factory.record("new", 1, 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + if (inc == 0) { + outv[0] = MatlabParamParser::wrap(rs2::syncer()); + } + else if (inc == 1) { + auto queue_size = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(rs2::syncer(queue_size)); + } + }); + syncer_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + syncer_factory.record("wait_for_frames", 1, 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + if (inc == 0) { + outv[0] = MatlabParamParser::wrap(thiz.wait_for_frames()); + } + else if (inc == 1) { + auto timeout_ms = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.wait_for_frames(timeout_ms)); + } + }); + // rs2::syncer::poll_for_frames(frameset*) [?] + factory->record(syncer_factory); + } + { + ClassFactory align_factory("rs2::align"); + align_factory.record("new", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto align_to = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(rs2::align(align_to)); + }); + align_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + align_factory.record("process", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto frame = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.process(frame)); + }); + factory->record(align_factory); + } + { + ClassFactory colorizer_factory("rs2::colorizer"); + colorizer_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::colorizer()); + }); + colorizer_factory.record("colorize", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto depth = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.colorize(depth)); + }); + factory->record(colorizer_factory); + } + { + ClassFactory process_interface_factory("rs2::process_interface"); + process_interface_factory.record("process", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + // TODO: will this work? + auto *thiz = MatlabParamParser::parse(inv[0]); + auto frame = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz->process(frame)); + }); + factory->record(process_interface_factory); + } + { + ClassFactory decimation_filter_factory("rs2::decimation_filter"); + decimation_filter_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::decimation_filter()); + }); + factory->record(decimation_filter_factory); + } + { + ClassFactory temporal_filter_factory("rs2::temporal_filter"); + temporal_filter_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::temporal_filter()); + }); + factory->record(temporal_filter_factory); + } + { + ClassFactory spatial_filter_factory("rs2::spatial_filter"); + spatial_filter_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::spatial_filter()); + }); + factory->record(spatial_filter_factory); + } + { + ClassFactory disparity_transform_factory("rs2::disparity_transform"); + disparity_transform_factory.record("new", 1, 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + if (inc == 0) { + outv[0] = MatlabParamParser::wrap(rs2::disparity_transform()); + } + else if (inc == 1) { + auto transform_to_disparity = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(rs2::disparity_transform(transform_to_disparity)); + } + }); + factory->record(disparity_transform_factory); + } + { + ClassFactory hole_filling_filter_factory("rs2::hole_filling_filter"); + hole_filling_filter_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::hole_filling_filter()); + }); + factory->record(hole_filling_filter_factory); + } + + // rs_context.hpp + // rs2::event_information [?] + { + ClassFactory context_factory("rs2::context"); + // This lambda feels like it should be possible to generate automatically with templates + context_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::context()); + }); + context_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + context_factory.record("query_devices", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.query_devices()); + }); + context_factory.record("query_all_sensors", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.query_all_sensors()); + }); + context_factory.record("get_sensor_parent", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto sensor = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.get_sensor_parent(sensor)); + }); + context_factory.record("load_device", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto file = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.load_device(file)); + }); + context_factory.record("unload_device", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto file = MatlabParamParser::parse(inv[1]); + thiz.unload_device(file); + }); + factory->record(context_factory); + } + { + ClassFactory device_hub_factory("rs2::device_hub"); + device_hub_factory.record("new", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto ctx = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(rs2::device_hub(ctx)); + }); + device_hub_factory.record("delete", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + device_hub_factory.record("wait_for_device", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.wait_for_device()); + }); + device_hub_factory.record("is_connected", 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto dev = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.is_connected(dev)); + }); + factory->record(device_hub_factory); + } + + // rs_pipeline.hpp + { + ClassFactory pipeline_profile_factory("rs2::pipeline_profile"); + // rs2::pipeline_profile::constructor() [?] + pipeline_profile_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + pipeline_profile_factory.record("get_streams", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_streams()); + }); + pipeline_profile_factory.record("get_stream", 1, 2, 3, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto stream_type = MatlabParamParser::parse(inv[1]); + if (inc == 2) + outv[0] = MatlabParamParser::wrap(thiz.get_stream(stream_type)); + else { + auto stream_index = MatlabParamParser::parse(inv[2]); + outv[0] = MatlabParamParser::wrap(thiz.get_stream(stream_type, stream_index)); + } + }); + pipeline_profile_factory.record("get_device", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_device()); + }); + // rs2::pipeline_profile::bool() [?] + factory->record(pipeline_profile_factory); + } + { + ClassFactory config_factory("rs2::config"); + config_factory.record("new", 1, 0, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + outv[0] = MatlabParamParser::wrap(rs2::config()); + }); + config_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + config_factory.record("enable_stream#full", 0, 5, 7, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto stream_type = MatlabParamParser::parse(inv[1]); + auto stream_index = MatlabParamParser::parse(inv[2]); + auto width = MatlabParamParser::parse(inv[3]); + auto height = MatlabParamParser::parse(inv[4]); + if (inc == 5) { + thiz.enable_stream(stream_type, stream_index, width, height); + } else if (inc == 6) { + auto format = MatlabParamParser::parse(inv[5]); + thiz.enable_stream(stream_type, stream_index, width, height, format); + } else if (inc == 7) { + auto format = MatlabParamParser::parse(inv[5]); + auto framerate = MatlabParamParser::parse(inv[6]); + thiz.enable_stream(stream_type, stream_index, width, height, format, framerate); + } + }); + config_factory.record("enable_stream#stream", 0, 2, 3, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto stream_type = MatlabParamParser::parse(inv[1]); + if (inc == 2) { + thiz.enable_stream(stream_type); + } else if (inc == 3){ + auto stream_index = MatlabParamParser::parse(inv[2]); + thiz.enable_stream(stream_type, stream_index); + } + }); + config_factory.record("enable_stream#size", 0, 4, 6, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto stream_type = MatlabParamParser::parse(inv[1]); + auto width = MatlabParamParser::parse(inv[2]); + auto height = MatlabParamParser::parse(inv[3]); + if (inc == 4) { + thiz.enable_stream(stream_type, width, height); + } else if (inc == 5) { + auto format = MatlabParamParser::parse(inv[4]); + thiz.enable_stream(stream_type, width, height, format); + } else if (inc == 6) { + auto format = MatlabParamParser::parse(inv[4]); + auto framerate = MatlabParamParser::parse(inv[5]); + thiz.enable_stream(stream_type, width, height, format, framerate); + } + }); + config_factory.record("enable_stream#format", 0, 3, 4, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto stream_type = MatlabParamParser::parse(inv[1]); + auto format = MatlabParamParser::parse(inv[2]); + if (inc == 3) { + thiz.enable_stream(stream_type, format); + } else if (inc == 4) { + auto framerate = MatlabParamParser::parse(inv[3]); + thiz.enable_stream(stream_type, format, framerate); + } + }); + config_factory.record("enable_stream#extended", 0, 4, 5, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto stream_type = MatlabParamParser::parse(inv[1]); + auto stream_index = MatlabParamParser::parse(inv[2]); + auto format = MatlabParamParser::parse(inv[3]); + if (inc == 4) { + thiz.enable_stream(stream_type, stream_index, format); + } else if (inc == 5) { + auto framerate = MatlabParamParser::parse(inv[4]); + thiz.enable_stream(stream_type, stream_index, format, framerate); + } + }); + config_factory.record("enable_all_streams", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.enable_all_streams(); + }); + config_factory.record("enable_device", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto serial = MatlabParamParser::parse(inv[1]); + thiz.enable_device(serial); + }); + config_factory.record("enable_device_from_file", 0, 2, 3, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto file_name = MatlabParamParser::parse(inv[1]); + if (inc == 2) + thiz.enable_device_from_file(file_name); + else if (inc == 3) { + auto repeat_playback = MatlabParamParser::parse(inv[2]); + thiz.enable_device_from_file(file_name, repeat_playback); + } + }); + config_factory.record("enable_record_to_file", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto file_name = MatlabParamParser::parse(inv[1]); + thiz.enable_record_to_file(file_name); + }); + config_factory.record("disable_stream", 0, 2, 3, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + auto stream = MatlabParamParser::parse(inv[1]); + if (inc == 2) + thiz.disable_stream(stream); + else if (inc == 3) { + auto index = MatlabParamParser::parse(inv[2]); + thiz.disable_stream(stream, index); + } + }); + config_factory.record("disable_all_streams", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.disable_all_streams(); + }); + factory->record(config_factory); + } + { + ClassFactory pipeline_factory("rs2::pipeline"); + pipeline_factory.record("new", 1, 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + if (inc == 0) { + outv[0] = MatlabParamParser::wrap(rs2::pipeline()); + } else if (inc == 1) { + auto ctx = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(rs2::pipeline(ctx)); + } + }); + pipeline_factory.record("delete", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + MatlabParamParser::destroy(inv[0]); + }); + pipeline_factory.record("start", 1, 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + if (inc == 1) + outv[0] = MatlabParamParser::wrap(thiz.start()); + else if (inc == 2) { + auto config = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.start(config)); + } + }); + pipeline_factory.record("stop", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + thiz.stop(); + }); + pipeline_factory.record("wait_for_frames", 1, 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + if (inc == 1) { + outv[0] = MatlabParamParser::wrap(thiz.wait_for_frames()); + } else if (inc == 2) { + auto timeout_ms = MatlabParamParser::parse(inv[1]); + outv[0] = MatlabParamParser::wrap(thiz.wait_for_frames(timeout_ms)); + } + }); + // rs2::pipeline::poll_for_frames [TODO/HOW] [multi-output?] + pipeline_factory.record("get_active_profile", 1, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) + { + auto thiz = MatlabParamParser::parse(inv[0]); + outv[0] = MatlabParamParser::wrap(thiz.get_active_profile()); + }); + factory->record(pipeline_factory); + } + + // rs.hpp + { + ClassFactory free_funcs_factory("rs2"); + free_funcs_factory.record("log_to_console", 0, 1, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) { + auto min_severity = MatlabParamParser::parse(inv[0]); + rs2::log_to_console(min_severity); + }); + free_funcs_factory.record("log_to_file", 0, 1, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) { + auto min_severity = MatlabParamParser::parse(inv[0]); + if (inc == 1) + rs2::log_to_file(min_severity); + else if (inc == 2) { + auto file_path = MatlabParamParser::parse(inv[1]); + rs2::log_to_file(min_severity, file_path); + } + }); + free_funcs_factory.record("log", 0, 2, [](int outc, mxArray* outv[], int inc, const mxArray* inv[]) { + auto severity = MatlabParamParser::parse(inv[0]); + auto message = MatlabParamParser::parse(inv[1]); + rs2::log(severity, message); + }); + } + + mexAtExit([]() { delete factory; }); +} + +void mexFunction(int nOutParams, mxArray *outParams[], int nInParams, const mxArray *inParams[]) +{ + // does this need to be made threadsafe? also maybe better idea than global object? + if (!factory) make_factory(); + + if (nInParams < 2) { + mexErrMsgTxt("At least class and command name are needed."); + return; + } + + auto cname = MatlabParamParser::parse(inParams[0]); + auto fname = MatlabParamParser::parse(inParams[1]); + + auto f_data = factory->get(cname, fname); + if (!f_data.f) { + mexErrMsgTxt("Unknown Command received."); + return; + } + + if (f_data.out != nOutParams) { + std::string errmsg = cname + "::" + fname.substr(0, fname.find("#", 0)) + ": Wrong number of outputs"; + mexErrMsgTxt(errmsg.c_str()); + } + + if (f_data.in_min > nInParams - 2 || f_data.in_max < nInParams - 2) { + std::string errmsg = cname + "::" + fname.substr(0, fname.find("#", 0)) + ": Wrong number of inputs"; + mexErrMsgTxt(errmsg.c_str()); + } + + try { + f_data.f(nOutParams, outParams, nInParams - 2, inParams + 2); // "eat" the two function specifiers + } catch (std::exception &e) { + mexErrMsgTxt(e.what()); + } catch (...) { + mexErrMsgTxt("An unknown error occured"); + } +} \ No newline at end of file diff --git a/wrappers/matlab/librealsense_mex.def b/wrappers/matlab/librealsense_mex.def new file mode 100644 index 0000000000..080c522134 --- /dev/null +++ b/wrappers/matlab/librealsense_mex.def @@ -0,0 +1,2 @@ +LIBRARY MATLABWRAPPER +EXPORTS mexFunction \ No newline at end of file diff --git a/wrappers/matlab/librealsense_mex.sln b/wrappers/matlab/librealsense_mex.sln new file mode 100644 index 0000000000..b1397b9f26 --- /dev/null +++ b/wrappers/matlab/librealsense_mex.sln @@ -0,0 +1,28 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 14 +VisualStudioVersion = 14.0.25420.1 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "librealsense_mex", "librealsense_mex.vcxproj", "{C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}.Debug|x64.ActiveCfg = Debug|x64 + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}.Debug|x64.Build.0 = Debug|x64 + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}.Debug|x86.ActiveCfg = Debug|Win32 + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}.Debug|x86.Build.0 = Debug|Win32 + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}.Release|x64.ActiveCfg = Release|x64 + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}.Release|x64.Build.0 = Release|x64 + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}.Release|x86.ActiveCfg = Release|Win32 + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C}.Release|x86.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/wrappers/matlab/librealsense_mex.vcxproj b/wrappers/matlab/librealsense_mex.vcxproj new file mode 100644 index 0000000000..d5dad74245 --- /dev/null +++ b/wrappers/matlab/librealsense_mex.vcxproj @@ -0,0 +1,222 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + + 15.0 + {C5DA93FC-DBBC-49FC-B138-F6B0476EC92C} + librealsensematlab + + + + DynamicLibrary + true + v140 + MultiByte + + + DynamicLibrary + false + v141 + true + MultiByte + + + DynamicLibrary + true + MultiByte + v140 + + + DynamicLibrary + false + true + MultiByte + v140 + + + + + + + + + + + + + + + + + + + + + .mexw64 + + + .mexw64 + + + .mexw64 + $(SolutionDir)$(Configuration)\+realsense\ + + + .mexw64 + $(SolutionDir)$(Platform)\$(Configuration)\+realsense\ + + + + Level3 + Disabled + true + true + MATLAB_MEX_FILE;%(PreprocessorDefinitions) + $(ProjectDir)\..\..\include\;C:\MATLAB\R2017a\extern\include\;C:\MATLAB\R2017b\extern\include\;%(AdditionalIncludeDirectories) + MultiThreadedDebug + + + C:\MATLAB\R2017a\extern\lib\win64\microsoft;C:\MATLAB\R2017b\extern\lib\win64\microsoft;$(ProjectDir)..\..\build\third-party\realsense-file\$(Configuration);$(ProjectDir)..\..\build\$(Configuration);$(ProjectDir);%(AdditionalLibraryDirectories) + libmx.lib;libmex.lib;libmat.lib;realsense2.lib;realsense-file.lib;%(AdditionalDependencies) + librealsense_mex.def + + + xcopy /y $(ProjectDir)*.m $(OutDir) + + + Copy Matlab files to output dir + + + + + Level3 + Disabled + true + true + $(SolutionDir)\..\..\include\;C:\MATLAB\R2017b\extern\include\;%(AdditionalIncludeDirectories) + + + libmx.lib;libmex.lib;libmat.lib;realsense.lib;%(AdditionalDependencies) + + + librealsense_mex.def + + + + + Level3 + MaxSpeed + true + true + true + true + $(SolutionDir)\..\..\include\;C:\MATLAB\R2017b\extern\include\;%(AdditionalIncludeDirectories) + + + true + true + + + + + Level3 + MaxSpeed + true + true + true + true + MATLAB_MEX_FILE;%(PreprocessorDefinitions) + $(ProjectDir)\..\..\include\;C:\MATLAB\R2017a\extern\include\;C:\MATLAB\R2017b\extern\include\;%(AdditionalIncludeDirectories) + MultiThreaded + + + true + true + C:\MATLAB\R2017a\extern\lib\win64\microsoft;C:\MATLAB\R2017b\extern\lib\win64\microsoft;$(ProjectDir)..\..\build\third-party\realsense-file\$(Configuration);$(ProjectDir)..\..\build\$(Configuration);$(ProjectDir);%(AdditionalLibraryDirectories) + libmx.lib;libmex.lib;libmat.lib;realsense2.lib;realsense-file.lib;%(AdditionalDependencies) + librealsense_mex.def + + + xcopy /y $(ProjectDir)*.m $(OutDir) + + + Copy Matlab files to output dir + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/wrappers/matlab/librealsense_mex.vcxproj.filters b/wrappers/matlab/librealsense_mex.vcxproj.filters new file mode 100644 index 0000000000..0ec340fc33 --- /dev/null +++ b/wrappers/matlab/librealsense_mex.vcxproj.filters @@ -0,0 +1,185 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hh;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + {e4083e9e-7e54-4832-bb0a-fcebcd093156} + + + {4cc58058-58a5-4048-aa49-15694d6af042} + + + {553cfcb3-67a4-4921-b078-af6a45f177dd} + + + {3cfdaa73-0cb8-41b6-aac1-b3d21d752105} + + + + + Source Files + + + Source Files + + + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + + + Matlab Files\Enums + + + Matlab Files\Enums + + + Matlab Files\Enums + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Enums + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Enums + + + Matlab Files\Enums + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Enums + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Classes + + + Matlab Files\Examples + + + Matlab Files\Examples + + + \ No newline at end of file diff --git a/wrappers/matlab/librealsense_mex.vcxproj.user b/wrappers/matlab/librealsense_mex.vcxproj.user new file mode 100644 index 0000000000..be25078707 --- /dev/null +++ b/wrappers/matlab/librealsense_mex.vcxproj.user @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/wrappers/matlab/motion_frame.m b/wrappers/matlab/motion_frame.m new file mode 100644 index 0000000000..57ab5bbaf0 --- /dev/null +++ b/wrappers/matlab/motion_frame.m @@ -0,0 +1,16 @@ +% Wraps librealsense2 motion_frame class +classdef motion_frame < realsense.frame + methods + % Constructor + function this = motion_frame(handle) + this = this@realsense.frame(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function motion_data = get_motion_data(this) + motion_data = realsense.librealsense_mex('rs2::motion_frame', 'get_motion_data', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/motion_stream_profile.m b/wrappers/matlab/motion_stream_profile.m new file mode 100644 index 0000000000..cb16e11b08 --- /dev/null +++ b/wrappers/matlab/motion_stream_profile.m @@ -0,0 +1,16 @@ +% Wraps librealsense2 motion_stream_profile class +classdef motion_stream_profile < realsense.stream_profile + methods + % Constructor + function this = motion_stream_profile(ownHandle, handle) + this = this@realsense.stream_profile(ownHandle, handle); + end + + % Destructor (uses base class destructor) + + % Functions + function motion_intrinsics = get_motion_intrinsics(this) + intrinsics = realsense.librealsense_mex('rs2::motion_stream_profile', 'get_motion_intrinsics', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/option.m b/wrappers/matlab/option.m new file mode 100644 index 0000000000..ed91a14c6f --- /dev/null +++ b/wrappers/matlab/option.m @@ -0,0 +1,48 @@ +classdef option < int64 + enumeration + backlight_compensation ( 0) + brightness ( 1) + contrast ( 2) + exposure ( 3) + gain ( 4) + gamma ( 5) + hue ( 6) + saturation ( 7) + sharpness ( 8) + white_balance ( 9) + enable_auto_exposure (10) + enable_auto_white_balance (11) + visual_preset (12) + laser_power (13) + accuracy (14) + motion_range (15) + filter_option (16) + confidence_threshold (17) + emitter_enabled (18) + frames_queue_size (19) + total_frame_drops (20) + auto_exposure_mode (21) + power_line_frequency (22) + asic_temperature (23) + error_polling_enabled (24) + projector_temperature (25) + output_trigger_enabled (26) + motion_module_temperature (27) + depth_units (28) + enable_motion_correction (29) + auto_exposure_priority (30) + color_scheme (31) + histogram_equalization_enabled (32) + min_distance (33) + max_distance (34) + texture_source (35) + filter_magnitude (36) + filter_smooth_alpha (37) + filter_smooth_delta (38) + holes_fill (39) + stereo_baseline (40) + auto_exposure_converge_step (41) + inter_cam_sync_mode (42) + count (43) + end +end \ No newline at end of file diff --git a/wrappers/matlab/options.m b/wrappers/matlab/options.m new file mode 100644 index 0000000000..f53867aad0 --- /dev/null +++ b/wrappers/matlab/options.m @@ -0,0 +1,59 @@ +% Wraps librealsense2 options class +classdef options < handle + properties (SetAccess = protected, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = options(handle) + narginchk(1, 1); + validateattributes(handle, {'uint64'}, {'scalar'}); + this.objectHandle = handle; + end + % Destructor + function delete(this) + if this.objectHandle ~= 0 + realsense.librealsense_mex('rs2::options', 'delete', this.objectHandle); + end + end + + % Functions + function value = supports_option(this, option) + narginchk(2, 2); + validateattributes(option, {'realsense.option', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.option.count}, '', 'option', 2); + value = realsense.librealsense_mex('rs2::options', 'supports#rs2_option', this.objectHandle, int64(option)); + end + function option_description = get_option_description(this, option) + narginchk(2, 2); + validateattributes(option, {'realsense.option', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.option.count}, '', 'option', 2); + option_description = realsense.librealsense_mex('rs2::options', 'get_option_description', this.objectHandle, int64(option)); + end + function option_value_description = get_option_value_description(this, option, val) + narginchk(3, 3); + validateattributes(option, {'realsense.option', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.option.count}, '', 'option', 2); + validateattributes(val, {'numeric'}, {'scalar', 'real'}, '', 'val', 3); + option_value_description = realsense.librealsense_mex('rs2::options', 'get_option_value_description', this.objectHandle, int64(option), double(val)); + end + function value = get_option(this, option) + narginchk(2, 2); + validateattributes(option, {'realsense.option', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.option.count}, '', 'option', 2); + value = realsense.librealsense_mex('rs2::options', 'get_option', this.objectHandle, int64(option)); + end + function option_range = get_option_range(this, option) + narginchk(2, 2); + validateattributes(option, {'realsense.option', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.option.count}, '', 'option', 2); + option_range = realsense.librealsense_mex('rs2::options', 'get_option_range', this.objectHandle, int64(option)); + end + function set_option(this, option, val) + narginchk(3, 3); + validateattributes(option, {'realsense.option', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.option.count}, '', 'option', 2); + validateattributes(val, {'numeric'}, {'scalar', 'real'}, '', 'val', 3); + realsense.librealsense_mex('rs2::options', 'set_option', this.objectHandle, int64(option), double(val)); + end + function value = is_option_read_only(this, option) + narginchk(2, 2); + validateattributes(option, {'realsense.option', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.option.count}, '', 'option', 2); + value = realsense.librealsense_mex('rs2::options', 'is_option_read_only', this.objectHandle, int64(option)); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/pipeline.m b/wrappers/matlab/pipeline.m new file mode 100644 index 0000000000..f13f9c6928 --- /dev/null +++ b/wrappers/matlab/pipeline.m @@ -0,0 +1,56 @@ +% Wraps librealsense2 pipeline class +classdef pipeline < handle + properties (SetAccess = protected, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = pipeline(ctx) + if nargin == 0 + this.objectHandle = realsense.librealsense_mex('rs2::pipeline', 'new'); + else + validateattributes(ctx, {'realsense.context'}, {'scalar'}); + this.objectHandle = realsense.librealsense_mex('rs2::pipeline', 'new', ctx.objectHandle); + end + end + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::pipeline', 'delete', this.objectHandle); + end + end + + % Functions + function pipeline_profile = start(this, config) + narginchk(1, 2); + if nargin == 1 + out = realsense.librealsense_mex('rs2::pipeline', 'start', this.objectHandle); + else + validateattributes(config, {'realsense.config'}, {'scalar'}, '', 'config', 2); + out = realsense.librealsense_mex('rs2::pipeline', 'start', this.objectHandle, config.objectHandle); + end + pipeline_profile = realsense.pipeline_profile(out); + end + function stop(this) + realsense.librealsense_mex('rs2::pipeline', 'stop', this.objectHandle); + end + function frames = wait_for_frames(this, timeout_ms) + narginchk(1, 2); + if nargin == 1 + out = realsense.librealsense_mex('rs2::pipeline', 'wait_for_frames', this.objectHandle); + else + if isduration(timeout_ms) + timeout_ms = milliseconds(timeout_ms); + end + validateattributes(timeout_ms, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'timeout_ms', 2); + out = realsense.librealsense_mex('rs2::pipeline', 'wait_for_frames', this.objectHandle, double(timeout_ms)); + end + frames = realsense.frameset(out); + end + % TODO: poll_for_frames + function profile = get_active_profile(this) + out = realsense.librealsense_mex('rs2::pipeline', 'get_active_profile', this.objectHandle); + realsense.pipeline_profile(out); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/pipeline_profile.m b/wrappers/matlab/pipeline_profile.m new file mode 100644 index 0000000000..2acf449ec3 --- /dev/null +++ b/wrappers/matlab/pipeline_profile.m @@ -0,0 +1,42 @@ +% Wraps librealsense2 pipeline_profile class +classdef pipeline_profile < handle + properties (SetAccess = private, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = pipeline_profile(handle) + narginchk(1, 1); + validateattributes(handle, {'uint64'}, {'scalar'}); + this.objectHandle = handle; + end + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::pipeline_profile', 'delete', this.objectHandle); + end + end + + % Functions + function streams = get_streams(this) + arr = realsense.librealsense_mex('rs2::pipeline_profile', 'get_streams', this.objectHandle); + % TOOD: Might be cell array + streams = arrayfun(@(x) realsense.stream_profile(x{:}{:}), arr, 'UniformOutput', false); + end + function stream = get_stream(this, stream_type, stream_index) + narginchk(2, 3); + validateattributes(stream_type, {'realsense.stream', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.stream.count}, '', 'stream_type', 2); + if nargin == 2 + out = realsense.librealsense_mex('rs2::pipeline_profile', 'get_streams', this.objectHandle, int64(stream_type)); + else + validateattributes(stream_index, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'stream_index', 3); + out = realsense.librealsense_mex('rs2::pipeline_profile', 'get_stream', this.objectHandle, int64(stream_type), int64(stream_index)); + end + stream = realsense.stream_profile(out{:}); + end + function dev = get_device(this) + out = realsense.librealsense_mex('rs2::pipeline_profile', 'get_device', this.objectHandle); + dev = realsense.device(out); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/playback.m b/wrappers/matlab/playback.m new file mode 100644 index 0000000000..e9b4614f15 --- /dev/null +++ b/wrappers/matlab/playback.m @@ -0,0 +1,65 @@ +% Wraps librealsense2 playback class +classdef playback < realsense.device + methods + % Constructor + function this = playback(handle, index) + this = this@realsense.device(handle, index); + end + + % Destructor (uses base class destructor) + + % Functions + function pause(this) + this.do_init(); + realsense.librealsense_mex('rs2::playback', 'pause', this.objectHandle); + end + function resume(this) + this.do_init(); + realsense.librealsense_mex('rs2::playback', 'resume', this.objectHandle); + end + function fname = file_name(this) + this.do_init(); + fname = realsense.librealsense_mex('rs2::playback', 'file_name', this.objectHandle); + end + function pos = get_position(this) + this.do_init(); + pos = realsense.librealsense_mex('rs2::playback', 'get_position', this.objectHandle); + end + function dur = get_duration(this) + this.do_init(); + out = realsense.librealsense_mex('rs2::playback', 'get_duration', this.objectHandle); + dur = milliseconds(out); + end + function seek(this, time) + narginchk(2, 2); + validateattributes(time, {'duration'}, {'scalar', 'nonnegative'}, '', 'time', 2); + this.do_init(); + realsense.librealsense_mex('rs2::playback', 'seek', this.objectHandle, milliseconds(time)); + end + function value = is_real_time(this) + this.do_init(); + value = realsense.librealsense_mex('rs2::playback', 'is_real_time', this.objectHandle); + end + function set_real_time(this, real_time) + narginchk(2, 2); + validateattributes(real_time, {'logical', 'numeric'}, {'scalar', 'real'}, '', 'real_time', 2); + this.do_init(); + realsense.librealsense_mex('rs2::playback', 'set_real_time', this.objectHandle, logical(real_time)); + end + function set_playback_speed(this, speed) + narginchk(2, 2); + validateattributes(speed, {'numeric'}, {'scalar', 'real'}, '', 'speed', 2); + this.do_init(); + realsense.librealsense_mex('rs2::playback', 'set_playback_speed', this.objectHandle, double(speed)); + end + function status = current_status(this) + this.do_init(); + out = realsense.librealsense_mex('rs2::playback', 'current_status', this.objectHandle); + status = realsense.playback_status(out); + end + function current_status(this) + this.do_init(); + realsense.librealsense_mex('rs2::playback', 'stop', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/playback_status.m b/wrappers/matlab/playback_status.m new file mode 100644 index 0000000000..627af3f58a --- /dev/null +++ b/wrappers/matlab/playback_status.m @@ -0,0 +1,9 @@ +classdef playback_status < int64 + enumeration + unknown (0) + playing (1) + paused (2) + stopped (3) + count (4) + end +end \ No newline at end of file diff --git a/wrappers/matlab/pointcloud.m b/wrappers/matlab/pointcloud.m new file mode 100644 index 0000000000..508d26649e --- /dev/null +++ b/wrappers/matlab/pointcloud.m @@ -0,0 +1,28 @@ +% Wraps librealsense2 pointcloud class +classdef pointcloud < realsense.options + methods + % Constructor + function this = pointcloud() + out = realsense.librealsense_mex('rs2::pointcloud', 'new'); + this = this@realsense.options(out); + end + + % Destructor (uses base class destructor) + + % Functions + function points = calculate(this, depth) + narginchk(2, 2) + validateattributes(depth, {'realsense.frame'}, {'scalar'}, '', 'depth', 2); + if ~depth.is('depth_frame') + error('Expected input number 2, depth, to be a depth_frame'); + end + out = realsense.librealsense_mex('rs2::pointcloud', 'calculate', this.objectHandle, depth.objectHandle); + points = realsense.points(out); + end + function map_to(this, mapped) + narginchk(2, 2) + validateattributes(mapped, {'realsense.frame'}, {'scalar'}, '', 'mapped', 2); + realsense.librealsense_mex('rs2::pointcloud', 'map_to', this.objectHandle, mapped.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/points.m b/wrappers/matlab/points.m new file mode 100644 index 0000000000..03e33c79ac --- /dev/null +++ b/wrappers/matlab/points.m @@ -0,0 +1,31 @@ +% Wraps librealsense2 points class +classdef points < realsense.frame + methods + % Constructor + function this = points(handle) + this = this@realsense.frame(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function vertcies = get_vertices(this) + vertices = realsense.librealsense_mex('rs2::points', 'get_vertices', this.objectHandle); + end + function export_to_ply(this, fname, texture) + narginchk(3, 3) + validateattributes(fname, {'char'}, {'scalartext', 'nonempty'}, '', 'fname', 2); + validateattributes(texture, {'realsense.frame'}, {'scalar'}, '', 'texture', 3); + if ~texture.is('video_frame') + error('Expected input number 3, texture, to be a video_frame'); + end + realsense.librealsense_mex('rs2::points', 'export_to_ply', this.objectHandle, fname, texture.objectHandle); + end + function texture_coordinates = get_texture_coordinates(this) + texture_coordinates = realsense.librealsense_mex('rs2::points', 'get_texture_coordinates', this.objectHandle); + end + function s = size(this) + realsense.librealsense_mex('rs2::points', 'size', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/pose_frame.m b/wrappers/matlab/pose_frame.m new file mode 100644 index 0000000000..a53b6ef4ee --- /dev/null +++ b/wrappers/matlab/pose_frame.m @@ -0,0 +1,16 @@ +% Wraps librealsense2 pose_frame class +classdef pose_frame < realsense.frame + methods + % Constructor + function this = pose_frame(handle) + this = this@realsense.frame(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function pose_data = get_pose_data(this) + pose_data = realsense.librealsense_mex('rs2::pose_frame', 'get_pose_data', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/process_interface.m b/wrappers/matlab/process_interface.m new file mode 100644 index 0000000000..0ba9e589b2 --- /dev/null +++ b/wrappers/matlab/process_interface.m @@ -0,0 +1,19 @@ +% Wraps librealsense2 process_interface class +classdef process_interface < realsense.options + methods + % Constructor + function this = process_interface(handle) + this = this@realsense.options(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function value = process(this, frame) + narginchk(2, 2) + validateattributes(frame, {'realsense.frame'}, {'scalar'}, '', 'frame', 2); + out = realsense.librealsense_mex('rs2::process_interface', 'process', this.objectHandle, frame.objectHandle); + value = realsense.frame(out); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/readme.md b/wrappers/matlab/readme.md new file mode 100644 index 0000000000..7a1ee45e8d --- /dev/null +++ b/wrappers/matlab/readme.md @@ -0,0 +1,68 @@ +# Getting Started with RealSense™ SDK2.0 for Matlab® + +## Introduction: +The latest generation of RealSense Depth Cameras can be controlled via the RealSense SDK2.0, also referred to as libRealSense 2.x.x, or LibRS for short. This includes stereo depth cameras D415 and D435 as well as their corresponding modules, such as D410 and D430. + +To get started controlling the RealSense Cameras with Matlab® in Windows 10, we have created a package that wraps some of the core functions of the realsense2.dll, and we have also created a simple “Hello World” function to get started capturing depth data. This uses Matlab R2017b and requires a Windows 10 laptop with a USB3 port. + +## Getting Started +### Building from Source +1. Download the Git repository and run CMake, specifying the x64 toolchain and static linkage (`-DBUILD_SHARED_LIBS:BOOL=OFF`). +2. Open Visual Studio and import the project file located [here](./librealsense_mex.vcxproj). +3. Make sure that the project settings are consistent with the instructions listed on the [MathWorks website](https://www.mathworks.com/help/matlab/matlab_external/compiling-mex-files-with-the-microsoft-visual-c-ide.html) and your system. +4. Make sure that in the project's settings `Target Platform Version` and `Platform Toolset` match the values set for the `realsense2` target. +5. Build the `realsense2` target. +6. Build the `librealsense_mex` target. +7. After compiling the project, set [build/Debug](../../build/Debug) or [build/Release](../../build/Release) as your Matlab working directory. Alternatively copy the `+realsense` folder from there to a place where Matlab can find it. +8. Our simple example can be run by typing [`realsense.depth_example`](./depth_example.m) at the Matlab prompt. + +### Windows Installer +1. Run the Windows Installer and select the Matlab Developer Package checkbox. + + ![Image of Installer](../../doc/img/matlab_select.png) +2. Allow the installer to complete +3. The package will be installed to `C:\Program Files (x86)\Intel RealSense SDK 2.0\matlab\+realsense\`. You can copy it from here to a place where Matlab can find it or add it to Matlab's path + +## Examples +#### Displaying a frame using _realsense.pipeline_ +```Matlab +function depth_example() + % Make Pipeline object to manage streaming + pipe = realsense.pipeline(); + % Make Colorizer object to prettify depth output + colorizer = realsense.colorizer(); + + % Start streaming on an arbitrary camera with default settings + profile = pipe.start(); + + % Get streaming device's name + dev = profile.get_device(); + name = dev.get_info(realsense.camera_info.name); + + % Get frames. We discard the first couple to allow + % the camera time to settle + for i = 1:5 + fs = pipe.wait_for_frames(); + end + + % Stop streaming + pipe.stop(); + + % Select depth frame + depth = fs.get_depth_frame(); + % Colorize depth frame + color = colorizer.colorize(depth); + + % Get actual data and convert into a format imshow can use + % (Color data arrives as [R, G, B, R, G, B, ...] vector) + data = color.get_data(); + channels = vec2mat(data, 3); + img(:,:,1) = vec2mat(channels(:,1), color.get_width()); + img(:,:,2) = vec2mat(channels(:,2), color.get_width()); + img(:,:,3) = vec2mat(channels(:,3), color.get_width()); + + % Display image + imshow(img); + title(sprintf("Colorized depth frame from %s", name)); +end +``` diff --git a/wrappers/matlab/recorder.m b/wrappers/matlab/recorder.m new file mode 100644 index 0000000000..34405687ab --- /dev/null +++ b/wrappers/matlab/recorder.m @@ -0,0 +1,33 @@ +% Wraps librealsense2 recorder class +classdef recorder < realsense.device + methods + % Constructor + function this = recorder(device, other) + narginchk(2, 2); + validateattributes(device, {'uint64', 'realsense.device'}, 'scalar'); + if isa(device, 'realsense.device') + validateattributes(other, {'string', 'char'}, {'scalartext', 'nonempty'}); + out = realsense.librealsense_mex('rs2::recorder', 'new#string_device', other, device.objectHandle); + this = this@realsense.device(out{:}); + else + this = this@realsense.device(handle, other); + end + end + + % Destructor (uses base class destructor) + + % Functions + function pause(this) + this.do_init(); + realsense.librealsense_mex('rs2::recorder', 'pause', this.objectHandle); + end + function resume(this) + this.do_init(); + realsense.librealsense_mex('rs2::recorder', 'resume', this.objectHandle); + end + function fname = filename(this) + this.do_init(); + fname = realsense.librealsense_mex('rs2::recorder', 'filename', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/roi_sensor.m b/wrappers/matlab/roi_sensor.m new file mode 100644 index 0000000000..864b03fd9c --- /dev/null +++ b/wrappers/matlab/roi_sensor.m @@ -0,0 +1,38 @@ +% Wraps librealsense2 roi_sensor class +classdef roi_sensor < sensor + methods + % Constructor + function this = roi_sensor(handle) + this = this@realsense.sensor(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function set_region_of_interest(this, roi) + narginchk(2, 2) + validateattributes(roi, {'struct'}, {'scalar'}, '', 'roi', 2); + if ~isfield(roi, 'min_x') + error('Expected input number 2, roi, to have a min_x field');; + end + validateattributes(roi.min_x, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'roi.min_x', 2); + if ~isfield(roi, 'min_y') + error('Expected input number 2, roi, to have a min_y field');; + end + validateattributes(roi.min_y, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'roi.min_y', 2); + if ~isfield(roi, 'max_x') + error('Expected input number 2, roi, to have a max_x field');; + end + validateattributes(roi.max_x, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'roi.max_x', 2); + if ~isfield(roi, 'max_y') + error('Expected input number 2, roi, to have a min_x field');; + end + validateattributes(roi.max_y, {'numeric'}, {'scalar', 'real', 'integer'}, '', 'roi.max_y', 2); + input = struct('min_x', int64(roi.min_x), 'min_y', int64(roi.min_y), 'max_x', int64(max_x), 'max_y', int64(max_y)); + realsense.librealsense_mex('rs2::roi_sensor', 'set_region_of_interest', this.objectHandle, input); + end + function roi = get_region_of_interest(this) + roi = realsense.librealsense_mex('rs2::roi_sensor', 'get_region_of_interest', this.objectHandle); + end + emd +end \ No newline at end of file diff --git a/wrappers/matlab/rosbag_example.m b/wrappers/matlab/rosbag_example.m new file mode 100644 index 0000000000..8965d772ca --- /dev/null +++ b/wrappers/matlab/rosbag_example.m @@ -0,0 +1,45 @@ +function rosbag_example(filename) + % Make Config object to manage pipeline settings + cfg = realsense.config(); + validateattributes(filename, {'char','string'}, {'scalartext', 'nonempty'}, '', 'filename', 1); + % Tell pipeline to stream from the given rosbag file + cfg.enable_device_from_file(filename) + + % Make Pipeline object to manage streaming + pipe = realsense.pipeline(); + % Make Colorizer object to prettify depth output + colorizer = realsense.colorizer(); + + % Start streaming from the rosbag with default settings + profile = pipe.start(cfg); + + % Get streaming device's name + dev = profile.get_device(); + name = dev.get_info(realsense.camera_info.name); + + % Get frames. We discard the first couple to allow + % the camera time to settle + for i = 1:5 + fs = pipe.wait_for_frames(); + end + + % Stop streaming + pipe.stop(); + + % Select depth frame + depth = fs.get_depth_frame(); + % Colorize depth frame + color = colorizer.colorize(depth); + + % Get actual data and convert into a format imshow can use + % (Color data arrives as [R, G, B, R, G, B, ...] vector) + data = color.get_data(); + channels = vec2mat(data, 3); + img(:,:,1) = vec2mat(channels(:,1), color.get_width()); + img(:,:,2) = vec2mat(channels(:,2), color.get_width()); + img(:,:,3) = vec2mat(channels(:,3), color.get_width()); + + % Display image + imshow(img); + title(sprintf("Colorized depth frame from %s", name)); +end \ No newline at end of file diff --git a/wrappers/matlab/rs2_type_traits.h b/wrappers/matlab/rs2_type_traits.h new file mode 100644 index 0000000000..b1ebe0a7d8 --- /dev/null +++ b/wrappers/matlab/rs2_type_traits.h @@ -0,0 +1,124 @@ +#pragma once +#include "librealsense2/rs.hpp" +#include + +// extra checks to help specialization priority resoultion +template using extra_checks = std::bool_constant::value +// || std::is_base_of::value + || std::is_base_of::value>; + +// C API + +// rs_types.hpp + +// rs_frame.hpp +template struct MatlabParamParser::type_traits::value>::type> { + using rs2_internal_t = const rs2_stream_profile*; + using use_cells = std::true_type; + static T from_internal(rs2_internal_t * ptr) { return T(rs2::stream_profile(*ptr)); } +}; +template struct MatlabParamParser::type_traits::value>::type> { using rs2_internal_t = rs2_frame * ; }; + +// rs_sensor.hpp +template<> struct MatlabParamParser::type_traits { + struct carrier { + void * ptr; + enum class types { rs2_sensor, rs2_process_interface, rs2_colorizer, rs2_pointcloud } type; + carrier(void *ptr_, types t) : ptr(ptr_), type(t) {} + ~carrier(); // implemented at the bottom with an explanation as to why + }; + using rs2_internal_t = carrier; + static rs2::options from_internal(rs2_internal_t * ptr); +}; +template struct MatlabParamParser::type_traits::value>::type> + : MatlabParamParser::type_traits { + using carrier_t = std::shared_ptr; + using carrier_enum = std::integral_constant; + static T from_internal(rs2_internal_t * ptr) { + if (ptr->type == carrier_enum::value) return T(rs2::sensor(*reinterpret_cast(ptr->ptr))); + mexErrMsgTxt("Error parsing argument, object is not a sensor"); + } + static rs2_internal_t* to_internal(T&& var) { mexLock(); return new rs2_internal_t(new carrier_t(var), carrier_enum::value); } +}; + +// rs_device.hpp +template<> struct MatlabParamParser::type_traits { using rs2_internal_t = std::shared_ptr; }; +template<> struct MatlabParamParser::type_traits { + using rs2_internal_t = std::shared_ptr; + using use_cells = std::true_type; +}; + +// rs_record_playback.hpp +template<> struct MatlabParamParser::type_traits : MatlabParamParser::type_traits { + static rs2::playback from_internal(rs2_internal_t * ptr) { return traits_trampoline::from_internal(ptr); } +}; +template<> struct MatlabParamParser::type_traits : MatlabParamParser::type_traits { + static rs2::recorder from_internal(rs2_internal_t * ptr) { return traits_trampoline::from_internal(ptr); } +}; + +// rs_processing.hpp +template struct over_wrapper { + using rs2_internal_t = std::shared_ptr; + static T from_internal(rs2_internal_t * ptr) { return T(**ptr); } + static rs2_internal_t* to_internal(T&& val) { mexLock(); return new rs2_internal_t(new T(val)); } +}; +template ::carrier::types E> struct options_over_wrapper + : MatlabParamParser::type_traits { + using carrier_t = std::shared_ptr; + using carrier_enum = std::integral_constant; + static T from_internal(rs2_internal_t * ptr) { + if (ptr->type == carrier_enum::value) return T(**static_cast(ptr->ptr)); + mexErrMsgTxt("Error parsing argument, wrong branch of rs2::options inheritance"); + } + static rs2_internal_t* to_internal(T&& var) { mexLock(); return new rs2_internal_t(new carrier_t(new T(var)), carrier_enum::value); } +}; +template<> struct MatlabParamParser::type_traits : over_wrapper {}; +template<> struct MatlabParamParser::type_traits + : options_over_wrapper::carrier::types::rs2_colorizer> {}; +template<> struct MatlabParamParser::type_traits : over_wrapper {}; +template<> struct MatlabParamParser::type_traits : over_wrapper {}; +template<> struct MatlabParamParser::type_traits + : options_over_wrapper::carrier::types::rs2_pointcloud> {}; + + +template<> struct MatlabParamParser::type_traits : type_traits { + using carrier_t = std::shared_ptr; + using carrier_enum = std::integral_constant; +}; +template struct MatlabParamParser::type_traits::value>::type> + : MatlabParamParser::type_traits { + static T from_internal(rs2_internal_t * ptr) { mexErrMsgTxt("from_internal(): This shouldn't happen"); } + static rs2_internal_t* to_internal(T&& var) { mexLock(); return new rs2_internal_t(new carrier_t(new T(var)), carrier_enum::value); } +}; + +// rs_context.hpp +// rs2::event_information [?] +template<> struct MatlabParamParser::type_traits { using rs2_internal_t = std::shared_ptr; }; +template<> struct MatlabParamParser::type_traits { using rs2_internal_t = std::shared_ptr; }; + +// rs_pipeline.hpp +template<> struct MatlabParamParser::type_traits { using rs2_internal_t = std::shared_ptr; }; +template<> struct MatlabParamParser::type_traits { using rs2_internal_t = std::shared_ptr; }; +template<> struct MatlabParamParser::type_traits { using rs2_internal_t = std::shared_ptr; }; + +// This needs to go at the bottom so that all the relevant type_traits specializations will have already occured. +MatlabParamParser::type_traits::carrier::~carrier() { + switch (type) { + case types::rs2_sensor: delete reinterpret_cast::carrier_t*>(ptr); break; + case types::rs2_process_interface: delete reinterpret_cast::carrier_t*>(ptr); break; + case types::rs2_colorizer: delete reinterpret_cast::carrier_t*>(ptr); break; + case types::rs2_pointcloud: delete reinterpret_cast::carrier_t*>(ptr); break; + } +} + +rs2::options MatlabParamParser::type_traits::from_internal(rs2_internal_t * ptr) { + switch (ptr->type) { + case carrier::types::rs2_sensor: return traits_trampoline::from_internal(ptr).as(); + // TODO: Fix + //case carrier::types::rs2_process_interface: return *std::shared_ptr(*static_cast::carrier_t*>(ptr->ptr)); + case carrier::types::rs2_colorizer: return *std::shared_ptr(*static_cast::carrier_t*>(ptr->ptr)); + case carrier::types::rs2_pointcloud: return *std::shared_ptr(*static_cast::carrier_t*>(ptr->ptr)); + default: mexErrMsgTxt("Error parsing argument of type rs2::options: unrecognized carrier type"); + } + +} diff --git a/wrappers/matlab/sensor.m b/wrappers/matlab/sensor.m new file mode 100644 index 0000000000..baf0f9162e --- /dev/null +++ b/wrappers/matlab/sensor.m @@ -0,0 +1,78 @@ +% Wraps librealsense2 sensor class +classdef sensor < realsense.options + methods + % Constructor + function this = sensor(handle) + this = this@realsense.options(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function open(this, profiles) + narginchk(2, 2) + validateattributes(profiles, {'realsense.stream_profile'}, {'nonempty', 'vector'}, '', 'profiles', 2); + if isscalar(profiles) + realsense.librealsense_mex('rs2::sensor', 'open#stream_profile', this.objectHandle, profiles.objectHandle); + else + realsense.librealsense_mex('rs2::sensor', 'open#vec_stream_profile', this.objectHandle, profiles); + end + end + function value = supports_camera_info(this, info) + narginchk(2, 2) + validateattributes(info, {'realsense.camera_info', 'numeric'},{'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.camera_info.count}, '', 'info', 2); + value = realsense.librealsense_mex('rs2::sensor', 'supports#rs2_camera_info', this.objectHandle, int64(info)); + end + function value = get_info(this, info) + narginchk(2, 2) + validateattributes(info, {'realsense.camera_info', 'numeric'},{'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.camera_info.count}, '', 'info', 2); + value = realsense.librealsense_mex('rs2::sensor', 'get_info', this.objectHandle, int64(info)); + end + function close(this) + realsense.librealsense_mex('rs2::sensor', 'close', this.objectHandle); + end + % TODO: start [etc?] + function start(this, queue) + narginchk(2, 2); + validateattributes(queue, {'realsense.frame_queue'}, {'scalar'}, '', 'queue', 2); + realsense.librealsense_mex('rs2::sensor', 'start', this.objectHandle, queue.objectHandle); + end + function stop(this) + realsense.librealsense_mex('rs2::sensor', 'stop', this.objectHandle); + end + function profiles = get_stream_profiles(this) + arr = realsense.librealsense_mex('rs2::sensor', 'get_stream_profiles', this.objectHandle); + % TODO: Might be cell array + profiles = arrayfun(@(x) realsense.stream_profile(x{:}{:}), arr, 'UniformOutput', false); + end + function l = logical(this) + l = realsense.librealsense_mex('rs2::sensor', 'operator bool', this.objectHandle); + end + function value = is(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + out = realsense.librealsense_mex('rs2::sensor', 'is', this.objectHandle, type); + value = logical(out); + end + function sensor = as(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + out = realsense.librealsense_mex('rs2::sensor', 'as', this.objectHandle, type); + switch type + case 'sensor' + sensor = realsense.sensor(out); + case 'roi_sensor' + sensor = realsense.roi_sensor(out); + case 'depth_sensor' + sensor = realsense.depth_sensor(out); + case 'depth_stereo_sensor' + sensor = realsense.depth_stereo_sensor(out);; + end + end + + % Operators + % TODO: operator== + end +end diff --git a/wrappers/matlab/spatial_filter.m b/wrappers/matlab/spatial_filter.m new file mode 100644 index 0000000000..3149089312 --- /dev/null +++ b/wrappers/matlab/spatial_filter.m @@ -0,0 +1,14 @@ +% Wraps librealsense2 spatial_filter class +classdef spatial_filter < realsense.process_interface + methods + % Constructor + function this = spatial_filter() + out = realsense.librealsense_mex('rs2::spatial_filter', 'new'); + this = this@realsense.process_interface(out); + end + + % Destructor (uses base class destructor) + + % Functions + end +end \ No newline at end of file diff --git a/wrappers/matlab/stream.m b/wrappers/matlab/stream.m new file mode 100644 index 0000000000..08aa5f0f06 --- /dev/null +++ b/wrappers/matlab/stream.m @@ -0,0 +1,15 @@ +classdef stream < int64 + enumeration + any_stream ( 0) + depth ( 1) + color ( 2) + infrared ( 3) + fisheye ( 4) + gyro ( 5) + accel ( 6) + gpio ( 7) + pose ( 8) + confidence ( 9) + count (10) + end +end \ No newline at end of file diff --git a/wrappers/matlab/stream_profile.m b/wrappers/matlab/stream_profile.m new file mode 100644 index 0000000000..4453d445f9 --- /dev/null +++ b/wrappers/matlab/stream_profile.m @@ -0,0 +1,89 @@ +% Wraps librealsense2 stream_profile class +classdef stream_profile < handle + properties (SetAccess = protected, Hidden = true) + objectOwnHandle; + objectHandle; + end + methods + % Constructor + function this = stream_profile(handle, ownHandle) + narginchk(2, 2); + validateattributes(handle, {'uint64'}, {'scalar'}); + validateattributes(ownHandle, {'uint64'}, {'scalar'}); + this.objectHandle = handle; + this.objectOwnHandle = ownHandle; + end + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::stream_profile', 'delete', this.objectOwnHandle); + end + end + + % Functions + function index = stream_index(this) + index = realsense.librealsense_mex('rs2::stream_profile', 'stream_index', this.objectHandle); + end + function type = stream_type(this) + type = realsense.stream(realsense.librealsense_mex('rs2::stream_profile', 'stream_type', this.objectHandle)); + end + function fmt = format(this) + fmt = realsense.format(realsense.librealsense_mex('rs2::stream_profile', 'format', this.objectHandle)); + end + function f = fps(this) + f = realsense.librealsense_mex('rs2::stream_profile', 'fps', this.objectHandle); + end + function id = unique_id(this) + id = realsense.librealsense_mex('rs2::stream_profile', 'unique_id', this.objectHandle); + end + function profile = clone(this, type, index, fmt) + narginchk(4, 4); + validateattributes(type, {'realsense.stream', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.stream.count}, '', 'type', 2); + validateattributes(index, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'index', 3); + validateattributes(fmt, {'realsense.format', 'numeric'}, {'scalar', 'nonnegative', 'real', 'integer', '<=', realsense.format.count}, '', 'fmt', 4); + out = realsense.librealsense_mex('rs2::stream_profile', 'clone', this.objectHandle, int64(type), int64(index), int64(fmt)); + realsense.stream_profile(out{:}); + end + function value = is(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + out = realsense.librealsense_mex('rs2::stream_profile', 'is', this.objectHandle, type); + value = logical(out); + end + function profile = as(this, type) + narginchk(2, 2); + % C++ function validates contents of type + validateattributes(type, {'char', 'string'}, {'scalartext'}, '', 'type', 2); + out = realsense.librealsense_mex('rs2::stream_profile', 'as', this.objectHandle, type); + switch type + case 'stream_profile' + profile = realsense.stream_profile(out); + case 'video_stream_profile' + profile = realsense.video_stream_profile(out); + case 'motion_stream_profile' + profile = realsense.motion_stream_profile(out); + end + end + function name = stream_name(this) + id = realsense.librealsense_mex('rs2::stream_profile', 'stream_name', this.objectHandle); + end + function value = is_default(this) + value = realsense.librealsense_mex('rs2::stream_profile', 'is_default', this.objectHandle); + end + function l = logical(this) + l = realsense.librealsense_mex('rs2::stream_profile', 'operator bool', this.objectHandle); + end + function extrinsics = get_extrinsics_to(this, to) + narginchk(2, 2); + validateattributes(to, {'realsense.stream_profile'}, {'scalar'}, '', 'to', 2); + extrinsics = realsense.librealsense_mex('rs2::stream_profile', 'get_extrinsics_to', this.objectHandle, to.objectHandle); + end + function value = is_cloned(this) + value = realsense.librealsense_mex('rs2::stream_profile', 'is_cloned', this.objectHandle) + end + + % Operators + % TODO: operator== + end +end \ No newline at end of file diff --git a/wrappers/matlab/syncer.m b/wrappers/matlab/syncer.m new file mode 100644 index 0000000000..af0fa3f827 --- /dev/null +++ b/wrappers/matlab/syncer.m @@ -0,0 +1,37 @@ +% Wraps librealsense2 syncer class +classdef syncer < handle + properties (SetAccess = private, Hidden = true) + objectHandle; + end + methods + % Constructor + function this = syncer(queue_size) + if nargin == 0 + this.objectHandle = realsense.librealsense_mex('rs2::syncer', 'new'); + else + validateattributes(queue_size, {'numeric'}, {'scalar', 'positive', 'real', 'integer'}); + this.objectHandle = realsense.librealsense_mex('rs2::syncer', 'new', uint64(queue_size)); + end + end + + % Destructor + function delete(this) + if (this.objectHandle ~= 0) + realsense.librealsense_mex('rs2::syncer', 'delete', this.objectHandle); + end + end + + % Functions + function frames = wait_for_frames(this, timeout_ms) + narginchk(1, 2); + if nargin == 1 + out = realsense.librealsense_mex('rs2::syncer', 'wait_for_frames', this.objectHandle); + else + if isduration(timeout_ms) + timeout_ms = milliseconds(timeout_ms); + end + validateattributes(timeout_ms, {'numeric'}, {'scalar', 'nonnegative', 'real', 'integer'}, '', 'timeout_ms', 2); + out = realsense.librealsense_mex('rs2::syncer', 'wait_for_frames', this.objectHandle, double(timeout_ms)); + end + frames = realsense.frameset(out); + end \ No newline at end of file diff --git a/wrappers/matlab/temporal_filter.m b/wrappers/matlab/temporal_filter.m new file mode 100644 index 0000000000..cd53c35364 --- /dev/null +++ b/wrappers/matlab/temporal_filter.m @@ -0,0 +1,14 @@ +% Wraps librealsense2 temporal_filter class +classdef temporal_filter < realsense.process_interface + methods + % Constructor + function this = temporal_filter() + out = realsense.librealsense_mex('rs2::temporal_filter', 'new'); + this = this@realsense.process_interface(out); + end + + % Destructor (uses base class destructor) + + % Functions + end +end \ No newline at end of file diff --git a/wrappers/matlab/timestamp_domain.m b/wrappers/matlab/timestamp_domain.m new file mode 100644 index 0000000000..37a4fc39c3 --- /dev/null +++ b/wrappers/matlab/timestamp_domain.m @@ -0,0 +1,7 @@ +classdef timestamp_domain < int64 + enumeration + hardware_clock (0) + system_time (1) + count (2) + end +end \ No newline at end of file diff --git a/wrappers/matlab/types.h b/wrappers/matlab/types.h new file mode 100644 index 0000000000..eec7f3b628 --- /dev/null +++ b/wrappers/matlab/types.h @@ -0,0 +1,246 @@ +#pragma once +#include "librealsense2/rs.hpp" +#include +#include + +// C API +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(rs2_extrinsics&& val) +{ + const char* fnames[] = { "rotation", "translation" }; + mxArray* cell = mxCreateStructMatrix(1, 1, 2, fnames); + mxSetField(cell, 0, "rotation", MatlabParamParser::wrap_array(val.rotation, 9)); + mxSetField(cell, 0, "translation", MatlabParamParser::wrap_array(val.translation, 3)); + return cell; +} +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(rs2_intrinsics&& val) +{ + const char* fnames[] = { "width", "height", "ppx", "ppy", "fx", "fy", "model", "coeffs" }; + mxArray* cell = mxCreateStructMatrix(1, 1, 8, fnames); + mxSetField(cell, 0, "width", MatlabParamParser::wrap(std::move(val.width))); + mxSetField(cell, 0, "height", MatlabParamParser::wrap(std::move(val.height))); + mxSetField(cell, 0, "ppx", MatlabParamParser::wrap(std::move(val.ppx))); + mxSetField(cell, 0, "ppy", MatlabParamParser::wrap(std::move(val.ppy))); + mxSetField(cell, 0, "fx", MatlabParamParser::wrap(std::move(val.fx))); + mxSetField(cell, 0, "fy", MatlabParamParser::wrap(std::move(val.fy))); + mxSetField(cell, 0, "model", MatlabParamParser::wrap(std::move(val.model))); + mxSetField(cell, 0, "coeffs", MatlabParamParser::wrap_array(val.coeffs, 5)); + return cell; +} +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(rs2_motion_device_intrinsic&& val) +{ + const char* fnames[] = { "data", "noise_variances", "bias_variances"}; + mxArray* cell = mxCreateStructMatrix(1, 1, 3, fnames); + mxSetField(cell, 0, "noise_variances", MatlabParamParser::wrap_array(val.noise_variances, 3)); + mxSetField(cell, 0, "bias_variances", MatlabParamParser::wrap_array(val.bias_variances, 3)); + + // have to do data field manually for now because of multidimensional array. hope to extend make_array to cover this case + using data_wrapper_t = mx_wrapper::type>; + mxArray* data_cell = mxCreateNumericMatrix(3, 4, data_wrapper_t::value::value, mxREAL); + auto data_ptr = static_cast(mxGetData(data_cell)); + for (int y = 0; y < 3; ++y) for (int x = 0; x < 4; ++x) data_ptr[y + 3*x] = val.data[y][x]; + mxSetField(cell, 0, "data", data_cell); + return cell; +} +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(rs2_vector&& val) +{ + using wrapper_t = mx_wrapper; + auto cells = mxCreateNumericMatrix(1, 3, wrapper_t::value::value, mxREAL); + auto ptr = static_cast(mxGetData(cells)); + ptr[0] = wrapper_t::type(val.x); + ptr[1] = wrapper_t::type(val.y); + ptr[2] = wrapper_t::type(val.z); + return cells; +} +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(rs2_quaternion&& val) +{ + using wrapper_t = mx_wrapper; + auto cells = mxCreateNumericMatrix(1, 4, wrapper_t::value::value, mxREAL); + auto ptr = static_cast(mxGetData(cells)); + ptr[0] = wrapper_t::type(val.x); + ptr[1] = wrapper_t::type(val.y); + ptr[2] = wrapper_t::type(val.z); + ptr[3] = wrapper_t::type(val.w); + return cells; +} +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(rs2_pose&& val) +{ + const char* fnames[] = { "translation", "velocity", "acceleration", "rotation", "angular_velocity", "angular_acceleration", "tracker_confidence", "mapper_confidence" }; + mxArray* cell = mxCreateStructMatrix(1, 1, 8, fnames); + mxSetField(cell, 0, "translation", MatlabParamParser::wrap(std::move(val.translation))); + mxSetField(cell, 0, "velocity", MatlabParamParser::wrap(std::move(val.velocity))); + mxSetField(cell, 0, "acceleration", MatlabParamParser::wrap(std::move(val.acceleration))); + mxSetField(cell, 0, "rotation", MatlabParamParser::wrap(std::move(val.rotation))); + mxSetField(cell, 0, "angular_velocity", MatlabParamParser::wrap(std::move(val.angular_velocity))); + mxSetField(cell, 0, "angular_acceleration", MatlabParamParser::wrap(std::move(val.angular_acceleration))); + mxSetField(cell, 0, "tracker_confidence", MatlabParamParser::wrap(std::move(val.tracker_confidence))); + mxSetField(cell, 0, "mapper_confidence", MatlabParamParser::wrap(std::move(val.mapper_confidence))); + return cell; +} + +// rs_types.hpp +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(rs2::option_range&& val) +{ + const char* fnames[] = { "min", "max", "step", "def" }; + mxArray* cell = mxCreateStructMatrix(1, 1, 4, fnames); + mxSetField(cell, 0, "min", MatlabParamParser::wrap(std::move(val.min))); + mxSetField(cell, 0, "max", MatlabParamParser::wrap(std::move(val.max))); + mxSetField(cell, 0, "step", MatlabParamParser::wrap(std::move(val.step))); + mxSetField(cell, 0, "def", MatlabParamParser::wrap(std::move(val.def))); + return cell; +} +template<> struct MatlabParamParser::mx_wrapper_fns +{ + static rs2::region_of_interest parse(const mxArray* cell) + { + rs2::region_of_interest ret; + ret.min_x = MatlabParamParser::parse(mxGetField(cell, 0, "min_x")); + ret.min_y = MatlabParamParser::parse(mxGetField(cell, 0, "min_y")); + ret.max_x = MatlabParamParser::parse(mxGetField(cell, 0, "max_x")); + ret.max_y = MatlabParamParser::parse(mxGetField(cell, 0, "max_y")); + return ret; + } + static mxArray* wrap(rs2::region_of_interest&& val) + { + const char* fnames[] = { "min_x", "min_y", "max_x", "max_y" }; + mxArray* cell = mxCreateStructMatrix(1, 1, 4, fnames); + mxSetField(cell, 0, "min_x", MatlabParamParser::wrap(std::move(val.min_x))); + mxSetField(cell, 0, "min_y", MatlabParamParser::wrap(std::move(val.min_y))); + mxSetField(cell, 0, "max_x", MatlabParamParser::wrap(std::move(val.max_x))); + mxSetField(cell, 0, "max_y", MatlabParamParser::wrap(std::move(val.max_y))); + return cell; + } +}; + +// rs_context.hpp + +// rs_record_playback.hpp + +// rs_device.hpp +template<> static mxArray* MatlabParamParser::mx_wrapper_fns::wrap(rs2::device_list&& var) +{ + // Device list is sent as a native array of (ptr, id) pairs to preserve lazy instantiation of devices + size_t len = var.size(); + + mxArray* vec = mxCreateCellMatrix(1, len); + for (uint32_t i = 0; i < len; ++i) + { + using dl_wrap_t = mx_wrapper; + using idx_wrap_t = mx_wrapper; + auto cells = mxCreateCellMatrix(1, 2); + auto dl_cell = mxCreateNumericMatrix(1, 1, dl_wrap_t::value::value, mxREAL); + mexLock(); // lock once for each created pointer + *static_cast(mxGetData(dl_cell)) = reinterpret_cast(new type_traits::rs2_internal_t(var)); + auto idx_cell = mxCreateNumericMatrix(1, 1, idx_wrap_t::value::value, mxREAL); + *static_cast(mxGetData(idx_cell)) = static_cast(i); + mxSetCell(cells, 0, dl_cell); + mxSetCell(cells, 1, dl_cell); + mxSetCell(vec, i, cells); + } + + return vec; +} + +// rs_sensor.hpp + +// rs_frame.hpp +// TODO: Is there really not a cleaner way to deal with cloned stream_profiles? +template struct MatlabParamParser::mx_wrapper_fns::value>::type> +{ + static T parse(const mxArray* cell) + { + using internal_t = typename type_traits::rs2_internal_t; + return traits_trampoline::from_internal(mx_wrapper_fns::parse(cell)); + } + static mxArray* wrap(T&& val) + { + auto cells = mxCreateCellMatrix(1, 2); + auto handle_cell = mxCreateNumericMatrix(1, 1, mxUINT64_CLASS, mxREAL); + auto handle_ptr = static_cast(mxGetData(cells)); + *handle_ptr = reinterpret_cast(type_traits::rs2_internal_t(val)); + + auto own_cell = mxCreateNumericMatrix(1, 1, mxUINT64_CLASS, mxREAL); + auto own_ptr = static_cast(mxGetData(cells)); + // if its cloned, give the wrapper ownership of the stream_profile + if (val.is_cloned()) + { + mexLock(); + *own_ptr = reinterpret_cast(new std::shared_ptr(val)); + } + else *own_ptr = reinterpret_cast(nullptr); + + mxSetCell(cells, 0, handle_cell); + mxSetCell(cells, 1, own_cell); + return cells; + } + static void destroy(const mxArray* cell) + { + // we parse a std::shard_ptr* because that is how the ownership + // pointer is stored, which is the one we need to destroy (assuming it exists) + auto ptr = mx_wrapper_fns*>::parse(cell); + if (!ptr) return; // only destroy if wrapper owns the profile + delete ptr; + mexUnlock(); + } +}; + +// rs2::frame does its own internal refcounting, so this interfaces with it properly +template struct MatlabParamParser::mx_wrapper_fns::value>::type> +{ + static mxArray* wrap(T&& var) + { + using wrapper_t = mx_wrapper; + mxArray *cell = mxCreateNumericMatrix(1, 1, wrapper_t::value::value, mxREAL); + auto *outp = static_cast(mxGetData(cell)); + auto inptr = type_traits::rs2_internal_t(var); + rs2_frame_add_ref(inptr, nullptr); + mexLock(); // Wrapper holds a reference to the internal type, which won't get destroyed until the wrapper lets go + *outp = reinterpret_cast(inptr); + return cell; + } + static T parse(const mxArray* cell) + { + auto inptr = mx_wrapper_fns::rs2_internal_t>::parse(cell); + rs2_frame_add_ref(inptr, nullptr); + return T(inptr); + } + static void destroy(const mxArray* cell) + { + auto inptr = mx_wrapper_fns::rs2_internal_t>::parse(cell); + rs2_release_frame(inptr); + mexUnlock(); // Wrapper holds a reference to the internal type, which won't get destroyed until the wrapper lets go + } +}; +template <> static mxArray* MatlabParamParser::wrap_array(const rs2::vertex* var, size_t length) +{ + using wrapper_t = mx_wrapper; + auto cells = mxCreateNumericMatrix(length, 3, wrapper_t::value::value, mxREAL); + auto ptr = static_cast(mxGetData(cells)); + for (int x = 0; x < length; ++x) { + ptr[0 * length + x] = wrapper_t::type(var[x].x); + ptr[1 * length + x] = wrapper_t::type(var[x].y); + ptr[2 * length + x] = wrapper_t::type(var[x].z); + } + return cells; +} +template <> static mxArray* MatlabParamParser::wrap_array(const rs2::texture_coordinate* var, size_t length) +{ + using wrapper_t = mx_wrapper; + auto cells = mxCreateNumericMatrix(length, 2, wrapper_t::value::value, mxREAL); + auto ptr = static_cast(mxGetData(cells)); + for (int x = 0; x < length; ++x) { + ptr[0 * length + x] = wrapper_t::type(var[x].u); + ptr[1 * length + x] = wrapper_t::type(var[x].v); + } + return cells; +} + +// rs_processing.hpp +template<> static rs2::process_interface* MatlabParamParser::mx_wrapper_fns::parse(const mxArray* cell) +{ + using traits_t = type_traits; + auto ptr = static_cast(mxGetData(cell)); + if (ptr->type == traits_t::carrier_enum::value) return reinterpret_cast(ptr->ptr)->get(); + mexErrMsgTxt("Error parsing argument, object is not a process_interface"); +} + +// rs_pipeline.hpp diff --git a/wrappers/matlab/video_frame.m b/wrappers/matlab/video_frame.m new file mode 100644 index 0000000000..f8a7b133b9 --- /dev/null +++ b/wrappers/matlab/video_frame.m @@ -0,0 +1,28 @@ +% Wraps librealsense2 video_frame class +classdef video_frame < realsense.frame + methods + % Constructor + function this = video_frame(handle) + this = this@realsense.frame(handle); + end + + % Destructor (uses base class destructor) + + % Functions + function width = get_width(this) + width = realsense.librealsense_mex('rs2::video_frame', 'get_width', this.objectHandle); + end + function height = get_height(this) + height = realsense.librealsense_mex('rs2::video_frame', 'get_height', this.objectHandle); + end + function stride = get_stride_in_bytes(this) + stride = realsense.librealsense_mex('rs2::video_frame', 'get_stride_in_bytes', this.objectHandle); + end + function bipp = get_bits_per_pixel(this) + bipp = realsense.librealsense_mex('rs2::video_frame', 'get_bits_per_pixel', this.objectHandle); + end + function bpp = get_bytes_per_pixel(this) + bpp = realsense.librealsense_mex('rs2::video_frame', 'get_bytes_per_pixel', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/matlab/video_stream_profile.m b/wrappers/matlab/video_stream_profile.m new file mode 100644 index 0000000000..227a607567 --- /dev/null +++ b/wrappers/matlab/video_stream_profile.m @@ -0,0 +1,22 @@ +% Wraps librealsense2 video_stream_profile class +classdef video_stream_profile < realsense.stream_profile + methods + % Constructor + function this = video_stream_profile(ownHandle, handle) + this = this@realsense.stream_profile(ownHandle, handle); + end + + % Destructor (uses base class destructor) + + % Functions + function w = width(this) + w = realsense.librealsense_mex('rs2::video_stream_profile', 'width', this.objectHandle); + end + function h = heigh(this) + h = realsense.librealsense_mex('rs2::video_stream_profile', 'height', this.objectHandle); + end + function intrinsics = get_intrinsics(this) + intrinsics = realsense.librealsense_mex('rs2::video_stream_profile', 'get_intrinsics', this.objectHandle); + end + end +end \ No newline at end of file diff --git a/wrappers/nodejs/examples/realsense_viewer/index.js b/wrappers/nodejs/examples/realsense_viewer/index.js index a38358c66b..06a9a85476 100644 --- a/wrappers/nodejs/examples/realsense_viewer/index.js +++ b/wrappers/nodejs/examples/realsense_viewer/index.js @@ -77,10 +77,11 @@ class Realsense { } let options = { tag: ResponseTag.options, - data: [], + data: {}, }; this.sensors.forEach((s) => { - options.data.push(this._getSensorOptions(s)); + let sensorName = s.getCameraInfo(this.wrapper.camera_info.camera_info_name); + options.data[sensorName] = this._getSensorOptions(s); }); return options; } @@ -109,6 +110,11 @@ class Realsense { console.log(cmd); this._handleStart(cmd); break; + case CommandTag.setOption: + console.log('Set option command'); + console.log(cmd); + this._handleSetOption(cmd); + break; default: console.log(`Unrecognized command ${cmd}`); break; @@ -310,6 +316,11 @@ class Realsense { }); } } + // process set option command + _handleSetOption(cmd) { + let sensor = this._findSensorByName(cmd.data.sensor); + sensor.setOption(cmd.data.option, Number(cmd.data.value)); + } } class ConnectionManager { diff --git a/wrappers/nodejs/examples/realsense_viewer/public/app.js b/wrappers/nodejs/examples/realsense_viewer/public/app.js index dbc867ea02..cae8c48326 100644 --- a/wrappers/nodejs/examples/realsense_viewer/public/app.js +++ b/wrappers/nodejs/examples/realsense_viewer/public/app.js @@ -89,6 +89,18 @@ class RealsenseProxy { console.log('send start cmd:'); this._sendCmd(JSON.stringify(cmd)); } + setOption(sensor, option, value) { + let cmd = { + tag: CommandTag.setOption, + data: { + sensor: sensor, + option: option, + value: value, + }, + }; + console.log('send set option cmd:'); + this._sendCmd(JSON.stringify(cmd)); + } _setupConnectionCallback(streamVal) { let inst = this; this.dataSockets[streamVal] = new WebSocket(this.url); @@ -141,7 +153,6 @@ const vue = new Vue({ sensorInfo: [], sensorInfoStr: '', presets: [], - options: '', selection: { selectedPreset: '', selectedResolutions: {}, @@ -171,6 +182,10 @@ const vue = new Vue({ 'infrared2': false, }, }, + controls: { + enabled: {}, + options: {}, + }, }, mounted: function() { proxy.connect(); @@ -215,6 +230,9 @@ const vue = new Vue({ console.log('selected Formats:', vue.selection.selectedFormats); console.log('selected Preset:', vue.selection.selectedPreset); }, + onOptionChanged: function(sensorName, optionName, value) { + proxy.setOption(sensorName, optionName, value); + }, }, }); @@ -225,7 +243,7 @@ function cmdResponseCallback(response) { vue.presets = response.data; break; case ResponseTag.options: - vue.options = JSON.stringify(response); + vue.controls.options =response.data; break; case ResponseTag.sensorInfo: vue.sensorInfo = response.data; diff --git a/wrappers/nodejs/examples/realsense_viewer/public/common.js b/wrappers/nodejs/examples/realsense_viewer/public/common.js index f98d4fab1e..c0efba6ac8 100644 --- a/wrappers/nodejs/examples/realsense_viewer/public/common.js +++ b/wrappers/nodejs/examples/realsense_viewer/public/common.js @@ -109,6 +109,7 @@ const CommandTag = { { tag: 'set-option', data: { + sensor: sensor name, option: option name, value: value } @@ -123,7 +124,7 @@ const CommandTag = { } structure of startDataType: { - sensor: 'color' or 'stereo' + sensor: sensor name streams: array of streamConfigType: } structure of streamConfigType: diff --git a/wrappers/nodejs/examples/realsense_viewer/public/index.html b/wrappers/nodejs/examples/realsense_viewer/public/index.html index 7c2a65a551..2296e4316b 100644 --- a/wrappers/nodejs/examples/realsense_viewer/public/index.html +++ b/wrappers/nodejs/examples/realsense_viewer/public/index.html @@ -11,46 +11,63 @@
-
- -

-
-
- -
- -
- Availables streams:   - - - -      - - - - - - - -
-
+
+
+ +

+
+
+ {{info.name}} + +
+ +
+ +
+ + + +

+ +
+
+
+
+ +
+
+

+ + + + + + +
+
+
+
-
-
- > -
- - - - +
+ > +
+ + + + + + +
diff --git a/wrappers/nodejs/src/addon.cpp b/wrappers/nodejs/src/addon.cpp index bd4c2194cd..9066dd19b9 100644 --- a/wrappers/nodejs/src/addon.cpp +++ b/wrappers/nodejs/src/addon.cpp @@ -3514,7 +3514,7 @@ class RSDeviceHub : public Nan::ObjectWrap { if (!me) return; auto dev = GetNativeResult(rs2_device_hub_wait_for_device, - &me->error_, me->ctx_, me->hub_, &me->error_); + &me->error_, me->hub_, &me->error_); if (!dev) return; info.GetReturnValue().Set(RSDevice::NewInstance(dev)); diff --git a/wrappers/python/CMakeLists.txt b/wrappers/python/CMakeLists.txt index d2e35b5532..b5140629d5 100644 --- a/wrappers/python/CMakeLists.txt +++ b/wrappers/python/CMakeLists.txt @@ -32,13 +32,32 @@ install(TARGETS pyrealsense2 EXPORT realsense2Targets ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} ) -set(RAW_RS_CPP ../../src/backend.cpp ../../src/win/win-helpers.cpp ../../src/win/win-uvc.cpp pybackend_extras.cpp pybackend.cpp - ../../src/win/win-usb.cpp ../../src/win/win-backend.cpp ../../src/linux/backend-v4l2.cpp ../../src/linux/backend-hid.cpp ../../src/types.cpp - ../../src/win/win-hid.cpp ../../src/archive.cpp ../../src/log.cpp ../../src/types.cpp ../../third-party/easyloggingpp/src/easylogging++.cc) -set(RAW_RS_HPP ../../src/backend.h ../../src/win/win-helpers.h ../../src/win/win-uvc.h ../../src/linux/backend-v4l2.h ../../src/linux/backend-hid.h - ../../src/win/win-usb.h ../../src/win/win-hid.h ../../src/win/win-backend.h ../../src/archive.h pybackend_extras.h +set(RAW_RS_CPP ../../src/backend.cpp pybackend_extras.cpp pybackend.cpp + ../../src/linux/backend-v4l2.cpp ../../src/linux/backend-hid.cpp ../../src/types.cpp + ../../src/archive.cpp ../../src/log.cpp ../../src/types.cpp ../../third-party/easyloggingpp/src/easylogging++.cc) +set(RAW_RS_HPP ../../src/backend.h ../../src/linux/backend-v4l2.h ../../src/linux/backend-hid.h + ../../src/archive.h pybackend_extras.h ../../src/types.h ../../third-party/easyloggingpp/src/easylogging++.h) +if( (${CMAKE_SYSTEM_VERSION} EQUAL 6.1) OR (FORCE_WINUSB_UVC) ) # Windows 7 + list(APPEND RAW_RS_CPP + ../../src/win7/win7-helpers.cpp + ../../src/win7/win7-uvc.cpp + ../../src/win7/win7-usb.cpp + ../../src/win7/win7-hid.cpp + ../../src/win7/win7-backend.cpp + ../../src/win7/winusb_uvc/winusb_uvc.cpp + ) +else() # Some other windows version + list(APPEND RAW_RS_CPP + ../../src/win/win-helpers.cpp + ../../src/win/win-uvc.cpp + ../../src/win/win-usb.cpp + ../../src/win/win-hid.cpp + ../../src/win/win-backend.cpp + ) +endif() + pybind11_add_module(pybackend2 SHARED ${RAW_RS_CPP} ${RAW_RS_HPP}) target_link_libraries(pybackend2 PRIVATE ${LIBUSB1_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}) set_target_properties(pybackend2 PROPERTIES diff --git a/wrappers/python/python.cpp b/wrappers/python/python.cpp index bbbeccc983..1ebe9559c7 100644 --- a/wrappers/python/python.cpp +++ b/wrappers/python/python.cpp @@ -114,8 +114,14 @@ PYBIND11_MODULE(NAME, m) { public: BufData(void *ptr, size_t itemsize, const std::string& format, size_t ndim, const std::vector &shape, const std::vector &strides) : _ptr(ptr), _itemsize(itemsize), _format(format), _ndim(ndim), _shape(shape), _strides(strides) {} - BufData(void *ptr, size_t itemsize, const std::string &format, size_t size) + BufData(void *ptr, size_t itemsize, const std::string& format, size_t size) : BufData(ptr, itemsize, format, 1, std::vector { size }, std::vector { itemsize }) { } + BufData(void *ptr, // Raw data pointer + size_t itemsize, // Size of the type in bytes + const std::string& format, // Data type's format descriptor (e.g. "@f" for float xyz) + size_t dim, // number of data elements per group (e.g. 3 for float xyz) + size_t count) // Number of groups + : BufData( ptr, itemsize, format, 2, std::vector { count, dim }, std::vector { itemsize*dim, itemsize }) { } }; py::class_ BufData_py(m, "BufData", py::buffer_protocol()); @@ -130,30 +136,30 @@ PYBIND11_MODULE(NAME, m) { ); /** - Binding of rs2_ enums + Binding of rs2_ enums */ BIND_ENUM(m, rs2_camera_info, RS2_CAMERA_INFO_COUNT) - BIND_ENUM(m, rs2_frame_metadata_value, RS2_FRAME_METADATA_COUNT) - BIND_ENUM(m, rs2_stream, RS2_STREAM_COUNT) - BIND_ENUM(m, rs2_extension, RS2_EXTENSION_COUNT) - BIND_ENUM(m, rs2_format, RS2_FORMAT_COUNT) - BIND_ENUM(m, rs2_notification_category, RS2_NOTIFICATION_CATEGORY_COUNT) - BIND_ENUM(m, rs2_log_severity, RS2_LOG_SEVERITY_COUNT) - BIND_ENUM(m, rs2_option, RS2_OPTION_COUNT) - BIND_ENUM(m, rs2_timestamp_domain, RS2_TIMESTAMP_DOMAIN_COUNT) - BIND_ENUM(m, rs2_distortion, RS2_DISTORTION_COUNT) - BIND_ENUM(m, rs2_playback_status, RS2_PLAYBACK_STATUS_COUNT) - - py::class_ extrinsics(m, "extrinsics"); + BIND_ENUM(m, rs2_frame_metadata_value, RS2_FRAME_METADATA_COUNT) + BIND_ENUM(m, rs2_stream, RS2_STREAM_COUNT) + BIND_ENUM(m, rs2_extension, RS2_EXTENSION_COUNT) + BIND_ENUM(m, rs2_format, RS2_FORMAT_COUNT) + BIND_ENUM(m, rs2_notification_category, RS2_NOTIFICATION_CATEGORY_COUNT) + BIND_ENUM(m, rs2_log_severity, RS2_LOG_SEVERITY_COUNT) + BIND_ENUM(m, rs2_option, RS2_OPTION_COUNT) + BIND_ENUM(m, rs2_timestamp_domain, RS2_TIMESTAMP_DOMAIN_COUNT) + BIND_ENUM(m, rs2_distortion, RS2_DISTORTION_COUNT) + BIND_ENUM(m, rs2_playback_status, RS2_PLAYBACK_STATUS_COUNT) + + py::class_ extrinsics(m, "extrinsics"); extrinsics.def(py::init<>()) .def_property(BIND_RAW_ARRAY_PROPERTY(rs2_extrinsics, rotation, float, 9)) .def_property(BIND_RAW_ARRAY_PROPERTY(rs2_extrinsics, translation, float, 3)) .def("__repr__", [](const rs2_extrinsics &e) { - std::stringstream ss; - ss << "rotation: " << array_to_string(e.rotation); - ss << "\ntranslation: " << array_to_string(e.translation); - return ss.str(); - }); + std::stringstream ss; + ss << "rotation: " << array_to_string(e.rotation); + ss << "\ntranslation: " << array_to_string(e.translation); + return ss.str(); + }); py::class_ intrinsics(m, "intrinsics"); intrinsics.def(py::init<>()) @@ -210,7 +216,7 @@ PYBIND11_MODULE(NAME, m) { context.def(py::init<>()) .def("query_devices", (rs2::device_list(rs2::context::*)() const) &rs2::context::query_devices, "Create a static" " snapshot of all connected devices a the time of the call.") - .def_property_readonly("devices", (rs2::device_list (rs2::context::*)() const) &rs2::context::query_devices, + .def_property_readonly("devices", (rs2::device_list(rs2::context::*)() const) &rs2::context::query_devices, "Create a static snapshot of all connected devices a the time of the call.") .def("query_all_sensors", &rs2::context::query_all_sensors, "Generate a flat list of " "all available sensors from all RealSense devices.") @@ -290,13 +296,13 @@ PYBIND11_MODULE(NAME, m) { py::class_ event_information(m, "event_information"); event_information.def("was_removed", &rs2::event_information::was_removed, "Check if " - "specific device was disconnected.", "dev"_a) - .def("was_added", &rs2::event_information::was_added, "Check if " - "specific device was added.", "dev"_a) - .def("get_new_devices", &rs2::event_information::get_new_devices, "Returns a " - "list of all newly connected devices") - .def("get_removed_devices", &rs2::event_information::get_removed_devices, "Returns a " - "list of all newly removed devices"); + "specific device was disconnected.", "dev"_a) + .def("was_added", &rs2::event_information::was_added, "Check if " + "specific device was added.", "dev"_a) + .def("get_new_devices", &rs2::event_information::get_new_devices, "Returns a " + "list of all newly connected devices") + .def("get_removed_devices", &rs2::event_information::get_removed_devices, "Returns a " + "list of all newly removed devices"); py::class_ tm2(m, "tm2"); tm2.def(py::init(), "device"_a) @@ -316,18 +322,18 @@ PYBIND11_MODULE(NAME, m) { switch (vf.get_profile().format()) { case RS2_FORMAT_RGB8: case RS2_FORMAT_BGR8: return BufData(const_cast(vf.get_data()), 1, bytes_per_pixel_to_format[1], 3, - { static_cast(vf.get_height()), static_cast(vf.get_width()), 3 }, - { static_cast(vf.get_stride_in_bytes()), static_cast(vf.get_bytes_per_pixel()), 1 }); + { static_cast(vf.get_height()), static_cast(vf.get_width()), 3 }, + { static_cast(vf.get_stride_in_bytes()), static_cast(vf.get_bytes_per_pixel()), 1 }); break; case RS2_FORMAT_RGBA8: case RS2_FORMAT_BGRA8: return BufData(const_cast(vf.get_data()), 1, bytes_per_pixel_to_format[1], 3, - { static_cast(vf.get_height()), static_cast(vf.get_width()), 4 }, - { static_cast(vf.get_stride_in_bytes()), static_cast(vf.get_bytes_per_pixel()), 1 }); + { static_cast(vf.get_height()), static_cast(vf.get_width()), 4 }, + { static_cast(vf.get_stride_in_bytes()), static_cast(vf.get_bytes_per_pixel()), 1 }); break; default: return BufData(const_cast(vf.get_data()), static_cast(vf.get_bytes_per_pixel()), bytes_per_pixel_to_format[vf.get_bytes_per_pixel()], 2, - { static_cast(vf.get_height()), static_cast(vf.get_width()) }, - { static_cast(vf.get_stride_in_bytes()), static_cast(vf.get_bytes_per_pixel()) }); + { static_cast(vf.get_height()), static_cast(vf.get_width()) }, + { static_cast(vf.get_stride_in_bytes()), static_cast(vf.get_bytes_per_pixel()) }); } } else @@ -335,29 +341,29 @@ PYBIND11_MODULE(NAME, m) { py::class_ frame(m, "frame"); frame.def(py::init<>()) -// .def(py::self = py::self) // can't overload assignment in python - .def(py::init()) - .def("swap", &rs2::frame::swap, "other"_a) - .def("__nonzero__", &rs2::frame::operator bool) - .def("get_timestamp", &rs2::frame::get_timestamp, "Retrieve the time at which the frame was captured") - .def_property_readonly("timestamp", &rs2::frame::get_timestamp, "Retrieve the time at which the frame was captured") - .def("get_frame_timestamp_domain", &rs2::frame::get_frame_timestamp_domain, "Retrieve the timestamp domain.") - .def_property_readonly("frame_timestamp_domain", &rs2::frame::get_frame_timestamp_domain, "Retrieve the timestamp domain.") - .def("get_frame_metadata", &rs2::frame::get_frame_metadata, "Retrieve the current value of a single frame_metadata.", "frame_metadata"_a) - .def("supports_frame_metadata", &rs2::frame::supports_frame_metadata, "Determine if the device allows a specific metadata to be queried.", "frame_metadata"_a) - .def("get_frame_number", &rs2::frame::get_frame_number, "Retrieve the frame number.") - .def_property_readonly("frame_number", &rs2::frame::get_frame_number, "Retrieve the frame number.") - .def("get_data", get_frame_data,"retrieve data from the frame handle.", py::keep_alive<0, 1>()) - .def_property_readonly("data", get_frame_data, "retrieve data from the frame handle.", py::keep_alive<0, 1>()) - .def("get_profile", &rs2::frame::get_profile) - .def("keep", &rs2::frame::keep) - .def_property_readonly("profile", &rs2::frame::get_profile) - .def(BIND_DOWNCAST(frame, frame)) - .def(BIND_DOWNCAST(frame, points)) - .def(BIND_DOWNCAST(frame, frameset)) - .def(BIND_DOWNCAST(frame, video_frame)) - .def(BIND_DOWNCAST(frame, depth_frame)); - + // .def(py::self = py::self) // can't overload assignment in python + .def(py::init()) + .def("swap", &rs2::frame::swap, "other"_a) + .def("__nonzero__", &rs2::frame::operator bool) + .def("get_timestamp", &rs2::frame::get_timestamp, "Retrieve the time at which the frame was captured") + .def_property_readonly("timestamp", &rs2::frame::get_timestamp, "Retrieve the time at which the frame was captured") + .def("get_frame_timestamp_domain", &rs2::frame::get_frame_timestamp_domain, "Retrieve the timestamp domain.") + .def_property_readonly("frame_timestamp_domain", &rs2::frame::get_frame_timestamp_domain, "Retrieve the timestamp domain.") + .def("get_frame_metadata", &rs2::frame::get_frame_metadata, "Retrieve the current value of a single frame_metadata.", "frame_metadata"_a) + .def("supports_frame_metadata", &rs2::frame::supports_frame_metadata, "Determine if the device allows a specific metadata to be queried.", "frame_metadata"_a) + .def("get_frame_number", &rs2::frame::get_frame_number, "Retrieve the frame number.") + .def_property_readonly("frame_number", &rs2::frame::get_frame_number, "Retrieve the frame number.") + .def("get_data", get_frame_data, "retrieve data from the frame handle.", py::keep_alive<0, 1>()) + .def_property_readonly("data", get_frame_data, "retrieve data from the frame handle.", py::keep_alive<0, 1>()) + .def("get_profile", &rs2::frame::get_profile) + .def("keep", &rs2::frame::keep) + .def_property_readonly("profile", &rs2::frame::get_profile) + //.def("apply_filter", &rs2::frame::apply_filter, "processing_block"_a) + .def(BIND_DOWNCAST(frame, frame)) + .def(BIND_DOWNCAST(frame, points)) + .def(BIND_DOWNCAST(frame, frameset)) + .def(BIND_DOWNCAST(frame, video_frame)) + .def(BIND_DOWNCAST(frame, depth_frame)); py::class_ video_frame(m, "video_frame"); video_frame.def(py::init()) @@ -394,26 +400,39 @@ PYBIND11_MODULE(NAME, m) { oss << t.u << ", " << t.v; return oss.str(); }); + py::class_ points(m, "points"); points.def(py::init<>()) .def(py::init()) - .def("get_vertices", [](rs2::points& self) -> BufData - { - return BufData(const_cast(self.get_vertices()), - sizeof(rs2::vertex), std::string("@fff"), self.size()); - }, py::keep_alive<0, 1>()) - .def("get_texture_coordinates", [](rs2::points& self) -> BufData - { - return BufData(const_cast(self.get_texture_coordinates()), - sizeof(rs2::texture_coordinate), std::string("@ff"), self.size()); - }, py::keep_alive<0, 1>()) + .def("get_vertices", [](rs2::points& self, int dims) -> BufData + { + auto verts = const_cast(self.get_vertices()); + switch (dims) { + case 1: + return BufData(verts, sizeof(rs2::vertex), "@fff", self.size()); + case 2: + return BufData(verts, sizeof(float), "@f", 3, self.size()); + } + }, py::keep_alive<0, 1>(), "dims"_a=1) + .def("get_texture_coordinates", [](rs2::points& self, int dims) -> BufData + { + auto tex = const_cast(self.get_texture_coordinates()); + switch (dims) { + case 1: + return BufData(tex, sizeof(rs2::texture_coordinate), "@ff", self.size()); + case 2: + return BufData(tex, sizeof(float), "@f", 2, self.size()); + } + }, py::keep_alive<0, 1>(), "dims"_a=1) .def("export_to_ply", &rs2::points::export_to_ply) .def("size", &rs2::points::size); py::class_ frameset(m, "composite_frame"); frameset.def(py::init()) - .def("first_or_default", &rs2::frameset::first_or_default, "s"_a) - .def("first", &rs2::frameset::first, "s"_a) + .def("first_or_default", &rs2::frameset::first_or_default, "s"_a, "f"_a) + .def("first_or_default", &rs2::frameset::first_or_default, "s"_a, "f"_a = RS2_FORMAT_ANY) + .def("first", &rs2::frameset::first, "s"_a, "f"_a) + .def("first", &rs2::frameset::first, "s"_a, "f"_a = RS2_FORMAT_ANY) .def("size", &rs2::frameset::size) .def("foreach", [](const rs2::frameset& self, std::function callable) { @@ -442,10 +461,10 @@ PYBIND11_MODULE(NAME, m) { depth_frame.def(py::init()) .def("get_distance", &rs2::depth_frame::get_distance, "x"_a, "y"_a); - - - /* rs2_processing.hpp */ + py::class_ process_interface(m, "process_interface"); + process_interface.def("process", &rs2::process_interface::process, "frame"_a); + // Base class for options interface. Should be used via sensor py::class_ options(m, "options"); options.def("is_option_read_only", &rs2::options::is_option_read_only, "Check if particular option " @@ -461,7 +480,7 @@ PYBIND11_MODULE(NAME, m) { "(In case a specific option value holds special meaning)", "option"_a, "value"_a); // Not binding frame_processor_callback, templated - py::class_ processing_block(m, "processing_block"); + py::class_ processing_block(m, "processing_block"); processing_block.def("start", [](rs2::processing_block& self, std::function f) { self.start(f); @@ -486,7 +505,7 @@ PYBIND11_MODULE(NAME, m) { }, "Poll if a new frame is available and dequeue it if it is") .def("__call__", &rs2::frame_queue::operator()); - py::class_ pointcloud(m, "pointcloud"); + py::class_ pointcloud(m, "pointcloud"); pointcloud.def(py::init<>()) .def("calculate", &rs2::pointcloud::calculate, "depth"_a) .def("map_to", &rs2::pointcloud::map_to, "mapped"_a); @@ -503,32 +522,28 @@ PYBIND11_MODULE(NAME, m) { }, "Check if a coherent set of frames is available"); /*.def("__call__", &rs2::syncer::operator(), "frame"_a)*/; - py::class_ colorizer(m, "colorizer"); + py::class_ colorizer(m, "colorizer"); colorizer.def(py::init<>()) .def("colorize", &rs2::colorizer::colorize, "depth"_a) /*.def("__call__", &rs2::colorizer::operator())*/; - py::class_ align(m, "align"); + py::class_ align(m, "align"); align.def(py::init(), "align_to"_a) - .def("process", &rs2::align::process, "depth"_a); - - // Do we need this? - py::class_ process_interface(m, "process_interface"); - process_interface.def("process", &rs2::process_interface::process, "frame"_a); + .def("process", &rs2::align::process, "frames"_a); - py::class_ decimation_filter(m, "decimation_filter"); + py::class_ decimation_filter(m, "decimation_filter"); decimation_filter.def(py::init<>()); - py::class_ temporal_filter(m, "temporal_filter"); + py::class_ temporal_filter(m, "temporal_filter"); temporal_filter.def(py::init<>()); - py::class_ spatial_filter(m, "spatial_filter"); + py::class_ spatial_filter(m, "spatial_filter"); spatial_filter.def(py::init<>()); - py::class_ hole_filling_filter(m, "hole_filling_filter"); + py::class_ hole_filling_filter(m, "hole_filling_filter"); hole_filling_filter.def(py::init<>()); - py::class_ disparity_transform(m, "disparity_transform"); + py::class_ disparity_transform(m, "disparity_transform"); disparity_transform.def(py::init(), "transform_to_disparity"_a=true); /* rs2_record_playback.hpp */ diff --git a/wrappers/python/readme.md b/wrappers/python/readme.md index 9f51456171..60e9d688e5 100644 --- a/wrappers/python/readme.md +++ b/wrappers/python/readme.md @@ -71,8 +71,7 @@ try: pipeline.start() while True: - # This call waits until a new coherent set of frames is available on a device - # Calls to get_frame_data(...) and get_frame_timestamp(...) on a device will return stable values until wait_for_frames(...) is called + # Create a pipeline object. This object configures the streaming camera and owns it's handle frames = pipeline.wait_for_frames() depth = frames.get_depth_frame() if not depth: continue diff --git a/wrappers/readme.md b/wrappers/readme.md index 7e49916a2f..64c9343624 100644 --- a/wrappers/readme.md +++ b/wrappers/readme.md @@ -8,4 +8,4 @@ * [PCL](./pcl) - Basic integration with the Point-Cloud Library * [.NET](./csharp) - Support for .NET languages with examples in C# * [Unity](./unity) - Unity assets for using RealSense SDK 2.0 -* Matlab - TBD +* [Matlab](./matlab) - Supports 64bit Windows installations diff --git a/wrappers/unity/ProjectSettings/PresetManager.asset b/wrappers/unity/ProjectSettings/PresetManager.asset new file mode 100644 index 0000000000..636a595b32 --- /dev/null +++ b/wrappers/unity/ProjectSettings/PresetManager.asset @@ -0,0 +1,6 @@ +%YAML 1.1 +%TAG !u! tag:unity3d.com,2011: +--- !u!1386491679 &1 +PresetManager: + m_ObjectHideFlags: 0 + m_DefaultList: [] diff --git a/wrappers/unity/readme.md b/wrappers/unity/readme.md index 67f98c5f17..dae313cbd8 100644 --- a/wrappers/unity/readme.md +++ b/wrappers/unity/readme.md @@ -2,7 +2,7 @@

-> [Download **realsense.unitypackage**](http://realsense-hw-public.s3.amazonaws.com/rs-tests/realsense.unitypackage) and go to `Assets > Scenes > Start Here` to see the home screen above +> [Download **realsense.unitypackage**](https://github.com/IntelRealSense/librealsense/releases/download/v2.16.0/Intel.RealSense.unitypackage) and go to `Assets > Scenes > Start Here` to see the home screen above ## Overview @@ -28,7 +28,7 @@ At the end of this step, the Unity project will be available in the CMake output - Generate the VS solution using cmake (run from librealsense root dir): - `mkdir build` - `cd build` - - `cmake .. -DBUILD_CSHARP_BINDINGS=ON -DBUILD_UNITY_BINDINGS=ON -DBUILD_SHARED_LIBS=ON -DDOTNET_VERSION_LIBRARY=3.5` + - `cmake .. -DBUILD_CSHARP_BINDINGS=ON -DBUILD_UNITY_BINDINGS=ON -DBUILD_SHARED_LIBS=ON -DDOTNET_VERSION_LIBRARY=3.5 -DCMAKE_GENERATOR_PLATFORM=x64` - The file 'realsense2.sln' should be created in 'build' folder, open the file with Visual Studio, C# examples and library will be available in the solution under 'Wrappers/csharp'. - Build Intel.RealSense project, this step will build both the native library and the .NET wrapper and will copy both DLLs to the Unity project Plugins folder. - In case Unity is installed and activated, a Unity package should be created at build/\/realsense.unitypackage. This file can be imported by the Unity editor.