pull/3471/merge
Amir Hassan 1 month ago committed by GitHub
commit a87cc21f11
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 1
      .gitignore
  2. 12
      .gitmodules
  3. 103
      README.md
  4. 24
      debug-env.sh
  5. 13
      modules/v4d/.gitignore
  6. 229
      modules/v4d/CMakeLists.txt
  7. BIN
      modules/v4d/doc/custom_source_and_sink.gif
  8. BIN
      modules/v4d/doc/display_image.png
  9. BIN
      modules/v4d/doc/display_image_fb.png
  10. BIN
      modules/v4d/doc/font_rendering.png
  11. BIN
      modules/v4d/doc/font_with_gui.png
  12. BIN
      modules/v4d/doc/lena.png
  13. BIN
      modules/v4d/doc/render_opengl.png
  14. 0
      modules/v4d/doc/v4d.bib
  15. BIN
      modules/v4d/doc/vector_graphics.png
  16. BIN
      modules/v4d/doc/vector_graphics_and_fb.png
  17. BIN
      modules/v4d/doc/video_editing.png
  18. 13
      modules/v4d/include/opencv2/v4d/detail/cl.hpp
  19. 44
      modules/v4d/include/opencv2/v4d/detail/context.hpp
  20. 352
      modules/v4d/include/opencv2/v4d/detail/framebuffercontext.hpp
  21. 19
      modules/v4d/include/opencv2/v4d/detail/gl.hpp
  22. 43
      modules/v4d/include/opencv2/v4d/detail/glcontext.hpp
  23. 35
      modules/v4d/include/opencv2/v4d/detail/imguicontext.hpp
  24. 81
      modules/v4d/include/opencv2/v4d/detail/nanovgcontext.hpp
  25. 38
      modules/v4d/include/opencv2/v4d/detail/resequence.hpp
  26. 56
      modules/v4d/include/opencv2/v4d/detail/sinkcontext.hpp
  27. 56
      modules/v4d/include/opencv2/v4d/detail/sourcecontext.hpp
  28. 139
      modules/v4d/include/opencv2/v4d/detail/timetracker.hpp
  29. 112
      modules/v4d/include/opencv2/v4d/detail/transaction.hpp
  30. 470
      modules/v4d/include/opencv2/v4d/events.hpp
  31. 509
      modules/v4d/include/opencv2/v4d/nvg.hpp
  32. 198
      modules/v4d/include/opencv2/v4d/scene.hpp
  33. 61
      modules/v4d/include/opencv2/v4d/sink.hpp
  34. 74
      modules/v4d/include/opencv2/v4d/source.hpp
  35. 124
      modules/v4d/include/opencv2/v4d/threadsafemap.hpp
  36. 531
      modules/v4d/include/opencv2/v4d/util.hpp
  37. 872
      modules/v4d/include/opencv2/v4d/v4d.hpp
  38. 399
      modules/v4d/samples/beauty-demo.cpp
  39. 75
      modules/v4d/samples/bgfx-demo.cpp
  40. 287
      modules/v4d/samples/bgfx-demo2.cpp
  41. 248
      modules/v4d/samples/cube-demo.cpp
  42. 62
      modules/v4d/samples/custom_source_and_sink.cpp
  43. 32
      modules/v4d/samples/display_image.cpp
  44. 40
      modules/v4d/samples/display_image_fb.cpp
  45. 59
      modules/v4d/samples/display_image_nvg.cpp
  46. 264
      modules/v4d/samples/example_v4d_beauty-demo.html
  47. 271
      modules/v4d/samples/example_v4d_capture.sh
  48. 210
      modules/v4d/samples/example_v4d_cube-demo.html
  49. 210
      modules/v4d/samples/example_v4d_custom_source_and_sink.html
  50. 210
      modules/v4d/samples/example_v4d_display_image.html
  51. 210
      modules/v4d/samples/example_v4d_display_image_fb.html
  52. 210
      modules/v4d/samples/example_v4d_display_image_nvg.html
  53. 210
      modules/v4d/samples/example_v4d_font-demo.html
  54. 210
      modules/v4d/samples/example_v4d_font_rendering.html
  55. 210
      modules/v4d/samples/example_v4d_font_with_gui.html
  56. 210
      modules/v4d/samples/example_v4d_many_cubes-demo.html
  57. 264
      modules/v4d/samples/example_v4d_nanovg-demo.html
  58. 218
      modules/v4d/samples/example_v4d_nocapture.sh
  59. 264
      modules/v4d/samples/example_v4d_optflow-demo.html
  60. 264
      modules/v4d/samples/example_v4d_pedestrian-demo.html
  61. 210
      modules/v4d/samples/example_v4d_render_opengl.html
  62. 264
      modules/v4d/samples/example_v4d_shader-demo.html
  63. 210
      modules/v4d/samples/example_v4d_vector_graphics.html
  64. 210
      modules/v4d/samples/example_v4d_vector_graphics_and_fb.html
  65. 264
      modules/v4d/samples/example_v4d_video-demo.html
  66. 264
      modules/v4d/samples/example_v4d_video_editing.html
  67. 243
      modules/v4d/samples/font-demo.cpp
  68. 33
      modules/v4d/samples/font_rendering.cpp
  69. 53
      modules/v4d/samples/font_with_gui.cpp
  70. BIN
      modules/v4d/samples/fonts/Roboto-Bold.ttf
  71. BIN
      modules/v4d/samples/fonts/Roboto-Light.ttf
  72. BIN
      modules/v4d/samples/fonts/Roboto-Regular.ttf
  73. BIN
      modules/v4d/samples/fonts/entypo.ttf
  74. 23
      modules/v4d/samples/make_example_html.sh
  75. 269
      modules/v4d/samples/many_cubes-demo.cpp
  76. 137
      modules/v4d/samples/montage-demo.cpp
  77. 195
      modules/v4d/samples/nanovg-demo.cpp
  78. 472
      modules/v4d/samples/optflow-demo.cpp
  79. 292
      modules/v4d/samples/pedestrian-demo.cpp
  80. 30
      modules/v4d/samples/render_opengl.cpp
  81. 67
      modules/v4d/samples/scene-demo.cpp
  82. 348
      modules/v4d/samples/shader-demo.cpp
  83. 111
      modules/v4d/samples/vector_graphics.cpp
  84. 110
      modules/v4d/samples/vector_graphics_and_fb.cpp
  85. 228
      modules/v4d/samples/video-demo.cpp
  86. 53
      modules/v4d/samples/video_editing.cpp
  87. 793
      modules/v4d/src/detail/framebuffercontext.cpp
  88. 42
      modules/v4d/src/detail/glcontext.cpp
  89. 99
      modules/v4d/src/detail/imguicontext.cpp
  90. 82
      modules/v4d/src/detail/nanovgcontext.cpp
  91. 45
      modules/v4d/src/detail/nvg.cpp
  92. 48
      modules/v4d/src/detail/sinkcontext.cpp
  93. 76
      modules/v4d/src/detail/sourcecontext.cpp
  94. 16
      modules/v4d/src/detail/timetracker.cpp
  95. 716
      modules/v4d/src/nvg.cpp
  96. 33
      modules/v4d/src/resequence.cpp
  97. 480
      modules/v4d/src/scene.cpp
  98. 60
      modules/v4d/src/sink.cpp
  99. 45
      modules/v4d/src/source.cpp
  100. 432
      modules/v4d/src/util.cpp
  101. Some files were not shown because too many files have changed in this diff Show More

1
.gitignore vendored

@ -10,3 +10,4 @@ tags
tegra/
*.i
.download*
/Debug/

12
.gitmodules vendored

@ -0,0 +1,12 @@
[submodule "modules/v4d/third/imgui"]
path = modules/v4d/third/imgui
url = https://github.com/kallaballa/imgui.git
[submodule "modules/v4d/third/doxygen-bootstrapped"]
path = modules/v4d/third/doxygen-bootstrapped
url = https://github.com/kallaballa/doxygen-bootstrapped.git
[submodule "modules/v4d/third/nanovg"]
path = modules/v4d/third/nanovg
url = https://github.com/kallaballa/nanovg
[submodule "bgfx.cmake"]
path = modules/v4d/third/bgfx.cmake
url = https://github.com/bkaradzic/bgfx.cmake.git

@ -1,60 +1,85 @@
## Repository for OpenCV's extra modules
## Introduction to "Plan" and "V4D"
This repository is intended for the development of so-called "extra" modules,
contributed functionality. New modules quite often do not have stable API,
and they are not well-tested. Thus, they shouldn't be released as a part of the
official OpenCV distribution, since the library maintains binary compatibility,
and tries to provide decent performance and stability.
### Overview of "Plan"
**Plan** is a computational graph engine built with C++20 templates, enabling developers to construct directed acyclic graphs (DAGs) from fragments of algorithms. By leveraging these graphs, Plan facilitates the optimization of parallel and concurrent algorithms, ensuring efficient resource utilization. The framework divides the lifetime of an algorithm into two distinct phases: **inference** and **execution**.
So, all the new modules should be developed separately, and published in the
`opencv_contrib` repository at first. Later, when the module matures and gains
popularity, it is moved to the central OpenCV repository, and the development team
provides production-quality support for this module.
- **Inference Phase:** During this phase, the computational graph is constructed by running the Plan implementation. This process organizes the algorithm's fragments and binds them to data, which may be classified as:
- **Safe Data:** Member variables of the Plan.
- **Shared Data:** External variables (e.g., global or static data).
Functions and data are explicitly flagged as shared when necessary, adhering to Plan’s transparent approach to state management. The framework discourages hidden states, as they impede program integrity and graph optimization.
### How to build OpenCV with extra modules
- **Execution Phase:** This phase executes the constructed graph using the defined nodes and edges. Nodes typically represent algorithmic fragments such as functions or lambdas, while edges define data flow, supporting various access patterns (e.g., read, write, copy).
You can build OpenCV, so it will include the modules from this repository. Contrib modules are under constant development and it is recommended to use them alongside the master branch or latest releases of OpenCV.
Plan also allows hierarchical composition, where one Plan may be composed of other sub-Plans. Special rules govern data sharing in such compositions to maintain performance and correctness. Currently, optimizations are limited to “best-effort” pipelining, with plans for more sophisticated enhancements.
Here is the CMake command for you:
### Overview of "V4D"
**V4D** is a versatile 2D/3D graphics runtime designed to integrate seamlessly with Plan. Built atop OpenGL (3.0 or ES 3.2), V4D extends its functionality through bindings to prominent libraries:
- **NanoVG:** For 2D vector and raster graphics, including font rendering.
- **bgfx:** A 3D engine modified to defer its concurrency model to Plan for optimal parallel execution.
- **IMGui:** A lightweight GUI overlay.
```
$ cd <opencv_build_directory>
$ cmake -DOPENCV_EXTRA_MODULES_PATH=<opencv_contrib>/modules <opencv_source_directory>
$ make -j5
```
V4D encourages direct OpenGL usage and external API integrations via **context sharing**, which is implemented using shared textures. Each external API operates within its isolated OpenGL state machine, maintaining thread safety and modularity.
As the result, OpenCV will be built in the `<opencv_build_directory>` with all
modules from `opencv_contrib` repository. If you don't want all of the modules,
use CMake's `BUILD_opencv_*` options. Like in this example:
The runtime’s capabilities are further augmented by its integration with OpenCV, providing:
- **Hardware Acceleration:** Utilizing OpenGL for graphics, VAAPI and NVENC for video, and OpenCL-OpenGL interop for compute tasks.
- **Data Sharing on GPU:** Depending on hardware and software features, V4D can directly share or copy data within GPU memory for efficient processing.
```
$ cmake -DOPENCV_EXTRA_MODULES_PATH=<opencv_contrib>/modules -DBUILD_opencv_legacy=OFF <opencv_source_directory>
```
### Integration and Platform Support
V4D and Plan share a tightly bonded design, simplifying combined use cases. However, plans are underway to decouple them, enabling the adoption of alternative runtimes. V4D is actively developed for Linux (X11 and Wayland via EGL or GLX), with auto-detection of supported backends. While macOS support lags slightly, Windows compatibility remains untested but is considered during development.
If you also want to build the samples from the "samples" folder of each module, also include the "-DBUILD_EXAMPLES=ON" option.
### Key Principles and Features
1. **Fine-Grained Edge Calls:** Plan introduces specialized edge calls (e.g., `R`, `RW`, `V`) to define data access patterns, supporting smart pointers and OpenCV `UMat` objects. This granularity allows better graph optimization.
2. **State and Data Transparency:** Functions and data in a Plan must avoid introducing hidden states unless explicitly marked as shared. This principle ensures the integrity of the graph and its optimizations.
3. **Parallelism and Pipelining:** Multiple OpenGL contexts can be created and utilized in parallel, making V4D a robust solution for high-performance graphics applications.
4. **Algorithm Modularity:** By structuring algorithms into smaller, reusable fragments or sub-Plans, Plan fosters modular development and scalability.
If you prefer using the GUI version of CMake (cmake-gui), then, you can add `opencv_contrib` modules within `opencv` core by doing the following:
### Selected Commented Examples (read sequentially)
The following examples have been selected to deepen your understanding of Plan-V4D. There are many more.
1. Start cmake-gui.
#### Blue Sreen using OpenGL
[source](modules/v4d/samples/render_opengl.cpp)
2. Select the opencv source code folder and the folder where binaries will be built (the 2 upper forms of the interface).
#### Displaying an Image using NanoVG
[source](modules/v4d/samples/display_image_nvg.cpp)
3. Press the `configure` button. You will see all the opencv build parameters in the central interface.
#### A realtime beauty filter (using sub-plans)
[source](modules/v4d/samples/beauty-demo.cpp)
4. Browse the parameters and look for the form called `OPENCV_EXTRA_MODULES_PATH` (use the search form to focus rapidly on it).
## Why Plan-V4D?
5. Complete this `OPENCV_EXTRA_MODULES_PATH` by the proper pathname to the `<opencv_contrib>/modules` value using its browse button.
* Computation Graph Engine: Fast parallel code.
* OpenGL: Easy access to OpenGL.
* GUI: Simple yet powerful user interfaces through ImGui.
* Vector graphics: Elegant and fast vector graphics through NanoVG.
* 3D graphics: Powerful 3D graphics through bgfx.
* Font rendering: Loading of fonts and sophisticated rendering options.
* Video pipeline: Through a simple source/sink system videos can be efficently read, displayed, edited and saved.
* Hardware acceleration: Transparent hardware acceleration usage where possible. (e.g. OpenGL, OpenCL, CL-GL interop, VAAPI and CL-VAAPI interop, nvenc). Actually it is possible to write programs that
* No more highgui with it's heavy dependencies, licenses and limitations.
6. Press the `configure` button followed by the `generate` button (the first time, you will be asked which makefile style to use).
Please refer to the examples and demos as well as [this OpenCV issue](https://github.com/opencv/opencv/issues/22923) to find out exactly what it can do for you.
7. Build the `opencv` core with the method you chose (make and make install if you chose Unix makefile at step 6).
## GPU Support
* Intel Gen 8+ (Tested: Gen 11 + Gen 13) tested
* NVIDIA Ada Lovelace (Tested: GTX 4070 Ti) with proprietary drivers (535.104.05) and CUDA toolkit (12.2) tested.
* Intel Arc770 (Mesa 24.3.1) tested
* AMD: never tested
8. To run, linker flags to contrib modules will need to be added to use them in your code/IDE. For example to use the aruco module, "-lopencv_aruco" flag will be added.
## Requirements
* C++20 (at the moment)
* OpenGL 3.2 Core (optionally Compat)/OpenGL ES 3.0/WebGL2
### Update the repository documentation
## Optional requirements
* Support for OpenCL 1.2
* Support for cl_khr_gl_sharing and cl_intel_va_api_media_sharing OpenCL extensions.
In order to keep a clean overview containing all contributed modules, the following files need to be created/adapted:
## Dependencies
* My OpenCV 4.x fork (It works with mainline OpenCV 4.x as well, but using my fork is highly recommended because it features several improvements and fixes)
* GLEW
* GLFW3
* NanoVG (included as a sub-repo)
* ImGui (included as a sub-repo)
* bgfx (included as a sub-repo)
* Glad (included)
1. Update the README.md file under the modules folder. Here, you add your model with a single-line description.
2. Add a README.md inside your own module folder. This README explains which functionality (separate functions) is available, links to the corresponding samples, and explains in somewhat more detail what the module is expected to do. If any extra requirements are needed to build the module without problems, add them here also.

@ -0,0 +1,24 @@
#OPENCV_LOC=/home/elchaschab/devel/opencv/
#FFMPEG_LOC=/home/elchaschab/devel/cartwheel-ffmpeg/ffmpeg/
#export LD_LIBRARY_PATH="$OPENCV_LOC/build/lib/:$FFMPEG_LOC/libavcodec/:$FFMPEG_LOC/libavutil/:$FFMPEG_LOC/libavdevice/:$FFMPEG_LOC/libavformat/:$FFMPEG_LOC/libavfilter/:$FFMPEG_LOC/libpostproc/:$FFMPEG_LOC/libswresample/:$FFMPEG_LOC/libswscale/:$LD_LIBRARY_PATH"
export OPENCV_LOG_LEVEL=VERBOSE
export OPENCV_FFMPEG_LOGLEVEL=56
export OPENCV_VIDEOIO_DEBUG=1
export OPENCV_VIDEOWRITER_DEBUG=1
export OPENCV_VIDEOCAPTURE_DEBUG=1
export OPENCV_FFMPEG_DEBUG=1
export OPENCV_OPENCL_RAISE_ERROR=1
export OPENCV_OPENCL_ABORT_ON_BUILD_ERROR=1
export OPENCV_DUMP_ERRORS=1
export OPENCV_DUMP_CONFIG=1
export OPENCV_TRACE=1
export OPENCV_TRACE_DEPTH_OPENCV=1
export OPENCV_TRACE_SYNC_OPENCL=1
#export OPENCV_CPU_DISABLE=
#export OPENCV_OPENCL_ENABLE_MEM_USE_HOST_PTR=1
#export OPENCV_OPENCL_ALIGNMENT_MEM_USE_HOST_PTR=1
#export OPENCV_OPENCL_RUNTIME=
#export OPENCV_OPENCL_DEVICE=
#export OPENCV_OPENCL_SVM_DISABLE=1

@ -0,0 +1,13 @@
.project
build/
samples/*/*.dep
samples/*/*.o
samples/beauty/beauty-demo
samples/font/font-demo
samples/nanovg/nanovg-demo
samples/optflow/optflow-demo
samples/pedestrian/pedestrian-demo
samples/shader/shader-demo
samples/tetra/tetra-demo
samples/video/video-demo

@ -0,0 +1,229 @@
cmake_policy(SET CMP0079 NEW)
OCV_OPTION(OPENCV_V4D_ENABLE_ES3 "Enable OpenGL ES 3.0 backend for V4D" OFF
VERIFY HAVE_OPENGL)
include(FetchContent)
if(NOT EMSCRIPTEN)
find_package(glfw3 3 REQUIRED)
find_package(OpenCL REQUIRED)
find_package(GLEW REQUIRED)
include("FindOpenGL")
endif()
set(the_description "V4D Visualization Module")
set(OPENCV_MODULE_IS_PART_OF_WORLD OFF)
# Check CXX Features
get_property(known_features GLOBAL PROPERTY CMAKE_CXX_KNOWN_FEATURES)
list (FIND known_features "cxx_std_20" idx)
if (${idx} LESS 0)
message(STATUS "Module opencv_v4d disabled because it requires C++20")
ocv_module_disable(v4d)
endif()
# Update submodules
find_package(Git QUIET)
if(GIT_FOUND AND EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/../../.git")
# Update submodules as needed
message(STATUS "Submodule update")
execute_process(COMMAND ${GIT_EXECUTABLE} submodule update --init --recursive
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/../../"
RESULT_VARIABLE GIT_SUBMOD_RESULT)
if(NOT GIT_SUBMOD_RESULT EQUAL "0")
message(FATAL_ERROR "git submodule update --init --recursive failed with ${GIT_SUBMOD_RESULT}, please checkout submodules")
endif()
endif()
if(NOT EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/third/imgui/")
message(FATAL_ERROR "The submodules were not downloaded! GIT_SUBMODULE was turned off or failed. Please update submodules and try again.")
endif()
# Macro to download a file
macro(fetch_file download_name url hash)
FetchContent_Declare(${download_name}
URL ${url}
URL_HASH SHA256=${hash}
DOWNLOAD_NO_EXTRACT true
TLS_VERIFY true
)
FetchContent_MakeAvailable(${download_name})
endmacro(fetch_file)
# Macro to add a native sample
macro(add_binary_sample sample source)
if(NOT (TARGET ${sample}))
ocv_add_executable(${sample} ${source})
endif()
ocv_target_link_libraries(${sample} OpenGL GLEW glfw X11 nanovg bgfx)
target_compile_features(${sample} PRIVATE cxx_std_20)
# set_property(TARGET ${sample} PROPERTY POSITION_INDEPENDENT_CODE ON)
target_link_directories(${sample} PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/../../lib")
target_include_directories(${sample} PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/include/" "${CMAKE_CURRENT_SOURCE_DIR}/third/glad/include" "${CMAKE_CURRENT_SOURCE_DIR}/third/imgui" "${CMAKE_CURRENT_SOURCE_DIR}/third/imgui/backends/" "${CMAKE_CURRENT_SOURCE_DIR}/third/nanovg/src/" "${CMAKE_CURRENT_SOURCE_DIR}/third/bgfx.cmake/bgfx/include/" "${CMAKE_CURRENT_SOURCE_DIR}/third/bgfx.cmake/bx/include/" "${CMAKE_CURRENT_SOURCE_DIR}/third/bgfx.cmake/bimg/include/")
endmacro()
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address")
# set(CMAKE_LD_FLAGS "${CMAKE_LqD_FLAGS} -fsanitize=address -static-libasan")
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined")
# set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -fsanitize=undefined -static-libasan")
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=thread")
# set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -fsanitize=thread -static-libasan")
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror -Wno-sign-promo")
if (NOT (TARGET nanovg))
#Configure NanoVG build options
if(OPENCV_V4D_ENABLE_ES3)
add_definitions(-DNANOVG_GLES3=1 )
else()
add_definitions(-DNANOVG_GL3=1 )
endif()
add_subdirectory("${CMAKE_CURRENT_SOURCE_DIR}/third/nanovg/")
target_compile_options(nanovg PUBLIC -Wno-error)
target_compile_options(nanovg PUBLIC -pthread)
# # target_include_directories(nanovg PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/third/nanovg/src/")
# # include_directories("${CMAKE_CURRENT_SOURCE_DIR}/third/nanovg/src/")
# if(OPENCV_V4D_ENABLE_ES3)
# target_link_libraries(nanovg OpenGL::GLES3)
# else()
# target_link_libraries(nanovg OpenGL::OpenGL)
# endif()
# target_compile_features(nanovg PRIVATE cxx_std_20)
install(TARGETS nanovg EXPORT OpenCVModules)
endif()
if (NOT (TARGET bgfx))
set(BGFX_BUILD_EXAMPLES OFF)
set(BGFX_LIBRARY_TYPE "SHARED")
set(BGFX_INSTALL OFF)
if(OPENCV_V4D_ENABLE_ES3)
set(BGFX_OPENGLES_VERSION "30")
else()
set(BGFX_OPENGL_VERSION "32")
endif()
#-DBGFX_CONFIG_MULTITHREADED=0
add_definitions(-DBGFX_CONFIG_PROFILER=0 -DBGFX_CONFIG_PASSIVE=1)
add_subdirectory("${CMAKE_CURRENT_SOURCE_DIR}/third/bgfx.cmake")
target_compile_features(bgfx PRIVATE cxx_std_20)
target_compile_options(bgfx PUBLIC -Wno-error)
target_include_directories(bgfx PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/third/glad/include")
# target_link_libraries(bgfx PUBLIC glfw)
install(TARGETS bgfx EXPORT OpenCVModules)
install(TARGETS bimg EXPORT OpenCVModules)
install(TARGETS bx EXPORT OpenCVModules)
endif()
# Add the opencv module
if(NOT (TARGET ${the_module}))
ocv_add_module(v4d opencv_core opencv_imgproc opencv_videoio opencv_video)
file(GLOB imgui_sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/third/imgui/*.cpp" "${CMAKE_CURRENT_SOURCE_DIR}/src/detail/imguicontext.cpp")
file(GLOB imgui_backend_sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/third/imgui/backends/imgui_impl_opengl3*.cpp")
file(GLOB imgui_glfw_sources CONFIGURE_DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/third/imgui/backends/imgui_impl_glfw.cpp")
ocv_glob_module_sources("${CMAKE_CURRENT_SOURCE_DIR}/src" "${CMAKE_CURRENT_SOURCE_DIR}/src/detail/" ${imgui_sources} ${imgui_backend_sources} ${imgui_glfw_sources})
ocv_module_include_directories("${CMAKE_CURRENT_SOURCE_DIR}/include/" "${CMAKE_CURRENT_SOURCE_DIR}/third/glad/include" "${CMAKE_CURRENT_SOURCE_DIR}/third/imgui" "${CMAKE_CURRENT_SOURCE_DIR}/third/imgui/backends/" "${CMAKE_CURRENT_SOURCE_DIR}/third/nanovg/src/" "${CMAKE_CURRENT_SOURCE_DIR}/third/bgfx.cmake/bgfx/include/" "${CMAKE_CURRENT_SOURCE_DIR}/third/bgfx.cmake/bx/include/" "${CMAKE_CURRENT_SOURCE_DIR}/third/bgfx.cmake/bimg/include/")
ocv_create_module()
set_target_properties(${the_module} PROPERTIES LINKER_LANGUAGE CXX)
ocv_add_samples(opencv_v4d opencv_core opencv_imgproc opencv_videoio opencv_video opencv_imgcodecs opencv_face opencv_tracking opencv_objdetect opencv_stitching opencv_optflow opencv_imgcodecs opencv_features2d opencv_dnn opencv_flann)
# Populate assets
fetch_file("LBFMODEL" "https://github.com/kurnianggoro/GSOC2017/raw/master/data/lbfmodel.yaml" "70dd8b1657c42d1595d6bd13d97d932877b3bed54a95d3c4733a0f740d1fd66b")
fetch_file("YUNET" "https://github.com/opencv/opencv_zoo/raw/main/models/face_detection_yunet/face_detection_yunet_2023mar.onnx" "8f2383e4dd3cfbb4553ea8718107fc0423210dc964f9f4280604804ed2552fa4")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory
"${CMAKE_CURRENT_BINARY_DIR}/assets")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory
"${CMAKE_CURRENT_BINARY_DIR}/assets/doxygen")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory
"${CMAKE_CURRENT_BINARY_DIR}/assets/models")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory
"${CMAKE_CURRENT_BINARY_DIR}/assets/fonts")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E copy
"${CMAKE_CURRENT_SOURCE_DIR}/samples/fonts/*.ttf"
"${CMAKE_CURRENT_BINARY_DIR}/assets/fonts/")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E copy
"${CMAKE_CURRENT_LIST_DIR}/doc/lena.png"
"${CMAKE_CURRENT_BINARY_DIR}/doc/lena.png")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E copy
"${lbfmodel_SOURCE_DIR}/lbfmodel.yaml"
"${CMAKE_CURRENT_BINARY_DIR}/assets/models/")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E copy
"${yunet_SOURCE_DIR}/face_detection_yunet_2023mar.onnx"
"${CMAKE_CURRENT_BINARY_DIR}/assets/models/")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E copy
"${CMAKE_CURRENT_SOURCE_DIR}/third/doxygen-bootstrapped/customdoxygen.css"
"${CMAKE_SOURCE_DIR}/doc/stylesheet.css")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E copy
"${CMAKE_CURRENT_SOURCE_DIR}/third/doxygen-bootstrapped/example-site/header.html"
"${CMAKE_SOURCE_DIR}/doc/")
add_custom_command(TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E copy
"${CMAKE_CURRENT_SOURCE_DIR}/third/doxygen-bootstrapped/example-site/footer.html"
"${CMAKE_SOURCE_DIR}/doc/")
list(APPEND CMAKE_DOXYGEN_HTML_FILES "${CMAKE_CURRENT_SOURCE_DIR}/third/doxygen-bootstrapped/doxy-boot.js")
#Add sample targets
if(BUILD_EXAMPLES)
add_binary_sample(example_v4d_display_image_fb samples/display_image_fb.cpp)
add_binary_sample(example_v4d_display_image_nvg samples/display_image_nvg.cpp)
add_binary_sample(example_v4d_vector_graphics samples/vector_graphics.cpp)
add_binary_sample(example_v4d_vector_graphics_and_fb samples/vector_graphics_and_fb.cpp)
add_binary_sample(example_v4d_render_opengl samples/render_opengl.cpp)
add_binary_sample(example_v4d_custom_source_and_sink samples/custom_source_and_sink.cpp)
add_binary_sample(example_v4d_font_rendering samples/font_rendering.cpp)
add_binary_sample(example_v4d_font_with_gui samples/font_with_gui.cpp)
add_binary_sample(example_v4d_video_editing samples/video_editing.cpp)
add_binary_sample(example_v4d_cube-demo samples/cube-demo.cpp)
add_binary_sample(example_v4d_many_cubes-demo samples/many_cubes-demo.cpp)
add_binary_sample(example_v4d_video-demo samples/video-demo.cpp)
add_binary_sample(example_v4d_nanovg-demo samples/nanovg-demo.cpp)
add_binary_sample(example_v4d_font-demo samples/font-demo.cpp)
add_binary_sample(example_v4d_shader-demo samples/shader-demo.cpp)
add_binary_sample(example_v4d_pedestrian-demo samples/pedestrian-demo.cpp)
add_binary_sample(example_v4d_optflow-demo samples/optflow-demo.cpp)
add_binary_sample(example_v4d_beauty-demo samples/beauty-demo.cpp)
add_binary_sample(example_v4d_bgfx-demo samples/bgfx-demo.cpp)
add_binary_sample(example_v4d_bgfx-demo2 samples/bgfx-demo2.cpp)
add_binary_sample(example_v4d_montage-demo samples/montage-demo.cpp)
endif()
if(OPENCV_V4D_ENABLE_ES3)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DOPENCV_V4D_USE_ES3=1")
endif()
target_compile_features(${the_module} PRIVATE cxx_std_20)
ocv_warnings_disable(CMAKE_CXX_FLAGS -Wdeprecated-enum-enum-conversion)
target_link_directories(${the_module} PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/../../lib")
ocv_target_link_libraries(${the_module} OpenCL OpenGL::OpenGL glfw -lnanovg -lbgfx -lbimg -lbx)
endif()

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 685 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 993 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 463 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 267 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 993 KiB

@ -0,0 +1,13 @@
#ifndef MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_CL_HPP_
#define MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_CL_HPP_
#ifndef CL_TARGET_OPENCL_VERSION
# define CL_TARGET_OPENCL_VERSION 120
#endif
#ifdef __APPLE__
# include <OpenCL/cl_gl_ext.h>
#else
# include <CL/cl_gl.h>
#endif
#endif /* MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_CL_HPP_ */

@ -0,0 +1,44 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <functional>
#include "../../../../include/opencv2/v4d/util.hpp"
#ifndef MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_V4DCONTEXT_HPP_
#define MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_V4DCONTEXT_HPP_
namespace cv {
namespace v4d {
namespace detail {
class V4DContext {
public:
virtual ~V4DContext() {}
virtual void execute(std::function<void()> fn) = 0;
};
class OnceContext : public V4DContext {
inline static std::once_flag flag_;
public:
virtual ~OnceContext() {}
virtual void execute(std::function<void()> fn) override {
std::call_once(flag_, fn);
}
};
class PlainContext : public V4DContext {
public:
virtual ~PlainContext() {}
virtual void execute(std::function<void()> fn) override {
fn();
}
};
}
}
}
#endif /* MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_V4DCONTEXT_HPP_ */

@ -0,0 +1,352 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_FRAMEBUFFERCONTEXT_HPP_
#define SRC_OPENCV_FRAMEBUFFERCONTEXT_HPP_
#include "cl.hpp"
#include "context.hpp"
#include <opencv2/core.hpp>
#include <opencv2/core/ocl.hpp>
#include "opencv2/v4d/util.hpp"
#include <iostream>
#include <map>
#include <vector>
#define GLFW_INCLUDE_NONE
#include <GLFW/glfw3.h>
typedef unsigned int GLenum;
#define GL_FRAMEBUFFER 0x8D40
namespace cv {
namespace v4d {
class V4D;
namespace detail {
#ifdef HAVE_OPENCL
typedef cv::ocl::OpenCLExecutionContext CLExecContext_t;
class CLExecScope_t
{
CLExecContext_t ctx_;
public:
inline CLExecScope_t(const CLExecContext_t& ctx)
{
if(ctx.empty())
return;
ctx_ = CLExecContext_t::getCurrentRef();
ctx.bind();
}
inline ~CLExecScope_t()
{
if (!ctx_.empty())
{
ctx_.bind();
}
}
};
#else
struct CLExecContext_t {
bool empty() {
return true;
}
static CLExecContext_t getCurrent() {
return CLExecContext_t();
}
};
class CLExecScope_t
{
CLExecContext_t ctx_;
public:
inline CLExecScope_t(const CLExecContext_t& ctx)
{
}
inline ~CLExecScope_t()
{
}
};
#endif
/*!
* The FrameBufferContext acquires the framebuffer from OpenGL (either by up-/download or by cl-gl sharing)
*/
class CV_EXPORTS FrameBufferContext : public V4DContext {
typedef unsigned int GLuint;
typedef signed int GLint;
friend class SourceContext;
friend class SinkContext;
friend class GLContext;
friend class NanoVGContext;
friend class ImGuiContextImpl;
friend class cv::v4d::V4D;
cv::Ptr<FrameBufferContext> self_ = this;
V4D* v4d_ = nullptr;
bool offscreen_;
string title_;
int major_;
int minor_;
int samples_;
bool debug_;
GLFWwindow* glfwWindow_ = nullptr;
bool clglSharing_ = true;
bool isVisible_;
GLuint onscreenTextureID_ = 0;
GLuint onscreenRenderBufferID_ = 0;
GLuint frameBufferID_ = 0;
GLuint textureID_ = 0;
GLuint renderBufferID_ = 0;
GLint viewport_[4];
cl_mem clImage_ = nullptr;
CLExecContext_t context_;
const cv::Size framebufferSize_;
bool hasParent_ = false;
GLFWwindow* rootWindow_;
cv::Ptr<FrameBufferContext> parent_;
bool isRoot_ = true;
//data and handles for webgl copying
std::map<size_t, GLint> texture_hdls_;
std::map<size_t, GLint> resolution_hdls_;
std::map<size_t, GLuint> shader_program_hdls_;
//gl object maps
std::map<size_t, GLuint> copyVaos, copyVbos, copyEbos;
// vertex position, color
const float copyVertices[12] = {
// x y z
-1.0f, -1.0f, -0.0f,
1.0f, 1.0f, -0.0f,
-1.0f, 1.0f, -0.0f,
1.0f, -1.0f, -0.0f };
const unsigned int copyIndices[6] = {
// 2---,1
// | .' |
// 0'---3
0, 1, 2, 0, 3, 1 };
std::map<size_t, GLuint> copyFramebuffers_;
std::map<size_t, GLuint> copyTextures_;
int index_;
void* currentSyncObject_ = 0;
static bool firstSync_;
public:
/*!
* Acquires and releases the framebuffer from and to OpenGL.
*/
class CV_EXPORTS FrameBufferScope {
cv::Ptr<FrameBufferContext> ctx_;
cv::UMat& m_;
#ifdef HAVE_OPENCL
std::shared_ptr<CLExecContext_t> pExecCtx;
#endif
public:
/*!
* Aquires the framebuffer via cl-gl sharing.
* @param ctx The corresponding #FrameBufferContext.
* @param m The UMat to bind the OpenGL framebuffer to.
*/
CV_EXPORTS FrameBufferScope(cv::Ptr<FrameBufferContext> ctx, cv::UMat& m) :
ctx_(ctx), m_(m)
#ifdef HAVE_OPENCL
, pExecCtx(std::static_pointer_cast<CLExecContext_t>(m.u->allocatorContext))
#endif
{
CV_Assert(!m.empty());
#ifdef HAVE_OPENCL
if(pExecCtx) {
CLExecScope_t execScope(*pExecCtx.get());
ctx_->acquireFromGL(m_);
} else {
#endif
ctx_->acquireFromGL(m_);
#ifdef HAVE_OPENCL
}
#endif
}
/*!
* Releases the framebuffer via cl-gl sharing.
*/
CV_EXPORTS virtual ~FrameBufferScope() {
#ifdef HAVE_OPENCL
if (pExecCtx) {
CLExecScope_t execScope(*pExecCtx.get());
ctx_->releaseToGL(m_);
}
else {
#endif
ctx_->releaseToGL(m_);
#ifdef HAVE_OPENCL
}
#endif
}
};
/*!
* Setups and tears-down OpenGL states.
*/
class CV_EXPORTS GLScope {
cv::Ptr<FrameBufferContext> ctx_;
public:
/*!
* Setup OpenGL states.
* @param ctx The corresponding #FrameBufferContext.
*/
CV_EXPORTS GLScope(cv::Ptr<FrameBufferContext> ctx, GLenum framebufferTarget = GL_FRAMEBUFFER) :
ctx_(ctx) {
ctx_->begin(framebufferTarget);
}
/*!
* Tear-down OpenGL states.
*/
CV_EXPORTS ~GLScope() {
ctx_->end();
}
};
/*!
* Create a FrameBufferContext with given size.
* @param frameBufferSize The frame buffer size.
*/
FrameBufferContext(V4D& v4d, const cv::Size& frameBufferSize, bool offscreen,
const string& title, int major, int minor, int samples, bool debug, GLFWwindow* rootWindow, cv::Ptr<FrameBufferContext> parent, bool root);
FrameBufferContext(V4D& v4d, const string& title, cv::Ptr<FrameBufferContext> other);
/*!
* Default destructor.
*/
virtual ~FrameBufferContext();
cv::Ptr<FrameBufferContext> self() {
return self_;
}
GLuint getFramebufferID();
GLuint getTextureID();
/*!
* Get the framebuffer size.
* @return The framebuffer size.
*/
const cv::Size& size() const;
void copyTo(cv::UMat& dst);
void copyFrom(const cv::UMat& src);
void copyToRootWindow();
/*!
* Execute function object fn inside a framebuffer context.
* The context acquires the framebuffer from OpenGL (either by up-/download or by cl-gl sharing)
* and provides it to the functon object. This is a good place to use OpenCL
* directly on the framebuffer.
* @param fn A function object that is passed the framebuffer to be read/manipulated.
*/
virtual void execute(std::function<void()> fn) override {
if(!getCLExecContext().empty()) {
CLExecScope_t clExecScope(getCLExecContext());
FrameBufferContext::GLScope glScope(self(), GL_FRAMEBUFFER);
FrameBufferContext::FrameBufferScope fbScope(self(), framebuffer_);
fn();
} else {
FrameBufferContext::GLScope glScope(self(), GL_FRAMEBUFFER);
FrameBufferContext::FrameBufferScope fbScope(self(), framebuffer_);
fn();
}
}
cv::Vec2f position();
float pixelRatioX();
float pixelRatioY();
void makeCurrent();
void makeNoneCurrent();
bool isResizable();
void setResizable(bool r);
void setWindowSize(const cv::Size& sz);
cv::Size getWindowSize();
bool isFullscreen();
void setFullscreen(bool f);
cv::Size getNativeFrameBufferSize();
void setVisible(bool v);
bool isVisible();
void close();
bool isClosed();
bool isRoot();
bool hasParent();
bool hasRootWindow();
/*!
* Blit the framebuffer to the screen
* @param viewport ROI to blit
* @param windowSize The size of the window to blit to
* @param stretch if true stretch the framebuffer to window size
*/
void blitFrameBufferToFrameBuffer(const cv::Rect& srcViewport, const cv::Size& targetFbSize,
GLuint targetFramebufferID = 0, bool stretch = true, bool flipY = false);
protected:
void fence();
bool wait(const uint64_t& timeout = 0);
CLExecContext_t& getCLExecContext();
cv::Ptr<V4D> getV4D();
int getIndex();
void setup();
void teardown();
/*!
* The UMat used to copy or bind (depending on cl-gl interop capability) the OpenGL framebuffer.
*/
/*!
* The internal framebuffer exposed as OpenGL Texture2D.
* @return The texture object.
*/
cv::ogl::Texture2D& getTexture2D();
GLFWwindow* getGLFWWindow() const;
private:
void loadBuffers(const size_t& index);
void loadShader(const size_t& index);
void init();
CV_EXPORTS cv::UMat& fb();
/*!
* Setup OpenGL states.
*/
CV_EXPORTS void begin(GLenum framebufferTarget);
/*!
* Tear-down OpenGL states.
*/
CV_EXPORTS void end();
/*!
* Download the framebuffer to UMat m.
* @param m The target UMat.
*/
void download(cv::UMat& m);
/*!
* Uploat UMat m to the framebuffer.
* @param m The UMat to upload.
*/
void upload(const cv::UMat& m);
/*!
* Acquire the framebuffer using cl-gl sharing.
* @param m The UMat the framebuffer will be bound to.
*/
void acquireFromGL(cv::UMat& m);
/*!
* Release the framebuffer using cl-gl sharing.
* @param m The UMat the framebuffer is bound to.
*/
void releaseToGL(cv::UMat& m);
void toGLTexture2D(cv::UMat& u, cv::ogl::Texture2D& texture);
void fromGLTexture2D(const cv::ogl::Texture2D& texture, cv::UMat& u);
cv::UMat framebuffer_;
/*!
* The texture bound to the OpenGL framebuffer.
*/
cv::ogl::Texture2D* texture_ = nullptr;
};
}
}
}
#endif /* SRC_OPENCV_FRAMEBUFFERCONTEXT_HPP_ */

@ -0,0 +1,19 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_GL_HPP_
#define MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_GL_HPP_
# if !defined(OPENCV_V4D_USE_ES3)
# include "GL/glew.h"
# define GLFW_INCLUDE_NONE
# else
# define GLFW_INCLUDE_ES3
# define GLFW_INCLUDE_GLEXT
# endif
# include <GLFW/glfw3.h>
#endif /* MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_GL_HPP_ */

@ -0,0 +1,43 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_GLCONTEXT_HPP_
#define SRC_OPENCV_GLCONTEXT_HPP_
#include "opencv2/v4d/detail/framebuffercontext.hpp"
#include "opencv2/v4d/detail/gl.hpp"
struct NVGcontext;
namespace cv {
namespace v4d {
namespace detail {
/*!
* Used to setup an OpengLG context
*/
class CV_EXPORTS GLContext : public V4DContext {
const int32_t idx_;
cv::Ptr<FrameBufferContext> mainFbContext_;
cv::Ptr<FrameBufferContext> glFbContext_;
public:
/*!
* Creates a OpenGL Context
* @param fbContext The framebuffer context
*/
GLContext(const int32_t& idx, cv::Ptr<FrameBufferContext> fbContext);
virtual ~GLContext() {};
/*!
* Execute function object fn inside a gl context.
* The context takes care of setting up opengl states.
* @param fn A function that is passed the size of the framebuffer
* and performs drawing using opengl
*/
virtual void execute(std::function<void()> fn) override;
const int32_t& getIndex() const;
cv::Ptr<FrameBufferContext> fbCtx();
};
}
}
}
#endif /* SRC_OPENCV_GLCONTEXT_HPP_ */

@ -0,0 +1,35 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_IMGUIContext_HPP_
#define SRC_OPENCV_IMGUIContext_HPP_
#include "opencv2/v4d/detail/framebuffercontext.hpp"
#include "imgui.h"
#include "opencv2/v4d/detail/imguicontext.hpp"
namespace cv {
namespace v4d {
namespace detail {
class CV_EXPORTS ImGuiContextImpl {
friend class cv::v4d::V4D;
cv::Ptr<FrameBufferContext> mainFbContext_;
ImGuiContext* context_;
std::function<void(ImGuiContext*)> renderCallback_;
bool firstFrame_ = true;
public:
CV_EXPORTS ImGuiContextImpl(cv::Ptr<FrameBufferContext> fbContext);
CV_EXPORTS void build(std::function<void(ImGuiContext*)> fn);
protected:
CV_EXPORTS void makeCurrent();
CV_EXPORTS void render(bool displayFPS);
};
}
}
}
#endif /* SRC_OPENCV_IMGUIContext_HPP_ */

@ -0,0 +1,81 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_NANOVGCONTEXT_HPP_
#define SRC_OPENCV_NANOVGCONTEXT_HPP_
#include "framebuffercontext.hpp"
struct NVGcontext;
namespace cv {
namespace v4d {
namespace detail {
/*!
* Used to setup a nanovg context
*/
class CV_EXPORTS NanoVGContext : public V4DContext {
cv::Ptr<FrameBufferContext> mainFbContext_;
cv::Ptr<FrameBufferContext> nvgFbContext_;
NVGcontext* context_;
cv::Size_<float> scale_ = {1.0f, 1.0f};
public:
/*!
* Makes sure #NanoVGContext::begin and #NanoVGContext::end are both called
*/
class Scope {
NanoVGContext& ctx_;
public:
/*!
* Setup NanoVG rendering
* @param ctx The corresponding #NanoVGContext
*/
Scope(NanoVGContext& ctx) :
ctx_(ctx) {
ctx_.begin();
}
/*!
* Tear-down NanoVG rendering
*/
~Scope() {
ctx_.end();
}
};
/*!
* Creates a NanoVGContext
* @param v4d The V4D object used in conjunction with this context
* @param context The native NVGContext
* @param fbContext The framebuffer context
*/
NanoVGContext(cv::Ptr<FrameBufferContext> fbContext);
virtual ~NanoVGContext() {};
/*!
* Execute function object fn inside a nanovg context.
* The context takes care of setting up opengl and nanovg states.
* A function object passed like that can use the functions in cv::viz::nvg.
* @param fn A function that is passed the size of the framebuffer
* and performs drawing using cv::viz::nvg
*/
virtual void execute(std::function<void()> fn) override;
void setScale(const cv::Size_<float>& scale);
cv::Ptr<FrameBufferContext> fbCtx();
private:
/*!
* Setup NanoVG context
*/
void begin();
/*!
* Tear down NanoVG context
*/
void end();
};
}
}
}
#endif /* SRC_OPENCV_NANOVGCONTEXT_HPP_ */

@ -0,0 +1,38 @@
#ifndef MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_RESEQUENCE_HPP_
#define MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_RESEQUENCE_HPP_
#include <functional>
#include <set>
#include <opencv2/core/cvdef.h>
#include <opencv2/core/mat.hpp>
#include <mutex>
#include <semaphore>
#include <condition_variable>
namespace cv {
namespace v4d {
class Resequence {
bool finish_ = false;
std::mutex putMtx_;
std::mutex waitMtx_;
std::condition_variable cv_;
uint64_t nextSeq_ = 0;
public:
Resequence() {
}
virtual ~Resequence() {}
void finish();
void notify();
void waitFor(const uint64_t& seq);
};
} /* namespace v4d */
} /* namespace kb */
#endif /* MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_RESEQUENCE_HPP_ */

@ -0,0 +1,56 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_SINKCONTEXT_HPP_
#define SRC_OPENCV_SINKCONTEXT_HPP_
#include "framebuffercontext.hpp"
namespace cv {
namespace v4d {
class V4D;
namespace detail {
/*!
* Provides a context for writing to a Sink
*/
class CV_EXPORTS SinkContext : public V4DContext {
friend class cv::v4d::V4D;
CLExecContext_t context_;
cv::UMat sinkBuffer_;
bool hasContext_ = false;
cv::Ptr<FrameBufferContext> mainFbContext_;
public:
/*!
* Create the CLVAContext
* @param fbContext The corresponding framebuffer context
*/
SinkContext(cv::Ptr<FrameBufferContext> fbContext);
virtual ~SinkContext() {};
/*!
* Called to capture from a function object.
* The functor fn is passed a UMat which it writes to which in turn is captured to the framebuffer.
* @param fn The functor that provides the data.
* @return true if successful-
*/
virtual void execute(std::function<void()> fn) override;
/*!
* Called to pass the frambuffer to a functor which consumes it (e.g. writes to a video file).
* @param fn The functor that consumes the data,
*/
/*FIXME only public till https://github.com/opencv/opencv/pull/22780 is resolved.
* required for manual initialization of VideoCapture/VideoWriter
*/
bool hasContext();
void copyContext();
CLExecContext_t getCLExecContext();
cv::UMat& sinkBuffer();
};
}
}
}
#endif /* SRC_OPENCV_SINKCONTEXT_HPP_ */

@ -0,0 +1,56 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_CLVACONTEXT_HPP_
#define SRC_OPENCV_CLVACONTEXT_HPP_
#include "framebuffercontext.hpp"
namespace cv {
namespace v4d {
class V4D;
namespace detail {
/*!
* Provides a context for OpenCL-VAAPI sharing
*/
class CV_EXPORTS SourceContext : public V4DContext {
friend class cv::v4d::V4D;
CLExecContext_t context_;
cv::UMat captureBuffer_;
cv::UMat captureBufferRGB_;
bool hasContext_ = false;
cv::Ptr<FrameBufferContext> mainFbContext_;
uint64_t currentSeqNr_ = 0;
public:
/*!
* Create the CLVAContext
* @param fbContext The corresponding framebuffer context
*/
SourceContext(cv::Ptr<FrameBufferContext> fbContext);
virtual ~SourceContext() {};
/*!
* Called to capture from a function object.
* The functor fn is passed a UMat which it writes to which in turn is captured to the framebuffer.
* @param fn The functor that provides the data.
* @return true if successful-
*/
virtual void execute(std::function<void()> fn) override;
uint64_t sequenceNumber();
/*FIXME only public till https://github.com/opencv/opencv/pull/22780 is resolved.
* required for manual initialization of VideoCapture/VideoWriter
*/
bool hasContext();
void copyContext();
CLExecContext_t getCLExecContext();
cv::UMat& sourceBuffer();
};
}
}
}
#endif /* SRC_OPENCV_CLVACONTEXT_HPP_ */

@ -0,0 +1,139 @@
#ifndef TIME_TRACKER_HPP_
#define TIME_TRACKER_HPP_
#include <chrono>
#include <map>
#include <string>
#include <sstream>
#include <ostream>
#include <limits>
#include <mutex>
#include <opencv2/core/cvdef.h>
using std::ostream;
using std::stringstream;
using std::string;
using std::map;
using std::chrono::microseconds;
using std::mutex;
struct CV_EXPORTS TimeInfo {
long totalCnt_ = 0;
long totalTime_ = 0;
long iterCnt_ = 0;
long iterTime_ = 0;
long last_ = 0;
void add(size_t t) {
last_ = t;
totalTime_ += t;
iterTime_ += t;
++totalCnt_;
++iterCnt_;
if (totalCnt_ == std::numeric_limits<long>::max() || totalTime_ == std::numeric_limits<long>::max()) {
totalCnt_ = 0;
totalTime_ = 0;
}
if (iterCnt_ == std::numeric_limits<long>::max() || iterTime_ == std::numeric_limits<long>::max()) {
iterCnt_ = 0;
iterTime_ = 0;
}
}
void newCount() {
iterCnt_ = 0;
iterTime_ = 0;
}
string str() const {
stringstream ss;
ss << (totalTime_ / 1000.0) / totalCnt_ << "ms = (" << totalTime_ / 1000.0 << '\\' << totalCnt_ << ")\t";
ss << (iterTime_ / 1000.0) / iterCnt_ << "ms = (" << iterTime_ / 1000.0 << '\\' << iterCnt_ << ")\t";
return ss.str();
}
};
inline std::ostream& operator<<(ostream &os, TimeInfo &ti) {
os << (ti.totalTime_ / 1000.0) / ti.totalCnt_ << "ms = (" << ti.totalTime_ / 1000.0 << '\\' << ti.totalCnt_ << ")\t";
os << (ti.iterTime_ / 1000.0) / ti.iterCnt_ << "ms = (" << ti.iterTime_ / 1000.0 << '\\' << ti.iterCnt_ << ")";
return os;
}
class CV_EXPORTS TimeTracker {
private:
static TimeTracker *instance_;
mutex mapMtx_;
map<string, TimeInfo> tiMap_;
bool enabled_;
TimeTracker();
public:
virtual ~TimeTracker();
map<string, TimeInfo>& getMap() {
return tiMap_;
}
template<typename F> void execute(const string &name, F const &func) {
auto start = std::chrono::system_clock::now();
func();
auto duration = std::chrono::duration_cast<microseconds>(std::chrono::system_clock::now() - start);
std::unique_lock lock(mapMtx_);
tiMap_[name].add(duration.count());
}
template<typename F> size_t measure(F const &func) {
auto start = std::chrono::system_clock::now();
func();
auto duration = std::chrono::duration_cast<microseconds>(std::chrono::system_clock::now() - start);
return duration.count();
}
bool isEnabled() {
return enabled_;
}
void setEnabled(bool e) {
enabled_ = e;
}
void print(ostream &os) {
std::unique_lock lock(mapMtx_);
stringstream ss;
ss << "Time tracking info: " << std::endl;
for (auto it : tiMap_) {
ss << "\t" << it.first << ": " << it.second << std::endl;
}
os << ss.str();
}
void reset() {
std::unique_lock lock(mapMtx_);
tiMap_.clear();
}
static TimeTracker* getInstance() {
if (instance_ == NULL)
instance_ = new TimeTracker();
return instance_;
}
static void destroy() {
if (instance_)
delete instance_;
instance_ = NULL;
}
void newCount() {
std::unique_lock lock(mapMtx_);
for (auto& pair : getMap()) {
pair.second.newCount();
}
}
};
#endif /* TIME_TRACKER_HPP_ */

@ -0,0 +1,112 @@
#ifndef MODULES_V4D_SRC_BACKEND_HPP_
#define MODULES_V4D_SRC_BACKEND_HPP_
#include "context.hpp"
#include <tuple>
#include <functional>
#include <utility>
#include <type_traits>
#include <opencv2/core.hpp>
namespace cv {
namespace v4d {
class Transaction {
private:
cv::Ptr<cv::v4d::detail::V4DContext> ctx_;
public:
virtual ~Transaction() {}
virtual void perform() = 0;
virtual bool enabled() = 0;
virtual bool isPredicate() = 0;
virtual bool lock() = 0;
void setContext(cv::Ptr<cv::v4d::detail::V4DContext> ctx) {
ctx_ = ctx;
}
cv::Ptr<cv::v4d::detail::V4DContext> getContext() {
return ctx_;
}
};
namespace detail {
template <typename F, typename... Ts>
class TransactionImpl : public Transaction
{
static_assert(sizeof...(Ts) == 0 || (!(std::is_rvalue_reference_v<Ts> && ...)));
private:
bool lock_;
F f;
std::tuple<Ts...> args;
public:
template <typename FwdF, typename... FwdTs,
typename = std::enable_if_t<sizeof...(Ts) == 0 || ((std::is_convertible_v<FwdTs&&, Ts> && ...))>>
TransactionImpl(bool lock, FwdF&& func, FwdTs&&... fwdArgs)
: lock_(lock),
f(std::forward<FwdF>(func)),
args{std::forward_as_tuple(fwdArgs...)}
{}
virtual ~TransactionImpl() override
{}
virtual void perform() override
{
std::apply(f, args);
}
template<bool b>
typename std::enable_if<b, bool>::type enabled() {
return std::apply(f, args);
}
template<bool b>
typename std::enable_if<!b, bool>::type enabled() {
return false;
}
virtual bool enabled() override {
return enabled<std::is_same_v<std::remove_cv_t<typename decltype(f)::result_type>, bool>>();
}
template<bool b>
typename std::enable_if<b, bool>::type isPredicate() {
return true;
}
template<bool b>
typename std::enable_if<!b, bool>::type isPredicate() {
return false;
}
virtual bool isPredicate() override {
return isPredicate<std::is_same_v<std::remove_cv_t<typename decltype(f)::result_type>, bool>>();
}
virtual bool lock() override {
return lock_;
}
};
}
template <typename F, typename... Args>
cv::Ptr<Transaction> make_transaction(bool lock, F f, Args&&... args) {
return cv::Ptr<Transaction>(dynamic_cast<Transaction*>(new detail::TransactionImpl<std::decay_t<F>, std::remove_cv_t<Args>...>
(lock, std::forward<F>(f), std::forward<Args>(args)...)));
}
template <typename F, typename Tfb, typename... Args>
cv::Ptr<Transaction> make_transaction(bool lock, F f, Tfb&& fb, Args&&... args) {
return cv::Ptr<Transaction>(dynamic_cast<Transaction*>(new detail::TransactionImpl<std::decay_t<F>, std::remove_cv_t<Tfb>, std::remove_cv_t<Args>...>
(lock, std::forward<F>(f), std::forward<Tfb>(fb), std::forward<Args>(args)...)));
}
}
}
#endif /* MODULES_V4D_SRC_BACKEND_HPP_ */

@ -0,0 +1,470 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_EVENTS_HPP_
#define MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_EVENTS_HPP_
#include <GLFW/glfw3.h>
#include <opencv2/core.hpp>
namespace cv {
namespace v4d {
namespace event {
inline static thread_local GLFWwindow* current_window = nullptr;
struct WindowState {
cv::Size size;
cv::Point position;
bool focused;
};
inline static thread_local WindowState window_state;
static GLFWwindow* get_current_glfw_window() {
if(current_window == nullptr)
CV_Error(cv::Error::StsBadArg, "No current glfw window set for event handling. You probably tried to call one of the cv::v4d::event functions outside a context-call.");
return current_window;
}
static void set_current_glfw_window(GLFWwindow* window) {
current_window = window;
}
// Define an enum class for the V4D keys
enum class Key {
KEY_A,
KEY_B,
KEY_C,
KEY_D,
KEY_E,
KEY_F,
KEY_G,
KEY_H,
KEY_I,
KEY_J,
KEY_K,
KEY_L,
KEY_M,
KEY_N,
KEY_O,
KEY_P,
KEY_Q,
KEY_R,
KEY_S,
KEY_T,
KEY_U,
KEY_V,
KEY_W,
KEY_X,
KEY_Y,
KEY_Z,
KEY_0,
KEY_1,
KEY_2,
KEY_3,
KEY_4,
KEY_5,
KEY_6,
KEY_7,
KEY_8,
KEY_9,
KEY_SPACE,
KEY_ENTER,
KEY_BACKSPACE,
KEY_TAB,
KEY_ESCAPE,
KEY_UP,
KEY_DOWN,
KEY_LEFT,
KEY_RIGHT,
KEY_HOME,
KEY_END,
KEY_PAGE_UP,
KEY_PAGE_DOWN,
KEY_INSERT,
KEY_DELETE,
KEY_F1,
KEY_F2,
KEY_F3,
KEY_F4,
KEY_F5,
KEY_F6,
KEY_F7,
KEY_F8,
KEY_F9,
KEY_F10,
KEY_F11,
KEY_F12
};
enum class KeyEventType {
NONE,
PRESS,
RELEASE,
REPEAT,
HOLD
};
inline static thread_local std::map<Key, bool> key_states;
constexpr Key get_v4d_key(int glfw_key) {
switch (glfw_key) {
case GLFW_KEY_A: return Key::KEY_A;
case GLFW_KEY_B: return Key::KEY_B;
case GLFW_KEY_C: return Key::KEY_C;
case GLFW_KEY_D: return Key::KEY_D;
case GLFW_KEY_E: return Key::KEY_E;
case GLFW_KEY_F: return Key::KEY_F;
case GLFW_KEY_G: return Key::KEY_G;
case GLFW_KEY_H: return Key::KEY_H;
case GLFW_KEY_I: return Key::KEY_I;
case GLFW_KEY_J: return Key::KEY_J;
case GLFW_KEY_K: return Key::KEY_K;
case GLFW_KEY_L: return Key::KEY_L;
case GLFW_KEY_M: return Key::KEY_M;
case GLFW_KEY_N: return Key::KEY_N;
case GLFW_KEY_O: return Key::KEY_O;
case GLFW_KEY_P: return Key::KEY_P;
case GLFW_KEY_Q: return Key::KEY_Q;
case GLFW_KEY_R: return Key::KEY_R;
case GLFW_KEY_S: return Key::KEY_S;
case GLFW_KEY_T: return Key::KEY_T;
case GLFW_KEY_U: return Key::KEY_U;
case GLFW_KEY_V: return Key::KEY_V;
case GLFW_KEY_W: return Key::KEY_W;
case GLFW_KEY_X: return Key::KEY_X;
case GLFW_KEY_Y: return Key::KEY_Y;
case GLFW_KEY_Z: return Key::KEY_Z;
case GLFW_KEY_0: return Key::KEY_0;
case GLFW_KEY_1: return Key::KEY_1;
case GLFW_KEY_2: return Key::KEY_2;
case GLFW_KEY_3: return Key::KEY_3;
case GLFW_KEY_4: return Key::KEY_4;
case GLFW_KEY_5: return Key::KEY_5;
case GLFW_KEY_6: return Key::KEY_6;
case GLFW_KEY_7: return Key::KEY_7;
case GLFW_KEY_8: return Key::KEY_8;
case GLFW_KEY_9: return Key::KEY_9;
case GLFW_KEY_SPACE: return Key::KEY_SPACE;
case GLFW_KEY_ENTER: return Key::KEY_ENTER;
case GLFW_KEY_BACKSPACE: return Key::KEY_BACKSPACE;
case GLFW_KEY_TAB: return Key::KEY_TAB;
case GLFW_KEY_ESCAPE: return Key::KEY_ESCAPE;
case GLFW_KEY_UP: return Key::KEY_UP;
case GLFW_KEY_DOWN: return Key::KEY_DOWN;
case GLFW_KEY_LEFT: return Key::KEY_LEFT;
case GLFW_KEY_RIGHT: return Key::KEY_RIGHT;
case GLFW_KEY_END: return Key::KEY_END;
case GLFW_KEY_PAGE_UP: return Key::KEY_PAGE_UP;
case GLFW_KEY_PAGE_DOWN: return Key::KEY_PAGE_DOWN;
case GLFW_KEY_INSERT: return Key::KEY_INSERT;
case GLFW_KEY_DELETE: return Key::KEY_DELETE;
case GLFW_KEY_F1: return Key::KEY_F1;
case GLFW_KEY_F2: return Key::KEY_F2;
case GLFW_KEY_F3: return Key::KEY_F3;
case GLFW_KEY_F4: return Key::KEY_F4;
case GLFW_KEY_F5: return Key::KEY_F5;
case GLFW_KEY_F6: return Key::KEY_F6;
case GLFW_KEY_F7: return Key::KEY_F7;
case GLFW_KEY_F8: return Key::KEY_F8;
case GLFW_KEY_F9: return Key::KEY_F9;
case GLFW_KEY_F10: return Key::KEY_F10;
case GLFW_KEY_F11: return Key::KEY_F11;
case GLFW_KEY_F12: return Key::KEY_F12;
default:
CV_Error_(cv::Error::StsBadArg, ("Invalid key: %d. Please ensure the key is within the valid range.", glfw_key));
return Key::KEY_F12;
}
}
static KeyEventType get_key_event_type(int key) {
Key v4d_key = get_v4d_key(key);
int state = glfwGetKey(get_current_glfw_window(), key);
switch (state) {
case GLFW_PRESS:
key_states[v4d_key] = true;
return KeyEventType::PRESS;
case GLFW_RELEASE:
key_states[v4d_key] = false;
return KeyEventType::RELEASE;
case GLFW_REPEAT:
return KeyEventType::REPEAT;
default:
return KeyEventType::NONE;
}
}
static KeyEventType get_key_hold_event(Key key) {
if (key_states[key]) {
return KeyEventType::HOLD;
} else {
return KeyEventType::NONE;
}
}
// Define an enum class for the V4D mouse buttons
enum class MouseButton {
LEFT,
RIGHT,
MIDDLE,
BUTTON_4,
BUTTON_5,
BUTTON_6,
BUTTON_7,
BUTTON_8
};
enum class MouseEventType {
NONE,
PRESS,
RELEASE,
MOVE,
SCROLL,
DRAG_START,
DRAG,
DRAG_END,
HOVER_ENTER,
HOVER_EXIT,
DOUBLE_CLICK
};
// Define a static function that returns the mouse position as a cv::Point2d
static cv::Point2d get_mouse_position() {
// Declare variables to store the mouse position
double x, y;
// Get the mouse position using glfwGetCursorPos
glfwGetCursorPos(get_current_glfw_window(), &x, &y);
// Return the mouse position as a cv::Point2d
return cv::Point2d(x, y);
}
inline static thread_local std::map<MouseButton, bool> button_states;
inline static thread_local cv::Point2d last_position = get_mouse_position();
inline static thread_local cv::Point2d scroll_offset(0, 0);
static void scroll_callback(GLFWwindow* window, double xoffset, double yoffset)
{
// Update the scroll offset
scroll_offset = cv::Point2d(xoffset, yoffset);
}
constexpr static MouseButton get_v4d_mouse_button(int glfw_button) {
switch (glfw_button) {
case GLFW_MOUSE_BUTTON_LEFT: return MouseButton::LEFT;
case GLFW_MOUSE_BUTTON_RIGHT: return MouseButton::RIGHT;
case GLFW_MOUSE_BUTTON_MIDDLE: return MouseButton::MIDDLE;
case GLFW_MOUSE_BUTTON_4: return MouseButton::BUTTON_4;
case GLFW_MOUSE_BUTTON_5: return MouseButton::BUTTON_5;
case GLFW_MOUSE_BUTTON_6: return MouseButton::BUTTON_6;
case GLFW_MOUSE_BUTTON_7: return MouseButton::BUTTON_7;
case GLFW_MOUSE_BUTTON_8: return MouseButton::BUTTON_8;
default: CV_Error_(cv::Error::StsBadArg, ("Invalid mouse button: %d. Please ensure the button is within the valid range.", glfw_button));
}
}
static MouseEventType get_mouse_event_type(int button) {
MouseButton v4d_button = get_v4d_mouse_button(button);
int state = glfwGetMouseButton(get_current_glfw_window(), button);
switch (state) {
case GLFW_PRESS:
button_states[v4d_button] = true;
return MouseEventType::PRESS;
case GLFW_RELEASE:
button_states[v4d_button] = false;
return MouseEventType::RELEASE;
default:
return MouseEventType::NONE;
}
}
static cv::Point2d get_mouse_scroll_offset() {
return scroll_offset;
}
static MouseEventType get_mouse_scroll_event() {
cv::Point2d current_offset = get_mouse_scroll_offset();
if (current_offset != last_position) {
last_position = current_offset;
return MouseEventType::SCROLL;
} else {
return MouseEventType::NONE;
}
}
static MouseEventType get_mouse_move_event() {
cv::Point2d current_position = get_mouse_position();
if (current_position != last_position) {
last_position = current_position;
return MouseEventType::MOVE;
} else {
return MouseEventType::NONE;
}
}
static MouseEventType get_mouse_drag_event(MouseButton button) {
cv::Point2d current_position = get_mouse_position();
if (button_states[button] && current_position != last_position) {
last_position = current_position;
return MouseEventType::DRAG;
} else {
return MouseEventType::NONE;
}
}
static MouseEventType get_mouse_hover_event() {
cv::Point2d current_position = get_mouse_position();
if (current_position != last_position) {
last_position = current_position;
return MouseEventType::HOVER_ENTER;
} else {
return MouseEventType::HOVER_EXIT;
}
}
enum class WindowEvent {
NONE,
RESIZE,
MOVE,
FOCUS,
UNFOCUS,
CLOSE
};
static WindowEvent get_window_resize_event() {
static WindowState last_state = window_state;
if (window_state.size != last_state.size) {
last_state.size = window_state.size;
return WindowEvent::RESIZE;
} else {
return WindowEvent::NONE;
}
}
static WindowEvent get_window_move_event() {
static WindowState last_state = window_state;
if (window_state.position != last_state.position) {
last_state.position = window_state.position;
return WindowEvent::MOVE;
} else {
return WindowEvent::NONE;
}
}
static WindowEvent get_window_focus_event() {
static WindowState last_state = window_state;
if (window_state.focused && !last_state.focused) {
last_state.focused = window_state.focused;
return WindowEvent::FOCUS;
} else if (!window_state.focused && last_state.focused) {
last_state.focused = window_state.focused;
return WindowEvent::UNFOCUS;
} else {
return WindowEvent::NONE;
}
}
static cv::Size get_window_size() {
int width, height;
glfwGetWindowSize(get_current_glfw_window(), &width, &height);
return cv::Size(width, height);
}
static cv::Point get_window_position() {
int x, y;
glfwGetWindowPos(get_current_glfw_window(), &x, &y);
return cv::Point(x, y);
}
static bool get_window_focus() {
int focused = glfwGetWindowAttrib(get_current_glfw_window(), GLFW_FOCUSED);
return focused;
}
static void initialize_callbacks(GLFWwindow* window) {
glfwSetScrollCallback(window, scroll_callback);
}
// Define an enum class for the V4D joystick buttons
enum class JoystickButton {
BUTTON_A,
BUTTON_B,
BUTTON_X,
BUTTON_Y,
BUTTON_LB,
BUTTON_RB,
BUTTON_BACK,
BUTTON_START,
BUTTON_GUIDE,
BUTTON_LEFT_THUMB,
BUTTON_RIGHT_THUMB,
BUTTON_DPAD_UP,
BUTTON_DPAD_RIGHT,
BUTTON_DPAD_DOWN,
BUTTON_DPAD_LEFT
};
// Define an enum class for the V4D joystick axes
enum class JoystickAxis {
AXIS_LEFT_X,
AXIS_LEFT_Y,
AXIS_RIGHT_X,
AXIS_RIGHT_Y,
AXIS_LEFT_TRIGGER,
AXIS_RIGHT_TRIGGER
};
// Define a static function that returns the state of a joystick button
static bool get_joystick_button_state(int joystick, JoystickButton button) {
int count;
const unsigned char* buttons = glfwGetJoystickButtons(joystick, &count);
if (buttons == nullptr) {
CV_Error(cv::Error::StsBadArg, "Failed to get joystick buttons. Please ensure the joystick is connected and working properly.");
}
return buttons[static_cast<int>(button)];
}
// Define a static function that returns the name of a joystick
static const char* get_joystick_name(int joystick) {
const char* name = glfwGetJoystickName(joystick);
if (name == nullptr) {
CV_Error(cv::Error::StsBadArg, "Failed to get joystick name. Please ensure the joystick is connected and working properly.");
}
return name;
}
// Define a static function that returns whether a joystick is present
static bool is_joystick_present(int joystick) {
int present = glfwJoystickPresent(joystick);
if (present != GLFW_TRUE && present != GLFW_FALSE) {
CV_Error(cv::Error::StsBadArg, "Failed to check if joystick is present. Please ensure the joystick is connected and working properly.");
}
return present;
}
// Define a static function that sets the clipboard string
static void set_clipboard_string(const char* string) {
if (string == nullptr) {
CV_Error(cv::Error::StsNullPtr, "Cannot set clipboard string to null. Please provide a valid string.");
}
glfwSetClipboardString(get_current_glfw_window(), string);
}
// Define a static function that gets the clipboard string
static const char* get_clipboard_string() {
const char* string = glfwGetClipboardString(get_current_glfw_window());
if (string == nullptr) {
CV_Error(cv::Error::StsNullPtr, "Failed to get clipboard string. Please ensure there is a string in the clipboard.");
}
return string;
}
}
}
}
#endif // MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_EVENTS_HPP_

@ -0,0 +1,509 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_V4D_NVG_HPP_
#define SRC_OPENCV_V4D_NVG_HPP_
#include "opencv2/v4d/v4d.hpp"
#include <stdio.h>
#include <opencv2/core.hpp>
#include "nanovg.h"
struct NVGcontext;
namespace cv {
namespace v4d {
/*!
* In general please refer to https://github.com/memononen/nanovg/blob/master/src/nanovg.h for reference.
*/
namespace nvg {
/*!
* Equivalent of a NVGtextRow.
*/
struct CV_EXPORTS TextRow: public NVGtextRow {
};
/*!
* Equivalent of a NVGglyphPosition.
*/
struct CV_EXPORTS GlyphPosition: public NVGglyphPosition {
};
/*!
* Equivalent of a NVGPaint. Converts back and forth between the two representations (Paint/NVGPaint).
*/
struct CV_EXPORTS Paint {
Paint() {
}
Paint(const NVGpaint& np);
NVGpaint toNVGpaint();
float xform[6];
float extent[2];
float radius = 0;
float feather = 0;
cv::Scalar innerColor;
cv::Scalar outerColor;
int image = 0;
};
/*!
* Internals of the NanoVG wrapper
*/
namespace detail {
/*!
* Internal NanoVG singleton that wraps all NanoVG functions.
*/
class NVG {
private:
friend class V4D;
static thread_local NVG* nvg_instance_;
NVGcontext* ctx_ = nullptr;
NVG(NVGcontext* ctx) :
ctx_(ctx) {
}
public:
/*!
* Initialize the current NVG object;
* @param ctx The NVGcontext to create the NVG object from.
*/
static void initializeContext(NVGcontext* ctx);
/*!
* Get the current NVGcontext.
* @return The current NVGcontext context.
*/
static NVG* getCurrentContext();
/*!
* Get the underlying NVGcontext.
* @return The underlying NVGcontext.
*/
NVGcontext* getContext() {
assert(ctx_ != nullptr);
return ctx_;
}
public:
int createFont(const char* name, const char* filename);
int createFontMem(const char* name, unsigned char* data, int ndata, int freeData);
int findFont(const char* name);
int addFallbackFontId(int baseFont, int fallbackFont);
int addFallbackFont(const char* baseFont, const char* fallbackFont);
void fontSize(float size);
void fontBlur(float blur);
void textLetterSpacing(float spacing);
void textLineHeight(float lineHeight);
void textAlign(int align);
void fontFaceId(int font);
void fontFace(const char* font);
float text(float x, float y, const char* string, const char* end);
void textBox(float x, float y, float breakRowWidth, const char* string, const char* end);
float textBounds(float x, float y, const char* string, const char* end, float* bounds);
void textBoxBounds(float x, float y, float breakRowWidth, const char* string, const char* end,
float* bounds);
int textGlyphPositions(float x, float y, const char* string, const char* end,
GlyphPosition* positions, int maxPositions);
void textMetrics(float* ascender, float* descender, float* lineh);
int textBreakLines(const char* string, const char* end, float breakRowWidth, TextRow* rows,
int maxRows);
void save();
void restore();
void reset();
// void shapeAntiAlias(int enabled);
void strokeColor(const cv::Scalar& bgra);
void strokePaint(Paint paint);
void fillColor(const cv::Scalar& bgra);
void fillPaint(Paint paint);
void miterLimit(float limit);
void strokeWidth(float size);
void lineCap(int cap);
void lineJoin(int join);
void globalAlpha(float alpha);
void resetTransform();
void transform(float a, float b, float c, float d, float e, float f);
void translate(float x, float y);
void rotate(float angle);
void skewX(float angle);
void skewY(float angle);
void scale(float x, float y);
void currentTransform(float* xform);
void transformIdentity(float* dst);
void transformTranslate(float* dst, float tx, float ty);
void transformScale(float* dst, float sx, float sy);
void transformRotate(float* dst, float a);
void transformSkewX(float* dst, float a);
void transformSkewY(float* dst, float a);
void transformMultiply(float* dst, const float* src);
void transformPremultiply(float* dst, const float* src);
int transformInverse(float* dst, const float* src);
void transformPoint(float* dstx, float* dsty, const float* xform, float srcx, float srcy);
float degToRad(float deg);
float radToDeg(float rad);
int createImage(const char* filename, int imageFlags);
int createImageMem(int imageFlags, unsigned char* data, int ndata);
int createImageRGBA(int w, int h, int imageFlags, const unsigned char* data);
void updateImage(int image, const unsigned char* data);
void imageSize(int image, int* w, int* h);
void deleteImage(int image);
void beginPath();
void moveTo(float x, float y);
void lineTo(float x, float y);
void bezierTo(float c1x, float c1y, float c2x, float c2y, float x, float y);
void quadTo(float cx, float cy, float x, float y);
void arcTo(float x1, float y1, float x2, float y2, float radius);
void closePath();
void pathWinding(int dir);
void arc(float cx, float cy, float r, float a0, float a1, int dir);
void rect(float x, float y, float w, float h);
void roundedRect(float x, float y, float w, float h, float r);
void roundedRectVarying(float x, float y, float w, float h, float radTopLeft, float radTopRight,
float radBottomRight, float radBottomLeft);
void ellipse(float cx, float cy, float rx, float ry);
void circle(float cx, float cy, float r);
void fill();
void stroke();
Paint linearGradient(float sx, float sy, float ex, float ey, const cv::Scalar& icol,
const cv::Scalar& ocol);
Paint boxGradient(float x, float y, float w, float h, float r, float f, const cv::Scalar& icol,
const cv::Scalar& ocol);
Paint radialGradient(float cx, float cy, float inr, float outr, const cv::Scalar& icol,
const cv::Scalar& ocol);
Paint imagePattern(float ox, float oy, float ex, float ey, float angle, int image, float alpha);
void scissor(float x, float y, float w, float h);
void intersectScissor(float x, float y, float w, float h);
void resetScissor();
};
} // namespace detail
/*!
* A forward to nvgCreateFont. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS int createFont(const char* name, const char* filename);
/*!
* A forward to nvgCreateFontMem. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS int createFontMem(const char* name, unsigned char* data, int ndata, int freeData);
/*!
* A forward to nvgFindFont. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS int findFont(const char* name);
/*!
* A forward to nvgAddFallbackFontId. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS int addFallbackFontId(int baseFont, int fallbackFont);
/*!
* A forward to nvgAddFallbackFont. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS int addFallbackFont(const char* baseFont, const char* fallbackFont);
/*!
* A forward to nvgFontSize. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void fontSize(float size);
/*!
* A forward to nvgFontBlur. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void fontBlur(float blur);
/*!
* A forward to nvgTextLetterSpacing. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void textLetterSpacing(float spacing);
/*!
* A forward to nvgTextLineHeight. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void textLineHeight(float lineHeight);
/*!
* A forward to nvgTextAlign. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void textAlign(int align);
/*!
* A forward to nvgFontFaceId. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void fontFaceId(int font);
/*!
* A forward to nvgFontFace. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void fontFace(const char* font);
/*!
* A forward to nvgText. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS float text(float x, float y, const char* string, const char* end);
/*!
* A forward to nvgTextBox. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void textBox(float x, float y, float breakRowWidth, const char* string, const char* end);
/*!
* A forward to nvgTextBounds. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS float textBounds(float x, float y, const char* string, const char* end, float* bounds);
/*!
* A forward to nvgTextBoxBounds. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void textBoxBounds(float x, float y, float breakRowWidth, const char* string, const char* end,
float* bounds);
/*!
* A forward to nvgTextGlyphPositions. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS int textGlyphPositions(float x, float y, const char* string, const char* end,
GlyphPosition* positions, int maxPositions);
/*!
* A forward to nvgTextMetrics. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void textMetrics(float* ascender, float* descender, float* lineh);
/*!
* A forward to nvgTextBreakLines. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS int textBreakLines(const char* string, const char* end, float breakRowWidth, TextRow* rows,
int maxRows);
/*!
* A forward to nvgSave. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void save();
/*!
* A forward to nvgRestore. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void restore();
/*!
* A forward to nvgReset. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void reset();
///*!
// * A forward to nvgShapeAntiAlias. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
// */
//CV_EXPORTS void shapeAntiAlias(int enabled);
/*!
* A forward to nvgStrokeColor. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void strokeColor(const cv::Scalar& bgra);
/*!
* A forward to nvgStrokePaint. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void strokePaint(Paint paint);
/*!
* A forward to nvgFillColor. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void fillColor(const cv::Scalar& color);
/*!
* A forward to nvgFillPaint. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void fillPaint(Paint paint);
/*!
* A forward to nvgMiterLimit. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void miterLimit(float limit);
/*!
* A forward to nvgStrokeWidth. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void strokeWidth(float size);
/*!
* A forward to nvgLineCap. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void lineCap(int cap);
/*!
* A forward to nvgLineJoin. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void lineJoin(int join);
/*!
* A forward to nvgGlobalAlpha. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void globalAlpha(float alpha);
/*!
* A forward to nvgResetTransform. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void resetTransform();
/*!
* A forward to nvgTransform. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transform(float a, float b, float c, float d, float e, float f);
/*!
* A forward to nvgTranslate. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void translate(float x, float y);
/*!
* A forward to nvgRotate. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void rotate(float angle);
/*!
* A forward to nvgSkewX. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void skewX(float angle);
/*!
* A forward to nvgSkewY. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void skewY(float angle);
/*!
* A forward to nvgScale. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void scale(float x, float y);
/*!
* A forward to nvgCurrentTransform. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void currentTransform(float* xform);
/*!
* A forward to nvgTransformIdentity. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformIdentity(float* dst);
/*!
* A forward to nvgTransformTranslate. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformTranslate(float* dst, float tx, float ty);
/*!
* A forward to nvgTransformScale. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformScale(float* dst, float sx, float sy);
/*!
* A forward to nvgTransformRotate. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformRotate(float* dst, float a);
/*!
* A forward to nvgTransformSkewX. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformSkewX(float* dst, float a);
/*!
* A forward to nvgTransformSkewY. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformSkewY(float* dst, float a);
/*!
* A forward to nvgTransformMultiply. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformMultiply(float* dst, const float* src);
/*!
* A forward to nvgTransformPremultiply. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformPremultiply(float* dst, const float* src);
/*!
* A forward to nvgTransformInverse. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS int transformInverse(float* dst, const float* src);
/*!
* A forward to nvgTransformPoint. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void transformPoint(float* dstx, float* dsty, const float* xform, float srcx, float srcy);
/*!
* A forward to nvgDegToRad. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS float degToRad(float deg);
/*!
* A forward to nvgRadToDeg. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS float radToDeg(float rad);
CV_EXPORTS int createImage(const char* filename, int imageFlags);
CV_EXPORTS int createImageMem(int imageFlags, unsigned char* data, int ndata);
CV_EXPORTS int createImageRGBA(int w, int h, int imageFlags, const unsigned char* data);
CV_EXPORTS void updateImage(int image, const unsigned char* data);
CV_EXPORTS void imageSize(int image, int* w, int* h);
CV_EXPORTS void deleteImage(int image);
/*!
* A forward to nvgBeginPath. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void beginPath();
/*!
* A forward to nvgMoveTo. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void moveTo(float x, float y);
/*!
* A forward to nvgLineTo. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void lineTo(float x, float y);
/*!
* A forward to nvgBezierTo. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void bezierTo(float c1x, float c1y, float c2x, float c2y, float x, float y);
/*!
* A forward to nvgQuadTo. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void quadTo(float cx, float cy, float x, float y);
/*!
* A forward to nvgArcTo. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void arcTo(float x1, float y1, float x2, float y2, float radius);
/*!
* A forward to nvgClosePath. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void closePath();
/*!
* A forward to nvgPathWinding. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void pathWinding(int dir);
/*!
* A forward to nvgArc. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void arc(float cx, float cy, float r, float a0, float a1, int dir);
/*!
* A forward to nvgRect. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void rect(float x, float y, float w, float h);
/*!
* A forward to nvgRoundedRect. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void roundedRect(float x, float y, float w, float h, float r);
/*!
* A forward to nvgRoundedRectVarying. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void roundedRectVarying(float x, float y, float w, float h, float radTopLeft, float radTopRight,
float radBottomRight, float radBottomLeft);
/*!
* A forward to nvgEllipse. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void ellipse(float cx, float cy, float rx, float ry);
/*!
* A forward to nvgCircle. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void circle(float cx, float cy, float r);
/*!
* A forward to nvgFill. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void fill();
/*!
* A forward to nvgStroke. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void stroke();
/*!
* A forward to nvgLinearGradient. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS Paint linearGradient(float sx, float sy, float ex, float ey, const cv::Scalar& icol,
const cv::Scalar& ocol);
/*!
* A forward to nvgBoxGradient. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS Paint boxGradient(float x, float y, float w, float h, float r, float f, const cv::Scalar& icol,
const cv::Scalar& ocol);
/*!
* A forward to nvgRadialGradient. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS Paint radialGradient(float cx, float cy, float inr, float outr, const cv::Scalar& icol,
const cv::Scalar& ocol);
/*!
* A forward to nvgImagePattern. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS Paint imagePattern(float ox, float oy, float ex, float ey, float angle, int image, float alpha);
/*!
* A forward to nvgScissor. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void scissor(float x, float y, float w, float h);
/*!
* A forward to nvgIntersectScissor. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void intersectScissor(float x, float y, float w, float h);
/*!
* A forward to nvgRresetScissor. See https://github.com/memononen/nanovg/blob/master/src/nanovg.h
*/
CV_EXPORTS void resetScissor();
CV_EXPORTS void clear(const cv::Scalar& bgra = cv::Scalar(0, 0, 0, 255));
}
}
}
#endif /* SRC_OPENCV_V4D_NVG_HPP_ */

@ -0,0 +1,198 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef MODULES_V4D_SRC_SCENE_HPP_
#define MODULES_V4D_SRC_SCENE_HPP_
#include "v4d.hpp"
#include <assimp/scene.h>
#include <assimp/Importer.hpp>
#include <string>
namespace cv {
namespace v4d {
namespace gl {
cv::Vec3f rotate3D(const cv::Vec3f& point, const cv::Vec3f& center, const cv::Vec3f& rotation);
cv::Matx44f perspective(float fov, float aspect, float zNear, float zFar);
cv::Matx44f lookAt(cv::Vec3f eye, cv::Vec3f center, cv::Vec3f up);
cv::Matx44f modelView(const cv::Vec3f& translation, const cv::Vec3f& rotationVec, const cv::Vec3f& scaleVec);
class Scene {
public:
enum RenderMode {
DEFAULT = 0,
WIREFRAME = 1,
POINTCLOUD = 2,
};
private:
Assimp::Importer importer_;
const aiScene* scene_ = nullptr;
RenderMode mode_ = DEFAULT;
GLuint shaderHandles_[3] = {0, 0, 0};
cv::Vec3f lightPos_ = {1.2f, 1.0f, 2.0f};
cv::Vec3f viewPos_ = {0.0, 0.0, 0.0};
cv::Vec3f autoCenter_, size_;
float autoScale_ = 1;
const string vertexShaderSource_ = R"(
#version 300 es
layout(location = 0) in vec3 aPos;
out vec3 fragPos;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main() {
gl_Position = projection * view * model * vec4(aPos, 1.0);
fragPos = vec3(model * vec4(aPos, 1.0));
gl_PointSize = 3.0; // Set the size_ of the points
}
)";
const string fragmentShaderSource_ = R"(
#version 300 es
#define RENDER_MODE_WIREFRAME 1
#define RENDER_MODE_POINTCLOUD 2
#define AMBIENT_COLOR vec3(0.95, 0.95, 0.95)
#define DIFFUSE_COLOR vec3(0.8, 0.8, 0.8)
#define SPECULAR_COLOR vec3(0.7, 0.7, 0.7)
// Control defines for effects
#define ENABLE_HDR true
#define HDR_EXPOSURE 1.0
#define ENABLE_BLOOM true
#define BLOOM_INTENSITY 1.0
#define ENABLE_SHADOWS true
precision highp float;
in vec3 fragPos;
out vec4 fragColor;
uniform vec3 lightPos;
uniform vec3 viewPos;
uniform int renderMode;
// Function to check ray-sphere intersection
bool intersectSphere(vec3 rayOrigin, vec3 rayDir, vec3 sphereCenter, float sphereRadius) {
vec3 oc = rayOrigin - sphereCenter;
float a = dot(rayDir, rayDir);
float b = 2.0 * dot(oc, rayDir);
float c = dot(oc, oc) - sphereRadius * sphereRadius;
float discriminant = b * b - 4.0 * a * c;
return (discriminant > 0.0);
}
// Function to check if a point is in shadow
bool isInShadow(vec3 fragPos, vec3 lightDir) {
// Use ray tracing to check for shadows (sphere example)
vec3 rayOrigin = fragPos + 0.001 * normalize(lightDir); // Slightly offset to avoid self-intersection
vec3 sphereCenter = vec3(0.0, 1.0, 0.0); // Example sphere center
float sphereRadius = 0.5; // Example sphere radius
if (intersectSphere(rayOrigin, lightDir, sphereCenter, sphereRadius)) {
return true; // Point is in shadow
}
return false; // Point is illuminated
}
// HDR tone mapping function
vec3 toneMap(vec3 color, float exposure) {
return 1.0 - exp(-color * exposure);
}
void main() {
vec4 attuned;
if (renderMode == RENDER_MODE_WIREFRAME) {
attuned = vec4(1.0, 0.0, 0.0, 1.0);
} else if (renderMode == RENDER_MODE_POINTCLOUD) {
float distance = length(fragPos - viewPos);
float attenuation = pow(1.0 / distance, 16.0);
vec3 color = vec3(1.0, 1.0, 1.0);
attuned = vec4(color, attenuation);
} else {
attuned = vec4(0.8, 0.8, 0.8, 1.0);
}
vec3 ambient = 0.7 * attuned.xyz * AMBIENT_COLOR;
vec3 lightDir = normalize(lightPos - fragPos);
// Check if the point is in shadow
#ifdef ENABLE_SHADOWS
if (isInShadow(fragPos, lightDir)) {
fragColor = vec4(ambient, 1.0); // Point is in shadow
return;
}
#endif
float diff = max(dot(normalize(fragPos), lightDir), 0.0);
vec3 diffuse = diff * attuned.xyz * DIFFUSE_COLOR;
vec3 viewDir = normalize(viewPos - fragPos);
vec3 reflectDir = reflect(-lightDir, normalize(fragPos));
float spec = pow(max(dot(viewDir, reflectDir), 0.0), 32.0);
vec3 specular = spec * SPECULAR_COLOR;
// Combine ambient, diffuse, and specular components
vec3 finalColor = ambient + diffuse + specular;
// Apply HDR tone mapping
#ifdef ENABLE_HDR
finalColor = toneMap(finalColor, HDR_EXPOSURE);
#endif
// Bloom effect
#ifdef ENABLE_BLOOM
vec3 brightColor = finalColor - ambient;
finalColor += BLOOM_INTENSITY * brightColor;
#endif
fragColor = vec4(finalColor, 1.0);
}
)";
public:
Scene();
virtual ~Scene();
void reset();
bool load(const std::vector<Point3f>& points);
bool load(const std::string& filename);
void render(const cv::Rect& viewport, const cv::Matx44f& projection, const cv::Matx44f& view, const cv::Matx44f& modelView);
cv::Mat_<float> pointCloudAsMat();
std::vector<cv::Point3f> pointCloudAsVector();
float autoScale() {
return autoScale_;
}
cv::Vec3f autoCenter() {
return autoCenter_;
}
void setMode(RenderMode mode) {
mode_ = mode;
}
cv::Vec3f lightPosition() {
return lightPos_;
}
void setLightPosition(cv::Vec3f pos) {
lightPos_ = pos;
}
};
} /* namespace gl */
} /* namespace v4d */
} /* namespace cv */
#endif /* MODULES_V4D_SRC_SCENE_HPP_ */

@ -0,0 +1,61 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_V4D_SINK_HPP_
#define SRC_OPENCV_V4D_SINK_HPP_
#include <functional>
#include <map>
#include <opencv2/core/cvdef.h>
#include <opencv2/core/mat.hpp>
#include <mutex>
namespace cv {
namespace v4d {
/*!
* A Sink object represents a way to write data produced by V4D (e.g. a video-file).
*/
class CV_EXPORTS Sink {
std::mutex mtx_;
bool open_ = true;
uint64_t nextSeq_ = 0;
std::map<uint64_t, cv::UMat> buffer_;
std::function<bool(const uint64_t&, const cv::UMat&)> consumer_;
public:
/*!
* Constructs the Sink object from a consumer functor.
* @param consumer A function object that consumes a UMat frame (e.g. writes it to a video file).
*/
CV_EXPORTS Sink(std::function<bool(const uint64_t&, const cv::UMat&)> consumer);
/*!
* Constucts a null Sink that is never open or ready
*/
CV_EXPORTS Sink();
/*!
* Default destructor
*/
CV_EXPORTS virtual ~Sink();
/*!
* Signals if the sink is ready to consume data.
* @return true if the sink is ready.
*/
CV_EXPORTS bool isReady();
/*!
* Determines if the sink is open.
* @return true if the sink is open.
*/
CV_EXPORTS bool isOpen();
/*!
* The sink operator. It accepts a UMat frame to pass to the consumer
* @param frame The frame to pass to the consumer. (e.g. VideoWriter)
*/
CV_EXPORTS void operator()(const uint64_t& seq, const cv::UMat& frame);
};
} /* namespace v4d */
} /* namespace kb */
#endif /* SRC_OPENCV_V4D_SINK_HPP_ */

@ -0,0 +1,74 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_V4D_SOURCE_HPP_
#define SRC_OPENCV_V4D_SOURCE_HPP_
#include <functional>
#include <opencv2/core/cvdef.h>
#include <opencv2/core/mat.hpp>
#include <mutex>
namespace cv {
namespace v4d {
/*!
* A Source object represents a way to provide data to V4D by using
* a generator functor.
*/
class CV_EXPORTS Source {
bool open_ = true;
std::function<bool(cv::UMat&)> generator_;
uint64_t count_ = 0;
float fps_;
bool threadSafe_ = false;
std::mutex mtx_;
public:
/*!
* Constructs the Source object from a generator functor.
* @param generator A function object that accepts a reference to a UMat frame
* that it manipulates. This is ultimatively used to provide video data to #cv::viz::V4D
* @param fps The fps the Source object provides data with.
*/
CV_EXPORTS Source(std::function<bool(cv::UMat&)> generator, float fps);
/*!
* Constructs a null Source that is never open or ready.
*/
CV_EXPORTS Source();
/*!
* Default destructor.
*/
CV_EXPORTS virtual ~Source();
/*!
* Signals if the source is ready to provide data.
* @return true if the source is ready.
*/
CV_EXPORTS bool isReady();
CV_EXPORTS bool isThreadSafe();
CV_EXPORTS void setThreadSafe(bool ts);
/*!
* Determines if the source is open.
* @return true if the source is open.
*/
CV_EXPORTS bool isOpen();
/*!
* Returns the fps the underlying generator provides data with.
* @return The fps of the Source object.
*/
CV_EXPORTS float fps();
/*!
* The source operator. It returns the frame count and the frame generated
* (e.g. by VideoCapture)in a pair.
* @return A pair containing the frame count and the frame generated.
*/
CV_EXPORTS std::pair<uint64_t, cv::UMat> operator()();
};
} /* namespace v4d */
} /* namespace kb */
#endif /* SRC_OPENCV_V4D_SOURCE_HPP_ */

@ -0,0 +1,124 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_THREADSAFEMAP_HPP_
#define MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_THREADSAFEMAP_HPP_
#include <any>
#include <concepts>
#include <mutex>
#include <unordered_map>
#include <shared_mutex>
#include <any>
#include <concepts>
#include <mutex>
#include <unordered_map>
#include <shared_mutex>
namespace cv {
namespace v4d {
// A concept to check if a type is hashable
template<typename T>
concept Hashable = requires(T a) {
{ std::hash<T>{}(a) } -> std::convertible_to<std::size_t>;
};
// A concept to check if a type can be stored in an std::unordered_map as value
template<typename T>
concept Mappable = requires(T a) {
{ std::any_cast<T>(std::any{}) } -> std::same_as<T>;
};
// A class that can set and get values in a thread-safe manner (per-key locking)
template<Hashable K>
class ThreadSafeMap {
private:
// A map from keys to values
std::unordered_map<K, std::any> map;
// A map from keys to mutexes
std::unordered_map<K, std::shared_mutex> mutexes;
// A mutex to lock the map
std::shared_mutex map_mutex;
public:
// A method to set a value for a given key
template<Mappable V>
void set(K key, V value) {
// Lock the map mutex for writing
std::unique_lock<std::shared_mutex> map_lock(map_mutex);
// Check if the key exists in the map
if (map.find(key) == map.end()) {
// If the key does not exist, insert it into the map and the mutexes
map[key] = value;
mutexes[key];
} else {
// If the key exists, lock the mutex for the key for writing
std::unique_lock<std::shared_mutex> key_lock(mutexes[key]);
// Set the value for the key
map[key] = value;
}
}
// A method to get a value for a given key
template<Mappable V>
V get(K key) {
// Lock the map mutex for reading
std::shared_lock<std::shared_mutex> map_lock(map_mutex);
// Check if the key exists in the map
if (map.find(key) == map.end()) {
CV_Error(Error::StsError, "Key not found in map");
}
// Lock the mutex for the key for reading
std::shared_lock<std::shared_mutex> key_lock(mutexes[key]);
// Get the value for the key
return std::any_cast<V>(map[key]);
}
template<typename F> void on(K key, F func) {
// Lock the map mutex for reading
std::shared_lock<std::shared_mutex> map_lock(map_mutex);
// Check if the key exists in the map
if (map.find(key) == map.end()) {
CV_Error(Error::StsError, "Key not found in map");
}
// Lock the mutex for the key for writing
std::unique_lock<std::shared_mutex> key_lock(mutexes[key]);
// Get the value for the key
std::any value = map[key];
// Apply the functor to the value
func(value);
// Set the value for the key
map[key] = value;
}
// A method to get a pointer to the value for a given key
// Note: This function is not thread-safe
template<Mappable V>
V* ptr(K key) {
return std::any_cast<V>(&map[key]);
}
};
}
}
#endif /* MODULES_V4D_INCLUDE_OPENCV2_V4D_DETAIL_THREADSAFEMAP_HPP_ */

@ -0,0 +1,531 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_V4D_UTIL_HPP_
#define SRC_OPENCV_V4D_UTIL_HPP_
#include "source.hpp"
#include "sink.hpp"
#include <filesystem>
#include <string>
#include <iostream>
#ifdef __GNUG__
#include <cstdlib>
#include <memory>
#include <cxxabi.h>
#endif
#include <opencv2/core/ocl.hpp>
#include <opencv2/imgproc.hpp>
#include <unistd.h>
#include <mutex>
#include <functional>
#include <iostream>
#include <cmath>
#include <thread>
namespace cv {
namespace v4d {
namespace detail {
using std::cout;
using std::endl;
inline uint64_t get_epoch_nanos() {
return std::chrono::duration_cast<std::chrono::nanoseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
}
static thread_local std::mutex mtx_;
class CV_EXPORTS ThreadLocal {
public:
CV_EXPORTS static std::mutex& mutex() {
return mtx_;
}
};
class CV_EXPORTS Global {
inline static std::mutex global_mtx_;
inline static std::mutex frame_cnt_mtx_;
inline static uint64_t frame_cnt_ = 0;
inline static std::mutex start_time_mtx_;
inline static uint64_t start_time_ = get_epoch_nanos();
inline static std::mutex fps_mtx_;
inline static double fps_ = 0;
inline static std::mutex thread_id_mtx_;
inline static const std::thread::id default_thread_id_;
inline static std::thread::id main_thread_id_;
inline static thread_local bool is_main_;
inline static uint64_t run_cnt_ = 0;
inline static bool first_run_ = true;
inline static size_t workers_ready_ = 0;
inline static size_t workers_started_ = 0;
inline static size_t next_worker_idx_ = 0;
inline static std::mutex sharedMtx_;
inline static std::map<size_t, std::mutex*> shared_;
typedef typename std::map<size_t, std::mutex*>::iterator Iterator;
public:
template <typename T>
class Scope {
private:
const T& t_;
// ocl::OpenCLExecutionContext* pSavedExecCtx_ = nullptr;
// ocl::OpenCLExecutionContext* pExecCtx_ = nullptr;
//
// template<typename Tunused> void bind(const Tunused& t) {
// //do nothing for all other types the UMat
// CV_UNUSED(t);
// }
//
// void bind(const cv::UMat& t) {
//#ifdef HAVE_OPENCL
// if(ocl::useOpenCL()) {
// pExecCtx_ = (t.u && t.u->allocatorContext) ? static_cast<ocl::OpenCLExecutionContext*>(t.u->allocatorContext.get()) : nullptr;
// if(pExecCtx_ && !pExecCtx_->empty()) {
// pSavedExecCtx_ = &ocl::OpenCLExecutionContext::getCurrentRef();
// pExecCtx_->bind();
// } else {
// pSavedExecCtx_ = nullptr;
// }
// }
//#endif
// }
//
// template<typename Tunused> void unbind(const Tunused& t) {
// //do nothing for all other types the UMat
// CV_UNUSED(t);
// }
//
// void unbind(const cv::UMat& t) {
// CV_UNUSED(t);
//#ifdef HAVE_OPENCL
// if(ocl::useOpenCL() && pSavedExecCtx_ && !pSavedExecCtx_->empty()) {
// pSavedExecCtx_->bind();
// }
//#endif
// }
public:
Scope(const T& t) : t_(t) {
lock(t_);
// bind(t_);
}
~Scope() {
unlock(t_);
// unbind(t_);
}
};
CV_EXPORTS static std::mutex& mutex() {
return global_mtx_;
}
CV_EXPORTS static uint64_t next_frame_cnt() {
std::unique_lock<std::mutex> lock(frame_cnt_mtx_);
return frame_cnt_++;
}
CV_EXPORTS static uint64_t frame_cnt() {
std::unique_lock<std::mutex> lock(frame_cnt_mtx_);
return frame_cnt_;
}
CV_EXPORTS static void mul_frame_cnt(const double& factor) {
std::unique_lock<std::mutex> lock(frame_cnt_mtx_);
frame_cnt_ *= factor;
}
CV_EXPORTS static void add_to_start_time(const size_t& st) {
std::unique_lock<std::mutex> lock(start_time_mtx_);
start_time_ += st;
}
CV_EXPORTS static uint64_t start_time() {
std::unique_lock<std::mutex> lock(start_time_mtx_);
return start_time_;
}
CV_EXPORTS static double fps() {
std::unique_lock<std::mutex> lock(fps_mtx_);
return fps_;
}
CV_EXPORTS static void set_fps(const double& f) {
std::unique_lock<std::mutex> lock(fps_mtx_);
fps_ = f;
}
CV_EXPORTS static void set_main_id(const std::thread::id& id) {
std::unique_lock<std::mutex> lock(thread_id_mtx_);
main_thread_id_ = id;
}
CV_EXPORTS static const bool is_main() {
std::unique_lock<std::mutex> lock(start_time_mtx_);
return (main_thread_id_ == default_thread_id_ || main_thread_id_ == std::this_thread::get_id());
}
CV_EXPORTS static bool is_first_run() {
static std::mutex mtx;
std::unique_lock<std::mutex> lock(mtx);
bool f = first_run_;
first_run_ = false;
return f;
}
CV_EXPORTS static uint64_t next_run_cnt() {
static std::mutex mtx;
std::unique_lock<std::mutex> lock(mtx);
return run_cnt_++;
}
CV_EXPORTS static void set_workers_started(const size_t& ws) {
static std::mutex mtx;
std::unique_lock<std::mutex> lock(mtx);
workers_started_ = ws;
}
CV_EXPORTS static size_t workers_started() {
static std::mutex mtx;
std::unique_lock<std::mutex> lock(mtx);
return workers_started_;
}
CV_EXPORTS static size_t next_worker_ready() {
static std::mutex mtx;
std::unique_lock<std::mutex> lock(mtx);
return ++workers_ready_;
}
CV_EXPORTS static size_t next_worker_idx() {
static std::mutex mtx;
std::unique_lock<std::mutex> lock(mtx);
return next_worker_idx_++;
}
template<typename T>
static bool isShared(const T& shared) {
std::lock_guard<std::mutex> guard(sharedMtx_);
std::cerr << "shared:" << reinterpret_cast<size_t>(&shared) << std::endl;
return shared_.find(reinterpret_cast<size_t>(&shared)) != shared_.end();
}
template<typename T>
static void registerShared(const T& shared) {
std::lock_guard<std::mutex> guard(sharedMtx_);
std::cerr << "register:" << reinterpret_cast<size_t>(&shared) << std::endl;
shared_.insert(std::make_pair(reinterpret_cast<size_t>(&shared), new std::mutex()));
}
template<typename T>
static void lock(const T& shared) {
Iterator it, end;
std::mutex* mtx = nullptr;
{
std::lock_guard<std::mutex> guard(sharedMtx_);
it = shared_.find(reinterpret_cast<size_t>(&shared));
end = shared_.end();
if(it != end) {
mtx = (*it).second;
}
}
if(mtx != nullptr) {
mtx->lock();
return;
}
CV_Assert(!"You are trying to lock a non-shared variable");
}
template<typename T>
static void unlock(const T& shared) {
Iterator it, end;
std::mutex* mtx = nullptr;
{
std::lock_guard<std::mutex> guard(sharedMtx_);
it = shared_.find(reinterpret_cast<size_t>(&shared));
end = shared_.end();
if(it != end) {
mtx = (*it).second;
}
}
if(mtx != nullptr) {
mtx->unlock();
return;
}
CV_Assert(!"You are trying to unlock a non-shared variable");
}
template<typename T>
static T safe_copy(const T& shared) {
std::lock_guard<std::mutex> guard(sharedMtx_);
auto it = shared_.find(reinterpret_cast<size_t>(&shared));
if(it != shared_.end()) {
std::lock_guard<std::mutex> guard(*(*it).second);
return shared;
} else {
CV_Assert(!"You are unnecessarily safe copying a variable");
//unreachable
return shared;
}
}
static cv::UMat safe_copy(const cv::UMat& shared) {
std::lock_guard<std::mutex> guard(sharedMtx_);
cv::UMat copy;
auto it = shared_.find(reinterpret_cast<size_t>(&shared));
if(it != shared_.end()) {
std::lock_guard<std::mutex> guard(*(*it).second);
//workaround for context conflicts
shared.getMat(cv::ACCESS_READ).copyTo(copy);
return copy;
} else {
CV_Assert(!"You are unnecessarily safe copying a variable");
//unreachable
shared.getMat(cv::ACCESS_READ).copyTo(copy);
return copy;
}
}
};
//https://stackoverflow.com/a/27885283/1884837
template<class T>
struct function_traits : function_traits<decltype(&T::operator())> {
};
// partial specialization for function type
template<class R, class... Args>
struct function_traits<R(Args...)> {
using result_type = R;
using argument_types = std::tuple<std::remove_reference_t<Args>...>;
};
// partial specialization for function pointer
template<class R, class... Args>
struct function_traits<R (*)(Args...)> {
using result_type = R;
using argument_types = std::tuple<std::remove_reference_t<Args>...>;
};
// partial specialization for std::function
template<class R, class... Args>
struct function_traits<std::function<R(Args...)>> {
using result_type = R;
using argument_types = std::tuple<std::remove_reference_t<Args>...>;
};
// partial specialization for pointer-to-member-function (i.e., operator()'s)
template<class T, class R, class... Args>
struct function_traits<R (T::*)(Args...)> {
using result_type = R;
using argument_types = std::tuple<std::remove_reference_t<Args>...>;
};
template<class T, class R, class... Args>
struct function_traits<R (T::*)(Args...) const> {
using result_type = R;
using argument_types = std::tuple<std::remove_reference_t<Args>...>;
};
//https://stackoverflow.com/questions/281818/unmangling-the-result-of-stdtype-infoname
CV_EXPORTS std::string demangle(const char* name);
template <const size_t _UniqueId, typename _Res, typename... _ArgTypes>
struct fun_ptr_helper
{
public:
typedef std::function<_Res(_ArgTypes...)> function_type;
static void bind(function_type&& f)
{ instance().fn_.swap(f); }
static void bind(const function_type& f)
{ instance().fn_=f; }
static _Res invoke(_ArgTypes... args)
{ return instance().fn_(args...); }
typedef decltype(&fun_ptr_helper::invoke) pointer_type;
static pointer_type ptr()
{ return &invoke; }
private:
static fun_ptr_helper& instance()
{
static fun_ptr_helper inst_;
return inst_;
}
fun_ptr_helper() {}
function_type fn_;
};
template <const size_t _UniqueId, typename _Res, typename... _ArgTypes>
typename fun_ptr_helper<_UniqueId, _Res, _ArgTypes...>::pointer_type
get_fn_ptr(const std::function<_Res(_ArgTypes...)>& f)
{
fun_ptr_helper<_UniqueId, _Res, _ArgTypes...>::bind(f);
return fun_ptr_helper<_UniqueId, _Res, _ArgTypes...>::ptr();
}
template<typename T>
std::function<typename std::enable_if<std::is_function<T>::value, T>::type>
make_function(T *t)
{
return {t};
}
//https://stackoverflow.com/a/33047781/1884837
struct Lambda {
template<typename Tret, typename T>
static Tret lambda_ptr_exec() {
return (Tret) (*(T*)fn<T>());
}
template<typename Tret = void, typename Tfp = Tret(*)(), typename T>
static Tfp ptr(T& t) {
fn<T>(&t);
return (Tfp) lambda_ptr_exec<Tret, T>;
}
template<typename T>
static const void* fn(const void* new_fn = nullptr) {
CV_Assert(new_fn);
return new_fn;
}
};
CV_EXPORTS size_t cnz(const cv::UMat& m);
}
using std::string;
class V4D;
CV_EXPORTS void copy_shared(const cv::UMat& src, cv::UMat& dst);
/*!
* Convenience function to color convert from Scalar to Scalar
* @param src The scalar to color convert
* @param code The color converions code
* @return The color converted scalar
*/
CV_EXPORTS cv::Scalar colorConvert(const cv::Scalar& src, cv::ColorConversionCodes code);
/*!
* Convenience function to check for OpenGL errors. Should only be used via the macro #GL_CHECK.
* @param file The file path of the error.
* @param line The file line of the error.
* @param expression The expression that failed.
*/
CV_EXPORTS void gl_check_error(const std::filesystem::path& file, unsigned int line, const char* expression);
/*!
* Convenience macro to check for OpenGL errors.
*/
#ifndef NDEBUG
#define GL_CHECK(expr) \
expr; \
cv::v4d::gl_check_error(__FILE__, __LINE__, #expr);
#else
#define GL_CHECK(expr) \
expr;
#endif
CV_EXPORTS void initShader(unsigned int handles[3], const char* vShader, const char* fShader, const char* outputAttributeName);
/*!
* Returns the OpenGL vendor string
* @return a string object with the OpenGL vendor information
*/
CV_EXPORTS std::string getGlVendor();
/*!
* Returns the OpenGL Version information.
* @return a string object with the OpenGL version information
*/
CV_EXPORTS std::string getGlInfo();
/*!
* Returns the OpenCL Version information.
* @return a string object with the OpenCL version information
*/
CV_EXPORTS std::string getClInfo();
/*!
* Determines if Intel VAAPI is supported
* @return true if it is supported
*/
CV_EXPORTS bool isIntelVaSupported();
/*!
* Determines if cl_khr_gl_sharing is supported
* @return true if it is supported
*/
CV_EXPORTS bool isClGlSharingSupported();
/*!
* Tells the application if it's alright to keep on running.
* Note: If you use this mechanism signal handlers are installed
* @return true if the program should keep on running
*/
CV_EXPORTS bool keepRunning();
CV_EXPORTS void requestFinish();
/*!
* Creates an Intel VAAPI enabled VideoWriter sink object to use in conjunction with #V4D::setSink().
* Usually you would call #makeWriterSink() and let it automatically decide if VAAPI is available.
* @param outputFilename The filename to write the video to.
* @param fourcc The fourcc code of the codec to use.
* @param fps The fps of the target video.
* @param frameSize The frame size of the target video.
* @param vaDeviceIndex The VAAPI device index to use.
* @return A VAAPI enabled sink object.
*/
CV_EXPORTS cv::Ptr<Sink> makeVaSink(cv::Ptr<V4D> window, const string& outputFilename, const int fourcc, const float fps,
const cv::Size& frameSize, const int vaDeviceIndex);
/*!
* Creates an Intel VAAPI enabled VideoCapture source object to use in conjunction with #V4D::setSource().
* Usually you would call #makeCaptureSource() and let it automatically decide if VAAPI is available.
* @param inputFilename The file to read from.
* @param vaDeviceIndex The VAAPI device index to use.
* @return A VAAPI enabled source object.
*/
CV_EXPORTS cv::Ptr<Source> makeVaSource(cv::Ptr<V4D> window, const string& inputFilename, const int vaDeviceIndex);
/*!
* Creates a VideoWriter sink object to use in conjunction with #V4D::setSink().
* This function automatically determines if Intel VAAPI is available and enables it if so.
* @param outputFilename The filename to write the video to.
* @param fourcc The fourcc code of the codec to use.
* @param fps The fps of the target video.
* @param frameSize The frame size of the target video.
* @return A (optionally VAAPI enabled) VideoWriter sink object.
*/
CV_EXPORTS cv::Ptr<Sink> makeWriterSink(cv::Ptr<V4D> window, const string& outputFilename, const float fps,
const cv::Size& frameSize);
CV_EXPORTS cv::Ptr<Sink> makeWriterSink(cv::Ptr<V4D> window, const string& outputFilename, const float fps,
const cv::Size& frameSize, const int fourcc);
/*!
* Creates a VideoCapture source object to use in conjunction with #V4D::setSource().
* This function automatically determines if Intel VAAPI is available and enables it if so.
* @param inputFilename The file to read from.
* @return A (optionally VAAPI enabled) VideoCapture enabled source object.
*/
CV_EXPORTS cv::Ptr<Source> makeCaptureSource(cv::Ptr<V4D> window, const string& inputFilename);
void resizePreserveAspectRatio(const cv::UMat& src, cv::UMat& output, const cv::Size& dstSize, const cv::Scalar& bgcolor = {0,0,0,255});
}
}
#endif /* SRC_OPENCV_V4D_UTIL_HPP_ */

@ -0,0 +1,872 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#ifndef SRC_OPENCV_V4D_V4D_HPP_
#define SRC_OPENCV_V4D_V4D_HPP_
#include "source.hpp"
#include "sink.hpp"
#include "util.hpp"
#include "nvg.hpp"
#include "threadsafemap.hpp"
#include "detail/transaction.hpp"
#include "detail/framebuffercontext.hpp"
#include "detail/nanovgcontext.hpp"
#include "detail/imguicontext.hpp"
#include "detail/timetracker.hpp"
#include "detail/glcontext.hpp"
#include "detail/sourcecontext.hpp"
#include "detail/sinkcontext.hpp"
#include "detail/resequence.hpp"
#include "events.hpp"
#include <type_traits>
#include <shared_mutex>
#include <iostream>
#include <future>
#include <set>
#include <map>
#include <string>
#include <memory>
#include <vector>
#include <type_traits>
#include <stdio.h>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/core/utils/logger.hpp>
using std::cout;
using std::cerr;
using std::endl;
using std::string;
using namespace std::chrono_literals;
/*!
* OpenCV namespace
*/
namespace cv {
/*!
* V4D namespace
*/
namespace v4d {
enum AllocateFlags {
NONE = 0,
NANOVG = 1,
IMGUI = 2,
ALL = NANOVG | IMGUI
};
class Plan {
const cv::Size sz_;
const cv::Rect vp_;
public:
//predefined branch predicates
constexpr static auto always_ = []() { return true; };
constexpr static auto isTrue_ = [](const bool& b) { return b; };
constexpr static auto isFalse_ = [](const bool& b) { return !b; };
constexpr static auto and_ = [](const bool& a, const bool& b) { return a && b; };
constexpr static auto or_ = [](const bool& a, const bool& b) { return a || b; };
explicit Plan(const cv::Rect& vp) : sz_(cv::Size(vp.width, vp.height)), vp_(vp){};
explicit Plan(const cv::Size& sz) : sz_(sz), vp_(0, 0, sz.width, sz.height){};
virtual ~Plan() {};
virtual void gui(cv::Ptr<V4D> window) { CV_UNUSED(window); };
virtual void setup(cv::Ptr<V4D> window) { CV_UNUSED(window); };
virtual void infer(cv::Ptr<V4D> window) = 0;
virtual void teardown(cv::Ptr<V4D> window) { CV_UNUSED(window); };
const cv::Size& size() {
return sz_;
}
const cv::Rect& viewport() {
return vp_;
}
};
/*!
* Private namespace
*/
namespace detail {
template <typename T> using static_not = std::integral_constant<bool, !T::value>;
template<typename T, typename ... Args>
struct is_function
{
static const bool value = std::is_constructible<T,std::function<void(Args...)>>::value;
};
//https://stackoverflow.com/a/34873353/1884837
template<class T>
struct is_stateless_lambda : std::integral_constant<bool, sizeof(T) == sizeof(std::true_type)>{};
template<typename T> std::string int_to_hex( T i )
{
std::stringstream stream;
stream << "0x"
<< std::setfill ('0') << std::setw(sizeof(T) * 2)
<< std::hex << i;
return stream.str();
}
template<typename Tlamba> std::string lambda_ptr_hex(Tlamba&& l) {
return int_to_hex((size_t)Lambda::ptr(l));
}
static std::size_t index(const std::thread::id id)
{
static std::size_t nextindex = 0;
static std::mutex my_mutex;
static std::unordered_map<std::thread::id, std::size_t> ids;
std::lock_guard<std::mutex> lock(my_mutex);
auto iter = ids.find(id);
if(iter == ids.end())
return ids[id] = nextindex++;
return iter->second;
}
template<typename Tfn, typename ... Args>
const string make_id(const string& name, Tfn&& fn, Args&& ... args) {
stringstream ss;
ss << name << "(" << index(std::this_thread::get_id()) << "-" << detail::lambda_ptr_hex(std::forward<Tfn>(fn)) << ")";
((ss << ',' << int_to_hex((long)&args)), ...);
return ss.str();
}
}
using namespace cv::v4d::detail;
class CV_EXPORTS V4D {
friend class detail::FrameBufferContext;
friend class HTML5Capture;
int32_t workerIdx_ = -1;
cv::Ptr<V4D> self_;
cv::Ptr<Plan> plan_;
const cv::Size initialSize_;
AllocateFlags flags_;
bool debug_;
cv::Rect viewport_;
bool stretching_;
int samples_;
bool focused_ = false;
cv::Ptr<FrameBufferContext> mainFbContext_ = nullptr;
cv::Ptr<SourceContext> sourceContext_ = nullptr;
cv::Ptr<SinkContext> sinkContext_ = nullptr;
cv::Ptr<NanoVGContext> nvgContext_ = nullptr;
cv::Ptr<ImGuiContextImpl> imguiContext_ = nullptr;
cv::Ptr<OnceContext> onceContext_ = nullptr;
cv::Ptr<PlainContext> plainContext_ = nullptr;
std::mutex glCtxMtx_;
std::map<int32_t,cv::Ptr<GLContext>> glContexts_;
bool closed_ = false;
cv::Ptr<Source> source_;
cv::Ptr<Sink> sink_;
cv::UMat captureFrame_;
cv::UMat writerFrame_;
std::function<bool(int key, int scancode, int action, int modifiers)> keyEventCb_;
std::function<void(int button, int action, int modifiers)> mouseEventCb_;
cv::Point2f mousePos_;
uint64_t frameCnt_ = 0;
bool showFPS_ = true;
bool printFPS_ = false;
bool showTracking_ = true;
std::vector<std::tuple<std::string,bool,long>> accesses_;
std::map<std::string, cv::Ptr<Transaction>> transactions_;
bool disableIO_ = false;
public:
/*!
* Creates a V4D object which is the central object to perform visualizations with.
* @param initialSize The initial size of the heavy-weight window.
* @param frameBufferSize The initial size of the framebuffer backing the window (needs to be equal or greate then initial size).
* @param offscreen Don't create a window and rather render offscreen.
* @param title The window title.
* @param major The OpenGL major version to request.
* @param minor The OpenGL minor version to request.
* @param compat Request a compatibility context.
* @param samples MSAA samples.
* @param debug Create a debug OpenGL context.
*/
CV_EXPORTS static cv::Ptr<V4D> make(const cv::Size& size, const string& title, AllocateFlags flags = ALL, bool offscreen = false, bool debug = false, int samples = 0);
CV_EXPORTS static cv::Ptr<V4D> make(const cv::Size& size, const cv::Size& fbsize, const string& title, AllocateFlags flags = ALL, bool offscreen = false, bool debug = false, int samples = 0);
CV_EXPORTS static cv::Ptr<V4D> make(const V4D& v4d, const string& title);
/*!
* Default destructor
*/
CV_EXPORTS virtual ~V4D();
CV_EXPORTS const int32_t& workerIndex() const;
CV_EXPORTS size_t workers_running();
/*!
* The internal framebuffer exposed as OpenGL Texture2D.
* @return The texture object.
*/
CV_EXPORTS cv::ogl::Texture2D& texture();
CV_EXPORTS std::string title() const;
struct Node {
string name_;
std::set<long> read_deps_;
std::set<long> write_deps_;
cv::Ptr<Transaction> tx_ = nullptr;
bool initialized() {
return tx_;
}
};
std::vector<cv::Ptr<Node>> nodes_;
void findNode(const string& name, cv::Ptr<Node>& found) {
CV_Assert(!name.empty());
if(nodes_.empty())
return;
if(nodes_.back()->name_ == name)
found = nodes_.back();
}
void makeGraph() {
// cout << std::this_thread::get_id() << " ### MAKE PLAN ### " << endl;
for(const auto& t : accesses_) {
const string& name = std::get<0>(t);
const bool& read = std::get<1>(t);
const long& dep = std::get<2>(t);
cv::Ptr<Node> n;
findNode(name, n);
if(!n) {
n = new Node();
n->name_ = name;
n->tx_ = transactions_[name];
CV_Assert(!n->name_.empty());
CV_Assert(n->tx_);
nodes_.push_back(n);
// cout << "make: " << std::this_thread::get_id() << " " << n->name_ << endl;
}
if(read) {
n->read_deps_.insert(dep);
} else {
n->write_deps_.insert(dep);
}
}
}
void runGraph() {
bool isEnabled = true;
for (auto& n : nodes_) {
if (n->tx_->isPredicate()) {
isEnabled = n->tx_->enabled();
} else if (isEnabled) {
if(n->tx_->lock()) {
std::lock_guard<std::mutex> guard(Global::mutex());
n->tx_->getContext()->execute([n]() {
TimeTracker::getInstance()->execute(n->name_, [n](){
n->tx_->perform();
});
});
} else {
n->tx_->getContext()->execute([n]() {
TimeTracker::getInstance()->execute(n->name_, [n](){
n->tx_->perform();
});
});
}
}
}
}
void clearGraph() {
nodes_.clear();
accesses_.clear();
}
template<typename Tenabled, typename T, typename ...Args>
typename std::enable_if<std::is_same<Tenabled, std::false_type>::value, void>::type
emit_access(const string& context, bool read, const T* tp) {
//disabled
}
template<typename Tenabled, typename T, typename ...Args>
typename std::enable_if<std::is_same<Tenabled, std::true_type>::value, void>::type
emit_access(const string& context, bool read, const T* tp) {
// cout << "access: " << std::this_thread::get_id() << " " << context << string(read ? " <- " : " -> ") << demangle(typeid(std::remove_const_t<T>).name()) << "(" << (long)tp << ") " << endl;
accesses_.push_back(std::make_tuple(context, read, (long)tp));
}
template<typename Tfn, typename ...Args>
void add_transaction(bool lock, cv::Ptr<V4DContext> ctx, const string& invocation, Tfn fn, Args&& ...args) {
auto it = transactions_.find(invocation);
if(it == transactions_.end()) {
auto tx = make_transaction(lock, fn, std::forward<Args>(args)...);
tx->setContext(ctx);
transactions_.insert({invocation, tx});
}
}
template <typename Tfn, typename ... Args>
void init_context_call(Tfn fn, Args&& ... args) {
static_assert(detail::is_stateless_lambda<std::remove_cv_t<std::remove_reference_t<decltype(fn)>>>::value, "All passed functors must be stateless lambdas");
static_assert(std::conjunction<std::is_lvalue_reference<Args>...>::value, "All arguments must be l-value references");
cv::v4d::event::set_current_glfw_window(getGLFWWindow());
}
template <typename Tfn, typename ... Args>
typename std::enable_if<std::is_invocable_v<Tfn, Args...>, void>::type
gl(Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
const string id = make_id("gl-1", fn, args...);
emit_access<std::true_type, cv::UMat, Args...>(id, true, &fbCtx()->fb());
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
emit_access<std::true_type, cv::UMat, Args...>(id, false, &fbCtx()->fb());
std::function functor(fn);
add_transaction(false, glCtx(-1), id, std::forward<decltype(functor)>(fn), std::forward<Args>(args)...);
}
template <typename Tfn, typename ... Args>
void gl(int32_t idx, Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
const string id = make_id("gl" + std::to_string(idx), fn, args...);
emit_access<std::true_type, cv::UMat, Args...>(id, true, &fbCtx()->fb());
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
emit_access<std::true_type, cv::UMat, Args...>(id, false, &fbCtx()->fb());
std::function<void((const int32_t&,Args...))> functor(fn);
add_transaction<decltype(functor),const int32_t&>(false, glCtx(idx),id, std::forward<decltype(functor)>(functor), glCtx(idx)->getIndex(), std::forward<Args>(args)...);
}
template <typename Tfn>
void branch(Tfn fn) {
init_context_call(fn);
const string id = make_id("branch", fn);
std::function functor = fn;
emit_access<std::true_type, decltype(fn)>(id, true, &fn);
add_transaction(true, plainCtx(), id, functor);
}
template <typename Tfn, typename ... Args>
void branch(Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
const string id = make_id("branch", fn, args...);
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
std::function functor = fn;
add_transaction(true, plainCtx(), id, functor, std::forward<Args>(args)...);
}
template <typename Tfn, typename ... Args>
void branch(int workerIdx, Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
const string id = make_id("branch-pin" + std::to_string(workerIdx), fn, args...);
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
std::function<bool(Args...)> functor = fn;
std::function<bool(Args...)> wrap = [this, workerIdx, functor](Args&& ... args){
return this->workerIndex() == workerIdx && functor(args...);
};
add_transaction(true, plainCtx(), id, wrap, std::forward<Args>(args)...);
}
template <typename Tfn>
void endbranch(Tfn fn) {
init_context_call(fn);
const string id = make_id("endbranch", fn);
std::function functor = fn;
emit_access<std::true_type, decltype(fn)>(id, true, &fn);
add_transaction(true, plainCtx(), id, functor);
}
template <typename Tfn, typename ... Args>
void endbranch(Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
const string id = make_id("endbranch", fn, args...);
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
std::function<bool(Args...)> functor = [this](Args&& ... args){
return true;
};
add_transaction(true, plainCtx(), id, functor, std::forward<Args>(args)...);
}
template <typename Tfn, typename ... Args>
void endbranch(int workerIdx, Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
const string id = make_id("endbranch-pin" + std::to_string(workerIdx), fn, args...);
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
std::function<bool(Args...)> functor = [this, workerIdx](Args&& ... args){
return this->workerIndex() == workerIdx;
};
add_transaction(true, plainCtx(), id, functor, std::forward<Args>(args)...);
}
template <typename Tfn, typename ... Args>
void fb(Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
const string id = make_id("fb", fn, args...);
using Tfb = std::add_lvalue_reference_t<typename std::tuple_element<0, typename function_traits<Tfn>::argument_types>::type>;
using Tfbbase = typename std::remove_cv<Tfb>::type;
static_assert((std::is_same<Tfb, cv::UMat&>::value || std::is_same<Tfb, const cv::UMat&>::value) || !"The first argument must be eiter of type 'cv::UMat&' or 'const cv::UMat&'");
emit_access<std::true_type, cv::UMat, Tfb, Args...>(id, true, &fbCtx()->fb());
(emit_access<std::true_type, std::remove_reference_t<Args>, Tfb, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
emit_access<static_not<typename std::is_const<Tfbbase>::type>, cv::UMat, Tfb, Args...>(id, false, &fbCtx()->fb());
std::function<void((Tfb,Args...))> functor(fn);
add_transaction<decltype(functor),Tfb>(false, fbCtx(),id, std::forward<decltype(functor)>(functor), fbCtx()->fb(), std::forward<Args>(args)...);
}
void capture() {
if(disableIO_)
return;
capture([](const cv::UMat& inputFrame, cv::UMat& f){
if(!inputFrame.empty())
inputFrame.copyTo(f);
}, captureFrame_);
fb([](cv::UMat& frameBuffer, const cv::UMat& f) {
if(!f.empty())
f.copyTo(frameBuffer);
}, captureFrame_);
}
template <typename Tfn, typename ... Args>
void capture(Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
if(disableIO_)
return;
const string id = make_id("capture", fn, args...);
using Tfb = std::add_lvalue_reference_t<typename std::tuple_element<0, typename function_traits<Tfn>::argument_types>::type>;
static_assert((std::is_same<Tfb,const cv::UMat&>::value) || !"The first argument must be of type 'const cv::UMat&'");
emit_access<std::true_type, cv::UMat, Tfb, Args...>(id, true, &sourceCtx()->sourceBuffer());
(emit_access<std::true_type, std::remove_reference_t<Args>, Tfb, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
std::function<void((Tfb,Args...))> functor(fn);
add_transaction<decltype(functor),Tfb>(false, std::dynamic_pointer_cast<V4DContext>(sourceCtx()),id, std::forward<decltype(functor)>(functor), sourceCtx()->sourceBuffer(), std::forward<Args>(args)...);
}
void write() {
if(disableIO_)
return;
fb([](const cv::UMat& frameBuffer, cv::UMat& f) {
frameBuffer.copyTo(f);
}, writerFrame_);
write([](cv::UMat& outputFrame, const cv::UMat& f){
f.copyTo(outputFrame);
}, writerFrame_);
}
template <typename Tfn, typename ... Args>
void write(Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
if(disableIO_)
return;
const string id = make_id("write", fn, args...);
using Tfb = std::add_lvalue_reference_t<typename std::tuple_element<0, typename function_traits<Tfn>::argument_types>::type>;
static_assert((std::is_same<Tfb,cv::UMat&>::value) || !"The first argument must be of type 'cv::UMat&'");
emit_access<std::true_type, cv::UMat, Tfb, Args...>(id, true, &sinkCtx()->sinkBuffer());
(emit_access<std::true_type, std::remove_reference_t<Args>, Tfb, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
emit_access<std::true_type, cv::UMat, Tfb, Args...>(id, false, &sinkCtx()->sinkBuffer());
std::function<void((Tfb,Args...))> functor(fn);
add_transaction<decltype(functor),Tfb>(false, std::dynamic_pointer_cast<V4DContext>(sinkCtx()),id, std::forward<decltype(functor)>(functor), sinkCtx()->sinkBuffer(), std::forward<Args>(args)...);
}
template <typename Tfn, typename ... Args>
void nvg(Tfn fn, Args&&... args) {
init_context_call(fn, args...);
const string id = make_id("nvg", fn, args...);
emit_access<std::true_type, cv::UMat, Args...>(id, true, &fbCtx()->fb());
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
emit_access<std::true_type, cv::UMat, Args...>(id, false, &fbCtx()->fb());
std::function functor(fn);
add_transaction<decltype(functor)>(false, nvgCtx(), id, std::forward<decltype(functor)>(fn), std::forward<Args>(args)...);
}
template <typename Tfn, typename ... Args>
void once(Tfn fn, Args&&... args) {
CV_Assert(detail::is_stateless_lambda<std::remove_cv_t<std::remove_reference_t<decltype(fn)>>>::value);
const string id = make_id("once", fn, args...);
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
std::function functor(fn);
add_transaction<decltype(functor)>(false, onceCtx(), id, std::forward<decltype(functor)>(fn), std::forward<Args>(args)...);
}
template <typename Tfn, typename ... Args>
void plain(Tfn fn, Args&&... args) {
init_context_call(fn, args...);
const string id = make_id("plain", fn, args...);
(emit_access<std::true_type, std::remove_reference_t<Args>, Args...>(id, std::is_const_v<std::remove_reference_t<Args>>, &args),...);
std::function functor(fn);
add_transaction<decltype(functor)>(false, fbCtx(), id, std::forward<decltype(functor)>(fn), std::forward<Args>(args)...);
}
template<typename Tfn, typename ... Args>
void imgui(Tfn fn, Args&& ... args) {
init_context_call(fn, args...);
if(!hasImguiCtx())
return;
auto s = self();
imguiCtx()->build([s, fn, &args...](ImGuiContext* ctx) {
fn(s, ctx, args...);
});
}
/*!
* Copy the framebuffer contents to an OutputArray.
* @param arr The array to copy to.
*/
CV_EXPORTS void copyTo(cv::UMat& arr);
/*!
* Copy the InputArray contents to the framebuffer.
* @param arr The array to copy.
*/
CV_EXPORTS void copyFrom(const cv::UMat& arr);
template<typename Tplan>
void run(cv::Ptr<Tplan> plan, int32_t workers = -1) {
plan_ = std::static_pointer_cast<Plan>(plan);
static Resequence reseq;
//for now, if automatic determination of the number of workers is requested,
//set workers always to 2
CV_Assert(workers > -2);
if(workers == -1) {
workers = 2;
} else {
++workers;
}
std::vector<std::thread*> threads;
{
static std::mutex runMtx;
std::unique_lock<std::mutex> lock(runMtx);
cerr << "run plan: " << std::this_thread::get_id() << " workers: " << workers << endl;
if(Global::is_first_run()) {
Global::set_main_id(std::this_thread::get_id());
cerr << "Starting with " << workers - 1<< " extra workers" << endl;
cv::utils::logging::setLogLevel(cv::utils::logging::LOG_LEVEL_SILENT);
}
if(workers > 1) {
cv::setNumThreads(0);
}
if(Global::is_main()) {
cv::Size sz = this->initialSize();
const string title = this->title();
bool debug = this->debug_;
auto src = this->getSource();
auto sink = this->getSink();
Global::set_workers_started(workers);
std::vector<cv::Ptr<Tplan>> plans;
//make sure all Plans are constructed before starting the workers
for (size_t i = 0; i < workers; ++i) {
plans.push_back(new Tplan(plan->size()));
}
for (size_t i = 0; i < workers; ++i) {
threads.push_back(
new std::thread(
[this, i, src, sink, plans] {
cv::utils::logging::setLogLevel(cv::utils::logging::LOG_LEVEL_SILENT);
cv::Ptr<cv::v4d::V4D> worker = V4D::make(*this, this->title() + "-worker-" + std::to_string(i));
if (src) {
worker->setSource(src);
}
if (sink) {
worker->setSink(sink);
}
cv::Ptr<Tplan> newPlan = plans[i];
worker->run(newPlan, 0);
}
)
);
}
}
}
CLExecScope_t scope(this->fbCtx()->getCLExecContext());
this->fbCtx()->makeCurrent();
if(Global::is_main()) {
this->printSystemInfo();
} else {
try {
plan->setup(self());
this->makeGraph();
this->runGraph();
this->clearGraph();
if(!Global::is_main() && Global::workers_started() == Global::next_worker_ready()) {
cv::utils::logging::setLogLevel(cv::utils::logging::LOG_LEVEL_INFO);
}
} catch(std::exception& ex) {
CV_Error_(cv::Error::StsError, ("pipeline setup failed: %s", ex.what()));
}
}
if(Global::is_main()) {
try {
plan->gui(self());
} catch(std::exception& ex) {
CV_Error_(cv::Error::StsError, ("GUI setup failed: %s", ex.what()));
}
} else {
plan->infer(self());
this->makeGraph();
}
try {
if(Global::is_main()) {
do {
//refresh-rate depends on swap interval (1) for sync
} while(keepRunning() && this->display());
requestFinish();
reseq.finish();
} else {
cerr << "Starting pipeling with " << this->nodes_.size() << " nodes." << endl;
static std::mutex seqMtx;
do {
reseq.notify();
uint64_t seq;
{
std::unique_lock<std::mutex> lock(seqMtx);
seq = Global::next_run_cnt();
}
this->runGraph();
reseq.waitFor(seq);
} while(keepRunning() && this->display());
}
} catch(std::exception& ex) {
requestFinish();
reseq.finish();
CV_LOG_WARNING(nullptr, "-> pipeline terminated: " << ex.what());
}
if(!Global::is_main()) {
this->clearGraph();
try {
plan->teardown(self());
this->makeGraph();
this->runGraph();
this->clearGraph();
} catch(std::exception& ex) {
CV_Error_(cv::Error::StsError, ("pipeline tear-down failed: %s", ex.what()));
}
} else {
for(auto& t : threads)
t->join();
}
}
/*!
* Called to feed an image directly to the framebuffer
*/
void feed(cv::UMat& in);
/*!
* Fetches a copy of frambuffer
* @return a copy of the framebuffer
*/
CV_EXPORTS cv::UMat fetch();
/*!
* Set the current #cv::viz::Source object. Usually created using #makeCaptureSource().
* @param src A #cv::viz::Source object.
*/
CV_EXPORTS void setSource(cv::Ptr<Source> src);
CV_EXPORTS cv::Ptr<Source> getSource();
CV_EXPORTS bool hasSource();
/*!
* Set the current #cv::viz::Sink object. Usually created using #makeWriterSink().
* @param sink A #cv::viz::Sink object.
*/
CV_EXPORTS void setSink(cv::Ptr<Sink> sink);
CV_EXPORTS cv::Ptr<Sink> getSink();
CV_EXPORTS bool hasSink();
/*!
* Get the window position.
* @return The window position.
*/
CV_EXPORTS cv::Vec2f position();
/*!
* Get the current viewport reference.
* @return The current viewport reference.
*/
CV_EXPORTS cv::Rect& viewport();
/*!
* Get the pixel ratio of the display x-axis.
* @return The pixel ratio of the display x-axis.
*/
CV_EXPORTS float pixelRatioX();
/*!
* Get the pixel ratio of the display y-axis.
* @return The pixel ratio of the display y-axis.
*/
CV_EXPORTS float pixelRatioY();
CV_EXPORTS const cv::Size& initialSize() const;
CV_EXPORTS const cv::Size& fbSize() const;
/*!
* Set the window size
* @param sz The future size of the window.
*/
CV_EXPORTS void setSize(const cv::Size& sz);
/*!
* Get the window size.
* @return The window size.
*/
CV_EXPORTS cv::Size size();
/*!
* Get the frambuffer size.
* @return The framebuffer size.
*/
CV_EXPORTS bool getShowFPS();
CV_EXPORTS void setShowFPS(bool s);
CV_EXPORTS bool getPrintFPS();
CV_EXPORTS void setPrintFPS(bool p);
CV_EXPORTS bool getShowTracking();
CV_EXPORTS void setShowTracking(bool st);
CV_EXPORTS void setDisableIO(bool d);
CV_EXPORTS bool isFullscreen();
/*!
* Enable or disable fullscreen mode.
* @param f if true enable fullscreen mode else disable.
*/
CV_EXPORTS void setFullscreen(bool f);
/*!
* Determines if the window is resizeable.
* @return true if the window is resizeable.
*/
CV_EXPORTS bool isResizable();
/*!
* Set the window resizable.
* @param r if r is true set the window resizable.
*/
CV_EXPORTS void setResizable(bool r);
/*!
* Determine if the window is visible.
* @return true if the window is visible.
*/
CV_EXPORTS bool isVisible();
/*!
* Set the window visible or invisible.
* @param v if v is true set the window visible.
*/
CV_EXPORTS void setVisible(bool v);
/*!
* Enable/Disable scaling the framebuffer during blitting.
* @param s if true enable scaling.
*/
CV_EXPORTS void setStretching(bool s);
/*!
* Determine if framebuffer is scaled during blitting.
* @return true if framebuffer is scaled during blitting.
*/
CV_EXPORTS bool isStretching();
/*!
* Determine if th V4D object is marked as focused.
* @return true if the V4D object is marked as focused.
*/
CV_EXPORTS bool isFocused();
/*!
* Mark the V4D object as focused.
* @param s if true mark as focused.
*/
CV_EXPORTS void setFocused(bool f);
/*!
* Everytime a frame is displayed this count is incremented-
* @return the current frame count-
*/
CV_EXPORTS const uint64_t& frameCount() const;
/*!
* Determine if the window is closed.
* @return true if the window is closed.
*/
CV_EXPORTS bool isClosed();
/*!
* Close the window.
*/
CV_EXPORTS void close();
/*!
* Display the framebuffer in the native window by blitting.
* @return false if the window is closed.
*/
CV_EXPORTS bool display();
/*!
* Print basic system information to stderr.
*/
CV_EXPORTS void printSystemInfo();
CV_EXPORTS GLFWwindow* getGLFWWindow() const;
CV_EXPORTS cv::Ptr<FrameBufferContext> fbCtx() const;
CV_EXPORTS cv::Ptr<SourceContext> sourceCtx();
CV_EXPORTS cv::Ptr<SinkContext> sinkCtx();
CV_EXPORTS cv::Ptr<NanoVGContext> nvgCtx();
CV_EXPORTS cv::Ptr<OnceContext> onceCtx();
CV_EXPORTS cv::Ptr<PlainContext> plainCtx();
CV_EXPORTS cv::Ptr<ImGuiContextImpl> imguiCtx();
CV_EXPORTS cv::Ptr<GLContext> glCtx(int32_t idx = 0);
CV_EXPORTS bool hasFbCtx();
CV_EXPORTS bool hasSourceCtx();
CV_EXPORTS bool hasSinkCtx();
CV_EXPORTS bool hasNvgCtx();
CV_EXPORTS bool hasOnceCtx();
CV_EXPORTS bool hasParallelCtx();
CV_EXPORTS bool hasImguiCtx();
CV_EXPORTS bool hasGlCtx(uint32_t idx = 0);
CV_EXPORTS size_t numGlCtx();
private:
V4D(const V4D& v4d, const string& title);
V4D(const cv::Size& size, const cv::Size& fbsize,
const string& title, AllocateFlags flags, bool offscreen, bool debug, int samples);
cv::Point2f getMousePosition();
void setMousePosition(const cv::Point2f& pt);
void swapContextBuffers();
protected:
AllocateFlags flags();
cv::Ptr<V4D> self();
void fence();
bool wait(uint64_t timeout = 0);
};
}
} /* namespace cv */
#endif /* SRC_OPENCV_V4D_V4D_HPP_ */

@ -0,0 +1,399 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/dnn.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/face.hpp>
#include <opencv2/stitching/detail/blenders.hpp>
#include <opencv2/tracking.hpp>
#include <vector>
#include <string>
using std::vector;
using std::string;
/*!
* Data structure holding the points for all face landmarks
*/
struct FaceFeatures {
cv::Rect faceRect_;
vector<cv::Point2f> chin_;
vector<cv::Point2f> top_nose_;
vector<cv::Point2f> bottom_nose_;
vector<cv::Point2f> left_eyebrow_;
vector<cv::Point2f> right_eyebrow_;
vector<cv::Point2f> left_eye_;
vector<cv::Point2f> right_eye_;
vector<cv::Point2f> outer_lips_;
vector<cv::Point2f> inside_lips_;
FaceFeatures() {};
FaceFeatures(const cv::Rect &faceRect, const vector<cv::Point2f> &shape, double local_scale) {
//calculate the face rectangle
faceRect_ = cv::Rect(faceRect.x / local_scale, faceRect.y / local_scale, faceRect.width / local_scale, faceRect.height / local_scale);
/** Copy all features **/
size_t i = 0;
// Around Chin. Ear to Ear
for (i = 0; i <= 16; ++i)
chin_.push_back(shape[i] / local_scale);
// left eyebrow
for (; i <= 21; ++i)
left_eyebrow_.push_back(shape[i] / local_scale);
// Right eyebrow
for (; i <= 26; ++i)
right_eyebrow_.push_back(shape[i] / local_scale);
// Line on top of nose
for (; i <= 30; ++i)
top_nose_.push_back(shape[i] / local_scale);
// Bottom part of the nose
for (; i <= 35; ++i)
bottom_nose_.push_back(shape[i] / local_scale);
// Left eye
for (; i <= 41; ++i)
left_eye_.push_back(shape[i] / local_scale);
// Right eye
for (; i <= 47; ++i)
right_eye_.push_back(shape[i] / local_scale);
// Lips outer part
for (; i <= 59; ++i)
outer_lips_.push_back(shape[i] / local_scale);
// Lips inside part
for (; i <= 67; ++i)
inside_lips_.push_back(shape[i] / local_scale);
}
//Concatenates all feature points
vector<cv::Point2f> points() const {
vector<cv::Point2f> allPoints;
allPoints.insert(allPoints.begin(), chin_.begin(), chin_.end());
allPoints.insert(allPoints.begin(), top_nose_.begin(), top_nose_.end());
allPoints.insert(allPoints.begin(), bottom_nose_.begin(), bottom_nose_.end());
allPoints.insert(allPoints.begin(), left_eyebrow_.begin(), left_eyebrow_.end());
allPoints.insert(allPoints.begin(), right_eyebrow_.begin(), right_eyebrow_.end());
allPoints.insert(allPoints.begin(), left_eye_.begin(), left_eye_.end());
allPoints.insert(allPoints.begin(), right_eye_.begin(), right_eye_.end());
allPoints.insert(allPoints.begin(), outer_lips_.begin(), outer_lips_.end());
allPoints.insert(allPoints.begin(), inside_lips_.begin(), inside_lips_.end());
return allPoints;
}
//Returns all feature points in fixed order
vector<vector<cv::Point2f>> features() const {
return {chin_,
top_nose_,
bottom_nose_,
left_eyebrow_,
right_eyebrow_,
left_eye_,
right_eye_,
outer_lips_,
inside_lips_};
}
size_t empty() const {
return points().empty();
}
};
using namespace cv::v4d;
class BeautyDemoPlan : public Plan {
public:
using Plan::Plan;
private:
cv::Size downSize_;
static struct Params {
int blurSkinKernelSize_ = 0;
//Saturation boost factor for eyes and lips
float eyesAndLipsSaturation_ = 1.8f;
//Saturation boost factor for skin
float skinSaturation_ = 1.4f;
//Contrast factor skin
float skinContrast_ = 0.7f;
//Show input and output side by side
bool sideBySide_ = false;
//Scale the video to the window size
bool stretch_ = true;
} params_;
struct Cache {
vector<cv::UMat> channels_;
cv::UMat hls_;
cv::UMat blur_;
cv::UMat frameOutFloat_;
cv::UMat bgra_;
} cache_;
struct Frames {
//BGR
cv::UMat orig_, down_, contrast_, faceOval_, eyesAndLips_, skin_;
cv::UMat lhalf_;
cv::UMat rhalf_;
//GREY
cv::UMat faceSkinMaskGrey_, eyesAndLipsMaskGrey_, backgroundMaskGrey_;
} frames_;
//results of face detection and facemark
struct Face {
vector<vector<cv::Point2f>> shapes_;
std::vector<cv::Rect> faceRects_;
bool found_ = false;
FaceFeatures features_;
} face_;
//the frame holding the final composed image
cv::UMat frameOut_;
cv::Ptr<cv::face::Facemark> facemark_ = cv::face::createFacemarkLBF();
//Blender (used to put the different face parts back together)
cv::Ptr<cv::detail::MultiBandBlender> blender_ = new cv::detail::MultiBandBlender(true, 5);
//Face detector
cv::Ptr<cv::FaceDetectorYN> detector_;
//based on the detected FaceFeatures it guesses a decent face oval and draws a mask for it.
static void draw_face_oval_mask(const FaceFeatures &ff) {
using namespace cv::v4d::nvg;
clear();
cv::RotatedRect rotRect = cv::fitEllipse(ff.points());
beginPath();
fillColor(cv::Scalar(255, 255, 255, 255));
ellipse(rotRect.center.x, rotRect.center.y * 0.875, rotRect.size.width / 2, rotRect.size.height / 1.75);
rotate(rotRect.angle);
fill();
}
//Draws a mask consisting of eyes and lips areas (deduced from FaceFeatures)
static void draw_face_eyes_and_lips_mask(const FaceFeatures &ff) {
using namespace cv::v4d::nvg;
clear();
vector<vector<cv::Point2f>> features = ff.features();
for (size_t j = 5; j < 8; ++j) {
beginPath();
fillColor(cv::Scalar(255, 255, 255, 255));
moveTo(features[j][0].x, features[j][0].y);
for (size_t k = 1; k < features[j].size(); ++k) {
lineTo(features[j][k].x, features[j][k].y);
}
closePath();
fill();
}
beginPath();
fillColor(cv::Scalar(0, 0, 0, 255));
moveTo(features[8][0].x, features[8][0].y);
for (size_t k = 1; k < features[8].size(); ++k) {
lineTo(features[8][k].x, features[8][k].y);
}
closePath();
fill();
}
//adjusts the saturation of a UMat
static void adjust_saturation(const cv::UMat &srcBGR, cv::UMat &dstBGR, float factor, Cache& cache) {
cvtColor(srcBGR, cache.hls_, cv::COLOR_BGR2HLS);
split(cache.hls_, cache.channels_);
cv::multiply(cache.channels_[2], factor, cache.channels_[2]);
merge(cache.channels_, cache.hls_);
cvtColor(cache.hls_, dstBGR, cv::COLOR_HLS2BGR);
}
public:
void gui(cv::Ptr<V4D> window) override {
window->imgui([](cv::Ptr<V4D> win, ImGuiContext* ctx, Params& params){
using namespace ImGui;
SetCurrentContext(ctx);
Begin("Effect");
Text("Display");
Checkbox("Side by side", &params.sideBySide_);
if(Checkbox("Stetch", &params.stretch_)) {
win->setStretching(true);
} else
win->setStretching(false);
if(Button("Fullscreen")) {
win->setFullscreen(!win->isFullscreen());
};
if(Button("Offscreen")) {
win->setVisible(!win->isVisible());
};
Text("Face Skin");
SliderInt("Blur", &params.blurSkinKernelSize_, 1, 128);
SliderFloat("Saturation", &params.skinSaturation_, 0.0f, 100.0f);
SliderFloat("Contrast", &params.skinContrast_, 0.0f, 1.0f);
Text("Eyes and Lips");
SliderFloat("Saturation ", &params.eyesAndLipsSaturation_, 0.0f, 100.0f);
End();
}, params_);
}
void setup(cv::Ptr<V4D> window) override {
int w = size().width;
int h = size().height;
downSize_ = { std::min(w, std::max(640, int(round(w / 2.0)))), std::min(h, std::max(360, int(round(h / 2.0)))) };
detector_ = cv::FaceDetectorYN::create("modules/v4d/assets/models/face_detection_yunet_2023mar.onnx", "", downSize_, 0.9, 0.3, 5000, cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_OPENCL);
int diag = hypot(double(size().width), double(size().height));
params_.blurSkinKernelSize_ = std::max(int(diag / 2000 % 2 == 0 ? diag / 2000 + 1 : diag / 2000), 1);
window->setStretching(params_.stretch_);
window->plain([](cv::Ptr<cv::face::Facemark>& facemark){
facemark->loadModel("modules/v4d/assets/models/lbfmodel.yaml");
}, facemark_);
}
void infer(cv::Ptr<V4D> window) override {
try {
window->branch(always_);
{
window->capture();
//Save the video frame as BGR
window->fb([](const cv::UMat &framebuffer, const cv::Rect& viewport, const cv::Size& downSize, Frames& frames) {
cvtColor(framebuffer(viewport), frames.orig_, cv::COLOR_BGRA2BGR);
//Downscale the video frame for face detection
cv::resize(frames.orig_, frames.down_, downSize);
}, viewport(), downSize_, frames_);
window->plain([](const cv::Size sz, cv::Ptr<cv::FaceDetectorYN>& detector, cv::Ptr<cv::face::Facemark>& facemark, const cv::UMat& down, Face& face) {
face.shapes_.clear();
cv::Mat faces;
//Detect faces in the down-scaled image
detector->detect(down, faces);
//Only add the first face
cv::Rect faceRect;
if(!faces.empty())
faceRect = cv::Rect(int(faces.at<float>(0, 0)), int(faces.at<float>(0, 1)), int(faces.at<float>(0, 2)), int(faces.at<float>(0, 3)));
face.faceRects_ = {faceRect};
//find landmarks if faces have been detected
face.found_ = !faceRect.empty() && facemark->fit(down, face.faceRects_, face.shapes_);
if(face.found_)
face.features_ = FaceFeatures(face.faceRects_[0], face.shapes_[0], float(down.size().width) / sz.width);
}, size(), detector_, facemark_, frames_.down_, face_);
}
window->endbranch(always_);
window->branch(isTrue_, face_.found_);
{
window->nvg([](const FaceFeatures& features) {
//Draw the face oval of the first face
draw_face_oval_mask(features);
}, face_.features_);
window->fb([](const cv::UMat& framebuffer, const cv::Rect& viewport, cv::UMat& faceOval) {
//Convert/Copy the mask
cvtColor(framebuffer(viewport), faceOval, cv::COLOR_BGRA2GRAY);
}, viewport(), frames_.faceOval_);
window->nvg([](const FaceFeatures& features) {
//Draw eyes eyes and lips areas of the first face
draw_face_eyes_and_lips_mask(features);
}, face_.features_);
window->fb([](const cv::UMat &framebuffer, const cv::Rect& viewport, cv::UMat& eyesAndLipsMaskGrey) {
//Convert/Copy the mask
cvtColor(framebuffer(viewport), eyesAndLipsMaskGrey, cv::COLOR_BGRA2GRAY);
}, viewport(), frames_.eyesAndLipsMaskGrey_);
window->plain([](Frames& frames, const Params& params, Cache& cache) {
//Create the skin mask
cv::subtract(frames.faceOval_, frames.eyesAndLipsMaskGrey_, frames.faceSkinMaskGrey_);
//Create the background mask
cv::bitwise_not(frames.faceOval_, frames.backgroundMaskGrey_);
//boost saturation of eyes and lips
adjust_saturation(frames.orig_, frames.eyesAndLips_, params.eyesAndLipsSaturation_, cache);
//reduce skin contrast
multiply(frames.orig_, cv::Scalar::all(params.skinContrast_), frames.contrast_);
//fix skin brightness
add(frames.contrast_, cv::Scalar::all((1.0 - params.skinContrast_) / 2.0) * 255.0, frames.contrast_);
//blur the skin_
cv::boxFilter(frames.contrast_, cache.blur_, -1, cv::Size(params.blurSkinKernelSize_, params.blurSkinKernelSize_), cv::Point(-1, -1), true, cv::BORDER_REPLICATE);
//boost skin saturation
adjust_saturation(cache.blur_, frames.skin_, params.skinSaturation_, cache);
}, frames_, params_, cache_);
window->plain([](cv::Ptr<cv::detail::MultiBandBlender>& bl, Frames& frames, cv::UMat& frameOut, Cache& cache) {
CV_Assert(!frames.skin_.empty());
CV_Assert(!frames.eyesAndLips_.empty());
//piece it all together
bl->prepare(cv::Rect(0, 0, frames.skin_.cols, frames.skin_.rows));
bl->feed(frames.skin_, frames.faceSkinMaskGrey_, cv::Point(0, 0));
bl->feed(frames.orig_, frames.backgroundMaskGrey_, cv::Point(0, 0));
bl->feed(frames.eyesAndLips_, frames.eyesAndLipsMaskGrey_, cv::Point(0, 0));
bl->blend(cache.frameOutFloat_, cv::UMat());
CV_Assert(!cache.frameOutFloat_.empty());
cache.frameOutFloat_.convertTo(frameOut, CV_8U, 1.0);
}, blender_, frames_, frameOut_, cache_);
window->plain([](const cv::Size& sz, const cv::UMat& orig, cv::UMat& frameOut, cv::UMat lhalf, cv::UMat rhalf, const Params& params) {
if (params.sideBySide_) {
//create side-by-side view with a result
cv::resize(orig, lhalf, cv::Size(0, 0), 0.5, 0.5);
cv::resize(frameOut, rhalf, cv::Size(0, 0), 0.5, 0.5);
frameOut = cv::Scalar::all(0);
lhalf.copyTo(frameOut(cv::Rect(0, sz.height / 2.0, lhalf.size().width, lhalf.size().height)));
rhalf.copyTo(frameOut(cv::Rect(sz.width / 2.0, sz.height / 2.0, lhalf.size().width, lhalf.size().height)));
}
}, size(), frames_.orig_, frameOut_, frames_.lhalf_, frames_.rhalf_, params_);
}
window->endbranch(isTrue_, face_.found_);
window->branch(isFalse_, face_.found_);
{
window->plain([](const cv::Size& sz, const cv::UMat& orig, cv::UMat& frameOut, cv::UMat lhalf, const Params& params) {
if (params.sideBySide_) {
//create side-by-side view without a result (using the input image for both sides)
frameOut = cv::Scalar::all(0);
cv::resize(orig, lhalf, cv::Size(0, 0), 0.5, 0.5);
lhalf.copyTo(frameOut(cv::Rect(0, sz.height / 2.0, lhalf.size().width, lhalf.size().height)));
lhalf.copyTo(frameOut(cv::Rect(sz.width / 2.0, sz.height / 2.0, lhalf.size().width, lhalf.size().height)));
} else {
orig.copyTo(frameOut);
}
}, size(), frames_.orig_, frameOut_, frames_.lhalf_, params_);
}
window->endbranch(isFalse_, face_.found_);
window->branch(always_);
{
//write the result to the framebuffer
window->fb([](cv::UMat& framebuffer, const cv::Rect& viewport, const cv::UMat& f, Cache& cache) {
cvtColor(f, cache.bgra_, cv::COLOR_BGR2BGRA);
cv::resize(cache.bgra_, framebuffer(viewport), viewport.size());
}, viewport(), frameOut_, cache_);
//write the current framebuffer to video
window->write();
}
window->endbranch(always_);
} catch (std::exception &ex) {
cerr << ex.what() << endl;
}
}
};
BeautyDemoPlan::Params BeautyDemoPlan::params_;
int main(int argc, char **argv) {
if (argc != 2) {
cerr << "Usage: beauty-demo <input-video-file>" << endl;
exit(1);
}
cv::Ptr<BeautyDemoPlan> plan = new BeautyDemoPlan(cv::Size(1920, 1080));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Beautification Demo", ALL);
auto src = makeCaptureSource(window, argv[1]);
auto sink = makeWriterSink(window, "beauty-demo.mkv", src->fps(), plan->size());
window->setSource(src);
window->setSink(sink);
window->run(plan);
return 0;
}

@ -0,0 +1,75 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
using namespace cv::v4d;
class DisplayImageBgfx : public Plan {
Property<cv::Rect> vp_ = P<cv::Rect>(V4D::Keys::VIEWPORT);
public:
void setup() override {
bgfx([](const cv::Rect& vp) {
// Set view 0 clear state.
bgfx::setViewClear(0
, BGFX_CLEAR_COLOR|BGFX_CLEAR_DEPTH
, 0x303030ff
, 1.0f
, 0
);
// Set view 0 default viewport.
bgfx::setViewRect(0, vp.x, vp.y, uint16_t(vp.width), uint16_t(vp.height));
}, vp_);
}
void infer() override {
bgfx([](const cv::Rect& vp) {
// This dummy draw call is here to make sure that view 0 is cleared
// if no other draw calls are submitted to view 0.
bgfx::touch(0);
// Use debug font to print information about this example.
bgfx::dbgTextClear();
const bgfx::Stats* stats = bgfx::getStats();
bgfx::dbgTextPrintf(
bx::max<uint16_t>(uint16_t(stats->textWidth/2), 20)-20
, bx::max<uint16_t>(uint16_t(stats->textHeight/2), 6)-6
, 40
, "Hello %s"
, "World"
);
bgfx::dbgTextPrintf(0, 1, 0x0f, "Color can be changed with ANSI \x1b[9;me\x1b[10;ms\x1b[11;mc\x1b[12;ma\x1b[13;mp\x1b[14;me\x1b[0m code too.");
bgfx::dbgTextPrintf(80, 1, 0x0f, "\x1b[;0m \x1b[;1m \x1b[; 2m \x1b[; 3m \x1b[; 4m \x1b[; 5m \x1b[; 6m \x1b[; 7m \x1b[0m");
bgfx::dbgTextPrintf(80, 2, 0x0f, "\x1b[;8m \x1b[;9m \x1b[;10m \x1b[;11m \x1b[;12m \x1b[;13m \x1b[;14m \x1b[;15m \x1b[0m");
bgfx::dbgTextPrintf(0, 2, 0x0f, "Backbuffer %dW x %dH in pixels, debug text %dW x %dH in characters."
, stats->width
, stats->height
, stats->textWidth
, stats->textHeight
);
// Advance to next frame. Rendering thread will be kicked to
// process submitted rendering primitives.
bgfx::frame();
}, vp_);
}
};
int main(int argc, char** argv) {
cv::Rect viewport(0,0, 1280, 720);
cv::Ptr<V4D> runtime = V4D::init(viewport, "Display an image using bgfx", AllocateFlags::BGFX | AllocateFlags::IMGUI);
Plan::run<DisplayImageBgfx>(0);
return 0;
}

@ -0,0 +1,287 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
using namespace cv::v4d;
// based on: https://github.com/bkaradzic/bgfx/blob/07be0f213acd73a4f6845dc8f7b20b93f66b7cc4/examples/01-cubes/cubes.cpp
class BgfxDemoPlan : public Plan {
struct PosColorVertex
{
float x_;
float y_;
float z_;
uint32_t abgr_;
static void init()
{
layout
.begin()
.add(bgfx::Attrib::Position, 3, bgfx::AttribType::Float)
.add(bgfx::Attrib::Color0, 4, bgfx::AttribType::Uint8, true)
.end();
};
inline static bgfx::VertexLayout layout;
};
inline static const PosColorVertex CUBE_VERTICES[] =
{
{-0.30f, 0.30f, 0.30f, 0xaa000000 },
{ 0.30f, 0.30f, 0.30f, 0xaa0000ff },
{-0.30f, -0.30f, 0.30f, 0xaa00ff00 },
{ 0.30f, -0.30f, 0.30f, 0xaa00ffff },
{-0.30f, 0.30f, -0.30f, 0xaaff0000 },
{ 0.30f, 0.30f, -0.30f, 0xaaff00ff },
{-0.30f, -0.30f, -0.30f, 0xaaffff00 },
{ 0.30f, -0.30f, -0.30f, 0xaaffffff },
};
inline static const uint16_t CUBE_TRI_LIST[] =
{
0, 1, 2, // 0
1, 3, 2,
4, 6, 5, // 2
5, 6, 7,
0, 2, 4, // 4
4, 2, 6,
1, 5, 3, // 6
5, 7, 3,
0, 4, 1, // 8
4, 5, 1,
2, 3, 6, // 10
6, 3, 7,
};
inline static const uint16_t CUBE_TRI_STRIP[] =
{
0, 1, 2,
3,
7,
1,
5,
0,
4,
2,
6,
7,
4,
5,
};
inline static const uint16_t CUBE_LINE_LIST[] =
{
0, 1,
0, 2,
0, 4,
1, 3,
1, 5,
2, 3,
2, 6,
3, 7,
4, 5,
4, 6,
5, 7,
6, 7,
};
inline static const uint16_t CUBE_LINE_STRIP[] =
{
0, 2, 3, 1, 5, 7, 6, 4,
0, 2, 6, 4, 5, 7, 3, 1,
0,
};
inline static const uint16_t CUBE_POINTS[] =
{
0, 1, 2, 3, 4, 5, 6, 7
};
inline static const char* PT_NAMES[]
{
"Triangle List",
"Triangle Strip",
"Lines",
"Line Strip",
"Points",
};
inline static const uint64_t PT_STATE[]
{
UINT64_C(0),
BGFX_STATE_PT_TRISTRIP,
BGFX_STATE_PT_LINES,
BGFX_STATE_PT_LINESTRIP,
BGFX_STATE_PT_POINTS,
};
struct Params {
uint32_t width_;
uint32_t height_;
bgfx::VertexBufferHandle vbh_;
bgfx::IndexBufferHandle ibh_[BX_COUNTOF(PT_STATE)];
bgfx::ProgramHandle program_;
int32_t pt_ = 0;
bool red_ = true;
bool green_ = true;
bool blue_ = true;
bool alpha_ = true;
} params_;
inline static int64_t time_offset_;
Property<cv::Rect> vp_ = P<cv::Rect>(V4D::Keys::VIEWPORT);
public:
BgfxDemoPlan(){
}
void setup() override {
branch(BranchType::ONCE, always_)
->plain([](int64_t& timeOffset) {
timeOffset = bx::getHPCounter();
}, RWS(time_offset_))
->endBranch();
bgfx([](const cv::Rect& vp, Params& params){
params.width_ = vp.width;
params.height_ = vp.height;
// Set view 0 clear state.
bgfx::setViewClear(0
, BGFX_CLEAR_COLOR|BGFX_CLEAR_DEPTH
, 0x00000000
, 1.0f
, 0
);
PosColorVertex::init();
// Set view 0 default viewport.
bgfx::setViewRect(0, vp.x, vp.y, uint16_t(vp.width), uint16_t(vp.height));
// Create static vertex buffer.
params.vbh_ = bgfx::createVertexBuffer(
// Static data can be passed with bgfx::makeRef
bgfx::makeRef(CUBE_VERTICES, sizeof(CUBE_VERTICES) )
, PosColorVertex::layout
);
// Create static index buffer for triangle list rendering.
params.ibh_[0] = bgfx::createIndexBuffer(
// Static data can be passed with bgfx::makeRef
bgfx::makeRef(CUBE_TRI_LIST, sizeof(CUBE_TRI_LIST) )
);
// Create static index buffer for triangle strip rendering.
params.ibh_[1] = bgfx::createIndexBuffer(
// Static data can be passed with bgfx::makeRef
bgfx::makeRef(CUBE_TRI_STRIP, sizeof(CUBE_TRI_STRIP) )
);
// Create static index buffer for line list rendering.
params.ibh_[2] = bgfx::createIndexBuffer(
// Static data can be passed with bgfx::makeRef
bgfx::makeRef(CUBE_LINE_LIST, sizeof(CUBE_LINE_LIST) )
);
// Create static index buffer for line strip rendering.
params.ibh_[3] = bgfx::createIndexBuffer(
// Static data can be passed with bgfx::makeRef
bgfx::makeRef(CUBE_LINE_STRIP, sizeof(CUBE_LINE_STRIP) )
);
// Create static index buffer for point list rendering.
params.ibh_[4] = bgfx::createIndexBuffer(
// Static data can be passed with bgfx::makeRef
bgfx::makeRef(CUBE_POINTS, sizeof(CUBE_POINTS) )
);
// Create program from shaders.
params.program_ = util::load_program("vs_cubes", "fs_cubes");
}, vp_, RW(params_));
}
void infer() override {
bgfx([](const Params& params, const int64_t timeOffset) {
float time = (float)( (bx::getHPCounter()-timeOffset)/double(bx::getHPFrequency()));
const bx::Vec3 at = { 0.0f, 0.0f, 0.0f };
const bx::Vec3 eye = { 0.0f, 0.0f, -35.0f };
// Set view and projection matrix for view 0.
{
float view[16];
bx::mtxLookAt(view, eye, at);
float proj[16];
bx::mtxProj(proj, 60.0f, float(params.width_)/float(params.height_), 0.1f, 100.0f, bgfx::getCaps()->homogeneousDepth);
bgfx::setViewTransform(0, view, proj);
// Set view 0 default viewport.
bgfx::setViewRect(0, 0, 0, uint16_t(params.width_), uint16_t(params.height_) );
}
// This dummy draw call is here to make sure that view 0 is cleared
// if no other draw calls are submitted to view 0.
bgfx::touch(0);
bgfx::IndexBufferHandle ibh = params.ibh_[params.pt_];
uint64_t state = 0
| (params.red_ ? BGFX_STATE_WRITE_R : 0)
| (params.green_ ? BGFX_STATE_WRITE_G : 0)
| (params.blue_ ? BGFX_STATE_WRITE_B : 0)
| (params.alpha_ ? BGFX_STATE_WRITE_A : 0)
| BGFX_STATE_WRITE_Z
| BGFX_STATE_DEPTH_TEST_LESS
| BGFX_STATE_CULL_CW
| BGFX_STATE_MSAA
| PT_STATE[params.pt_]
;
// Submit 11x11 cubes.
for (uint32_t yy = 0; yy < 100; ++yy)
{
for (uint32_t xx = 0; xx < 100; ++xx)
{
float mtx[16];
float angle = fmod(float(time) + sin((float(xx * yy / pow(170.0f, 2.0f)) * 2.0f - 1.0f) * CV_PI), 2.0f * CV_PI);
bx::mtxRotateXYZ(mtx, angle, angle, angle);
mtx[12] = ((xx / 100.0) * 2.0 - 1.0) * 30.0;
mtx[13] = ((yy / 100.0) * 2.0 - 1.0) * 30.0;
mtx[14] = 0.0f;
// Set model matrix for rendering.
bgfx::setTransform(mtx);
// Set vertex and index buffer.
bgfx::setVertexBuffer(0, params.vbh_);
bgfx::setIndexBuffer(ibh);
// Set render states.
bgfx::setState(state);
// Submit primitive for rendering to view 0.
bgfx::submit(0, params.program_);
}
}
// Advance to next frame. Rendering thread will be kicked to
// process submitted rendering primitives.
bgfx::frame();
}, R(params_), CS(time_offset_));
}
};
int main(int argc, char** argv) {
cv::Ptr<V4D> runtime = V4D::init(cv::Rect(0,0, 1280, 720), "Bgfx Demo", AllocateFlags::BGFX | AllocateFlags::IMGUI);
Plan::run<BgfxDemoPlan>(std::stoi(argv[1]));
return 0;
}

@ -0,0 +1,248 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
//adapted from https://gitlab.com/wikibooks-opengl/modern-tutorials/-/blob/master/tut05_cube/cube.cpp
using namespace cv::v4d;
class CubeDemoPlan : public Plan {
public:
using Plan::Plan;
/* Demo Parameters */
int glowKernelSize_ = 0;
/* OpenGL constants */
constexpr static GLuint TRIANGLES_ = 12;
constexpr static GLuint VERTICES_INDEX_ = 0;
constexpr static GLuint COLOR_INDEX_ = 1;
//Cube vertices, colors and indices
constexpr static float VERTICES[24] = {
// Front face
0.5, 0.5, 0.5, -0.5, 0.5, 0.5, -0.5, -0.5, 0.5, 0.5, -0.5, 0.5,
// Back face
0.5, 0.5, -0.5, -0.5, 0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, -0.5
};
constexpr static float VERTEX_COLORS_[24] = {
1.0, 0.4, 0.6, 1.0, 0.9, 0.2, 0.7, 0.3, 0.8, 0.5, 0.3, 1.0,
0.2, 0.6, 1.0, 0.6, 1.0, 0.4, 0.6, 0.8, 0.8, 0.4, 0.8, 0.8
};
constexpr static unsigned short TRIANGLE_INDICES_[36] = {
// Front
0, 1, 2, 2, 3, 0,
// Right
0, 3, 7, 7, 4, 0,
// Bottom
2, 6, 7, 7, 3, 2,
// Left
1, 5, 6, 6, 2, 1,
// Back
4, 7, 6, 6, 5, 4,
// Top
5, 1, 0, 0, 4, 5
};
private:
struct Cache {
cv::UMat down_;
cv::UMat up_;
cv::UMat blur_;
cv::UMat dst16_;
} cache_;
GLuint vao_ = 0;
GLuint shaderProgram_ = 0;
GLuint uniformTransform_= 0;
//Simple transform & pass-through shaders
static GLuint load_shader() {
//Shader versions "330" and "300 es" are very similar.
//If you are careful you can write the same code for both versions.
#if !defined(OPENCV_V4D_USE_ES3)
const string shaderVersion = "330";
#else
const string shaderVersion = "300 es";
#endif
const string vert =
" #version " + shaderVersion
+ R"(
precision lowp float;
layout(location = 0) in vec3 pos;
layout(location = 1) in vec3 vertex_color;
uniform mat4 transform;
out vec3 color;
void main() {
gl_Position = transform * vec4(pos, 1.0);
color = vertex_color;
}
)";
const string frag =
" #version " + shaderVersion
+ R"(
precision lowp float;
in vec3 color;
out vec4 frag_color;
void main() {
frag_color = vec4(color, 1.0);
}
)";
//Initialize the shaders and returns the program
unsigned int handles[3];
cv::v4d::initShader(handles, vert.c_str(), frag.c_str(), "fragColor");
return handles[0];
}
//Initializes objects, buffers, shaders and uniforms
static void init_scene(const cv::Size& sz, GLuint& vao, GLuint& shaderProgram, GLuint& uniformTransform) {
glEnable (GL_DEPTH_TEST);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
unsigned int triangles_ebo;
glGenBuffers(1, &triangles_ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, triangles_ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof TRIANGLE_INDICES_, TRIANGLE_INDICES_,
GL_STATIC_DRAW);
unsigned int verticies_vbo;
glGenBuffers(1, &verticies_vbo);
glBindBuffer(GL_ARRAY_BUFFER, verticies_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof VERTICES, VERTICES, GL_STATIC_DRAW);
glVertexAttribPointer(VERTICES_INDEX_, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(VERTICES_INDEX_);
unsigned int colors_vbo;
glGenBuffers(1, &colors_vbo);
glBindBuffer(GL_ARRAY_BUFFER, colors_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof VERTEX_COLORS_, VERTEX_COLORS_, GL_STATIC_DRAW);
glVertexAttribPointer(COLOR_INDEX_, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(COLOR_INDEX_);
glBindVertexArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
shaderProgram = load_shader();
uniformTransform = glGetUniformLocation(shaderProgram, "transform");
glViewport(0,0, sz.width, sz.height);
}
//Renders a rotating rainbow-colored cube on a blueish background
static void render_scene(GLuint &vao, GLuint &shaderProgram,
GLuint &uniformTransform) {
//Clear the background
glClearColor(0.2, 0.24, 0.4, 1);
glClear(GL_COLOR_BUFFER_BIT);
//Use the prepared shader program
glUseProgram(shaderProgram);
//Scale and rotate the cube depending on the current time.
float angle = fmod(
double(cv::getTickCount()) / double(cv::getTickFrequency()),
2 * M_PI);
float scale = 0.25;
cv::Matx44f scaleMat(scale, 0.0, 0.0, 0.0, 0.0, scale, 0.0, 0.0, 0.0, 0.0,
scale, 0.0, 0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotXMat(1.0, 0.0, 0.0, 0.0, 0.0, cos(angle), -sin(angle), 0.0,
0.0, sin(angle), cos(angle), 0.0, 0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotYMat(cos(angle), 0.0, sin(angle), 0.0, 0.0, 1.0, 0.0, 0.0,
-sin(angle), 0.0, cos(angle), 0.0, 0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotZMat(cos(angle), -sin(angle), 0.0, 0.0, sin(angle),
cos(angle), 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0);
//calculate the transform
cv::Matx44f transform = scaleMat * rotXMat * rotYMat * rotZMat;
//set the corresponding uniform
glUniformMatrix4fv(uniformTransform, 1, GL_FALSE, transform.val);
//Bind the prepared vertex array object
glBindVertexArray(vao);
//Draw
glDrawElements(GL_TRIANGLES, TRIANGLES_ * 3, GL_UNSIGNED_SHORT, NULL);
}
//applies a glow effect to an image
static void glow_effect(const cv::UMat& src, cv::UMat& dst, const int ksize, Cache& cache) {
cv::bitwise_not(src, dst);
//Resize for some extra performance
cv::resize(dst, cache.down_, cv::Size(), 0.5, 0.5);
//Cheap blur
cv::boxFilter(cache.down_, cache.blur_, -1, cv::Size(ksize, ksize), cv::Point(-1, -1), true,
cv::BORDER_REPLICATE);
//Back to original size
cv::resize(cache.blur_, cache.up_, src.size());
//Multiply the src image with a blurred version of itself
cv::multiply(dst, cache.up_, cache.dst16_, 1, CV_16U);
//Normalize and convert back to CV_8U
cv::divide(cache.dst16_, cv::Scalar::all(255.0), dst, 1, CV_8U);
cv::bitwise_not(dst, dst);
}
public:
void setup(cv::Ptr<V4D> window) override {
int diag = hypot(double(size().width), double(size().height));
glowKernelSize_ = std::max(int(diag / 138 % 2 == 0 ? diag / 138 + 1 : diag / 138), 1);
window->gl([](const cv::Size& sz, GLuint& v, GLuint& sp, GLuint& ut){
init_scene(sz, v, sp, ut);
}, size(), vao_, shaderProgram_, uniformTransform_);
}
void infer(cv::Ptr<V4D> window) override {
window->gl([](){
//Clear the background
glClearColor(0.2f, 0.24f, 0.4f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
});
//Render using multiple OpenGL contexts
window->gl([](GLuint& v, GLuint& sp, GLuint& ut){
render_scene(v, sp, ut);
}, vao_, shaderProgram_, uniformTransform_);
//Aquire the frame buffer for use by OpenCV
window->fb([](cv::UMat& framebuffer, const cv::Rect& viewport, int glowKernelSize, Cache& cache) {
cv::UMat roi = framebuffer(viewport);
glow_effect(roi, roi, glowKernelSize, cache);
}, viewport(), glowKernelSize_, cache_);
window->write();
}
};
int main() {
cv::Ptr<CubeDemoPlan> plan = new CubeDemoPlan(cv::Size(1280, 720));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Cube Demo", ALL);
//Creates a writer sink (which might be hardware accelerated)
auto sink = makeWriterSink(window, "cube-demo.mkv", 60, plan->size());
window->setSink(sink);
window->run(plan);
return 0;
}

@ -0,0 +1,62 @@
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/imgcodecs.hpp>
using namespace cv;
using namespace cv::v4d;
class CustomSourceAndSinkPlan : public Plan {
string hr_ = "Hello Rainbow!";
public:
CustomSourceAndSinkPlan(const cv::Size& sz) : Plan(sz) {
}
void infer(cv::Ptr<V4D> win) override {
win->capture();
//Render "Hello Rainbow!" over the video
win->nvg([](const Size& sz, const string& str) {
using namespace cv::v4d::nvg;
fontSize(40.0f);
fontFace("sans-bold");
fillColor(Scalar(255, 0, 0, 255));
textAlign(NVG_ALIGN_CENTER | NVG_ALIGN_TOP);
text(sz.width / 2.0, sz.height / 2.0, str.c_str(), str.c_str() + str.size());
}, win->fbSize(), hr_);
win->write();
}
};
int main() {
Ptr<CustomSourceAndSinkPlan> plan = new CustomSourceAndSinkPlan(cv::Size(960, 960));
Ptr<V4D> window = V4D::make(plan->size(), "Custom Source/Sink");
//Make a source that generates rainbow frames.
cv::Ptr<Source> src = new Source([](cv::UMat& frame){
static long cnt = 0;
//The source is responsible for initializing the frame..
if(frame.empty())
frame.create(Size(960, 960), CV_8UC3);
frame = colorConvert(Scalar(++cnt % 180, 128, 128, 255), COLOR_HLS2BGR);
return true;
}, 60.0f);
//Make a sink the saves each frame to a PNG file (does nothing in case of WebAssembly).
cv::Ptr<Sink> sink = new Sink([](const uint64_t& seq, const cv::UMat& frame){
try {
imwrite(std::to_string(seq) + ".png", frame);
} catch(std::exception& ex) {
cerr << "Unable to write frame: " << ex.what() << endl;
return false;
}
return true;
});
//Attach source and sink
window->setSource(src);
window->setSink(sink);
window->run(plan);
}

@ -0,0 +1,32 @@
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/imgcodecs.hpp>
using namespace cv;
using namespace cv::v4d;
class DisplayImagePlan : public Plan {
UMat image_;
public:
DisplayImagePlan(const cv::Size& sz) : Plan(sz) {
}
void setup(Ptr<V4D> win) override {
win->plain([](cv::UMat& image){
image = imread(samples::findFile("lena.jpg")).getUMat(ACCESS_READ);
}, image_);
}
void infer(Ptr<V4D> win) override {
//Feeds the image to the video pipeline
win->feed(image_);
}
};
int main() {
cv::Ptr<DisplayImagePlan> plan = new DisplayImagePlan(cv::Size(960,960));
//Creates a V4D window for on screen rendering with a window size of 960x960 and a framebuffer of the same size.
//Please note that while the window size may change the framebuffer size may not. If you need multiple framebuffer
//sizes you need multiple V4D objects
cv::Ptr<V4D> window = V4D::make(plan->size(), "Display an Image");
window->run(plan);
}

@ -0,0 +1,40 @@
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/imgcodecs.hpp>
using namespace cv;
using namespace cv::v4d;
class DisplayImageFB : public Plan {
UMat image_;
UMat converted_;
public:
DisplayImageFB(const cv::Size& sz) : Plan(sz) {
}
void setup(cv::Ptr<V4D> win) override {
win->plain([](cv::UMat& image, cv::UMat& converted, const cv::Size& sz){
//Loads an image as a UMat (just in case we have hardware acceleration available)
image = imread(samples::findFile("lena.jpg")).getUMat(ACCESS_READ);
//We have to manually resize and color convert the image when using direct frambuffer access.
resize(image, converted, sz);
cvtColor(converted, converted, COLOR_RGB2BGRA);
}, image_, converted_, win->fbSize());
}
void infer(Ptr<V4D> win) override {
//Create a fb context and copy the prepared image to the framebuffer. The fb context
//takes care of retrieving and storing the data on the graphics card (using CL-GL
//interop if available), ready for other contexts to use
win->fb([](UMat& framebuffer, const cv::UMat& c){
c.copyTo(framebuffer);
}, converted_);
}
};
int main() {
Ptr<DisplayImageFB> plan = new DisplayImageFB(cv::Size(960,960));
//Creates a V4D object
Ptr<V4D> window = V4D::make(plan->size(), "Display an Image through direct FB access");
window->run(plan);
}

@ -0,0 +1,59 @@
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/imgcodecs.hpp>
using namespace cv;
using namespace cv::v4d;
class DisplayImageNVG : public Plan {
//A simple struct to hold our image variables
struct Image_t {
std::string filename_;
nvg::Paint paint_;
int w_;
int h_;
} image_;
public:
DisplayImageNVG(const cv::Size& sz) : Plan(sz) {
}
void setup(Ptr<V4D> win) override{
//Set the filename
image_.filename_ = samples::findFile("lena.jpg");
//Creates a NanoVG context. The wrapped C-functions of NanoVG are available in the namespace cv::v4d::nvg;
win->nvg([](Image_t& img) {
using namespace cv::v4d::nvg;
//Create the image_ and receive a handle.
int handle = createImage(img.filename_.c_str(), NVG_IMAGE_NEAREST);
//Make sure it was created successfully
CV_Assert(handle > 0);
//Query the image_ size
imageSize(handle, &img.w_, &img.h_);
//Create a simple image_ pattern with the image dimensions
img.paint_ = imagePattern(0, 0, img.w_, img.h_, 0.0f/180.0f*NVG_PI, handle, 1.0);
}, image_);
}
void infer(Ptr<V4D> win) override{
//Creates a NanoVG context to draw the loaded image_ over again to the screen.
win->nvg([](const Image_t& img, const cv::Size& sz) {
using namespace cv::v4d::nvg;
beginPath();
//Scale all further calls to window size
scale(double(sz.width)/img.w_, double(sz.height)/img.h_);
//Create a rounded rectangle with the images dimensions.
//Note that actually this rectangle will have the size of the window
//because of the previous scale call.
roundedRect(0,0, img.w_, img.h_, 50);
//Fill the rounded rectangle with our picture
fillPaint(img.paint_);
fill();
}, image_, win->fbSize());
}
};
int main() {
Ptr<DisplayImageNVG> plan = new DisplayImageNVG(cv::Size(960,960));
Ptr<V4D> window = V4D::make(plan->size(), "Display an Image using NanoVG");
window->run(plan);
}

@ -0,0 +1,264 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Beauty Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
#emscripten_logo {
display: inline-block;
margin: 0;
}
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
/* #controls {
display: inline-block;
float: right;
vertical-align: top;
margin-top: 30px;
margin-right: 20px;
}*/
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<video id="v4dVideoElement" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var captureBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#v4dVideoElement");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
videoElement.style.width = (width / window.devicePixelRatio) + "px";
videoElement.style.height = (height / window.devicePixelRatio) + "px";
videoElement.width = width;
videoElement.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
globalThis.doCapture = true;
}
}
captureBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: { width: width, height: height } , audio: false });
var settings = stream.getVideoTracks()[0].getSettings();
var aspectRatio = settings.width / settings.height;
Module._v4dInitCapture(width, width / aspectRatio);
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
});
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script async type="text/javascript" src="example_v4d_beauty-demo.js"></script>
</body>
</html>

@ -0,0 +1,271 @@
#!/bin/bash
title=$1
name=$2
cat << EOF
<!doctype html>
<html lang="en-us">
<head>
<title>${title}</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
#emscripten_logo {
display: inline-block;
margin: 0;
}
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
/* #controls {
display: inline-block;
float: right;
vertical-align: top;
margin-top: 30px;
margin-right: 20px;
}*/
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<video id="v4dVideoElement" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var captureBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#v4dVideoElement");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
videoElement.style.width = (width / window.devicePixelRatio) + "px";
videoElement.style.height = (height / window.devicePixelRatio) + "px";
videoElement.width = width;
videoElement.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
globalThis.doCapture = true;
}
}
captureBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: { width: width, height: height } , audio: false });
var settings = stream.getVideoTracks()[0].getSettings();
var aspectRatio = settings.width / settings.height;
Module._v4dInitCapture(width, width / aspectRatio);
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
});
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script async type="text/javascript" src="example_v4d_${name}.js"></script>
</body>
</html>
EOF

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Cube Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_cube-demo.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Custom Source and Sink</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_custom_source_and_sink.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Display an Image through the Video-Pipeline</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_display_image.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Display an Image through the FB Context</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_display_image_fb.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Display an Image through NanoVG</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_display_image_nvg.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Font Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_font-demo.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Font rendering</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_font_rendering.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Font rendering with Form-based GUI</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_font_with_gui.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Many Cubes Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_many_cubes-demo.js" defer></script>
</body>
</html>

@ -0,0 +1,264 @@
<!doctype html>
<html lang="en-us">
<head>
<title>NanoVG Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
#emscripten_logo {
display: inline-block;
margin: 0;
}
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
/* #controls {
display: inline-block;
float: right;
vertical-align: top;
margin-top: 30px;
margin-right: 20px;
}*/
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<video id="v4dVideoElement" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var captureBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#v4dVideoElement");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
videoElement.style.width = (width / window.devicePixelRatio) + "px";
videoElement.style.height = (height / window.devicePixelRatio) + "px";
videoElement.width = width;
videoElement.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
globalThis.doCapture = true;
}
}
captureBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: { width: width, height: height } , audio: false });
var settings = stream.getVideoTracks()[0].getSettings();
var aspectRatio = settings.width / settings.height;
Module._v4dInitCapture(width, width / aspectRatio);
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
});
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script async type="text/javascript" src="example_v4d_nanovg-demo.js"></script>
</body>
</html>

@ -0,0 +1,218 @@
#!/bin/bash
title=$1
name=$2
cat << EOF
<!doctype html>
<html lang="en-us">
<head>
<title>${title}</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_${name}.js" defer></script>
</body>
</html>
EOF

@ -0,0 +1,264 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Sparse Optical Flow Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
#emscripten_logo {
display: inline-block;
margin: 0;
}
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
/* #controls {
display: inline-block;
float: right;
vertical-align: top;
margin-top: 30px;
margin-right: 20px;
}*/
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<video id="v4dVideoElement" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var captureBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#v4dVideoElement");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
videoElement.style.width = (width / window.devicePixelRatio) + "px";
videoElement.style.height = (height / window.devicePixelRatio) + "px";
videoElement.width = width;
videoElement.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
globalThis.doCapture = true;
}
}
captureBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: { width: width, height: height } , audio: false });
var settings = stream.getVideoTracks()[0].getSettings();
var aspectRatio = settings.width / settings.height;
Module._v4dInitCapture(width, width / aspectRatio);
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
});
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script async type="text/javascript" src="example_v4d_optflow-demo.js"></script>
</body>
</html>

@ -0,0 +1,264 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Pedestrian Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
#emscripten_logo {
display: inline-block;
margin: 0;
}
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
/* #controls {
display: inline-block;
float: right;
vertical-align: top;
margin-top: 30px;
margin-right: 20px;
}*/
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<video id="v4dVideoElement" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var captureBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#v4dVideoElement");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
videoElement.style.width = (width / window.devicePixelRatio) + "px";
videoElement.style.height = (height / window.devicePixelRatio) + "px";
videoElement.width = width;
videoElement.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
globalThis.doCapture = true;
}
}
captureBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: { width: width, height: height } , audio: false });
var settings = stream.getVideoTracks()[0].getSettings();
var aspectRatio = settings.width / settings.height;
Module._v4dInitCapture(width, width / aspectRatio);
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
});
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script async type="text/javascript" src="example_v4d_pedestrian-demo.js"></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Render OpenGL Blue Screen</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_render_opengl.js" defer></script>
</body>
</html>

@ -0,0 +1,264 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Mandelbrot Shader Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
#emscripten_logo {
display: inline-block;
margin: 0;
}
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
/* #controls {
display: inline-block;
float: right;
vertical-align: top;
margin-top: 30px;
margin-right: 20px;
}*/
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<video id="v4dVideoElement" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var captureBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#v4dVideoElement");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
videoElement.style.width = (width / window.devicePixelRatio) + "px";
videoElement.style.height = (height / window.devicePixelRatio) + "px";
videoElement.width = width;
videoElement.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
globalThis.doCapture = true;
}
}
captureBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: { width: width, height: height } , audio: false });
var settings = stream.getVideoTracks()[0].getSettings();
var aspectRatio = settings.width / settings.height;
Module._v4dInitCapture(width, width / aspectRatio);
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
});
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script async type="text/javascript" src="example_v4d_shader-demo.js"></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Vector Graphics</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_vector_graphics.js" defer></script>
</body>
</html>

@ -0,0 +1,210 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Vector Graphics and Frambuffer access</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span>
<button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script type="text/javascript" src="example_v4d_vector_graphics_and_fb.js" defer></script>
</body>
</html>

@ -0,0 +1,264 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Video Demo</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
#emscripten_logo {
display: inline-block;
margin: 0;
}
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
/* #controls {
display: inline-block;
float: right;
vertical-align: top;
margin-top: 30px;
margin-right: 20px;
}*/
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<video id="v4dVideoElement" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var captureBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#v4dVideoElement");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
videoElement.style.width = (width / window.devicePixelRatio) + "px";
videoElement.style.height = (height / window.devicePixelRatio) + "px";
videoElement.width = width;
videoElement.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
globalThis.doCapture = true;
}
}
captureBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: { width: width, height: height } , audio: false });
var settings = stream.getVideoTracks()[0].getSettings();
var aspectRatio = settings.width / settings.height;
Module._v4dInitCapture(width, width / aspectRatio);
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
});
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script async type="text/javascript" src="example_v4d_video-demo.js"></script>
</body>
</html>

@ -0,0 +1,264 @@
<!doctype html>
<html lang="en-us">
<head>
<title>Video Editing</title>
<style>
body {
font-family: arial;
margin: 0;
padding: none;
}
.emscripten { padding-right: 0; margin-left: auto; margin-right: auto; display: block; }
div.emscripten { text-align: center; }
div.emscripten_border { border: 1px solid black; }
/* the canvas *must not* have any border or padding, or mouse coords will be wrong */
canvas.emscripten { border: 0px none; background-color: black; }
#emscripten_logo {
display: inline-block;
margin: 0;
}
.spinner {
height: 30px;
width: 30px;
margin: 0;
margin-top: 20px;
margin-left: 20px;
display: inline-block;
vertical-align: top;
-webkit-animation: rotation .8s linear infinite;
-moz-animation: rotation .8s linear infinite;
-o-animation: rotation .8s linear infinite;
animation: rotation 0.8s linear infinite;
border-left: 5px solid rgb(235, 235, 235);
border-right: 5px solid rgb(235, 235, 235);
border-bottom: 5px solid rgb(235, 235, 235);
border-top: 5px solid rgb(120, 120, 120);
border-radius: 100%;
background-color: rgb(189, 215, 46);
}
@-webkit-keyframes rotation {
from {-webkit-transform: rotate(0deg);}
to {-webkit-transform: rotate(360deg);}
}
@-moz-keyframes rotation {
from {-moz-transform: rotate(0deg);}
to {-moz-transform: rotate(360deg);}
}
@-o-keyframes rotation {
from {-o-transform: rotate(0deg);}
to {-o-transform: rotate(360deg);}
}
@keyframes rotation {
from {transform: rotate(0deg);}
to {transform: rotate(360deg);}
}
#status {
display: inline-block;
vertical-align: top;
margin-top: 30px;
margin-left: 20px;
font-weight: bold;
color: rgb(120, 120, 120);
}
#progress {
height: 20px;
width: 300px;
}
/* #controls {
display: inline-block;
float: right;
vertical-align: top;
margin-top: 30px;
margin-right: 20px;
}*/
#output {
width: 100%;
height: 200px;
margin: 0 auto;
margin-top: 10px;
border-left: 0px;
border-right: 0px;
padding-left: 0px;
padding-right: 0px;
display: block;
color: white;
font-family: 'Lucida Console', Monaco, monospace;
outline: none;
}
</style>
</head>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="v4dOffscreenCanvas" style="display:none;"></canvas>
<video id="v4dVideoElement" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
<div class="emscripten">
<progress value="0" max="100" id="progress" hidden=1></progress>
</div>
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var captureBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#v4dVideoElement");
var offscreenCanvas = document.querySelector("#v4dOffscreenCanvas");
var width = 960;
var height = 960;
function fixCanvasSize() {
Module.canvas.style.width = (width / window.devicePixelRatio) + "px";
Module.canvas.style.height = (height / window.devicePixelRatio) + "px";
Module.canvas.width = width;
Module.canvas.height = height;
videoElement.style.width = (width / window.devicePixelRatio) + "px";
videoElement.style.height = (height / window.devicePixelRatio) + "px";
videoElement.width = width;
videoElement.height = height;
offscreenCanvas.style.width = (width / window.devicePixelRatio) + "px";
offscreenCanvas.style.height = (height / window.devicePixelRatio) + "px";
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
},
preRun: [],
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
// These replacements are necessary if you render to raw HTML
//text = text.replace(/&/g, "&amp;");
//text = text.replace(/</g, "&lt;");
//text = text.replace(/>/g, "&gt;");
//text = text.replace('\n', '<br>', 'g');
console.log(text);
if (element) {
element.value += text + "\n";
element.scrollTop = element.scrollHeight; // focus on bottom
}
};
})(),
printErr: function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
console.error(text);
},
canvas: (function() {
var canvas = document.getElementById('canvas');
// As a default initial behavior, pop up an alert when webgl context is lost. To make your
// application robust, you may want to override this behavior before shipping!
// See http://www.khronos.org/registry/webgl/specs/latest/1.0/#5.15.2
canvas.addEventListener("webglcontextlost", function(e) { alert('WebGL context lost. You will need to reload the page.'); e.preventDefault(); }, false);
return canvas;
})(),
setStatus: function(text) {
if (!Module.setStatus.last) Module.setStatus.last = { time: Date.now(), text: '' };
if (text === Module.setStatus.last.text) return;
var m = text.match(/([^(]+)\((\d+(\.\d+)?)\/(\d+)\)/);
var now = Date.now();
if (m && now - Module.setStatus.last.time < 30) return; // if this is a progress update, skip it if too soon
Module.setStatus.last.time = now;
Module.setStatus.last.text = text;
if (m) {
text = m[1];
progressElement.value = parseInt(m[2])*100;
progressElement.max = parseInt(m[4])*100;
progressElement.hidden = false;
} else {
progressElement.value = null;
progressElement.max = null;
progressElement.hidden = true;
}
statusElement.innerHTML = text;
},
totalDependencies: 0,
monitorRunDependencies: function(left) {
this.totalDependencies = Math.max(this.totalDependencies, left);
Module.setStatus(left ? 'Preparing... (' + (this.totalDependencies-left) + '/' + this.totalDependencies + ')' : 'All downloads complete.');
}
};
Module.setStatus('Downloading...');
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
globalThis.doCapture = true;
}
}
captureBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: { width: width, height: height } , audio: false });
var settings = stream.getVideoTracks()[0].getSettings();
var aspectRatio = settings.width / settings.height;
Module._v4dInitCapture(width, width / aspectRatio);
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
});
window.onerror = function(event) {
// TODO: do not warn on ok events like simulating an infinite loop or exitStatus
Module.setStatus('Exception thrown, see JavaScript console');
//spinnerElement.style.display = 'none';
Module.setStatus = function(text) {
if (text) Module.printErr('[post-exception status] ' + text);
};
};
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log("Element: " + document.fullscreenElement.id + " entered fullscreen mode.");
} else {
console.log('Leaving fullscreen mode.');
}
});
</script>
<script async type="text/javascript" src="example_v4d_video_editing.js"></script>
</body>
</html>

@ -0,0 +1,243 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
#include <string>
#include <algorithm>
#include <vector>
#include <sstream>
#include <limits>
using std::string;
using std::vector;
using std::istringstream;
using namespace cv::v4d;
class FontDemoPlan : public Plan {
static struct Params {
const cv::Scalar_<float> INITIAL_COLOR = cv::v4d::colorConvert(cv::Scalar(0.15 * 180.0, 128, 255, 255), cv::COLOR_HLS2RGB);
float minStarSize_ = 0.5f;
float maxStarSize_ = 1.5f;
int minStarCount_ = 1000;
int maxStarCount_ = 3000;
float starAlpha_ = 0.3f;
float fontSize_ = 0.0f;
cv::Scalar_<float> textColor_ = INITIAL_COLOR / 255.0;
float warpRatio_ = 1.0f/3.0f;
bool updateStars_ = true;
bool updatePerspective_ = true;
} params_;
//BGRA
inline static cv::UMat stars_;
cv::UMat warped_;
//transformation matrix
inline static cv::Mat tm_;
static struct TextVars {
//the text to display
vector<string> lines_;
//global frame count
uint32_t global_cnt_ = 0;
//Total number of lines in the text
int32_t numLines_ = 0;
//Height of the text in pixels
int32_t textHeight_ = 0;
} textVars_;
//the sequence number of the current frame
uint32_t seqNum_ = 0;
//y-value of the current line
int32_t y_ = 0;
int32_t translateY_ = 0;
cv::RNG rng_ = cv::getTickCount();
public:
using Plan::Plan;
FontDemoPlan(const cv::Size& sz) : FontDemoPlan(cv::Rect(0, 0, sz.width, sz.height)) {
Global::registerShared(params_);
Global::registerShared(textVars_);
Global::registerShared(tm_);
Global::registerShared(stars_);
}
FontDemoPlan(const cv::Rect& vp) : Plan(vp) {
}
void gui(cv::Ptr<V4D> window) override {
window->imgui([](cv::Ptr<V4D> win, ImGuiContext* ctx, Params& params){
CV_UNUSED(win);
using namespace ImGui;
SetCurrentContext(ctx);
Begin("Effect");
Text("Text Crawl");
SliderFloat("Font Size", &params.fontSize_, 1.0f, 100.0f);
if(SliderFloat("Warp Ratio", &params.warpRatio_, 0.1f, 1.0f))
params.updatePerspective_ = true;
ColorPicker4("Text Color", params.textColor_.val);
Text("Stars");
if(SliderFloat("Min Star Size", &params.minStarSize_, 0.5f, 1.0f))
params.updateStars_ = true;
if(SliderFloat("Max Star Size", &params.maxStarSize_, 1.0f, 10.0f))
params.updateStars_ = true;
if(SliderInt("Min Star Count", &params.minStarCount_, 1, 1000))
params.updateStars_ = true;
if(SliderInt("Max Star Count", &params.maxStarCount_, 1000, 5000))
params.updateStars_ = true;
if(SliderFloat("Min Star Alpha", &params.starAlpha_, 0.2f, 1.0f))
params.updateStars_ = true;
End();
}, params_);
}
void setup(cv::Ptr<V4D> window) override {
window->once([](const cv::Size& sz, TextVars& textVars, Params& params){
//The text to display
string txt = cv::getBuildInformation();
//Save the text to a vector
std::istringstream iss(txt);
int fontSize = hypot(sz.width, sz.height) / 60.0;
{
Global::Scope scope(textVars);
for (std::string line; std::getline(iss, line); ) {
textVars.lines_.push_back(line);
}
textVars.numLines_ = textVars.lines_.size();
textVars.textHeight_ = (textVars.numLines_ * fontSize);
}
{
Global::Scope scope(params);
params.fontSize_ = fontSize;
}
}, size(), textVars_, params_);
}
void infer(cv::Ptr<V4D> window) override {
window->branch(0, isTrue_, params_.updateStars_);
{
window->nvg([](const cv::Size& sz, cv::RNG& rng, const Params& params) {
Params p = Global::safe_copy(params);
using namespace cv::v4d::nvg;
clear();
//draw stars
int numStars = rng.uniform(p.minStarCount_, p.maxStarCount_);
for(int i = 0; i < numStars; ++i) {
beginPath();
const auto& size = rng.uniform(p.minStarSize_, p.maxStarSize_);
strokeWidth(size);
strokeColor(cv::Scalar(255, 255, 255, p.starAlpha_ * 255.0f));
circle(rng.uniform(0, sz.width) , rng.uniform(0, sz.height), size / 2.0);
stroke();
}
}, size(), rng_, params_);
window->fb([](const cv::UMat& framebuffer, const cv::Rect& viewport, cv::UMat& stars, Params& params){
{
Global::Scope scope(stars);
framebuffer(viewport).copyTo(stars);
}
{
Global::Scope scope(params);
params.updateStars_ = false;
}
}, viewport(), stars_, params_);
}
window->endbranch(0, isTrue_, params_.updateStars_);
window->branch(0, isTrue_, params_.updatePerspective_);
{
window->plain([](const cv::Size& sz, cv::Mat& tm, Params& params){
Params p = Global::safe_copy(params);
//Derive the transformation matrix tm for the pseudo 3D effect from quad1 and quad2.
vector<cv::Point2f> quad1 = {cv::Point2f(0,0),cv::Point2f(sz.width,0),
cv::Point2f(sz.width,sz.height),cv::Point2f(0,sz.height)};
float l = (sz.width - (sz.width * p.warpRatio_)) / 2.0;
float r = sz.width - l;
vector<cv::Point2f> quad2 = {cv::Point2f(l, 0.0f),cv::Point2f(r, 0.0f),
cv::Point2f(sz.width,sz.height), cv::Point2f(0,sz.height)};
Global::Scope scope(tm);
tm = cv::getPerspectiveTransform(quad1, quad2);
}, size(), tm_, params_);
}
window->endbranch(0, isTrue_, params_.updatePerspective_);
window->branch(always_);
{
window->nvg([](const cv::Size& sz, int32_t& ty, const int32_t& seqNum, int32_t& y, const TextVars& textVars, const Params& params) {
Params p = Global::safe_copy(params);
TextVars txt = Global::safe_copy(textVars);
//How many pixels to translate the text up.
ty = sz.height - seqNum;
using namespace cv::v4d::nvg;
clear();
fontSize(p.fontSize_);
fontFace("sans-bold");
fillColor(p.textColor_ * 255);
textAlign(NVG_ALIGN_CENTER | NVG_ALIGN_TOP);
/** only draw lines that are visible **/
translate(0, ty);
for (size_t i = 0; i < txt.lines_.size(); ++i) {
y = (i * p.fontSize_);
if (y + ty < txt.textHeight_ && y + ty + p.fontSize_ > 0) {
text(sz.width / 2.0, y, txt.lines_[i].c_str(), txt.lines_[i].c_str() + txt.lines_[i].size());
}
}
}, size(), translateY_, seqNum_, y_, textVars_, params_);
window->fb([](cv::UMat& framebuffer, const cv::Rect& viewport, cv::UMat& warped, cv::UMat& stars, cv::Mat& tm) {
{
Global::Scope scope(tm);
cv::warpPerspective(framebuffer(viewport), warped, tm, viewport.size(), cv::INTER_LINEAR, cv::BORDER_CONSTANT, cv::Scalar());
}
{
Global::Scope scope(stars);
cv::add(stars.clone(), warped, framebuffer(viewport));
}
}, viewport(), warped_, stars_, tm_);
window->write();
window->plain([](const int32_t& translateY, TextVars& textVars, uint32_t& seqNum) {
Global::Scope scope(textVars);
if(-translateY > textVars.textHeight_) {
//reset the scroll once the text is out of the picture
textVars.global_cnt_ = 0;
}
++textVars.global_cnt_;
//Wrap the cnt around if it becomes to big.
if(textVars.global_cnt_ > std::numeric_limits<uint32_t>().max() / 2.0)
textVars.global_cnt_ = 0;
seqNum = textVars.global_cnt_;
}, translateY_, textVars_, seqNum_);
}
window->endbranch(always_);
}
};
FontDemoPlan::Params FontDemoPlan::params_;
FontDemoPlan::TextVars FontDemoPlan::textVars_;
int main() {
cv::Ptr<FontDemoPlan> plan = new FontDemoPlan(cv::Size(1280, 720));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Font Demo", ALL);
auto sink = makeWriterSink(window, "font-demo.mkv", 60, plan->size());
window->setSink(sink);
window->run(plan);
return 0;
}

@ -0,0 +1,33 @@
#include <opencv2/v4d/v4d.hpp>
using namespace cv;
using namespace cv::v4d;
class FontRenderingPlan: public Plan {
//The text to render
string hw_ = "Hello World";
public:
FontRenderingPlan(const cv::Size& sz) : Plan(sz) {
}
void infer(Ptr<V4D> win) override {
//Render the text at the center of the screen. Note that you can load you own fonts.
win->nvg([](const Size &sz, const string &str) {
using namespace cv::v4d::nvg;
clear();
fontSize(40.0f);
fontFace("sans-bold");
fillColor(Scalar(255, 0, 0, 255));
textAlign(NVG_ALIGN_CENTER | NVG_ALIGN_TOP);
text(sz.width / 2.0, sz.height / 2.0, str.c_str(),
str.c_str() + str.size());
}, win->fbSize(), hw_);
}
};
int main() {
cv::Ptr<FontRenderingPlan> plan = new FontRenderingPlan(cv::Size(960,960));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Font Rendering");
window->run(plan);
}

@ -0,0 +1,53 @@
#include <opencv2/v4d/v4d.hpp>
using namespace cv;
using namespace cv::v4d;
class FontWithGuiPlan: public Plan {
enum Names {
SIZE,
COLOR
};
using Params = ThreadSafeMap<Names>;
inline static Params params_;
//The text
string hw_ = "hello world";
public:
FontWithGuiPlan(const cv::Size& sz) : Plan(sz) {
params_.set(SIZE, 40.0f);
params_.set(COLOR, cv::Scalar_<float>(1.0f, 0.0f, 0.0f, 1.0f));
}
void gui(Ptr<V4D> window) override {
window->imgui([](Ptr<V4D> win, ImGuiContext* ctx, Params& params) {
CV_UNUSED(win);
using namespace ImGui;
SetCurrentContext(ctx);
Begin("Settings");
SliderFloat("Font Size", params.ptr<float>(SIZE), 1.0f, 100.0f);
ColorPicker4("Text Color", params.ptr<cv::Scalar_<float>>(COLOR)->val);
End();
}, params_);
}
void infer(Ptr<V4D> window) override {
//Render the text at the center of the screen using parameters from the GUI.
window->nvg([](const Size& sz, const string& str, Params& params) {
using namespace cv::v4d::nvg;
clear();
fontSize(params.get<float>(SIZE));
fontFace("sans-bold");
fillColor(params.get<cv::Scalar_<float>>(COLOR) * 255.0);
textAlign(NVG_ALIGN_CENTER | NVG_ALIGN_TOP);
text(sz.width / 2.0, sz.height / 2.0, str.c_str(), str.c_str() + str.size());
}, window->fbSize(), hw_, params_);
}
};
int main() {
Ptr<FontWithGuiPlan> plan = new FontWithGuiPlan(cv::Size(960,960));
Ptr<V4D> window = V4D::make(plan->size(), "Font Rendering with GUI");
window->run(plan);
}

@ -0,0 +1,23 @@
#!/bin/bash
set -e
./example_v4d_capture.sh "Beauty Demo" beauty-demo > example_v4d_beauty-demo.html
./example_v4d_nocapture.sh "Cube Demo" cube-demo > example_v4d_cube-demo.html
./example_v4d_nocapture.sh "Custom Source and Sink" custom_source_and_sink > example_v4d_custom_source_and_sink.html
./example_v4d_nocapture.sh "Display an Image through the FB Context" display_image_fb > example_v4d_display_image_fb.html
./example_v4d_nocapture.sh "Display an Image through the Video-Pipeline" display_image > example_v4d_display_image.html
./example_v4d_nocapture.sh "Display an Image through NanoVG" display_image_nvg > example_v4d_display_image_nvg.html
./example_v4d_nocapture.sh "Font Demo" font-demo > example_v4d_font-demo.html
./example_v4d_nocapture.sh "Font rendering with Form-based GUI" font_with_gui > example_v4d_font_with_gui.html
./example_v4d_nocapture.sh "Font rendering" font_rendering > example_v4d_font_rendering.html
./example_v4d_nocapture.sh "Many Cubes Demo" many_cubes-demo > example_v4d_many_cubes-demo.html
./example_v4d_capture.sh "NanoVG Demo" nanovg-demo > example_v4d_nanovg-demo.html
./example_v4d_capture.sh "Sparse Optical Flow Demo" optflow-demo > example_v4d_optflow-demo.html
./example_v4d_capture.sh "Pedestrian Demo" pedestrian-demo > example_v4d_pedestrian-demo.html
./example_v4d_nocapture.sh "Render OpenGL Blue Screen" render_opengl > example_v4d_render_opengl.html
./example_v4d_capture.sh "Mandelbrot Shader Demo" shader-demo > example_v4d_shader-demo.html
./example_v4d_nocapture.sh "Vector Graphics and Frambuffer access" vector_graphics_and_fb > example_v4d_vector_graphics_and_fb.html
./example_v4d_nocapture.sh "Vector Graphics" vector_graphics > example_v4d_vector_graphics.html
./example_v4d_capture.sh "Video Demo" video-demo > example_v4d_video-demo.html
./example_v4d_capture.sh "Video Editing" video_editing > example_v4d_video_editing.html

@ -0,0 +1,269 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
//adapted from https://gitlab.com/wikibooks-opengl/modern-tutorials/-/blob/master/tut05_cube/cube.cpp
using namespace cv::v4d;
class ManyCubesDemoPlan : public Plan {
public:
using Plan::Plan;
/* Demo Parameters */
constexpr static size_t NUMBER_OF_CUBES_ = 10;
int glowKernelSize_;
/* OpenGL constants and variables */
constexpr static GLuint TRIANGLES_ = 12;
constexpr static GLuint VERTICES_INDEX_ = 0;
constexpr static GLuint COLORS_INDEX_ = 1;
//Cube vertices, colors and indices
constexpr static float VERTICES_[24] = {
// Front face
0.5, 0.5, 0.5, -0.5, 0.5, 0.5, -0.5, -0.5, 0.5, 0.5, -0.5, 0.5,
// Back face
0.5, 0.5, -0.5, -0.5, 0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, -0.5
};
constexpr static float VERTEX_COLORS_[24] = {
1.0, 0.4, 0.6, 1.0, 0.9, 0.2, 0.7, 0.3, 0.8, 0.5, 0.3, 1.0,
0.2, 0.6, 1.0, 0.6, 1.0, 0.4, 0.6, 0.8, 0.8, 0.4, 0.8, 0.8
};
constexpr static unsigned short TRIANGLE_INDICES_[36] = {
// Front
0, 1, 2, 2, 3, 0,
// Right
0, 3, 7, 7, 4, 0,
// Bottom
2, 6, 7, 7, 3, 2,
// Left
1, 5, 6, 6, 2, 1,
// Back
4, 7, 6, 6, 5, 4,
// Top
5, 1, 0, 0, 4, 5
};
private:
struct Cache {
cv::UMat down_;
cv::UMat up_;
cv::UMat blur_;
cv::UMat dst16_;
} cache_;
GLuint vao_[NUMBER_OF_CUBES_];
GLuint shaderProgram_[NUMBER_OF_CUBES_];
GLuint uniformTransform_[NUMBER_OF_CUBES_];
//Simple transform & pass-through shaders
static GLuint load_shader() {
//Shader versions "330" and "300 es" are very similar.
//If you are careful you can write the same code for both versions.
#if !defined(OPENCV_V4D_USE_ES3)
const string shaderVersion = "330";
#else
const string shaderVersion = "300 es";
#endif
const string vert =
" #version " + shaderVersion
+ R"(
precision lowp float;
layout(location = 0) in vec3 pos;
layout(location = 1) in vec3 vertex_color;
uniform mat4 transform;
out vec3 color;
void main() {
gl_Position = transform * vec4(pos, 1.0);
color = vertex_color;
}
)";
const string frag =
" #version " + shaderVersion
+ R"(
precision lowp float;
in vec3 color;
out vec4 frag_color;
void main() {
frag_color = vec4(color, 1.0);
}
)";
//Initialize the shaders and returns the program
unsigned int handles[3];
cv::v4d::initShader(handles, vert.c_str(), frag.c_str(), "fragColor");
return handles[0];
}
//Initializes objects, buffers, shaders and uniforms
static void init_scene(const cv::Size& sz, GLuint& vao, GLuint& shaderProgram, GLuint& uniformTransform) {
glEnable (GL_DEPTH_TEST);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
unsigned int triangles_ebo;
glGenBuffers(1, &triangles_ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, triangles_ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof TRIANGLE_INDICES_, TRIANGLE_INDICES_,
GL_STATIC_DRAW);
unsigned int verticies_vbo;
glGenBuffers(1, &verticies_vbo);
glBindBuffer(GL_ARRAY_BUFFER, verticies_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof VERTICES_, VERTICES_, GL_STATIC_DRAW);
glVertexAttribPointer(VERTICES_INDEX_, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(VERTICES_INDEX_);
unsigned int colors_vbo;
glGenBuffers(1, &colors_vbo);
glBindBuffer(GL_ARRAY_BUFFER, colors_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof VERTEX_COLORS_, VERTEX_COLORS_, GL_STATIC_DRAW);
glVertexAttribPointer(COLORS_INDEX_, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(COLORS_INDEX_);
glBindVertexArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
shaderProgram = load_shader();
uniformTransform = glGetUniformLocation(shaderProgram, "transform");
glViewport(0,0, sz.width, sz.height);
}
//Renders a rotating rainbow-colored cube on a blueish background
static void render_scene(const cv::Size& sz, const double& x, const double& y, const double& angleMod, GLuint& vao, GLuint& shaderProgram, GLuint& uniformTransform) {
glViewport(0,0, sz.width, sz.height);
//Use the prepared shader program
glUseProgram(shaderProgram);
//Scale and rotate the cube depending on the current time.
float angle = fmod(double(cv::getTickCount()) / double(cv::getTickFrequency()) + angleMod, 2 * M_PI);
double scale = 0.25;
cv::Matx44f scaleMat(
scale, 0.0, 0.0, 0.0,
0.0, scale, 0.0, 0.0,
0.0, 0.0, scale, 0.0,
0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotXMat(
1.0, 0.0, 0.0, 0.0,
0.0, cos(angle), -sin(angle), 0.0,
0.0, sin(angle), cos(angle), 0.0,
0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotYMat(
cos(angle), 0.0, sin(angle), 0.0,
0.0, 1.0, 0.0, 0.0,
-sin(angle), 0.0,cos(angle), 0.0,
0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotZMat(
cos(angle), -sin(angle), 0.0, 0.0,
sin(angle), cos(angle), 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0);
cv::Matx44f translateMat(
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
x, y, 0.0, 1.0);
//calculate the transform
cv::Matx44f transform = scaleMat * rotXMat * rotYMat * rotZMat * translateMat;
//set the corresponding uniform
glUniformMatrix4fv(uniformTransform, 1, GL_FALSE, transform.val);
//Bind our vertex array
glBindVertexArray(vao);
//Draw
glDrawElements(GL_TRIANGLES, TRIANGLES_ * 3, GL_UNSIGNED_SHORT, NULL);
}
//applies a glow effect to an image
static void glow_effect(const cv::UMat& src, cv::UMat& dst, const int ksize, Cache& cache) {
cv::bitwise_not(src, dst);
//Resize for some extra performance
cv::resize(dst, cache.down_, cv::Size(), 0.5, 0.5);
//Cheap blur
cv::boxFilter(cache.down_, cache.blur_, -1, cv::Size(ksize, ksize), cv::Point(-1, -1), true,
cv::BORDER_REPLICATE);
//Back to original size
cv::resize(cache.blur_, cache.up_, src.size());
//Multiply the src image with a blurred version of itself
cv::multiply(dst, cache.up_, cache.dst16_, 1, CV_16U);
//Normalize and convert back to CV_8U
cv::divide(cache.dst16_, cv::Scalar::all(255.0), dst, 1, CV_8U);
cv::bitwise_not(dst, dst);
}
public:
void setup(cv::Ptr<V4D> window) override {
int diag = hypot(double(size().width), double(size().height));
glowKernelSize_ = std::max(int(diag / 138 % 2 == 0 ? diag / 138 + 1 : diag / 138), 1);
for(size_t i = 0; i < NUMBER_OF_CUBES_; ++i) {
window->gl(i, [](const size_t& ctxIdx, const cv::Size& sz, GLuint& vao, GLuint& shader, GLuint& uniformTrans){
CV_UNUSED(ctxIdx);
init_scene(sz, vao, shader, uniformTrans);
}, size(), vao_[i], shaderProgram_[i], uniformTransform_[i]);
}
}
void infer(cv::Ptr<V4D> window) override {
window->gl([](){
//Clear the background
glClearColor(0.2, 0.24, 0.4, 1);
glClear(GL_COLOR_BUFFER_BIT);
});
//Render using multiple OpenGL contexts
for(size_t i = 0; i < NUMBER_OF_CUBES_; ++i) {
window->gl(i, [](const int32_t& ctxIdx, const cv::Size& sz, GLuint& vao, GLuint& shader, GLuint& uniformTrans){
double x = sin((double(ctxIdx) / NUMBER_OF_CUBES_) * 2 * M_PI) / 1.5;
double y = cos((double(ctxIdx) / NUMBER_OF_CUBES_) * 2 * M_PI) / 1.5;
double angle = sin((double(ctxIdx) / NUMBER_OF_CUBES_) * 2 * M_PI);
render_scene(sz, x, y, angle, vao, shader, uniformTrans);
}, size(), vao_[i], shaderProgram_[i], uniformTransform_[i]);
}
//Aquire the frame buffer for use by OpenCV
window->fb([](cv::UMat& framebuffer, const cv::Rect& viewport, int glowKernelSize, Cache& cache) {
cv::UMat roi = framebuffer(viewport);
glow_effect(roi, roi, glowKernelSize, cache);
}, viewport(), glowKernelSize_, cache_);
window->write();
}
};
int main() {
cv::Ptr<ManyCubesDemoPlan> plan = new ManyCubesDemoPlan(cv::Size(1280, 720));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Many Cubes Demo", IMGUI);
//Creates a writer sink (which might be hardware accelerated)
auto sink = makeWriterSink(window, "many_cubes-demo.mkv", 60, plan->size());
window->setSink(sink);
window->run(plan, 1);
return 0;
}

@ -0,0 +1,137 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
int v4d_cube_main();
int v4d_many_cubes_main();
int v4d_video_main(int argc, char **argv);
int v4d_nanovg_main(int argc, char **argv);
int v4d_shader_main(int argc, char **argv);
int v4d_font_main();
int v4d_pedestrian_main(int argc, char **argv);
int v4d_optflow_main(int argc, char **argv);
int v4d_beauty_main(int argc, char **argv);
#define main v4d_cube_main
#include "cube-demo.cpp"
#undef main
#define main v4d_many_cubes_main
#include "many_cubes-demo.cpp"
#undef main
#define main v4d_video_main
#include "video-demo.cpp"
#undef main
#define main v4d_nanovg_main
#include "nanovg-demo.cpp"
#undef main
#define main v4d_shader_main
#include "shader-demo.cpp"
#undef main
#define main v4d_font_main
#include "font-demo.cpp"
#undef main
#define main v4d_pedestrian_main
#include "pedestrian-demo.cpp"
#undef main
#define main v4d_optflow_main
#include "optflow-demo.cpp"
#undef main
#define main v4d_beauty_main
#include "beauty-demo.cpp"
#undef main
class MontageDemoPlan : public Plan {
const cv::Size tiling_ = cv::Size(3, 3);
const cv::Size tileSz_ = cv::Size(640, 360);
const cv::Rect viewport_ = cv::Rect(0, 720, 640, 360);
std::vector<Plan*> plans_ = {
new CubeDemoPlan(viewport_),
new ManyCubesDemoPlan(viewport_),
new VideoDemoPlan(viewport_),
new NanoVGDemoPlan(viewport_),
new ShaderDemoPlan(viewport_),
new FontDemoPlan(viewport_),
new PedestrianDemoPlan(viewport_),
new BeautyDemoPlan(viewport_),
new OptflowDemoPlan(viewport_)
};
struct Frames {
std::vector<cv::UMat> results_ = std::vector<cv::UMat>(9);
cv::UMat captured;
} frames_;
cv::Size_<float> scale_;
public:
MontageDemoPlan(const cv::Size& sz) : Plan(sz) {
CV_Assert(plans_.size() == frames_.results_.size() && plans_.size() == size_t(tiling_.width * tiling_.height));
scale_ = cv::Size_<float>(float(size().width) / tileSz_.width, float(size().height) / tileSz_.height);
}
virtual void setup(cv::Ptr<V4D> window) override {
for(auto* plan : plans_) {
plan->setup(window);
}
}
virtual void infer(cv::Ptr<V4D> window) override {
window->nvgCtx()->setScale(scale_);
window->capture();
window->setDisableIO(true);
window->fb([](cv::UMat& framebuffer, const cv::Size& tileSize, cv::UMat& captured){
cv::resize(framebuffer, captured, tileSize);
}, tileSz_, frames_.captured);
for(size_t i = 0; i < plans_.size(); ++i) {
auto* plan = plans_[i];
window->fb([](cv::UMat& framebuffer, const cv::Size& tileSize, const cv::UMat& captured){
framebuffer = cv::Scalar::all(0);
captured.copyTo(framebuffer(cv::Rect(0, tileSize.height * 2, tileSize.width, tileSize.height)));
}, tileSz_, frames_.captured);
plan->infer(window);
window->fb([](const cv::UMat& framebuffer, cv::UMat& result){
framebuffer.copyTo(result);
}, frames_.results_[i]);
}
window->fb([](cv::UMat& framebuffer, const cv::Size& tileSz, const Frames& frames){
int w = tileSz.width;
int h = tileSz.height;
framebuffer = cv::Scalar::all(0);
for(size_t x = 0; x < 3; ++x)
for(size_t y = 0; y < 3; ++y)
frames.results_[x * 3 + y](cv::Rect(0, h * 2, w, h)).copyTo(framebuffer(cv::Rect(w * x, h * y, w, h)));
}, tileSz_, frames_);
window->setDisableIO(false);
window->write();
}
virtual void teardown(cv::Ptr<V4D> window) override {
for(auto* plan : plans_) {
plan->teardown(window);
}
}
};
int main(int argc, char** argv) {
if (argc != 3) {
cerr << "Usage: montage-demo <video-file> <number of extra workers>" << endl;
exit(1);
}
cv::Ptr<MontageDemoPlan> plan = new MontageDemoPlan(cv::Size(1920, 1080));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Montage Demo", ALL);
//Creates a source from a file or a device
auto src = makeCaptureSource(window, argv[1]);
window->setSource(src);
//Creates a writer sink (which might be hardware accelerated)
auto sink = makeWriterSink(window, "montage-demo.mkv", 60, plan->size());
window->setSink(sink);
window->run(plan, atoi(argv[2]));
return 0;
}

@ -0,0 +1,195 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
static void draw_color_wheel(float x, float y, float w, float h, double hue) {
//color wheel drawing code taken from https://github.com/memononen/nanovg/blob/master/example/demo.c
using namespace cv::v4d::nvg;
int i;
float r0, r1, ax, ay, bx, by, cx, cy, aeps, r;
Paint paint;
save();
cx = x + w * 0.5f;
cy = y + h * 0.5f;
r1 = (w < h ? w : h) * 0.5f - 5.0f;
r0 = r1 - 20.0f;
aeps = 0.5f / r1; // half a pixel arc length in radians (2pi cancels out).
for (i = 0; i < 6; i++) {
float a0 = (float) i / 6.0f * CV_PI * 2.0f - aeps;
float a1 = (float) (i + 1.0f) / 6.0f * CV_PI * 2.0f + aeps;
beginPath();
arc(cx, cy, r0, a0, a1, NVG_CW);
arc(cx, cy, r1, a1, a0, NVG_CCW);
closePath();
ax = cx + cosf(a0) * (r0 + r1) * 0.5f;
ay = cy + sinf(a0) * (r0 + r1) * 0.5f;
bx = cx + cosf(a1) * (r0 + r1) * 0.5f;
by = cy + sinf(a1) * (r0 + r1) * 0.5f;
paint = linearGradient(ax, ay, bx, by,
cv::v4d::colorConvert(cv::Scalar((a0 / (CV_PI * 2.0)) * 180.0, 0.55 * 255.0, 255.0, 255.0), cv::COLOR_HLS2BGR),
cv::v4d::colorConvert(cv::Scalar((a1 / (CV_PI * 2.0)) * 180.0, 0.55 * 255, 255, 255), cv::COLOR_HLS2BGR));
fillPaint(paint);
fill();
}
beginPath();
circle(cx, cy, r0 - 0.5f);
circle(cx, cy, r1 + 0.5f);
strokeColor(cv::Scalar(0, 0, 0, 64));
strokeWidth(1.0f);
stroke();
// Selector
save();
translate(cx, cy);
rotate((hue/255.0) * CV_PI * 2);
// Marker on
strokeWidth(2.0f);
beginPath();
rect(r0 - 1, -3, r1 - r0 + 2, 6);
strokeColor(cv::Scalar(255, 255, 255, 192));
stroke();
paint = boxGradient(r0 - 3, -5, r1 - r0 + 6, 10, 2, 4, cv::Scalar(0, 0, 0, 128), cv::Scalar(0, 0, 0, 0));
beginPath();
rect(r0 - 2 - 10, -4 - 10, r1 - r0 + 4 + 20, 8 + 20);
rect(r0 - 2, -4, r1 - r0 + 4, 8);
pathWinding(NVG_HOLE);
fillPaint(paint);
fill();
// Center triangle
r = r0 - 6;
ax = cosf(120.0f / 180.0f * NVG_PI) * r;
ay = sinf(120.0f / 180.0f * NVG_PI) * r;
bx = cosf(-120.0f / 180.0f * NVG_PI) * r;
by = sinf(-120.0f / 180.0f * NVG_PI) * r;
beginPath();
moveTo(r, 0);
lineTo(ax, ay);
lineTo(bx, by);
closePath();
paint = linearGradient(r, 0, ax, ay, cv::v4d::colorConvert(cv::Scalar(hue, 128.0, 255.0, 255.0), cv::COLOR_HLS2BGR_FULL), cv::Scalar(255, 255, 255, 255));
fillPaint(paint);
fill();
paint = linearGradient((r + ax) * 0.5f, (0 + ay) * 0.5f, bx, by, cv::Scalar(0, 0, 0, 0), cv::Scalar(0, 0, 0, 255));
fillPaint(paint);
fill();
strokeColor(cv::Scalar(0, 0, 0, 64));
stroke();
// Select circle on triangle
ax = cosf(120.0f / 180.0f * NVG_PI) * r * 0.3f;
ay = sinf(120.0f / 180.0f * NVG_PI) * r * 0.4f;
strokeWidth(2.0f);
beginPath();
circle(ax, ay, 5);
strokeColor(cv::Scalar(255, 255, 255, 192));
stroke();
paint = radialGradient(ax, ay, 7, 9, cv::Scalar(0, 0, 0, 64), cv::Scalar(0, 0, 0, 0));
beginPath();
rect(ax - 20, ay - 20, 40, 40);
circle(ax, ay, 7);
pathWinding(NVG_HOLE);
fillPaint(paint);
fill();
restore();
restore();
}
using namespace cv::v4d;
class NanoVGDemoPlan : public Plan {
std::vector<cv::UMat> hsvChannels_;
cv::UMat rgb_;
cv::UMat bgra_;
cv::UMat hsv_;
cv::UMat hueChannel_;
inline static long cnt_ = 0;
double hue_ = 0;
public:
using Plan::Plan;
NanoVGDemoPlan(const cv::Rect& vp) : Plan(vp) {
Global::registerShared(cnt_);
}
NanoVGDemoPlan(const cv::Size& sz) : NanoVGDemoPlan(cv::Rect(0, 0, sz.width, sz.height)) {
}
void infer(cv::Ptr<V4D> window) override {
window->plain([](long& cnt, double& hue){
long c;
Global::lock(cnt);
//we use frame count to calculate the current hue
double t = ++c / 60.0;
//nanovg hue fading depending on t
hue = (sinf(t * 0.12) + 1.0) * 127.5;
Global::unlock(cnt);
}, cnt_, hue_);
window->capture();
//Acquire the framebuffer and convert it to RGB
window->fb([](const cv::UMat &framebuffer, const cv::Rect& viewport, cv::UMat& rgb) {
cvtColor(framebuffer(viewport), rgb, cv::COLOR_BGRA2RGB);
}, viewport(), rgb_);
window->plain([](cv::UMat& rgb, cv::UMat& hsv, std::vector<cv::UMat>& hsvChannels, double& hue){
//Color-conversion from RGB to HSV
cv::cvtColor(rgb, hsv, cv::COLOR_RGB2HSV_FULL);
//Split the channels
split(hsv,hsvChannels);
//Set the current hue
hsvChannels[0].setTo(std::round(hue));
//Merge the channels back
merge(hsvChannels,hsv);
//Color-conversion from HSV to RGB
cv::cvtColor(hsv, rgb, cv::COLOR_HSV2RGB_FULL);
}, rgb_, hsv_, hsvChannels_, hue_);
//Acquire the framebuffer and convert the rgb_ into it
window->fb([](cv::UMat &framebuffer, const cv::Rect& viewport, const cv::UMat& rgb) {
cv::cvtColor(rgb, framebuffer(viewport), cv::COLOR_BGR2BGRA);
}, viewport(), rgb_);
//Render using nanovg
window->nvg([](const cv::Size &sz, const double& h) {
draw_color_wheel(sz.width - (sz.width / 5), sz.height - (sz.width / 5), sz.width / 6, sz.width / 6, h);
}, size(), hue_);
window->write();
}
};
int main(int argc, char **argv) {
if (argc != 2) {
cerr << "Usage: nanovg-demo <video-file>" << endl;
exit(1);
}
cv::Ptr<NanoVGDemoPlan> plan = new NanoVGDemoPlan(cv::Size(1280, 960));
cv::Ptr<V4D> window = V4D::make(plan->size(), "NanoVG Demo", NANOVG);
window->printSystemInfo();
auto src = makeCaptureSource(window, argv[1]);
auto sink = makeWriterSink(window, "nanovg-demo.mkv", src->fps(), plan->size());
window->setSource(src);
window->setSink(sink);
window->run(plan);
return 0;
}

@ -0,0 +1,472 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/optflow.hpp>
#include <cmath>
#include <vector>
#include <set>
#include <string>
#include <random>
#include <tuple>
#include <array>
#include <utility>
using std::vector;
using std::string;
using namespace cv::v4d;
class OptflowDemoPlan : public Plan {
public:
using Plan::Plan;
private:
//How the background will be visualized
enum BackgroundModes {
GREY,
COLOR,
VALUE,
BLACK
};
//Post-processing modes for the foreground
enum PostProcModes {
GLOW,
BLOOM,
DISABLED
};
static struct Params {
// Generate the foreground at this scale.
float fgScale_ = 0.5f;
// On every frame the foreground loses on brightness. Specifies the loss in percent.
float fgLoss_ = 1;
//Convert the background to greyscale
BackgroundModes backgroundMode_ = GREY;
// Peak thresholds for the scene change detection. Lowering them makes the detection more sensitive but
// the default should be fine.
float sceneChangeThresh_ = 0.29f;
float sceneChangeThreshDiff_ = 0.1f;
// The theoretical maximum number of points to track which is scaled by the density of detected points
// and therefor is usually much smaller.
int maxPoints_ = 300000;
// How many of the tracked points to lose intentionally, in percent.
float pointLoss_ = 20;
// The theoretical maximum size of the drawing stroke which is scaled by the area of the convex hull
// of tracked points and therefor is usually much smaller.
int maxStroke_ = 6;
// Blue, green, red and alpha. All from 0.0f to 1.0f
cv::Scalar_<float> effectColor_ = {0.4f, 0.75f, 1.0f, 0.15f};
//display on-screen FPS
bool showFps_ = true;
//Stretch frame buffer to window size
bool stretch_ = false;
//The post processing mode
PostProcModes postProcMode_ = GLOW;
// Intensity of glow or bloom defined by kernel size. The default scales with the image diagonal.
int glowKernelSize_ = 0;
//The lightness selection threshold
int bloomThresh_ = 210;
//The intensity of the bloom filter
float bloomGain_ = 3;
} params_;
struct Cache {
cv::Mat element_ = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3), cv::Point(1, 1));
vector<cv::KeyPoint> tmpKeyPoints_;
float last_movement_ = 0;
vector<cv::Point2f> hull_, prevPoints_, nextPoints_, newPoints_;
vector<cv::Point2f> upPrevPoints_, upNextPoints_;
std::vector<uchar> status_;
std::vector<float> err_;
std::random_device rd_;
std::mt19937 rng_;
cv::UMat bgr_;
cv::UMat hls_;
cv::UMat ls16_;
cv::UMat ls_;
cv::UMat bblur_;
std::vector<cv::UMat> hlsChannels_;
cv::UMat high_;
cv::UMat low_;
cv::UMat gblur_;
cv::UMat dst16_;
cv::UMat tmp_;
cv::UMat post_;
cv::UMat backgroundGrey_;
vector<cv::UMat> channels_;
cv::UMat localFg_;
} cache_;
//BGRA
cv::UMat background_, down_, frame_;
inline static cv::UMat foreground_;
//BGR
cv::UMat result_;
//GREY
cv::UMat downPrevGrey_, downNextGrey_, downMotionMaskGrey_;
vector<cv::Point2f> detectedPoints_;
cv::Ptr<cv::BackgroundSubtractor> bg_subtractor_ = cv::createBackgroundSubtractorMOG2(100, 16.0, false);
cv::Ptr<cv::FastFeatureDetector> detector_ = cv::FastFeatureDetector::create(1, false);
//Uses background subtraction to generate a "motion mask"
static void prepare_motion_mask(const cv::UMat& srcGrey, cv::UMat& motionMaskGrey, cv::Ptr<cv::BackgroundSubtractor> bg_subtractor, Cache& cache) {
bg_subtractor->apply(srcGrey, motionMaskGrey);
//Surpress speckles
cv::morphologyEx(motionMaskGrey, motionMaskGrey, cv::MORPH_OPEN, cache.element_, cv::Point(cache.element_.cols >> 1, cache.element_.rows >> 1), 2, cv::BORDER_CONSTANT, cv::morphologyDefaultBorderValue());
}
//Detect points to track
static void detect_points(const cv::UMat& srcMotionMaskGrey, vector<cv::Point2f>& points, cv::Ptr<cv::FastFeatureDetector> detector, Cache& cache) {
detector->detect(srcMotionMaskGrey, cache.tmpKeyPoints_);
points.clear();
for (const auto &kp : cache.tmpKeyPoints_) {
points.push_back(kp.pt);
}
}
//Detect extrem changes in scene content and report it
static bool detect_scene_change(const cv::UMat& srcMotionMaskGrey, const Params& params, Cache& cache) {
float movement = cv::countNonZero(srcMotionMaskGrey) / float(srcMotionMaskGrey.cols * srcMotionMaskGrey.rows);
float relation = movement > 0 && cache.last_movement_ > 0 ? std::max(movement, cache.last_movement_) / std::min(movement, cache.last_movement_) : 0;
float relM = relation * log10(1.0f + (movement * 9.0));
float relLM = relation * log10(1.0f + (cache.last_movement_ * 9.0));
bool result = !((movement > 0 && cache.last_movement_ > 0 && relation > 0)
&& (relM < params.sceneChangeThresh_ && relLM < params.sceneChangeThresh_ && fabs(relM - relLM) < params.sceneChangeThreshDiff_));
cache.last_movement_ = (cache.last_movement_ + movement) / 2.0f;
return result;
}
//Visualize the sparse optical flow
static void visualize_sparse_optical_flow(const cv::UMat &prevGrey, const cv::UMat &nextGrey, const vector<cv::Point2f> &detectedPoints, const Params& params, Cache& cache) {
//less then 5 points is a degenerate case (e.g. the corners of a video frame)
if (detectedPoints.size() > 4) {
cv::convexHull(detectedPoints, cache.hull_);
float area = cv::contourArea(cache.hull_);
//make sure the area of the point cloud is positive
if (area > 0) {
float density = (detectedPoints.size() / area);
//stroke size is biased by the area of the point cloud
float strokeSize = params.maxStroke_ * pow(area / (nextGrey.cols * nextGrey.rows), 0.33f);
//max points is biased by the densitiy of the point cloud
size_t currentMaxPoints = ceil(density * params.maxPoints_);
//lose a number of random points specified by pointLossPercent
std::shuffle(cache.prevPoints_.begin(), cache.prevPoints_.end(), cache.rng_);
cache.prevPoints_.resize(ceil(cache.prevPoints_.size() * (1.0f - (params.pointLoss_ / 100.0f))));
//calculate how many newly detected points to add
size_t copyn = std::min(detectedPoints.size(), (size_t(std::ceil(currentMaxPoints)) - cache.prevPoints_.size()));
if (cache.prevPoints_.size() < currentMaxPoints) {
std::copy(detectedPoints.begin(), detectedPoints.begin() + copyn, std::back_inserter(cache.prevPoints_));
}
//calculate the sparse optical flow
cv::calcOpticalFlowPyrLK(prevGrey, nextGrey, cache.prevPoints_, cache.nextPoints_, cache.status_, cache.err_);
cache.newPoints_.clear();
if (cache.prevPoints_.size() > 1 && cache.nextPoints_.size() > 1) {
//scale the points to original size
cache.upNextPoints_.clear();
cache.upPrevPoints_.clear();
for (cv::Point2f pt : cache.prevPoints_) {
cache.upPrevPoints_.push_back(pt /= params.fgScale_);
}
for (cv::Point2f pt : cache.nextPoints_) {
cache.upNextPoints_.push_back(pt /= params.fgScale_);
}
using namespace cv::v4d::nvg;
//start drawing
beginPath();
strokeWidth(strokeSize);
strokeColor(params.effectColor_ * 255.0);
for (size_t i = 0; i < cache.prevPoints_.size(); i++) {
if (cache.status_[i] == 1 //point was found in prev and new set
&& cache.err_[i] < (1.0 / density) //with a higher density be more sensitive to the feature error
&& cache.upNextPoints_[i].y >= 0 && cache.upNextPoints_[i].x >= 0 //check bounds
&& cache.upNextPoints_[i].y < nextGrey.rows / params.fgScale_ && cache.upNextPoints_[i].x < nextGrey.cols / params.fgScale_ //check bounds
) {
float len = hypot(fabs(cache.upPrevPoints_[i].x - cache.upNextPoints_[i].x), fabs(cache.upPrevPoints_[i].y - cache.upNextPoints_[i].y));
//upper and lower bound of the flow vector length
if (len > 0 && len < sqrt(area)) {
//collect new points
cache.newPoints_.push_back(cache.nextPoints_[i]);
//the actual drawing operations
moveTo(cache.upNextPoints_[i].x, cache.upNextPoints_[i].y);
lineTo(cache.upPrevPoints_[i].x, cache.upPrevPoints_[i].y);
}
}
}
//end drawing
stroke();
}
cache.prevPoints_ = cache.newPoints_;
}
}
}
//Bloom post-processing effect
static void bloom(const cv::UMat& src, cv::UMat &dst, Cache& cache, int ksize = 3, int threshValue = 235, float gain = 4) {
//remove alpha channel
cv::cvtColor(src, cache.bgr_, cv::COLOR_BGRA2RGB);
//convert to hls
cv::cvtColor(cache.bgr_, cache.hls_, cv::COLOR_BGR2HLS);
//split channels
cv::split(cache.hls_, cache.hlsChannels_);
//invert lightness
cv::bitwise_not(cache.hlsChannels_[2], cache.hlsChannels_[2]);
//multiply lightness and saturation
cv::multiply(cache.hlsChannels_[1], cache.hlsChannels_[2], cache.ls16_, 1, CV_16U);
//normalize
cv::divide(cache.ls16_, cv::Scalar(255.0), cache.ls_, 1, CV_8U);
//binary threhold according to threshValue
cv::threshold(cache.ls_, cache.bblur_, threshValue, 255, cv::THRESH_BINARY);
//blur
cv::boxFilter(cache.bblur_, cache.bblur_, -1, cv::Size(ksize, ksize), cv::Point(-1,-1), true, cv::BORDER_REPLICATE);
//convert to BGRA
cv::cvtColor(cache.bblur_, cache.bblur_, cv::COLOR_GRAY2BGRA);
//add src and the blurred L-S-product according to gain
addWeighted(src, 1.0, cache.bblur_, gain, 0, dst);
}
//Glow post-processing effect
static void glow_effect(const cv::UMat &src, cv::UMat &dst, const int ksize, Cache& cache) {
cv::bitwise_not(src, dst);
//Resize for some extra performance
cv::resize(dst, cache.low_, cv::Size(), 0.5, 0.5);
//Cheap blur
cv::boxFilter(cache.low_, cache.gblur_, -1, cv::Size(ksize, ksize), cv::Point(-1,-1), true, cv::BORDER_REPLICATE);
//Back to original size
cv::resize(cache.gblur_, cache.high_, src.size());
//Multiply the src image with a blurred version of itself
cv::multiply(dst, cache.high_, cache.dst16_, 1, CV_16U);
//Normalize and convert back to CV_8U
cv::divide(cache.dst16_, cv::Scalar::all(255.0), dst, 1, CV_8U);
cv::bitwise_not(dst, dst);
}
//Compose the different layers into the final image
static void composite_layers(cv::UMat& background, cv::UMat& foreground, const cv::UMat& frameBuffer, cv::UMat& dst, const Params& params, Cache& cache) {
//Lose a bit of foreground brightness based on fgLossPercent
cv::subtract(foreground, cv::Scalar::all(255.0f * (params.fgLoss_ / 100.0f)), foreground);
//Add foreground an the current framebuffer into foregound
cv::add(foreground, frameBuffer, foreground);
//Dependin on bgMode prepare the background in different ways
switch (params.backgroundMode_) {
case GREY:
cv::cvtColor(background, cache.backgroundGrey_, cv::COLOR_BGRA2GRAY);
cv::cvtColor(cache.backgroundGrey_, background, cv::COLOR_GRAY2BGRA);
break;
case VALUE:
cv::cvtColor(background, cache.tmp_, cv::COLOR_BGRA2BGR);
cv::cvtColor(cache.tmp_, cache.tmp_, cv::COLOR_BGR2HSV);
split(cache.tmp_, cache.channels_);
cv::cvtColor(cache.channels_[2], background, cv::COLOR_GRAY2BGRA);
break;
case COLOR:
break;
case BLACK:
background = cv::Scalar::all(0);
break;
default:
break;
}
//Depending on ppMode perform post-processing
switch (params.postProcMode_) {
case GLOW:
glow_effect(foreground, cache.post_, params.glowKernelSize_, cache);
break;
case BLOOM:
bloom(foreground, cache.post_, cache, params.glowKernelSize_, params.bloomThresh_, params.bloomGain_);
break;
case DISABLED:
foreground.copyTo(cache.post_);
break;
default:
break;
}
//Add background and post-processed foreground into dst
cv::add(background, cache.post_, dst);
}
public:
OptflowDemoPlan(const cv::Rect& viewport) : Plan(viewport) {
Global::registerShared(params_);
Global::registerShared(foreground_);
}
OptflowDemoPlan(const cv::Size& sz) : OptflowDemoPlan(cv::Rect(0,0, sz.width, sz.height)) {
}
virtual void gui(cv::Ptr<V4D> window) override {
window->imgui([](cv::Ptr<V4D> win, ImGuiContext* ctx, Params& params){
using namespace ImGui;
SetCurrentContext(ctx);
Begin("Effects");
Text("Foreground");
SliderFloat("Scale", &params.fgScale_, 0.1f, 4.0f);
SliderFloat("Loss", &params.fgLoss_, 0.1f, 99.9f);
Text("Background");
thread_local const char* bgm_items[4] = {"Grey", "Color", "Value", "Black"};
thread_local int* bgm = (int*)&params.backgroundMode_;
ListBox("Mode", bgm, bgm_items, 4, 4);
Text("Points");
SliderInt("Max. Points", &params.maxPoints_, 10, 1000000);
SliderFloat("Point Loss", &params.pointLoss_, 0.0f, 100.0f);
Text("Optical flow");
SliderInt("Max. Stroke Size", &params.maxStroke_, 1, 100);
ColorPicker4("Color", params.effectColor_.val);
End();
Begin("Post Processing");
thread_local const char* ppm_items[3] = {"Glow", "Bloom", "None"};
thread_local int* ppm = (int*)&params.postProcMode_;
ListBox("Effect",ppm, ppm_items, 3, 3);
SliderInt("Kernel Size",&params.glowKernelSize_, 1, 63);
SliderFloat("Gain", &params.bloomGain_, 0.1f, 20.0f);
End();
Begin("Settings");
Text("Scene Change Detection");
SliderFloat("Threshold", &params.sceneChangeThresh_, 0.1f, 1.0f);
SliderFloat("Threshold Diff", &params.sceneChangeThreshDiff_, 0.1f, 1.0f);
End();
Begin("Window");
if(Checkbox("Show FPS", &params.showFps_)) {
win->setShowFPS(params.showFps_);
}
if(Checkbox("Stretch", &params.stretch_)) {
win->setStretching(params.stretch_);
}
if(Button("Fullscreen")) {
win->setFullscreen(!win->isFullscreen());
};
if(Button("Offscreen")) {
win->setVisible(!win->isVisible());
};
End();
}, params_);
}
virtual void setup(cv::Ptr<V4D> window) override {
cache_.rng_ = std::mt19937(cache_.rd_());
window->setStretching(params_.stretch_);
window->once([](const cv::Size& sz, Params& params, cv::UMat& foreground){
int diag = hypot(double(sz.width), double(sz.height));
params.glowKernelSize_ = std::max(int(diag / 150 % 2 == 0 ? diag / 150 + 1 : diag / 150), 1);
params.effectColor_[3] /= (Global::workers_started() - 1);
foreground.create(sz, CV_8UC4);
foreground = cv::Scalar::all(0);
}, size(), params_, foreground_);
}
virtual void infer(cv::Ptr<V4D> window) override {
window->capture();
window->fb([](const cv::UMat& framebuffer, const cv::Rect& viewport, cv::UMat& frame) {
framebuffer(viewport).copyTo(frame);
}, viewport(), frame_);
window->plain([](const cv::UMat& frame, cv::UMat& background) {
frame.copyTo(background);
}, frame_, background_);
window->fb([](const cv::UMat& framebuffer, const cv::Rect& viewport, cv::UMat& d, cv::UMat& b, const Params& params) {
Params p = Global::safe_copy(params);
//resize to foreground scale
cv::resize(framebuffer(viewport), d, cv::Size(viewport.width * p.fgScale_, viewport.height * p.fgScale_));
//save video background
framebuffer(viewport).copyTo(b);
}, viewport(), down_, background_, params_);
window->plain([](const cv::UMat& d, cv::UMat& dng, cv::UMat& dmmg, std::vector<cv::Point2f>& dp, cv::Ptr<cv::BackgroundSubtractor>& bg_subtractor, cv::Ptr<cv::FastFeatureDetector>& detector, Cache& cache){
cv::cvtColor(d, dng, cv::COLOR_RGBA2GRAY);
//Subtract the background to create a motion mask
prepare_motion_mask(dng, dmmg, bg_subtractor, cache);
//Detect trackable points in the motion mask
detect_points(dmmg, dp, detector, cache);
}, down_, downNextGrey_, downMotionMaskGrey_, detectedPoints_, bg_subtractor_, detector_, cache_);
window->nvg([](const cv::UMat& dmmg, const cv::UMat& dpg, const cv::UMat& dng, const std::vector<cv::Point2f>& dp, const Params& params, Cache& cache) {
const Params p = Global::safe_copy(params);
cv::v4d::nvg::clear();
if (!dpg.empty()) {
//We don't want the algorithm to get out of hand when there is a scene change, so we suppress it when we detect one.
if (!detect_scene_change(dmmg, p, cache)) {
//Visualize the sparse optical flow using nanovg
visualize_sparse_optical_flow(dpg, dng, dp, p, cache);
}
}
}, downMotionMaskGrey_, downPrevGrey_, downNextGrey_, detectedPoints_, params_, cache_);
window->plain([](cv::UMat& dpg, const cv::UMat& dng) {
dpg = dng.clone();
}, downPrevGrey_, downNextGrey_);
window->fb([](const cv::UMat& framebuffer, const cv::Rect& viewport, cv::UMat& frame) {
framebuffer(viewport).copyTo(frame);
}, viewport(), frame_);
window->plain([](cv::UMat& frame, cv::UMat& background, cv::UMat& foreground, const Params& params, Cache& cache) {
//Put it all together (OpenCL)
Global::Scope scope(foreground);
copy_shared(foreground, cache.localFg_);
composite_layers(background, cache.localFg_, frame, frame, params, cache);
copy_shared(cache.localFg_, foreground);
}, frame_, background_, foreground_, params_, cache_);
window->fb([](cv::UMat& framebuffer, const cv::Rect& viewport, const cv::UMat& frame) {
frame.copyTo(framebuffer(viewport));
}, viewport(), frame_);
window->write();
}
};
OptflowDemoPlan::Params OptflowDemoPlan::params_;
int main(int argc, char **argv) {
if (argc != 2) {
std::cerr << "Usage: optflow-demo <input-video-file>" << endl;
exit(1);
}
cv::Ptr<OptflowDemoPlan> plan = new OptflowDemoPlan(cv::Size(1280, 720));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Sparse Optical Flow Demo", ALL);
auto src = makeCaptureSource(window, argv[1]);
auto sink = makeWriterSink(window, "optflow-demo.mkv", src->fps(), plan->size());
window->setSource(src);
window->setSink(sink);
window->run(plan, 5);
return 0;
}

@ -0,0 +1,292 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/tracking.hpp>
#include <opencv2/objdetect.hpp>
#include <string>
using std::vector;
using std::string;
using namespace cv::v4d;
class PedestrianDemoPlan : public Plan {
public:
using Plan::Plan;
private:
unsigned long diag_ = 0;
cv::Size downSize_;
cv::Size_<float> scale_;
int blurKernelSize_ = 0;
struct Cache {
cv::UMat blur_;
cv::UMat local_;
uint64_t fps_;
} cache_;
//BGRA
cv::UMat background_;
//RGB
cv::UMat videoFrame_, videoFrameDown_;
//GREY
cv::UMat videoFrameDownGrey_;
struct Detection {
//detected pedestrian locations rectangles
std::vector<cv::Rect> locations_;
//detected pedestrian locations as boxes
vector<vector<double>> boxes_;
//probability of detected object being a pedestrian - currently always set to 1.0
vector<double> probs_;
//Faster tracking parameters
cv::TrackerKCF::Params params_;
//KCF tracker used instead of continous detection
cv::Ptr<cv::Tracker> tracker_;
bool trackerInitialized_ = false;
//If tracking fails re-detect
bool redetect_ = true;
//Descriptor used for pedestrian detection
cv::HOGDescriptor hog_;
} detection_;
inline static cv::Rect tracked_ = cv::Rect(0,0,1,1);
constexpr static auto doRedect_ = [](const Detection& detection){ return !detection.trackerInitialized_ || detection.redetect_; };
constexpr static auto dontRedect_ = [](const Detection& detection){ return detection.trackerInitialized_ && !detection.redetect_; };
//adapted from cv::dnn_objdetect::InferBbox
static inline bool pair_comparator(std::pair<double, size_t> l1, std::pair<double, size_t> l2) {
return l1.first > l2.first;
}
//adapted from cv::dnn_objdetect::InferBbox
static void intersection_over_union(std::vector<std::vector<double> > *boxes, std::vector<double> *base_box, std::vector<double> *iou) {
double g_xmin = (*base_box)[0];
double g_ymin = (*base_box)[1];
double g_xmax = (*base_box)[2];
double g_ymax = (*base_box)[3];
double base_box_w = g_xmax - g_xmin;
double base_box_h = g_ymax - g_ymin;
for (size_t b = 0; b < (*boxes).size(); ++b) {
double xmin = std::max((*boxes)[b][0], g_xmin);
double ymin = std::max((*boxes)[b][1], g_ymin);
double xmax = std::min((*boxes)[b][2], g_xmax);
double ymax = std::min((*boxes)[b][3], g_ymax);
// Intersection
double w = std::max(static_cast<double>(0.0), xmax - xmin);
double h = std::max(static_cast<double>(0.0), ymax - ymin);
// Union
double test_box_w = (*boxes)[b][2] - (*boxes)[b][0];
double test_box_h = (*boxes)[b][3] - (*boxes)[b][1];
double inter_ = w * h;
double union_ = test_box_h * test_box_w + base_box_h * base_box_w - inter_;
(*iou)[b] = inter_ / (union_ + 1e-7);
}
}
//adapted from cv::dnn_objdetect::InferBbox
static std::vector<bool> non_maximal_suppression(std::vector<std::vector<double> > *boxes, std::vector<double> *probs, const double threshold = 0.1) {
std::vector<bool> keep(((*probs).size()));
std::fill(keep.begin(), keep.end(), true);
std::vector<size_t> prob_args_sorted((*probs).size());
std::vector<std::pair<double, size_t> > temp_sort((*probs).size());
for (size_t tidx = 0; tidx < (*probs).size(); ++tidx) {
temp_sort[tidx] = std::make_pair((*probs)[tidx], static_cast<size_t>(tidx));
}
std::sort(temp_sort.begin(), temp_sort.end(), pair_comparator);
for (size_t idx = 0; idx < temp_sort.size(); ++idx) {
prob_args_sorted[idx] = temp_sort[idx].second;
}
for (std::vector<size_t>::iterator itr = prob_args_sorted.begin(); itr != prob_args_sorted.end() - 1; ++itr) {
size_t idx = itr - prob_args_sorted.begin();
std::vector<double> iou_(prob_args_sorted.size() - idx - 1);
std::vector<std::vector<double> > temp_boxes(iou_.size());
for (size_t bb = 0; bb < temp_boxes.size(); ++bb) {
std::vector<double> temp_box(4);
for (size_t b = 0; b < 4; ++b) {
temp_box[b] = (*boxes)[prob_args_sorted[idx + bb + 1]][b];
}
temp_boxes[bb] = temp_box;
}
intersection_over_union(&temp_boxes, &(*boxes)[prob_args_sorted[idx]], &iou_);
for (std::vector<double>::iterator _itr = iou_.begin(); _itr != iou_.end(); ++_itr) {
size_t iou_idx = _itr - iou_.begin();
if (*_itr > threshold) {
keep[prob_args_sorted[idx + iou_idx + 1]] = false;
}
}
}
return keep;
}
//post process and add layers together
static void composite_layers(const cv::UMat background, const cv::UMat foreground, cv::UMat dst, int blurKernelSize, Cache& cache) {
cv::boxFilter(foreground, cache.blur_, -1, cv::Size(blurKernelSize, blurKernelSize), cv::Point(-1,-1), true, cv::BORDER_REPLICATE);
cv::add(background, cache.blur_, dst);
}
public:
PedestrianDemoPlan(const cv::Rect& viewport) : Plan(viewport) {
Global::registerShared(tracked_);
}
PedestrianDemoPlan(const cv::Size& sz) : PedestrianDemoPlan(cv::Rect(0,0,sz.width, sz.height)) {
}
void setup(cv::Ptr<V4D> window) override {
int w = size().width;
int h = size().height;
diag_ = hypot(w, h);
downSize_ = { 640 , 360 };
scale_ = { float(w) / downSize_.width, float(h) / downSize_.height };
blurKernelSize_ = std::max(int(diag_ / 200 % 2 == 0 ? diag_ / 200 + 1 : diag_ / 200), 1);
window->plain([](Detection& detection){
detection.params_.desc_pca = cv::TrackerKCF::GRAY;
detection.params_.compress_feature = false;
detection.params_.compressed_size = 1;
detection.tracker_ = cv::TrackerKCF::create(detection.params_);
detection.hog_.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector());
}, detection_);
}
void infer(cv::Ptr<V4D> window) override {
window->branch(always_);
{
window->capture();
window->fb([](const cv::UMat& frameBuffer, const cv::Rect& viewport, cv::UMat& videoFrame){
//copy video frame
cvtColor(frameBuffer(viewport),videoFrame,cv::COLOR_BGRA2RGB);
//downsample video frame for hog_ detection
}, viewport(), videoFrame_);
window->plain([](const cv::Size downSize, const cv::UMat& videoFrame, cv::UMat& videoFrameDown, cv::UMat& videoFrameDownGrey, cv::UMat& background){
cv::resize(videoFrame, videoFrameDown, downSize);
cv::cvtColor(videoFrameDown, videoFrameDownGrey, cv::COLOR_RGB2GRAY);
cv::cvtColor(videoFrame, background, cv::COLOR_RGB2BGRA);
}, downSize_, videoFrame_, videoFrameDown_, videoFrameDownGrey_, background_);
}
window->endbranch(always_);
//Try to track the pedestrian (if we currently are tracking one), else re-detect using HOG descriptor
window->branch(doRedect_, detection_);
{
window->plain([](cv::UMat& videoFrameDownGrey, Detection& detection, cv::Rect& tracked, Cache& cache){
detection.redetect_ = false;
//Detect pedestrians
detection.hog_.detectMultiScale(videoFrameDownGrey, detection.locations_, 0, cv::Size(), cv::Size(), 1.15, 2.0, true);
if (!detection.locations_.empty()) {
detection.boxes_.clear();
detection.probs_.clear();
//collect all found boxes
for (const auto &rect : detection.locations_) {
detection.boxes_.push_back( { double(rect.x), double(rect.y), double(rect.x + rect.width), double(rect.y + rect.height) });
detection.probs_.push_back(1.0);
}
//use nms to filter overlapping boxes (https://medium.com/analytics-vidhya/non-max-suppression-nms-6623e6572536)
vector<bool> keep = non_maximal_suppression(&detection.boxes_, &detection.probs_, 0.1);
for (size_t i = 0; i < keep.size(); ++i) {
if (keep[i]) {
Global::Scope scope(tracked);
//only track the first pedestrian found
tracked = detection.locations_[i];
break;
}
}
if(!detection.trackerInitialized_) {
Global::Scope scope(tracked);
//initialize the tracker once
detection.tracker_->init(videoFrameDownGrey, tracked);
detection.trackerInitialized_ = true;
}
}
}, videoFrameDownGrey_, detection_, tracked_, cache_);
}
window->endbranch(doRedect_, detection_);
window->branch(dontRedect_, detection_);
{
window->plain([](cv::UMat& videoFrameDownGrey, Detection& detection, const uint64_t& frameCnt, cv::Rect& tracked, Cache& cache){
Global::Scope scope(tracked);
cv::Rect oldTracked = tracked;
if((cache.fps_ == 0 || frameCnt % cache.fps_ == 0) || !detection.tracker_->update(videoFrameDownGrey, tracked)) {
cache.fps_ = uint64_t(std::ceil(Global::fps()));
//detection failed - re-detect
detection.redetect_ = true;
}
tracked.x = (oldTracked.x + tracked.x) / 2.0;
tracked.y = (oldTracked.y + tracked.y) / 2.0;
tracked.width = (oldTracked.width + tracked.width) / 2.0;
tracked.height = (oldTracked.height+ tracked.height) / 2.0;
}, videoFrameDownGrey_, detection_, window->frameCount(), tracked_, cache_);
}
window->endbranch(dontRedect_, detection_);
window->branch(always_);
{
//Draw an ellipse around the tracked pedestrian
window->nvg([](const cv::Size& sz, const cv::Size_<float> scale, cv::Rect& tracked) {
using namespace cv::v4d::nvg;
float width;
float height;
float cx;
float cy;
{
Global::Scope scope(tracked);
width = tracked.width * scale.width;
height = tracked.height * scale.height;
cx = (scale.width * tracked.x + (width / 2.0));
cy = (scale.height * tracked.y + ((height) / 2.0));
}
clear();
beginPath();
strokeWidth(std::fmax(5, sz.width / 960.0));
strokeColor(cv::v4d::colorConvert(cv::Scalar(0, 127, 255, 200), cv::COLOR_HLS2BGR));
ellipse(cx, cy, (width), (height));
stroke();
}, size(), scale_, tracked_);
//Put it all together
window->fb([](cv::UMat& frameBuffer, const cv::Rect& viewport, cv::UMat& bg, int blurKernelSize, Cache& cache){
composite_layers(bg, frameBuffer(viewport), frameBuffer(viewport), blurKernelSize, cache);
}, viewport(), background_, blurKernelSize_, cache_);
window->write();
}
window->endbranch(always_);
}
};
int main(int argc, char **argv) {
if (argc != 2) {
std::cerr << "Usage: pedestrian-demo <video-input>" << endl;
exit(1);
}
cv::Ptr<PedestrianDemoPlan> plan = new PedestrianDemoPlan(cv::Size(1280, 720));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Pedestrian Demo", ALL);
window->printSystemInfo();
auto src = makeCaptureSource(window, argv[1]);
auto sink = makeWriterSink(window, "pedestrian-demo.mkv", src->fps(), plan->size());
window->setSource(src);
window->setSink(sink);
window->run(plan);
return 0;
}

@ -0,0 +1,30 @@
#include <opencv2/v4d/v4d.hpp>
using namespace cv;
using namespace cv::v4d;
class RenderOpenGLPlan : public Plan {
public:
RenderOpenGLPlan(const cv::Size& sz) : Plan(sz) {
}
void setup(Ptr<V4D> window) override {
window->gl([]() {
//Sets the clear color to blue
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
});
}
void infer(Ptr<V4D> window) override {
window->gl([]() {
//Clears the screen. The clear color and other GL-states are preserved between context-calls.
glClear(GL_COLOR_BUFFER_BIT);
});
}
};
int main() {
Ptr<RenderOpenGLPlan> plan = new RenderOpenGLPlan(cv::Size(960, 960));
Ptr<V4D> window = V4D::make(plan->size(), "GL Blue Screen");
window->run(plan);
}

@ -0,0 +1,67 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/v4d/scene.hpp>
using namespace cv::v4d;
class SceneDemoPlan : public Plan {
const string filename_ = "gear.glb";
gl::Scene scene_;
gl::Scene pcaScene_;
std::vector<cv::Point3f> pointCloud_;
struct Transform {
cv::Vec3f translate_;
cv::Vec3f rotation_;
cv::Vec3f scale_;
cv::Matx44f projection_;
cv::Matx44f view_;
cv::Matx44f model_;
} transform_;
public:
using Plan::Plan;
void setup(cv::Ptr<V4D> window) override {
window->gl([](gl::Scene& scene, const string& filename){
CV_Assert(scene.load(filename));
}, scene_, filename_);
}
void infer(cv::Ptr<V4D> window) override {
window->gl(0,[](const int32_t& ctx, const cv::Rect& viewport, gl::Scene& scene, std::vector<cv::Point3f>& pointCloud, Transform& transform){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
double progress = (cv::getTickCount() / cv::getTickFrequency()) / 5.0;
float angle = fmod(double(cv::getTickCount()) / double(cv::getTickFrequency()), 2 * M_PI);
int m = int(progress) % 3;
float scale = scene.autoScale();
cv::Vec3f center = scene.autoCenter();
transform.rotation_ = {0, angle, 0};
transform.translate_ = {-center[0], -center[1], -center[2]};
transform.scale_ = { scale, scale, scale };
transform.projection_ = gl::perspective(45.0f * (CV_PI/180), float(viewport.width) / viewport.height, 0.1f, 100.0f);
transform.view_ = gl::lookAt(cv::Vec3f(0.0f, 0.0f, 3.0f), cv::Vec3f(0.0f, 0.0f, 0.0f), cv::Vec3f(0.0f, 1.0f, 0.0f));
transform.model_ = gl::modelView(transform.translate_, transform.rotation_, transform.scale_);
scene.setMode(static_cast<gl::Scene::RenderMode>(m));
scene.render(viewport, transform.projection_, transform.view_, transform.model_);
}, viewport(), scene_, pointCloud_, transform_);
window->write();
}
};
int main() {
cv::Ptr<V4D> window = V4D::make(cv::Size(1280, 720), "Scene Demo", IMGUI);
cv::Ptr<SceneDemoPlan> plan = new SceneDemoPlan(cv::Size(1280, 720));
//Creates a writer sink (which might be hardware accelerated)
auto sink = makeWriterSink(window, "scene-demo.mkv", 60, plan->size());
window->setSink(sink);
window->run(plan, 3);
return 0;
}

@ -0,0 +1,348 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/v4d/v4d.hpp>
using namespace cv::v4d;
class ShaderDemoPlan : public Plan {
public:
using Plan::Plan;
//A value greater 1 will enable experimental tiling with one context per tile.
constexpr static size_t TILING_ = 1;
constexpr static size_t NUM_CONTEXTS_ = TILING_ * TILING_;
private:
// vertex position, color
constexpr static float vertices[12] = {
// x y z
-1.0f, -1.0f, -0.0f, 1.0f, 1.0f, -0.0f, -1.0f, 1.0f, -0.0f, 1.0f, -1.0f, -0.0f };
constexpr static unsigned int indices[6] = {
// 2---,1
// | .' |
// 0'---3
0, 1, 2, 0, 3, 1 };
static struct Params {
/* Mandelbrot control parameters */
// Red, green, blue and alpha. All from 0.0f to 1.0f
float baseColorVal_[4] = {0.2, 0.6, 1.0, 0.8};
//contrast boost
int contrastBoost_ = 255; //0.0-255
//max fractal iterations
int maxIterations_ = 50000;
//center x coordinate
float centerX_ = -0.466;
//center y coordinate
float centerY_ = 0.57052;
float zoomFactor_ = 1.0;
float currentZoom_ = 4.0;
bool zoomIn = true;
float zoomIncr_ = -currentZoom_ / 1000;
bool manualNavigation_ = false;
} params_;
struct Handles {
/* GL uniform handles */
GLint baseColorHdl_;
GLint contrastBoostHdl_;
GLint maxIterationsHdl_;
GLint centerXHdl_;
GLint centerYHdl_;
GLint offsetXHdl_;
GLint offsetYHdl_;
GLint currentZoomHdl_;
GLint resolutionHdl_;
/* Shader program handle */
GLuint shaderHdl_;
/* Object handles */
GLuint vao_;
GLuint vbo_, ebo_;
} handles_[NUM_CONTEXTS_];
cv::Rect viewports_[NUM_CONTEXTS_];
struct Cache {
cv::UMat down;
cv::UMat up;
cv::UMat blur;
cv::UMat dst16;
} cache_;
//easing function for the bungee zoom
static float easeInOutQuint(float x) {
return x < 0.5f ? 16.0f * x * x * x * x * x : 1.0f - std::pow(-2.0f * x + 2.0f, 5.0f) / 2.0f;
}
//Load objects and buffers
static void load_buffers(Handles& handles) {
GL_CHECK(glGenVertexArrays(1, &handles.vao_));
GL_CHECK(glBindVertexArray(handles.vao_));
GL_CHECK(glGenBuffers(1, &handles.vbo_));
GL_CHECK(glGenBuffers(1, &handles.ebo_));
GL_CHECK(glBindBuffer(GL_ARRAY_BUFFER, handles.vbo_));
GL_CHECK(glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW));
GL_CHECK(glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, handles.ebo_));
GL_CHECK(glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW));
GL_CHECK(glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*) 0));
GL_CHECK(glEnableVertexAttribArray(0));
GL_CHECK(glBindBuffer(GL_ARRAY_BUFFER, 0));
GL_CHECK(glBindVertexArray(0));
}
//mandelbrot shader code adapted from my own project: https://github.com/kallaballa/FractalDive#after
static GLuint load_shader() {
#if !defined(OPENCV_V4D_USE_ES3)
const string shaderVersion = "330";
#else
const string shaderVersion = "300 es";
#endif
const string vert =
" #version " + shaderVersion
+ R"(
in vec4 position;
void main()
{
gl_Position = vec4(position.xyz, 1.0);
})";
const string frag =
" #version " + shaderVersion
+ R"(
precision highp float;
out vec4 outColor;
uniform vec4 base_color;
uniform int contrast_boost;
uniform int max_iterations;
uniform float current_zoom;
uniform float center_y;
uniform float center_x;
uniform float offset_y;
uniform float offset_x;
uniform vec2 resolution;
int get_iterations()
{
float pointr = (((gl_FragCoord.x / resolution[0]) - 0.5f) * current_zoom + center_x);
float pointi = (((gl_FragCoord.y / resolution[1]) - 0.5f) * current_zoom + center_y);
const float four = 4.0f;
int iterations = 0;
float zi = 0.0f;
float zr = 0.0f;
float zrsqr = 0.0f;
float zisqr = 0.0f;
while (iterations < max_iterations && zrsqr + zisqr < four) {
//equals following line as a consequence of binomial expansion: zi = (((zr + zi)*(zr + zi)) - zrsqr) - zisqr
zi = (zr + zr) * zi;
zi += pointi;
zr = (zrsqr - zisqr) + pointr;
zrsqr = zr * zr;
zisqr = zi * zi;
++iterations;
}
return iterations;
}
void mandelbrot()
{
int iter = get_iterations();
if (iter < max_iterations) {
float iterations = float(iter) / float(max_iterations);
float cb = float(contrast_boost);
float logBase;
if(iter % 2 == 0)
logBase = 25.0f;
else
logBase = 50.0f;
float logDiv = log2(logBase);
float colorBoost = iterations * cb;
outColor = vec4(log2((logBase - 1.0f) * base_color[0] * colorBoost + 1.0f)/logDiv,
log2((logBase - 1.0f) * base_color[1] * colorBoost + 1.0f)/logDiv,
log2((logBase - 1.0f) * base_color[2] * colorBoost + 1.0f)/logDiv,
base_color[3]);
} else {
outColor = vec4(0,0,0,0);
}
}
void main()
{
mandelbrot();
})";
unsigned int handles[3];
cv::v4d::initShader(handles, vert.c_str(), frag.c_str(), "fragColor");
return handles[0];
}
//Initialize shaders, objects, buffers and uniforms
static void init_scene(const cv::Rect& viewport, Handles& handles) {
GL_CHECK(glEnable(GL_BLEND));
GL_CHECK(glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA));
handles.shaderHdl_ = load_shader();
load_buffers(handles);
handles.baseColorHdl_ = glGetUniformLocation(handles.shaderHdl_, "base_color");
handles.contrastBoostHdl_ = glGetUniformLocation(handles.shaderHdl_, "contrast_boost");
handles.maxIterationsHdl_ = glGetUniformLocation(handles.shaderHdl_, "max_iterations");
handles.currentZoomHdl_ = glGetUniformLocation(handles.shaderHdl_, "current_zoom");
handles.centerXHdl_ = glGetUniformLocation(handles.shaderHdl_, "center_x");
handles.centerYHdl_ = glGetUniformLocation(handles.shaderHdl_, "center_y");
handles.offsetXHdl_ = glGetUniformLocation(handles.shaderHdl_, "offset_x");
handles.offsetYHdl_ = glGetUniformLocation(handles.shaderHdl_, "offset_y");
handles.resolutionHdl_ = glGetUniformLocation(handles.shaderHdl_, "resolution");
GL_CHECK(glViewport(viewport.x, viewport.y, viewport.width, viewport.height));
}
//Free OpenGL resources
static void destroy_scene(Handles& handles) {
glDeleteShader(handles.shaderHdl_);
glDeleteBuffers(1, &handles.vbo_);
glDeleteBuffers(1, &handles.ebo_);
glDeleteVertexArrays(1, &handles.vao_);
}
//Render the mandelbrot fractal on top of a video
static void render_scene(const cv::Size& sz, const cv::Rect& viewport, Params& params, Handles& handles) {
GL_CHECK(glViewport(viewport.x, viewport.y, viewport.width, viewport.height));
//bungee zoom
if (params.currentZoom_ >= 3) {
params.zoomIn = true;
} else if (params.currentZoom_ < 0.05) {
params.zoomIn = false;
}
params.zoomIncr_ = (params.currentZoom_ / 100);
if(params.zoomIn)
params.zoomIncr_ = -params.zoomIncr_;
GL_CHECK(glUseProgram(handles.shaderHdl_));
GL_CHECK(glUniform4f(handles.baseColorHdl_, params.baseColorVal_[0], params.baseColorVal_[1], params.baseColorVal_[2], params.baseColorVal_[3]));
GL_CHECK(glUniform1i(handles.contrastBoostHdl_, params.contrastBoost_));
GL_CHECK(glUniform1i(handles.maxIterationsHdl_, params.maxIterations_));
GL_CHECK(glUniform1f(handles.centerYHdl_, params.centerY_));
GL_CHECK(glUniform1f(handles.centerXHdl_, params.centerX_));
GL_CHECK(glUniform1f(handles.offsetYHdl_, viewport.x));
GL_CHECK(glUniform1f(handles.offsetXHdl_, viewport.y));
if (!params.manualNavigation_) {
params.currentZoom_ += params.zoomIncr_;
GL_CHECK(glUniform1f(handles.currentZoomHdl_, easeInOutQuint(params.currentZoom_)));
} else {
params.currentZoom_ = 1.0 / pow(params.zoomFactor_, 5.0f);
GL_CHECK(glUniform1f(handles.currentZoomHdl_, params.currentZoom_));
}
float res[2] = {float(sz.width), float(sz.height)};
GL_CHECK(glUniform2fv(handles.resolutionHdl_, 1, res));
GL_CHECK(glBindVertexArray(handles.vao_));
GL_CHECK(glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0));
}
public:
ShaderDemoPlan(const cv::Rect& viewport) : Plan(viewport) {
Global::registerShared(params_);
}
ShaderDemoPlan(const cv::Size& sz) : ShaderDemoPlan(cv::Rect(0,0,sz.width, sz.height)) {
}
void gui(cv::Ptr<V4D> window) override {
window->imgui([](cv::Ptr<V4D> win, ImGuiContext* ctx, Params& params) {
CV_UNUSED(win);
using namespace ImGui;
SetCurrentContext(ctx);
Begin("Fractal");
Text("Navigation");
SliderInt("Iterations", &params.maxIterations_, 3, 100000);
DragFloat("X", &params.centerX_, 0.000001, -1.0f, 1.0f);
DragFloat("Y", &params.centerY_, 0.000001, -1.0f, 1.0f);
if(SliderFloat("Zoom", &params.zoomFactor_, 0.0001f, 10.0f))
params.manualNavigation_ = true;
Text("Color");
ColorPicker4("Color", params.baseColorVal_);
SliderInt("Contrast boost", &params.contrastBoost_, 1, 255);
End();
}, params_);
}
void setup(cv::Ptr<V4D> window) override {
float w = size().width;
float h = size().height;
float tw = w / TILING_;
float th = h / TILING_;
for(size_t i = 0; i < TILING_; ++i) {
for(size_t j = 0; j < TILING_; ++j) {
viewports_[i * TILING_ + j] = cv::Rect(tw * i, th * j, tw - 1, th - 1);
}
}
for(size_t i = 0; i < NUM_CONTEXTS_; ++i) {
window->gl(i, [](const int32_t& ctxID, const cv::Rect& viewport, Handles& handles) {
init_scene(viewport, handles);
}, viewports_[i], handles_[i]);
}
}
void infer(cv::Ptr<V4D> window) override {
window->capture();
for(size_t i = 0; i < NUM_CONTEXTS_; ++i) {
window->gl(i,[](const int32_t& ctxID, const cv::Size& sz, const cv::Rect& viewport, Params& params, Handles& handles) {
Params p = Global::safe_copy(params);
render_scene(sz, viewport, p, handles);
}, size(), viewports_[i], params_, handles_[i]);
}
window->write();
}
void teardown(cv::Ptr<V4D> window) override {
for(size_t i = 0; i < NUM_CONTEXTS_; ++i) {
window->gl(i, [](const int32_t& ctxID, Handles& handles) {
destroy_scene(handles);
}, handles_[i]);
}
}
};
ShaderDemoPlan::Params ShaderDemoPlan::params_;
int main(int argc, char** argv) {
if (argc != 2) {
cerr << "Usage: shader-demo <video-file>" << endl;
exit(1);
}
cv::Ptr<ShaderDemoPlan> plan = new ShaderDemoPlan(cv::Size(1280, 720));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Mandelbrot Shader Demo", IMGUI);
auto src = makeCaptureSource(window, argv[1]);
auto sink = makeWriterSink(window, "shader-demo.mkv", src->fps(), plan->size());
window->setSource(src);
window->setSink(sink);
window->run(plan);
return 0;
}

@ -0,0 +1,111 @@
#include <opencv2/v4d/v4d.hpp>
using namespace cv;
using namespace cv::v4d;
class VectorGraphicsPlan: public Plan {
public:
VectorGraphicsPlan(const cv::Size& sz) : Plan(sz) {
}
void infer(Ptr<V4D> win) override {
//Creates a NanoVG context and draws googly eyes that occasionally blink.
win->nvg([](const Size &sz) {
//Calls from this namespace may only be used inside a nvg context.
//Nvg calls work exactly like their c-funtion counterparts.
//Please refer to the NanoVG documentation for details.
using namespace cv::v4d::nvg;
clear();
static long start = cv::getTickCount() / cv::getTickFrequency();
float t = cv::getTickCount() / cv::getTickFrequency() - start;
float x = 0;
float y = 0;
float w = sz.width / 4;
float h = sz.height / 4;
translate((sz.width / 2.0f) - (w / 2.0f), (sz.height / 2.0f) - (h / 2.0f));
float mx = w / 2.0;
float my = h / 2.0;
Paint gloss, bg;
float ex = w * 0.23f;
float ey = h * 0.5f;
float lx = x + ex;
float ly = y + ey;
float rx = x + w - ex;
float ry = y + ey;
float dx, dy, d;
float br = (ex < ey ? ex : ey) * 0.5f;
float blink = 1 - pow(sinf(t * 0.5f), 200) * 0.8f;
bg = linearGradient(x, y + h * 0.5f, x + w * 0.1f, y + h,
cv::Scalar(0, 0, 0, 32), cv::Scalar(0, 0, 0, 16));
beginPath();
ellipse(lx + 3.0f, ly + 16.0f, ex, ey);
ellipse(rx + 3.0f, ry + 16.0f, ex, ey);
fillPaint(bg);
fill();
bg = linearGradient(x, y + h * 0.25f, x + w * 0.1f, y + h,
cv::Scalar(220, 220, 220, 255),
cv::Scalar(128, 128, 128, 255));
beginPath();
ellipse(lx, ly, ex, ey);
ellipse(rx, ry, ex, ey);
fillPaint(bg);
fill();
dx = (mx - rx) / (ex * 10);
dy = (my - ry) / (ey * 10);
d = sqrtf(dx * dx + dy * dy);
if (d > 1.0f) {
dx /= d;
dy /= d;
}
dx *= ex * 0.4f;
dy *= ey * 0.5f;
beginPath();
ellipse(lx + dx, ly + dy + ey * 0.25f * (1 - blink), br,
br * blink);
fillColor(cv::Scalar(32, 32, 32, 255));
fill();
dx = (mx - rx) / (ex * 10);
dy = (my - ry) / (ey * 10);
d = sqrtf(dx * dx + dy * dy);
if (d > 1.0f) {
dx /= d;
dy /= d;
}
dx *= ex * 0.4f;
dy *= ey * 0.5f;
beginPath();
ellipse(rx + dx, ry + dy + ey * 0.25f * (1 - blink), br,
br * blink);
fillColor(cv::Scalar(32, 32, 32, 255));
fill();
gloss = radialGradient(lx - ex * 0.25f, ly - ey * 0.5f,
ex * 0.1f, ex * 0.75f, cv::Scalar(255, 255, 255, 128),
cv::Scalar(255, 255, 255, 0));
beginPath();
ellipse(lx, ly, ex, ey);
fillPaint(gloss);
fill();
gloss = radialGradient(rx - ex * 0.25f, ry - ey * 0.5f,
ex * 0.1f, ex * 0.75f, cv::Scalar(255, 255, 255, 128),
cv::Scalar(255, 255, 255, 0));
beginPath();
ellipse(rx, ry, ex, ey);
fillPaint(gloss);
fill();
}, win->fbSize());
}
};
int main() {
Ptr<VectorGraphicsPlan> plan = new VectorGraphicsPlan(cv::Size(960, 960));
Ptr<V4D> window = V4D::make(plan->size(), "Vector Graphics");
window->run(plan);
}

@ -0,0 +1,110 @@
#include <opencv2/v4d/v4d.hpp>
#include <opencv2/v4d/util.hpp>
using namespace cv;
using namespace cv::v4d;
class VectorGraphicsAndFBPlan : public Plan {
public:
VectorGraphicsAndFBPlan(const cv::Size& sz) : Plan(sz) {
}
void infer(Ptr<V4D> window) override {
//Again creates a NanoVG context and draws googly eyes
window->nvg([](const Size& sz) {
//Calls from this namespace may only be used inside a nvg context
using namespace cv::v4d::nvg;
clear();
static long start = cv::getTickCount() / cv::getTickFrequency();
float t = cv::getTickCount() / cv::getTickFrequency() - start;
float x = 0;
float y = 0;
float w = sz.width / 4;
float h = sz.height / 4;
translate((sz.width / 2.0f) - (w / 2.0f), (sz.height / 2.0f) - (h / 2.0f));
float mx = w / 2.0;
float my = h / 2.0;
Paint gloss, bg;
float ex = w * 0.23f;
float ey = h * 0.5f;
float lx = x + ex;
float ly = y + ey;
float rx = x + w - ex;
float ry = y + ey;
float dx, dy, d;
float br = (ex < ey ? ex : ey) * 0.5f;
float blink = 1 - pow(sinf(t * 0.5f), 200) * 0.8f;
bg = linearGradient(x, y + h * 0.5f, x + w * 0.1f, y + h, cv::Scalar(0, 0, 0, 32), cv::Scalar(0,0,0,16));
beginPath();
ellipse(lx + 3.0f, ly + 16.0f, ex, ey);
ellipse(rx + 3.0f, ry + 16.0f, ex, ey);
fillPaint(bg);
fill();
bg = linearGradient(x, y + h * 0.25f, x + w * 0.1f, y + h,
cv::Scalar(220, 220, 220, 255), cv::Scalar(128, 128, 128, 255));
beginPath();
ellipse(lx, ly, ex, ey);
ellipse(rx, ry, ex, ey);
fillPaint(bg);
fill();
dx = (mx - rx) / (ex * 10);
dy = (my - ry) / (ey * 10);
d = sqrtf(dx * dx + dy * dy);
if (d > 1.0f) {
dx /= d;
dy /= d;
}
dx *= ex * 0.4f;
dy *= ey * 0.5f;
beginPath();
ellipse(lx + dx, ly + dy + ey * 0.25f * (1 - blink), br, br * blink);
fillColor(cv::Scalar(32, 32, 32, 255));
fill();
dx = (mx - rx) / (ex * 10);
dy = (my - ry) / (ey * 10);
d = sqrtf(dx * dx + dy * dy);
if (d > 1.0f) {
dx /= d;
dy /= d;
}
dx *= ex * 0.4f;
dy *= ey * 0.5f;
beginPath();
ellipse(rx + dx, ry + dy + ey * 0.25f * (1 - blink), br, br * blink);
fillColor(cv::Scalar(32, 32, 32, 255));
fill();
gloss = radialGradient(lx - ex * 0.25f, ly - ey * 0.5f, ex * 0.1f, ex * 0.75f,
cv::Scalar(255, 255, 255, 128), cv::Scalar(255, 255, 255, 0));
beginPath();
ellipse(lx, ly, ex, ey);
fillPaint(gloss);
fill();
gloss = radialGradient(rx - ex * 0.25f, ry - ey * 0.5f, ex * 0.1f, ex * 0.75f,
cv::Scalar(255, 255, 255, 128), cv::Scalar(255, 255, 255, 0));
beginPath();
ellipse(rx, ry, ex, ey);
fillPaint(gloss);
fill();
}, window->fbSize());
//Provides the framebuffer as left-off by the nvg context.
window->fb([](UMat& framebuffer) {
//Heavily blurs the eyes using a cheap boxFilter
boxFilter(framebuffer, framebuffer, -1, Size(15, 15), Point(-1,-1), true, BORDER_REPLICATE);
});
}
};
int main() {
Ptr<VectorGraphicsAndFBPlan> plan = new VectorGraphicsAndFBPlan(cv::Size(960, 960));
Ptr<V4D> window = V4D::make(plan->size(), "Vector Graphics and Framebuffer");
window->run(plan);
}

@ -0,0 +1,228 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
/*
* Based on cube-demo. Only differs in two points:
* - Uses a source to read a video.
* - Doesn't clear the background so the cube is rendered on top of the video.
*/
#include <opencv2/v4d/v4d.hpp>
using std::cerr;
using std::endl;
using namespace cv::v4d;
class VideoDemoPlan: public Plan {
public:
using Plan::Plan;
/* Demo Parameters */
int glowKernelSize_ = 0;
private:
struct Cache {
cv::UMat up_;
cv::UMat down_;
cv::UMat blur_;
cv::UMat dst16_;
} cache_;
/* OpenGL constants */
constexpr static GLuint TRIANGLES_ = 12;
constexpr static GLuint VERTICES_INDEX_ = 0;
constexpr static GLuint COLOR_INDEX_ = 1;
constexpr static float VERTICES_[24] = {
// Front face
0.5, 0.5, 0.5, -0.5, 0.5, 0.5, -0.5, -0.5, 0.5, 0.5, -0.5, 0.5,
// Back face
0.5, 0.5, -0.5, -0.5, 0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, -0.5, };
constexpr static float VERTEX_COLORS[24] = { 1.0, 0.4, 0.6, 1.0, 0.9, 0.2, 0.7, 0.3, 0.8, 0.5, 0.3, 1.0,
0.2, 0.6, 1.0, 0.6, 1.0, 0.4, 0.6, 0.8, 0.8, 0.4, 0.8, 0.8, };
constexpr static unsigned short TRIANGLE_INDICES_[36] = {
// Front
0, 1, 2, 2, 3, 0,
// Right
0, 3, 7, 7, 4, 0,
// Bottom
2, 6, 7, 7, 3, 2,
// Left
1, 5, 6, 6, 2, 1,
// Back
4, 7, 6, 6, 5, 4,
// Top
5, 1, 0, 0, 4, 5, };
/* OpenGL variables */
GLuint vao_ = 0;
GLuint shader_ = 0;
GLuint uniform_transform_ = 0;
static GLuint load_shader() {
#if !defined(OPENCV_V4D_USE_ES3)
const string shaderVersion = "330";
#else
const string shaderVersion = "300 es";
#endif
const string vert =
" #version " + shaderVersion
+ R"(
precision lowp float;
layout(location = 0) in vec3 pos;
layout(location = 1) in vec3 vertex_color;
uniform mat4 transform;
out vec3 color;
void main() {
gl_Position = transform * vec4(pos, 1.0);
color = vertex_color;
}
)";
const string frag =
" #version " + shaderVersion
+ R"(
precision lowp float;
in vec3 color;
out vec4 frag_color;
void main() {
frag_color = vec4(color, 1.0);
}
)";
unsigned int handles[3];
cv::v4d::initShader(handles, vert.c_str(), frag.c_str(), "fragColor");
return handles[0];
}
static void init_scene(GLuint& vao, GLuint& shader, GLuint& uniformTrans) {
glEnable (GL_DEPTH_TEST);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
unsigned int triangles_ebo;
glGenBuffers(1, &triangles_ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, triangles_ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof TRIANGLE_INDICES_, TRIANGLE_INDICES_,
GL_STATIC_DRAW);
unsigned int verticies_vbo;
glGenBuffers(1, &verticies_vbo);
glBindBuffer(GL_ARRAY_BUFFER, verticies_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof VERTICES_, VERTICES_, GL_STATIC_DRAW);
glVertexAttribPointer(VERTICES_INDEX_, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(VERTICES_INDEX_);
unsigned int colors_vbo;
glGenBuffers(1, &colors_vbo);
glBindBuffer(GL_ARRAY_BUFFER, colors_vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof VERTEX_COLORS, VERTEX_COLORS, GL_STATIC_DRAW);
glVertexAttribPointer(COLOR_INDEX_, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(COLOR_INDEX_);
glBindVertexArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
shader = load_shader();
uniformTrans = glGetUniformLocation(shader, "transform");
}
static void render_scene(GLuint& vao, GLuint& shader, GLuint& uniformTrans) {
glUseProgram(shader);
float angle = fmod(double(cv::getTickCount()) / double(cv::getTickFrequency()), 2 * M_PI);
float scale = 0.25;
cv::Matx44f scaleMat(scale, 0.0, 0.0, 0.0, 0.0, scale, 0.0, 0.0, 0.0, 0.0, scale, 0.0, 0.0, 0.0,
0.0, 1.0);
cv::Matx44f rotXMat(1.0, 0.0, 0.0, 0.0, 0.0, cos(angle), -sin(angle), 0.0, 0.0, sin(angle),
cos(angle), 0.0, 0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotYMat(cos(angle), 0.0, sin(angle), 0.0, 0.0, 1.0, 0.0, 0.0, -sin(angle), 0.0,
cos(angle), 0.0, 0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotZMat(cos(angle), -sin(angle), 0.0, 0.0, sin(angle), cos(angle), 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0);
cv::Matx44f transform = scaleMat * rotXMat * rotYMat * rotZMat;
glUniformMatrix4fv(uniformTrans, 1, GL_FALSE, transform.val);
glBindVertexArray(vao);
glDrawElements(GL_TRIANGLES, TRIANGLES_ * 3, GL_UNSIGNED_SHORT, NULL);
}
static void glow_effect(const cv::UMat& src, cv::UMat& dst, const int ksize, Cache& cache) {
cv::bitwise_not(src, dst);
cv::resize(dst, cache.down_, cv::Size(), 0.5, 0.5);
cv::boxFilter(cache.down_, cache.blur_, -1, cv::Size(ksize, ksize), cv::Point(-1, -1), true,
cv::BORDER_REPLICATE);
cv::resize(cache.blur_, cache.up_, src.size());
cv::multiply(dst, cache.up_, cache.dst16_, 1, CV_16U);
cv::divide(cache.dst16_, cv::Scalar::all(255.0), dst, 1, CV_8U);
cv::bitwise_not(dst, dst);
}
public:
void setup(cv::Ptr<V4D> window) override {
int diag = hypot(double(size().width), double(size().height));
glowKernelSize_ = std::max(int(diag / 138 % 2 == 0 ? diag / 138 + 1 : diag / 138), 1);
window->gl([](GLuint& vao, GLuint& shader, GLuint& uniformTrans) {
init_scene(vao, shader, uniformTrans);
}, vao_, shader_, uniform_transform_);
}
void infer(cv::Ptr<V4D> window) override {
window->capture();
window->gl([](GLuint& vao, GLuint& shader, GLuint& uniformTrans) {
render_scene(vao, shader, uniformTrans);
}, vao_, shader_, uniform_transform_);
window->fb([](cv::UMat& framebuffer, const cv::Rect& viewport, int glowKernelSize, Cache& cache) {
cv::UMat roi = framebuffer(viewport);
glow_effect(roi, roi, glowKernelSize, cache);
}, viewport(), glowKernelSize_, cache_);
window->write();
}
};
int main(int argc, char** argv) {
if (argc != 2) {
cerr << "Usage: video-demo <video-file>" << endl;
exit(1);
}
cv::Ptr<VideoDemoPlan> plan = new VideoDemoPlan(cv::Size(1280,720));
cv::Ptr<V4D> window = V4D::make(plan->size(), "Video Demo", NONE);
auto src = makeCaptureSource(window, argv[1]);
auto sink = makeWriterSink(window, "video-demo.mkv", src->fps(), plan->size());
window->setSource(src);
window->setSink(sink);
window->run(plan);
return 0;
}

@ -0,0 +1,53 @@
#include <opencv2/v4d/v4d.hpp>
using namespace cv;
using namespace cv::v4d;
class VideoEditingPlan : public Plan {
cv::UMat frame_;
const string hv_ = "Hello Video!";
public:
VideoEditingPlan(const cv::Size& sz) : Plan(sz) {
}
void infer(Ptr<V4D> win) override {
//Capture video from the source
win->capture();
//Render on top of the video
win->nvg([](const Size& sz, const string& str) {
using namespace cv::v4d::nvg;
fontSize(40.0f);
fontFace("sans-bold");
fillColor(Scalar(255, 0, 0, 255));
textAlign(NVG_ALIGN_CENTER | NVG_ALIGN_TOP);
text(sz.width / 2.0, sz.height / 2.0, str.c_str(), str.c_str() + str.size());
}, win->fbSize(), hv_);
//Write video to the sink (do nothing in case of WebAssembly)
win->write();
}
};
int main(int argc, char** argv) {
if (argc != 3) {
cerr << "Usage: video_editing <input-video-file> <output-video-file>" << endl;
exit(1);
}
Ptr<VideoEditingPlan> plan = new VideoEditingPlan(cv::Size(960,960));
Ptr<V4D> window = V4D::make(plan->size(), "Video Editing");
//Make the video source
auto src = makeCaptureSource(window, argv[1]);
//Make the video sink
auto sink = makeWriterSink(window, argv[2], src->fps(), plan->size());
//Attach source and sink
window->setSource(src);
window->setSink(sink);
window->run(plan);
}

@ -0,0 +1,793 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "opencv2/v4d/detail/framebuffercontext.hpp"
#include "opencv2/v4d/v4d.hpp"
#include "opencv2/v4d/util.hpp"
#include "opencv2/core/ocl.hpp"
#include "opencv2/v4d/detail/gl.hpp"
#include "opencv2/core/opengl.hpp"
#include <opencv2/core/utils/logger.hpp>
#include <exception>
#include <iostream>
#include "imgui_impl_glfw.h"
#define GLFW_INCLUDE_NONE
#include <GLFW/glfw3.h>
using std::cerr;
using std::cout;
using std::endl;
namespace cv {
namespace v4d {
namespace detail {
static void glfw_error_callback(int error, const char* description) {
#ifndef NDEBUG
fprintf(stderr, "GLFW Error: (%d) %s\n", error, description);
#endif
}
bool FrameBufferContext::firstSync_ = true;
int frameBufferContextCnt = 0;
FrameBufferContext::FrameBufferContext(V4D& v4d, const string& title, cv::Ptr<FrameBufferContext> other) :
FrameBufferContext(v4d, other->framebufferSize_, !other->debug_, title, other->major_, other->minor_, other->samples_, other->debug_, other->rootWindow_, other, false) {
}
FrameBufferContext::FrameBufferContext(V4D& v4d, const cv::Size& framebufferSize, bool offscreen,
const string& title, int major, int minor, int samples, bool debug, GLFWwindow* rootWindow, cv::Ptr<FrameBufferContext> parent, bool root) :
v4d_(&v4d), offscreen_(offscreen), title_(title), major_(major), minor_(
minor), samples_(samples), debug_(debug), isVisible_(offscreen), viewport_(0, 0, framebufferSize.width, framebufferSize.height), framebufferSize_(framebufferSize), hasParent_(false), rootWindow_(rootWindow), parent_(parent), framebuffer_(), isRoot_(root) {
init();
index_ = ++frameBufferContextCnt;
}
FrameBufferContext::~FrameBufferContext() {
teardown();
}
GLuint FrameBufferContext::getFramebufferID() {
return frameBufferID_;
}
GLuint FrameBufferContext::getTextureID() {
return textureID_;
}
void FrameBufferContext::loadShader(const size_t& index) {
#if !defined(OPENCV_V4D_USE_ES3)
const string shaderVersion = "330";
#else
const string shaderVersion = "300 es";
#endif
const string vert =
" #version " + shaderVersion
+ R"(
layout (location = 0) in vec3 aPos;
void main()
{
gl_Position = vec4(aPos, 1.0);
}
)";
const string frag =
" #version " + shaderVersion
+ R"(
precision mediump float;
out vec4 FragColor;
uniform sampler2D texture0;
uniform vec2 resolution;
void main()
{
//translate screen coordinates to texture coordinates and flip the y-axis.
vec4 texPos = gl_FragCoord / vec4(resolution.x, resolution.y * -1.0f, 1.0, 1.0);
vec4 texColor0 = texture(texture0, texPos.xy);
if(texColor0.a == 0.0)
discard;
else
FragColor = texColor0;
}
)";
unsigned int handles[3];
cv::v4d::initShader(handles, vert.c_str(), frag.c_str(), "fragColor");
shader_program_hdls_[index] = handles[0];
}
void FrameBufferContext::loadBuffers(const size_t& index) {
glGenVertexArrays(1, &copyVaos[index]);
glBindVertexArray(copyVaos[index]);
glGenBuffers(1, &copyVbos[index]);
glGenBuffers(1, &copyEbos[index]);
glBindBuffer(GL_ARRAY_BUFFER, copyVbos[index]);
glBufferData(GL_ARRAY_BUFFER, sizeof(copyVertices), copyVertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, copyEbos[index]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(copyIndices), copyIndices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*) 0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
void FrameBufferContext::init() {
static std::mutex initMtx;
std::unique_lock<std::mutex> lock(initMtx);
if(parent_) {
hasParent_ = true;
if(isRoot()) {
textureID_ = 0;
renderBufferID_ = 0;
onscreenTextureID_ = parent_->textureID_;
onscreenRenderBufferID_ = parent_->renderBufferID_;
} else {
textureID_ = parent_->textureID_;
renderBufferID_ = parent_->renderBufferID_;
onscreenTextureID_ = parent_->onscreenTextureID_;
onscreenRenderBufferID_ = parent_->onscreenRenderBufferID_;
}
} else if (glfwInit() != GLFW_TRUE) {
cerr << "Can't init GLFW" << endl;
exit(1);
}
glfwSetErrorCallback(cv::v4d::detail::glfw_error_callback);
if (debug_)
glfwWindowHint(GLFW_OPENGL_DEBUG_CONTEXT, GLFW_TRUE);
glfwSetTime(0);
#ifdef __APPLE__
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#elif defined(OPENCV_V4D_USE_ES3)
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
glfwWindowHint(GLFW_CONTEXT_CREATION_API, GLFW_EGL_CONTEXT_API);
glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_ES_API);
#else
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, major_);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, minor_);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_CONTEXT_CREATION_API, GLFW_EGL_CONTEXT_API);
glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_API) ;
#endif
glfwWindowHint(GLFW_SAMPLES, samples_);
glfwWindowHint(GLFW_RED_BITS, 8);
glfwWindowHint(GLFW_GREEN_BITS, 8);
glfwWindowHint(GLFW_BLUE_BITS, 8);
glfwWindowHint(GLFW_ALPHA_BITS, 8);
glfwWindowHint(GLFW_STENCIL_BITS, 8);
glfwWindowHint(GLFW_DEPTH_BITS, 24);
glfwWindowHint(GLFW_RESIZABLE, GLFW_TRUE);
glfwWindowHint(GLFW_VISIBLE, offscreen_ ? GLFW_FALSE : GLFW_TRUE );
glfwWindowHint(GLFW_DOUBLEBUFFER, GLFW_TRUE);
glfwWindow_ = glfwCreateWindow(framebufferSize_.width, framebufferSize_.height, title_.c_str(), nullptr, rootWindow_);
if (glfwWindow_ == nullptr) {
//retry with native api
glfwWindowHint(GLFW_CONTEXT_CREATION_API, GLFW_NATIVE_CONTEXT_API);
glfwWindow_ = glfwCreateWindow(framebufferSize_.width, framebufferSize_.height, title_.c_str(), nullptr,
rootWindow_);
if (glfwWindow_ == nullptr) {
CV_Error(Error::StsError, "Unable to initialize window.");
}
}
this->makeCurrent();
if(isRoot()) {
rootWindow_ = glfwWindow_;
glfwSwapInterval(1);
} else {
glfwSwapInterval(0);
}
#if !defined(OPENCV_V4D_USE_ES3)
if (!parent_) {
GLenum err = glewInit();
if (err != GLEW_OK && err != GLEW_ERROR_NO_GLX_DISPLAY) {
CV_Error(Error::StsError, "Could not initialize GLEW!");
}
}
#endif
try {
if (isRoot() && isClGlSharingSupported())
cv::ogl::ocl::initializeContextFromGL();
else
clglSharing_ = false;
} catch (std::exception& ex) {
CV_LOG_WARNING(nullptr, "CL-GL sharing failed: %s" << ex.what());
clglSharing_ = false;
} catch (...) {
CV_LOG_WARNING(nullptr, "CL-GL sharing failed with unknown error");
clglSharing_ = false;
}
//#else
// clglSharing_ = false;
//#endif
context_ = CLExecContext_t::getCurrent();
setup();
if(isRoot()) {
glfwSetWindowUserPointer(getGLFWWindow(), getV4D().get());
glfwSetCursorPosCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
if(v4d->hasImguiCtx()) {
ImGui_ImplGlfw_CursorPosCallback(glfwWin, x, y);
if (!ImGui::GetIO().WantCaptureMouse) {
v4d->setMousePosition(cv::Point2f(float(x), float(y)));
}
}
});
glfwSetMouseButtonCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, int button, int action, int modifiers) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
if(v4d->hasImguiCtx()) {
ImGui_ImplGlfw_MouseButtonCallback(glfwWin, button, action, modifiers);
if (!ImGui::GetIO().WantCaptureMouse) {
// Pass event further
} else {
// Do nothing, since imgui already reacted to mouse click. It would be weird if unrelated things started happening when you click something on UI.
}
}
});
glfwSetKeyCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, int key, int scancode, int action, int mods) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
if(v4d->hasImguiCtx()) {
ImGui_ImplGlfw_KeyCallback(glfwWin, key, scancode, action, mods);
if (!ImGui::GetIO().WantCaptureKeyboard) {
// Pass event further
} else {
// Do nothing, since imgui already reacted to mouse click. It would be weird if unrelated things started happening when you click something on UI.
}
}
});
glfwSetCharCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, unsigned int codepoint) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
if(v4d->hasImguiCtx()) {
ImGui_ImplGlfw_CharCallback(glfwWin, codepoint);
}
});
//// glfwSetDropCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, int count, const char** filenames) {
//// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
//// });
//
// glfwSetScrollCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) {
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
// if (v4d->hasImguiCtx()) {
// ImGui_ImplGlfw_ScrollCallback(glfwWin, x, y);
// }
// });
//
// glfwSetWindowSizeCallback(getGLFWWindow(),
// [](GLFWwindow* glfwWin, int width, int height) {
// cerr << "glfwSetWindowSizeCallback: " << width << endl;
// run_sync_on_main<23>([glfwWin, width, height]() {
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
// cv::Rect& vp = v4d->viewport();
// cv::Size fbsz = v4d->framebufferSize();
// vp.x = 0;
// vp.y = 0;
// vp.width = fbsz.width;
// vp.height = fbsz.height;
// });
// });
//
// glfwSetFramebufferSizeCallback(getGLFWWindow(),
// [](GLFWwindow* glfwWin, int width, int height) {
//// cerr << "glfwSetFramebufferSizeCallback: " << width << endl;
//// run_sync_on_main<22>([glfwWin, width, height]() {
//// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
////// v4d->makeCurrent();
//// cv::Rect& vp = v4d->viewport();
//// cv::Size fbsz = v4d->framebufferSize();
//// vp.x = 0;
//// vp.y = 0;
//// vp.width = fbsz.width;
//// vp.height = fbsz.height;
////
//// if(v4d->hasNguiCtx())
//// v4d->nguiCtx().screen().resize_callback_event(width, height);
//// });
//// if(v4d->isResizable()) {
//// v4d->nvgCtx().fbCtx()->teardown();
//// v4d->glCtx().fbCtx()->teardown();
//// v4d->fbCtx()->teardown();
//// v4d->fbCtx()->setup(cv::Size(width, height));
//// v4d->glCtx().fbCtx()->setup(cv::Size(width, height));
//// v4d->nvgCtx().fbCtx()->setup(cv::Size(width, height));
//// }
// });
glfwSetWindowFocusCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, int i) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
if(v4d->getGLFWWindow() == glfwWin) {
v4d->setFocused(i == 1);
}
});
}
}
cv::Ptr<V4D> FrameBufferContext::getV4D() {
return v4d_->self();
}
int FrameBufferContext::getIndex() {
return index_;
}
void FrameBufferContext::setup() {
cv::Size sz = framebufferSize_;
CLExecScope_t clExecScope(getCLExecContext());
framebuffer_.create(sz, CV_8UC4);
if(isRoot()) {
GL_CHECK(glGenFramebuffers(1, &frameBufferID_));
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, frameBufferID_));
GL_CHECK(glGenRenderbuffers(1, &renderBufferID_));
GL_CHECK(glGenTextures(1, &textureID_));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, textureID_));
texture_ = new cv::ogl::Texture2D(sz, cv::ogl::Texture2D::RGBA, textureID_);
GL_CHECK(glPixelStorei(GL_UNPACK_ALIGNMENT, 1));
GL_CHECK(
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sz.width, sz.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
GL_CHECK(
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID_, 0));
GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, sz.width, sz.height));
GL_CHECK(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, renderBufferID_));
assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE);
} else if(hasParent()) {
GL_CHECK(glGenFramebuffers(1, &frameBufferID_));
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, frameBufferID_));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, textureID_));
texture_ = new cv::ogl::Texture2D(sz, cv::ogl::Texture2D::RGBA, textureID_);
GL_CHECK(glPixelStorei(GL_UNPACK_ALIGNMENT, 1));
GL_CHECK(
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sz.width, sz.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
GL_CHECK(glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
GL_CHECK(
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID_, 0));
GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, sz.width, sz.height));
GL_CHECK(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, renderBufferID_));
assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE);
} else
CV_Assert(false);
}
void FrameBufferContext::teardown() {
using namespace cv::ocl;
this->makeCurrent();
#ifdef HAVE_OPENCL
if(cv::ocl::useOpenCL() && clImage_ != nullptr && !getCLExecContext().empty()) {
CLExecScope_t clExecScope(getCLExecContext());
cl_int status = 0;
cl_command_queue q = (cl_command_queue) Queue::getDefault().ptr();
status = clEnqueueReleaseGLObjects(q, 1, &clImage_, 0, NULL, NULL);
if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, ("OpenCL: clEnqueueReleaseGLObjects failed: %d", status));
status = clFinish(q); // TODO Use events
if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, ("OpenCL: clFinish failed: %d", status));
status = clReleaseMemObject(clImage_); // TODO RAII
if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, ("OpenCL: clReleaseMemObject failed: %d", status));
clImage_ = nullptr;
}
#endif
glBindTexture(GL_TEXTURE_2D, 0);
glGetError();
glBindRenderbuffer(GL_RENDERBUFFER, 0);
glGetError();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glGetError();
CV_Assert(texture_ != nullptr);
delete texture_;
GL_CHECK(glDeleteTextures(1, &textureID_));
GL_CHECK(glDeleteRenderbuffers(1, &renderBufferID_));
GL_CHECK(glDeleteFramebuffers(1, &frameBufferID_));
this->makeNoneCurrent();
}
#ifdef HAVE_OPENCL
void FrameBufferContext::toGLTexture2D(cv::UMat& u, cv::ogl::Texture2D& texture) {
CV_Assert(clImage_ != nullptr);
using namespace cv::ocl;
cl_int status = 0;
cl_command_queue q = (cl_command_queue) context_.getQueue().ptr();
cl_mem clBuffer = (cl_mem) u.handle(ACCESS_READ);
size_t offset = 0;
size_t dst_origin[3] = { 0, 0, 0 };
size_t region[3] = { (size_t) u.cols, (size_t) u.rows, 1 };
status = clEnqueueCopyBufferToImage(q, clBuffer, clImage_, offset, dst_origin, region, 0, NULL,
NULL);
if (status != CL_SUCCESS)
throw std::runtime_error("OpenCL: clEnqueueCopyBufferToImage failed: " + std::to_string(status));
status = clEnqueueReleaseGLObjects(q, 1, &clImage_, 0, NULL, NULL);
if (status != CL_SUCCESS)
throw std::runtime_error("OpenCL: clEnqueueReleaseGLObjects failed: " + std::to_string(status));
}
void FrameBufferContext::fromGLTexture2D(const cv::ogl::Texture2D& texture, cv::UMat& u) {
using namespace cv::ocl;
const int dtype = CV_8UC4;
int textureType = dtype;
if (u.size() != texture.size() || u.type() != textureType) {
u.create(texture.size(), textureType);
}
cl_command_queue q = (cl_command_queue) context_.getQueue().ptr();
cl_int status = 0;
if (clImage_ == nullptr) {
Context& ctx = context_.getContext();
cl_context context = (cl_context) ctx.ptr();
clImage_ = clCreateFromGLTexture(context, CL_MEM_READ_ONLY, 0x0DE1, 0, texture.texId(),
&status);
if (status != CL_SUCCESS)
throw std::runtime_error("OpenCL: clCreateFromGLTexture failed: " + std::to_string(status));
}
status = clEnqueueAcquireGLObjects(q, 1, &clImage_, 0, NULL, NULL);
if (status != CL_SUCCESS)
throw std::runtime_error("OpenCL: clEnqueueAcquireGLObjects failed: " + std::to_string(status));
cl_mem clBuffer = (cl_mem) u.handle(ACCESS_READ);
size_t offset = 0;
size_t src_origin[3] = { 0, 0, 0 };
size_t region[3] = { (size_t) u.cols, (size_t) u.rows, 1 };
status = clEnqueueCopyImageToBuffer(q, clImage_, clBuffer, src_origin, region, offset, 0, NULL,
NULL);
if (status != CL_SUCCESS)
throw std::runtime_error("OpenCL: clEnqueueCopyImageToBuffer failed: " + std::to_string(status));
}
#endif
const cv::Size& FrameBufferContext::size() const {
return framebufferSize_;
}
void FrameBufferContext::copyTo(cv::UMat& dst) {
if(!getCLExecContext().empty()) {
CLExecScope_t clExecScope(getCLExecContext());
FrameBufferContext::GLScope glScope(this, GL_FRAMEBUFFER);
FrameBufferContext::FrameBufferScope fbScope(this, framebuffer_);
framebuffer_.copyTo(dst);
} else {
FrameBufferContext::GLScope glScope(this, GL_FRAMEBUFFER);
FrameBufferContext::FrameBufferScope fbScope(this, framebuffer_);
framebuffer_.copyTo(dst);
}
}
void FrameBufferContext::copyFrom(const cv::UMat& src) {
if(!getCLExecContext().empty()) {
CLExecScope_t clExecScope(getCLExecContext());
FrameBufferContext::GLScope glScope(this, GL_FRAMEBUFFER);
FrameBufferContext::FrameBufferScope fbScope(this, framebuffer_);
src.copyTo(framebuffer_);
} else {
FrameBufferContext::GLScope glScope(this, GL_FRAMEBUFFER);
FrameBufferContext::FrameBufferScope fbScope(this, framebuffer_);
src.copyTo(framebuffer_);
}
}
void FrameBufferContext::copyToRootWindow() {
GLScope scope(self_, GL_READ_FRAMEBUFFER);
GL_CHECK(glReadBuffer(GL_COLOR_ATTACHMENT0));
GL_CHECK(glActiveTexture(GL_TEXTURE0));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, onscreenTextureID_));
GL_CHECK(glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, 0, 0, size().width, size().height));
}
cv::ogl::Texture2D& FrameBufferContext::getTexture2D() {
return *texture_;
}
GLFWwindow* FrameBufferContext::getGLFWWindow() const {
return glfwWindow_;
}
CLExecContext_t& FrameBufferContext::getCLExecContext() {
return context_;
}
void FrameBufferContext::blitFrameBufferToFrameBuffer(const cv::Rect& srcViewport,
const cv::Size& targetFbSize, GLuint targetFramebufferID, bool stretch, bool flipY) {
double hf = double(targetFbSize.height) / framebufferSize_.height;
double wf = double(targetFbSize.width) / framebufferSize_.width;
double f;
if (hf > wf)
f = wf;
else
f = hf;
double fbws = framebufferSize_.width * f;
double fbhs = framebufferSize_.height * f;
double marginw = (targetFbSize.width - framebufferSize_.width) / 2.0;
double marginh = (targetFbSize.height - framebufferSize_.height) / 2.0;
double marginws = (targetFbSize.width - fbws) / 2.0;
double marginhs = (targetFbSize.height - fbhs) / 2.0;
GLint srcX0 = srcViewport.x;
GLint srcY0 = srcViewport.y;
GLint srcX1 = srcViewport.x + srcViewport.width;
GLint srcY1 = srcViewport.y + srcViewport.height;
GLint dstX0 = stretch ? marginws : marginw;
GLint dstY0 = stretch ? marginhs : marginh;
GLint dstX1 = stretch ? marginws + fbws : marginw + framebufferSize_.width;
GLint dstY1 = stretch ? marginhs + fbhs : marginh + framebufferSize_.height;
if(flipY) {
GLint tmp = dstY0;
dstY0 = dstY1;
dstY1 = tmp;
}
GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, targetFramebufferID));
GL_CHECK(glBlitFramebuffer( srcX0, srcY0, srcX1, srcY1,
dstX0, dstY0, dstX1, dstY1,
GL_COLOR_BUFFER_BIT, GL_NEAREST));
}
cv::UMat& FrameBufferContext::fb() {
return framebuffer_;
}
void FrameBufferContext::begin(GLenum framebufferTarget) {
this->makeCurrent();
GL_CHECK(glBindFramebuffer(framebufferTarget, frameBufferID_));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, textureID_));
GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, size().width, size().height));
GL_CHECK(
glFramebufferRenderbuffer(framebufferTarget, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glFramebufferTexture2D(framebufferTarget, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID_, 0));
assert(glCheckFramebufferStatus(framebufferTarget) == GL_FRAMEBUFFER_COMPLETE);
}
void FrameBufferContext::end() {
this->makeNoneCurrent();
}
void FrameBufferContext::download(cv::UMat& m) {
cv::Mat tmp = m.getMat(cv::ACCESS_WRITE);
assert(tmp.data != nullptr);
GL_CHECK(glReadPixels(0, 0, tmp.cols, tmp.rows, GL_RGBA, GL_UNSIGNED_BYTE, tmp.data));
tmp.release();
}
void FrameBufferContext::upload(const cv::UMat& m) {
cv::Mat tmp = m.getMat(cv::ACCESS_READ);
assert(tmp.data != nullptr);
GL_CHECK(
glTexSubImage2D( GL_TEXTURE_2D, 0, 0, 0, tmp.cols, tmp.rows, GL_RGBA, GL_UNSIGNED_BYTE, tmp.data));
tmp.release();
}
void FrameBufferContext::acquireFromGL(cv::UMat& m) {
#ifdef HAVE_OPENCL
if (cv::ocl::useOpenCL() && clglSharing_) {
try {
GL_CHECK(fromGLTexture2D(getTexture2D(), m));
} catch(...) {
clglSharing_ = false;
download(m);
}
return;
}
#endif
{
download(m);
}
//FIXME
cv::flip(m, m, 0);
}
void FrameBufferContext::releaseToGL(cv::UMat& m) {
//FIXME
cv::flip(m, m, 0);
#ifdef HAVE_OPENCL
if (cv::ocl::useOpenCL() && clglSharing_) {
try {
GL_CHECK(toGLTexture2D(m, getTexture2D()));
} catch(...) {
clglSharing_ = false;
upload(m);
}
return;
}
#endif
{
upload(m);
}
}
cv::Vec2f FrameBufferContext::position() {
int x, y;
glfwGetWindowPos(getGLFWWindow(), &x, &y);
return cv::Vec2f(x, y);
}
float FrameBufferContext::pixelRatioX() {
float xscale, yscale;
glfwGetWindowContentScale(getGLFWWindow(), &xscale, &yscale);
return xscale;
}
float FrameBufferContext::pixelRatioY() {
float xscale, yscale;
glfwGetWindowContentScale(getGLFWWindow(), &xscale, &yscale);
return yscale;
}
void FrameBufferContext::makeCurrent() {
assert(getGLFWWindow() != nullptr);
glfwMakeContextCurrent(getGLFWWindow());
}
void FrameBufferContext::makeNoneCurrent() {
glfwMakeContextCurrent(nullptr);
}
bool FrameBufferContext::isResizable() {
return glfwGetWindowAttrib(getGLFWWindow(), GLFW_RESIZABLE) == GLFW_TRUE;
}
void FrameBufferContext::setResizable(bool r) {
glfwSetWindowAttrib(getGLFWWindow(), GLFW_RESIZABLE, r ? GLFW_TRUE : GLFW_FALSE);
}
void FrameBufferContext::setWindowSize(const cv::Size& sz) {
glfwSetWindowSize(getGLFWWindow(), sz.width, sz.height);
}
//FIXME cache window size
cv::Size FrameBufferContext::getWindowSize() {
cv::Size sz;
glfwGetWindowSize(getGLFWWindow(), &sz.width, &sz.height);
return sz;
}
bool FrameBufferContext::isFullscreen() {
return glfwGetWindowMonitor(getGLFWWindow()) != nullptr;
}
void FrameBufferContext::setFullscreen(bool f) {
auto monitor = glfwGetPrimaryMonitor();
const GLFWvidmode* mode = glfwGetVideoMode(monitor);
if (f) {
glfwSetWindowMonitor(getGLFWWindow(), monitor, 0, 0, mode->width, mode->height,
mode->refreshRate);
setWindowSize(getNativeFrameBufferSize());
} else {
glfwSetWindowMonitor(getGLFWWindow(), nullptr, 0, 0, size().width,
size().height, 0);
setWindowSize(size());
}
}
cv::Size FrameBufferContext::getNativeFrameBufferSize() {
int w, h;
glfwGetFramebufferSize(getGLFWWindow(), &w, &h);
return cv::Size{w, h};
}
//cache window visibility instead of performing a heavy window attrib query.
bool FrameBufferContext::isVisible() {
return isVisible_;
}
void FrameBufferContext::setVisible(bool v) {
isVisible_ = v;
if (isVisible_)
glfwShowWindow(getGLFWWindow());
else
glfwHideWindow(getGLFWWindow());
}
bool FrameBufferContext::isClosed() {
return glfwWindow_ == nullptr;
}
void FrameBufferContext::close() {
teardown();
glfwDestroyWindow(getGLFWWindow());
glfwWindow_ = nullptr;
}
bool FrameBufferContext::isRoot() {
return isRoot_;
}
bool FrameBufferContext::hasParent() {
return hasParent_;
}
bool FrameBufferContext::hasRootWindow() {
return rootWindow_ != nullptr;
}
void FrameBufferContext::fence() {
CV_Assert(currentSyncObject_ == 0);
currentSyncObject_ = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
CV_Assert(currentSyncObject_ != 0);
}
bool FrameBufferContext::wait(const uint64_t& timeout) {
if(firstSync_) {
currentSyncObject_ = 0;
firstSync_ = false;
return true;
}
CV_Assert(currentSyncObject_ != 0);
GLuint ret = glClientWaitSync(static_cast<GLsync>(currentSyncObject_),
GL_SYNC_FLUSH_COMMANDS_BIT, timeout);
GL_CHECK();
CV_Assert(GL_WAIT_FAILED != ret);
if(GL_CONDITION_SATISFIED == ret || GL_ALREADY_SIGNALED == ret) {
currentSyncObject_ = 0;
return true;
} else {
currentSyncObject_ = 0;
return false;
}
}
}
}
}

@ -0,0 +1,42 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "opencv2/v4d/detail/glcontext.hpp"
namespace cv {
namespace v4d {
namespace detail {
GLContext::GLContext(const int32_t& idx, cv::Ptr<FrameBufferContext> fbContext) :
idx_(idx), mainFbContext_(fbContext), glFbContext_(new FrameBufferContext(*fbContext->getV4D(), "OpenGL" + std::to_string(idx), fbContext)) {
}
void GLContext::execute(std::function<void()> fn) {
if(!fbCtx()->hasParent()) {
UMat tmp;
mainFbContext_->copyTo(tmp);
fbCtx()->copyFrom(tmp);
}
{
FrameBufferContext::GLScope glScope(fbCtx(), GL_FRAMEBUFFER);
GL_CHECK(glClear(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT));
fn();
}
if(!fbCtx()->hasParent()) {
UMat tmp;
fbCtx()->copyTo(tmp);
mainFbContext_->copyFrom(tmp);
}
}
const int32_t& GLContext::getIndex() const {
return idx_;
}
cv::Ptr<FrameBufferContext> GLContext::fbCtx() {
return glFbContext_;
}
}
}
}

@ -0,0 +1,99 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "opencv2/v4d/v4d.hpp"
#if defined(OPENCV_V4D_USE_ES3) || defined(EMSCRIPTEN)
# define IMGUI_IMPL_OPENGL_ES3
#endif
#define IMGUI_IMPL_OPENGL_LOADER_CUSTOM
#include "imgui_impl_glfw.h"
#include "imgui_impl_opengl3.h"
namespace cv {
namespace v4d {
namespace detail {
ImGuiContextImpl::ImGuiContextImpl(cv::Ptr<FrameBufferContext> fbContext) :
mainFbContext_(fbContext) {
FrameBufferContext::GLScope glScope(mainFbContext_, GL_FRAMEBUFFER);
IMGUI_CHECKVERSION();
context_ = ImGui::CreateContext();
ImGui::SetCurrentContext(context_);
ImGuiIO& io = ImGui::GetIO();
(void)io;
io.ConfigFlags |= ImGuiConfigFlags_NavEnableKeyboard;
io.ConfigFlags |= ImGuiConfigFlags_NavEnableGamepad;
ImGui::StyleColorsDark();
ImGui_ImplGlfw_InitForOpenGL(mainFbContext_->getGLFWWindow(), false);
ImGui_ImplGlfw_SetCallbacksChainForAllWindows(true);
#if !defined(OPENCV_V4D_USE_ES3)
ImGui_ImplOpenGL3_Init("#version 330");
#else
ImGui_ImplOpenGL3_Init("#version 300 es");
#endif
}
void ImGuiContextImpl::build(std::function<void(ImGuiContext*)> fn) {
renderCallback_ = fn;
}
void ImGuiContextImpl::makeCurrent() {
ImGui::SetCurrentContext(context_);
}
void ImGuiContextImpl::render(bool showFPS) {
mainFbContext_->makeCurrent();
ImGui::SetCurrentContext(context_);
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, 0));
#if !defined(OPENCV_V4D_USE_ES3)
GL_CHECK(glDrawBuffer(GL_BACK));
#endif
ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplGlfw_NewFrame();
ImGui::NewFrame();
if (showFPS) {
static bool open_ptr[1] = { true };
static ImGuiWindowFlags window_flags = 0;
// window_flags |= ImGuiWindowFlags_NoBackground;
window_flags |= ImGuiWindowFlags_NoBringToFrontOnFocus;
window_flags |= ImGuiWindowFlags_NoMove;
window_flags |= ImGuiWindowFlags_NoScrollWithMouse;
window_flags |= ImGuiWindowFlags_AlwaysAutoResize;
window_flags |= ImGuiWindowFlags_NoSavedSettings;
window_flags |= ImGuiWindowFlags_NoFocusOnAppearing;
window_flags |= ImGuiWindowFlags_NoNav;
window_flags |= ImGuiWindowFlags_NoDecoration;
window_flags |= ImGuiWindowFlags_NoInputs;
static ImVec2 pos(0, 0);
ImGui::SetNextWindowPos(pos, ImGuiCond_Once);
ImGui::PushStyleColor(ImGuiCol_WindowBg, ImVec4(0.0f, 0.0f, 0.0f, 0.5f));
ImGui::Begin("Display", open_ptr, window_flags);
ImGui::Text("%.3f ms/frame (%.1f FPS)", (1000.0f / Global::fps()) , Global::fps());
ImGui::End();
ImGui::PopStyleColor(1);
std::stringstream ss;
TimeTracker::getInstance()->print(ss);
std::string line;
ImGui::PushStyleColor(ImGuiCol_WindowBg, ImVec4(0.0f, 0.0f, 0.0f, 0.5f));
ImGui::Begin("Time Tracking");
while(getline(ss, line)) {
ImGui::Text("%s", line.c_str());
}
ImGui::End();
ImGui::PopStyleColor(1);
}
if (renderCallback_)
renderCallback_(context_);
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
mainFbContext_->makeNoneCurrent();
}
}
}
}

@ -0,0 +1,82 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "opencv2/v4d/detail/nanovgcontext.hpp"
#include "opencv2/v4d/nvg.hpp"
#include "nanovg_gl.h"
namespace cv {
namespace v4d {
namespace detail {
NanoVGContext::NanoVGContext(cv::Ptr<FrameBufferContext> fbContext) :
mainFbContext_(fbContext), nvgFbContext_(new FrameBufferContext(*fbContext->getV4D(), "NanoVG", fbContext)), context_(
nullptr) {
FrameBufferContext::GLScope glScope(fbCtx(), GL_FRAMEBUFFER);
#if defined(OPENCV_V4D_USE_ES3)
context_ = nvgCreateGLES3(NVG_ANTIALIAS | NVG_STENCIL_STROKES);
#else
context_ = nvgCreateGL3(NVG_ANTIALIAS | NVG_STENCIL_STROKES);
#endif
if (!context_)
CV_Error(Error::StsError, "Could not initialize NanoVG!");
nvgCreateFont(context_, "icons", "modules/v4d/assets/fonts/entypo.ttf");
nvgCreateFont(context_, "sans", "modules/v4d/assets/fonts/Roboto-Regular.ttf");
nvgCreateFont(context_, "sans-bold", "modules/v4d/assets/fonts/Roboto-Bold.ttf");
}
void NanoVGContext::execute(std::function<void()> fn) {
if (!fbCtx()->hasParent()) {
UMat tmp;
mainFbContext_->copyTo(tmp);
fbCtx()->copyFrom(tmp);
}
{
FrameBufferContext::GLScope glScope(fbCtx(), GL_FRAMEBUFFER);
glClear(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
NanoVGContext::Scope nvgScope(*this);
cv::v4d::nvg::detail::NVG::initializeContext(context_);
fn();
}
if (!fbCtx()->hasParent()) {
UMat tmp;
fbCtx()->copyTo(tmp);
mainFbContext_->copyFrom(tmp);
}
}
void NanoVGContext::begin() {
float w = fbCtx()->size().width;
float h = fbCtx()->size().height;
float ws = w / scale_.width;
float hs = h / scale_.height;
float r = fbCtx()->pixelRatioX();
CV_UNUSED(ws);
CV_UNUSED(hs);
nvgSave(context_);
nvgBeginFrame(context_, w, h, r);
nvgTranslate(context_, 0, h - hs);
}
void NanoVGContext::end() {
//FIXME make nvgCancelFrame possible
nvgEndFrame(context_);
nvgRestore(context_);
}
void NanoVGContext::setScale(const cv::Size_<float>& scale) {
scale_ = scale;
}
cv::Ptr<FrameBufferContext> NanoVGContext::fbCtx() {
return nvgFbContext_;
}
}
}
}

@ -0,0 +1,45 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "opencv2/v4d/nvg.hpp"
#include <nanovg.h>
#include "opencv2/core.hpp"
#include <cstring>
namespace cv {
namespace v4d {
/*!
* In general please refere to https://github.com/memononen/nanovg/blob/master/src/nanovg.h for reference.
*/
namespace nvg {
Paint::Paint(const NVGpaint& np) {
memcpy(this->xform, np.xform, sizeof(this->xform));
memcpy(this->extent, np.extent, sizeof(this->extent));
this->radius = np.radius;
this->feather = np.feather;
this->innerColor = cv::Scalar(np.innerColor.rgba[2] * 255, np.innerColor.rgba[1] * 255,
np.innerColor.rgba[0] * 255, np.innerColor.rgba[3] * 255);
this->outerColor = cv::Scalar(np.outerColor.rgba[2] * 255, np.outerColor.rgba[1] * 255,
np.outerColor.rgba[0] * 255, np.outerColor.rgba[3] * 255);
this->image = np.image;
}
NVGpaint Paint::toNVGpaint() {
NVGpaint np;
memcpy(np.xform, this->xform, sizeof(this->xform));
memcpy(np.extent, this->extent, sizeof(this->extent));
np.radius = this->radius;
np.feather = this->feather;
np.innerColor = nvgRGBA(this->innerColor[2], this->innerColor[1], this->innerColor[0],
this->innerColor[3]);
np.outerColor = nvgRGBA(this->outerColor[2], this->outerColor[1], this->outerColor[0],
this->outerColor[3]);
np.image = this->image;
return np;
}
}
}
}

@ -0,0 +1,48 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "../../include/opencv2/v4d/detail/sinkcontext.hpp"
#include "../../include/opencv2/v4d/v4d.hpp"
#include <opencv2/imgproc.hpp>
namespace cv {
namespace v4d {
namespace detail {
SinkContext::SinkContext(cv::Ptr<FrameBufferContext> mainFbContext) : mainFbContext_(mainFbContext) {
}
void SinkContext::execute(std::function<void()> fn) {
if (hasContext()) {
CLExecScope_t scope(getCLExecContext());
fn();
} else {
fn();
}
auto v4d = mainFbContext_->getV4D();
if(v4d->hasSink()) {
v4d->getSink()->operator ()(v4d->sourceCtx()->sequenceNumber(), sinkBuffer());
}
}
bool SinkContext::hasContext() {
return !context_.empty();
}
void SinkContext::copyContext() {
context_ = CLExecContext_t::getCurrent();
}
CLExecContext_t SinkContext::getCLExecContext() {
return context_;
}
cv::UMat& SinkContext::sinkBuffer() {
return sinkBuffer_;
}
}
}
}

@ -0,0 +1,76 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "../../include/opencv2/v4d/detail/sourcecontext.hpp"
#include "../../include/opencv2/v4d/v4d.hpp"
#include <opencv2/imgproc.hpp>
namespace cv {
namespace v4d {
namespace detail {
SourceContext::SourceContext(cv::Ptr<FrameBufferContext> mainFbContext) : mainFbContext_(mainFbContext) {
}
void SourceContext::execute(std::function<void()> fn) {
if (hasContext()) {
CLExecScope_t scope(getCLExecContext());
if (mainFbContext_->getV4D()->hasSource()) {
auto src = mainFbContext_->getV4D()->getSource();
if(src->isOpen()) {
auto p = src->operator ()();
currentSeqNr_ = p.first;
if(p.second.empty()) {
CV_Error(cv::Error::StsError, "End of stream");
}
resizePreserveAspectRatio(p.second, captureBufferRGB_, mainFbContext_->size());
cv::cvtColor(captureBufferRGB_, sourceBuffer(), cv::COLOR_RGB2BGRA);
}
}
fn();
} else {
if (mainFbContext_->getV4D()->hasSource()) {
auto src = mainFbContext_->getV4D()->getSource();
if(src->isOpen()) {
auto p = src->operator ()();
currentSeqNr_ = p.first;
if(p.second.empty()) {
CV_Error(cv::Error::StsError, "End of stream");
}
resizePreserveAspectRatio(p.second, captureBufferRGB_, mainFbContext_->size());
cv::cvtColor(captureBufferRGB_, sourceBuffer(), cv::COLOR_RGB2BGRA);
}
}
fn();
}
}
uint64_t SourceContext::sequenceNumber() {
return currentSeqNr_;
}
bool SourceContext::hasContext() {
return !context_.empty();
}
void SourceContext::copyContext() {
context_ = CLExecContext_t::getCurrent();
}
CLExecContext_t SourceContext::getCLExecContext() {
return context_;
}
cv::UMat& SourceContext::sourceBuffer() {
return captureBuffer_;
}
}
}
}

@ -0,0 +1,16 @@
/*
* time_tracker.cpp
*
* Created on: Mar 22, 2014
* Author: elchaschab
*/
#include "opencv2/v4d/detail/timetracker.hpp"
TimeTracker* TimeTracker::instance_;
TimeTracker::TimeTracker() : enabled_(false) {
}
TimeTracker::~TimeTracker() {
}

@ -0,0 +1,716 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "opencv2/v4d/nvg.hpp"
#include "opencv2/v4d/v4d.hpp"
namespace cv {
namespace v4d {
namespace nvg {
namespace detail {
class NVG;
thread_local NVG* NVG::nvg_instance_ = nullptr;
void NVG::initializeContext(NVGcontext* ctx) {
if (nvg_instance_ != nullptr)
delete nvg_instance_;
nvg_instance_ = new NVG(ctx);
}
NVG* NVG::getCurrentContext() {
assert(nvg_instance_ != nullptr);
return nvg_instance_;
}
int NVG::createFont(const char* name, const char* filename) {
return nvgCreateFont(getContext(), name, filename);
}
int NVG::createFontMem(const char* name, unsigned char* data, int ndata, int freeData) {
return nvgCreateFontMem(getContext(), name, data, ndata, freeData);
}
int NVG::findFont(const char* name) {
return nvgFindFont(getContext(), name);
}
int NVG::addFallbackFontId(int baseFont, int fallbackFont) {
return nvgAddFallbackFontId(getContext(), baseFont, fallbackFont);
}
int NVG::addFallbackFont(const char* baseFont, const char* fallbackFont) {
return nvgAddFallbackFont(getContext(), baseFont, fallbackFont);
}
void NVG::fontSize(float size) {
nvgFontSize(getContext(), size);
}
void NVG::fontBlur(float blur) {
nvgFontBlur(getContext(), blur);
}
void NVG::textLetterSpacing(float spacing) {
nvgTextLetterSpacing(getContext(), spacing);
}
void NVG::textLineHeight(float lineHeight) {
nvgTextLineHeight(getContext(), lineHeight);
}
void NVG::textAlign(int align) {
nvgTextAlign(getContext(), align);
}
void NVG::fontFaceId(int font) {
nvgFontFaceId(getContext(), font);
}
void NVG::fontFace(const char* font) {
nvgFontFace(getContext(), font);
}
float NVG::text(float x, float y, const char* string, const char* end) {
return nvgText(getContext(), x, y, string, end);
}
void NVG::textBox(float x, float y, float breakRowWidth, const char* string, const char* end) {
nvgTextBox(getContext(), x, y, breakRowWidth, string, end);
}
float NVG::textBounds(float x, float y, const char* string, const char* end, float* bounds) {
return nvgTextBounds(getContext(), x, y, string, end, bounds);
}
void NVG::textBoxBounds(float x, float y, float breakRowWidth, const char* string, const char* end,
float* bounds) {
nvgTextBoxBounds(getContext(), x, y, breakRowWidth, string, end, bounds);
}
int NVG::textGlyphPositions(float x, float y, const char* string, const char* end,
GlyphPosition* positions, int maxPositions) {
return nvgTextGlyphPositions(getContext(), x, y, string, end, positions, maxPositions);
}
void NVG::textMetrics(float* ascender, float* descender, float* lineh) {
nvgTextMetrics(getContext(), ascender, descender, lineh);
}
int NVG::textBreakLines(const char* string, const char* end, float breakRowWidth, TextRow* rows,
int maxRows) {
return nvgTextBreakLines(getContext(), string, end, breakRowWidth, rows, maxRows);
}
void NVG::save() {
nvgSave(getContext());
}
void NVG::restore() {
nvgRestore(getContext());
}
void NVG::reset() {
nvgReset(getContext());
}
//void NVG::shapeAntiAlias(int enabled) {
// nvgShapeAntiAlias(getContext(), enabled);
//}
void NVG::strokeColor(const cv::Scalar& bgra) {
nvgStrokeColor(getContext(), nvgRGBA(bgra[2], bgra[1], bgra[0], bgra[3]));
}
void NVG::strokePaint(Paint paint) {
NVGpaint np = paint.toNVGpaint();
nvgStrokePaint(getContext(), np);
}
void NVG::fillColor(const cv::Scalar& rgba) {
nvgFillColor(getContext(), nvgRGBA(rgba[0], rgba[1], rgba[2], rgba[3]));
}
void NVG::fillPaint(Paint paint) {
NVGpaint np = paint.toNVGpaint();
nvgFillPaint(getContext(), np);
}
void NVG::miterLimit(float limit) {
nvgMiterLimit(getContext(), limit);
}
void NVG::strokeWidth(float size) {
nvgStrokeWidth(getContext(), size);
}
void NVG::lineCap(int cap) {
nvgLineCap(getContext(), cap);
}
void NVG::lineJoin(int join) {
nvgLineJoin(getContext(), join);
}
void NVG::globalAlpha(float alpha) {
nvgGlobalAlpha(getContext(), alpha);
}
void NVG::resetTransform() {
nvgResetTransform(getContext());
}
void NVG::transform(float a, float b, float c, float d, float e, float f) {
nvgTransform(getContext(), a, b, c, d, e, f);
}
void NVG::translate(float x, float y) {
nvgTranslate(getContext(), x, y);
}
void NVG::rotate(float angle) {
nvgRotate(getContext(), angle);
}
void NVG::skewX(float angle) {
nvgSkewX(getContext(), angle);
}
void NVG::skewY(float angle) {
nvgSkewY(getContext(), angle);
}
void NVG::scale(float x, float y) {
nvgScale(getContext(), x, y);
}
void NVG::currentTransform(float* xform) {
nvgCurrentTransform(getContext(), xform);
}
void NVG::transformIdentity(float* dst) {
nvgTransformIdentity(dst);
}
void NVG::transformTranslate(float* dst, float tx, float ty) {
nvgTransformTranslate(dst, tx, ty);
}
void NVG::transformScale(float* dst, float sx, float sy) {
nvgTransformScale(dst, sx, sy);
}
void NVG::transformRotate(float* dst, float a) {
nvgTransformRotate(dst, a);
}
void NVG::transformSkewX(float* dst, float a) {
nvgTransformSkewX(dst, a);
}
void NVG::transformSkewY(float* dst, float a) {
nvgTransformSkewY(dst, a);
}
void NVG::transformMultiply(float* dst, const float* src) {
nvgTransformMultiply(dst, src);
}
void NVG::transformPremultiply(float* dst, const float* src) {
nvgTransformPremultiply(dst, src);
}
int NVG::transformInverse(float* dst, const float* src) {
return nvgTransformInverse(dst, src);
}
void NVG::transformPoint(float* dstx, float* dsty, const float* xform, float srcx, float srcy) {
nvgTransformPoint(dstx, dsty, xform, srcx, srcy);
}
float NVG::degToRad(float deg) {
return nvgDegToRad(deg);
}
float NVG::radToDeg(float rad) {
return nvgRadToDeg(rad);
}
int NVG::createImage(const char* filename, int imageFlags) {
return nvgCreateImage(getContext(), filename, imageFlags);
}
int NVG::createImageMem(int imageFlags, unsigned char* data, int ndata) {
return nvgCreateImageMem(getContext(), imageFlags, data, ndata);
}
int NVG::createImageRGBA(int w, int h, int imageFlags, const unsigned char* data) {
return nvgCreateImageRGBA(getContext(), w, h, imageFlags, data);
}
void NVG::updateImage(int image, const unsigned char* data) {
nvgUpdateImage(getContext(), image, data);
}
void NVG::imageSize(int image, int* w, int* h) {
nvgImageSize(getContext(), image, w, h);
}
void NVG::deleteImage(int image) {
nvgDeleteImage(getContext(), image);
}
void NVG::beginPath() {
nvgBeginPath(getContext());
}
void NVG::moveTo(float x, float y) {
nvgMoveTo(getContext(), x, y);
}
void NVG::lineTo(float x, float y) {
nvgLineTo(getContext(), x, y);
}
void NVG::bezierTo(float c1x, float c1y, float c2x, float c2y, float x, float y) {
nvgBezierTo(getContext(), c1x, c1y, c2x, c2y, x, y);
}
void NVG::quadTo(float cx, float cy, float x, float y) {
nvgQuadTo(getContext(), cx, cy, x, y);
}
void NVG::arcTo(float x1, float y1, float x2, float y2, float radius) {
nvgArcTo(getContext(), x1, y1, x2, y2, radius);
}
void NVG::closePath() {
nvgClosePath(getContext());
}
void NVG::pathWinding(int dir) {
nvgPathWinding(getContext(), dir);
}
void NVG::arc(float cx, float cy, float r, float a0, float a1, int dir) {
nvgArc(getContext(), cx, cy, r, a0, a1, dir);
}
void NVG::rect(float x, float y, float w, float h) {
nvgRect(getContext(), x, y, w, h);
}
void NVG::roundedRect(float x, float y, float w, float h, float r) {
nvgRoundedRect(getContext(), x, y, w, h, r);
}
void NVG::roundedRectVarying(float x, float y, float w, float h, float radTopLeft,
float radTopRight, float radBottomRight, float radBottomLeft) {
nvgRoundedRectVarying(getContext(), x, y, w, h, radTopLeft, radTopRight, radBottomRight,
radBottomLeft);
}
void NVG::ellipse(float cx, float cy, float rx, float ry) {
nvgEllipse(getContext(), cx, cy, rx, ry);
}
void NVG::circle(float cx, float cy, float r) {
nvgCircle(getContext(), cx, cy, r);
}
void NVG::fill() {
nvgFill(getContext());
}
void NVG::stroke() {
nvgStroke(getContext());
}
Paint NVG::linearGradient(float sx, float sy, float ex, float ey, const cv::Scalar& icol,
const cv::Scalar& ocol) {
NVGpaint np = nvgLinearGradient(getContext(), sx, sy, ex, ey,
nvgRGBA(icol[2], icol[1], icol[0], icol[3]),
nvgRGBA(ocol[2], ocol[1], ocol[0], ocol[3]));
return Paint(np);
}
Paint NVG::boxGradient(float x, float y, float w, float h, float r, float f, const cv::Scalar& icol,
const cv::Scalar& ocol) {
NVGpaint np = nvgBoxGradient(getContext(), x, y, w, h, r, f,
nvgRGBA(icol[2], icol[1], icol[0], icol[3]),
nvgRGBA(ocol[2], ocol[1], ocol[0], ocol[3]));
return Paint(np);
}
Paint NVG::radialGradient(float cx, float cy, float inr, float outr, const cv::Scalar& icol,
const cv::Scalar& ocol) {
NVGpaint np = nvgRadialGradient(getContext(), cx, cy, inr, outr,
nvgRGBA(icol[2], icol[1], icol[0], icol[3]),
nvgRGBA(ocol[2], ocol[1], ocol[0], ocol[3]));
return Paint(np);
}
Paint NVG::imagePattern(float ox, float oy, float ex, float ey, float angle, int image,
float alpha) {
NVGpaint np = nvgImagePattern(getContext(), ox, oy, ex, ey, angle, image, alpha);
return Paint(np);
}
void NVG::scissor(float x, float y, float w, float h) {
nvgScissor(getContext(), x, y, w, h);
}
void NVG::intersectScissor(float x, float y, float w, float h) {
nvgIntersectScissor(getContext(), x, y, w, h);
}
void NVG::resetScissor() {
nvgResetScissor(getContext());
}
}
int createFont(const char* name, const char* filename) {
return detail::NVG::getCurrentContext()->createFont(name, filename);
}
int createFontMem(const char* name, unsigned char* data, int ndata, int freeData) {
return detail::NVG::getCurrentContext()->createFontMem(name, data, ndata, freeData);
}
int findFont(const char* name) {
return detail::NVG::getCurrentContext()->findFont(name);
}
int addFallbackFontId(int baseFont, int fallbackFont) {
return detail::NVG::getCurrentContext()->addFallbackFontId(baseFont, fallbackFont);
}
int addFallbackFont(const char* baseFont, const char* fallbackFont) {
return detail::NVG::getCurrentContext()->addFallbackFont(baseFont, fallbackFont);
}
void fontSize(float size) {
detail::NVG::getCurrentContext()->fontSize(size);
}
void fontBlur(float blur) {
detail::NVG::getCurrentContext()->fontBlur(blur);
}
void textLetterSpacing(float spacing) {
detail::NVG::getCurrentContext()->textLetterSpacing(spacing);
}
void textLineHeight(float lineHeight) {
detail::NVG::getCurrentContext()->textLineHeight(lineHeight);
}
void textAlign(int align) {
detail::NVG::getCurrentContext()->textAlign(align);
}
void fontFaceId(int font) {
detail::NVG::getCurrentContext()->fontFaceId(font);
}
void fontFace(const char* font) {
detail::NVG::getCurrentContext()->fontFace(font);
}
float text(float x, float y, const char* string, const char* end) {
return detail::NVG::getCurrentContext()->text(x, y, string, end);
}
void textBox(float x, float y, float breakRowWidth, const char* string, const char* end) {
detail::NVG::getCurrentContext()->textBox(x, y, breakRowWidth, string, end);
}
float textBounds(float x, float y, const char* string, const char* end, float* bounds) {
return detail::NVG::getCurrentContext()->textBounds(x, y, string, end, bounds);
}
void textBoxBounds(float x, float y, float breakRowWidth, const char* string, const char* end,
float* bounds) {
detail::NVG::getCurrentContext()->textBoxBounds(x, y, breakRowWidth, string, end, bounds);
}
int textGlyphPositions(float x, float y, const char* string, const char* end,
GlyphPosition* positions, int maxPositions) {
return detail::NVG::getCurrentContext()->textGlyphPositions(x, y, string, end, positions,
maxPositions);
}
void textMetrics(float* ascender, float* descender, float* lineh) {
detail::NVG::getCurrentContext()->textMetrics(ascender, descender, lineh);
}
int textBreakLines(const char* string, const char* end, float breakRowWidth, TextRow* rows,
int maxRows) {
return detail::NVG::getCurrentContext()->textBreakLines(string, end, breakRowWidth, rows,
maxRows);
}
void save() {
detail::NVG::getCurrentContext()->save();
}
void restore() {
detail::NVG::getCurrentContext()->restore();
}
void reset() {
detail::NVG::getCurrentContext()->reset();
}
//void shapeAntiAlias(int enabled) {
// detail::NVG::getCurrentContext()->strokeColor(enabled);
//}
void strokeColor(const cv::Scalar& bgra) {
detail::NVG::getCurrentContext()->strokeColor(bgra);
}
void strokePaint(Paint paint) {
detail::NVG::getCurrentContext()->strokePaint(paint);
}
void fillColor(const cv::Scalar& color) {
detail::NVG::getCurrentContext()->fillColor(color);
}
void fillPaint(Paint paint) {
detail::NVG::getCurrentContext()->fillPaint(paint);
}
void miterLimit(float limit) {
detail::NVG::getCurrentContext()->miterLimit(limit);
}
void strokeWidth(float size) {
detail::NVG::getCurrentContext()->strokeWidth(size);
}
void lineCap(int cap) {
detail::NVG::getCurrentContext()->lineCap(cap);
}
void lineJoin(int join) {
detail::NVG::getCurrentContext()->lineJoin(join);
}
void globalAlpha(float alpha) {
detail::NVG::getCurrentContext()->globalAlpha(alpha);
}
void resetTransform() {
detail::NVG::getCurrentContext()->resetTransform();
}
void transform(float a, float b, float c, float d, float e, float f) {
detail::NVG::getCurrentContext()->transform(a, b, c, d, e, f);
}
void translate(float x, float y) {
detail::NVG::getCurrentContext()->translate(x, y);
}
void rotate(float angle) {
detail::NVG::getCurrentContext()->rotate(angle);
}
void skewX(float angle) {
detail::NVG::getCurrentContext()->skewX(angle);
}
void skewY(float angle) {
detail::NVG::getCurrentContext()->skewY(angle);
}
void scale(float x, float y) {
detail::NVG::getCurrentContext()->scale(x, y);
}
void currentTransform(float* xform) {
detail::NVG::getCurrentContext()->currentTransform(xform);
}
void transformIdentity(float* dst) {
detail::NVG::getCurrentContext()->transformIdentity(dst);
}
void transformTranslate(float* dst, float tx, float ty) {
detail::NVG::getCurrentContext()->transformTranslate(dst, tx, ty);
}
void transformScale(float* dst, float sx, float sy) {
detail::NVG::getCurrentContext()->transformScale(dst, sx, sy);
}
void transformRotate(float* dst, float a) {
detail::NVG::getCurrentContext()->transformRotate(dst, a);
}
void transformSkewX(float* dst, float a) {
detail::NVG::getCurrentContext()->transformSkewX(dst, a);
}
void transformSkewY(float* dst, float a) {
detail::NVG::getCurrentContext()->transformSkewY(dst, a);
}
void transformMultiply(float* dst, const float* src) {
detail::NVG::getCurrentContext()->transformMultiply(dst, src);
}
void transformPremultiply(float* dst, const float* src) {
detail::NVG::getCurrentContext()->transformPremultiply(dst, src);
}
int transformInverse(float* dst, const float* src) {
return detail::NVG::getCurrentContext()->transformInverse(dst, src);
}
void transformPoint(float* dstx, float* dsty, const float* xform, float srcx, float srcy) {
return detail::NVG::getCurrentContext()->transformPoint(dstx, dsty, xform, srcx, srcy);
}
float degToRad(float deg) {
return detail::NVG::getCurrentContext()->degToRad(deg);
}
float radToDeg(float rad) {
return detail::NVG::getCurrentContext()->radToDeg(rad);
}
int createImage(const char* filename, int imageFlags) {
return detail::NVG::getCurrentContext()->createImage(filename, imageFlags);
}
int createImageMem(int imageFlags, unsigned char* data, int ndata) {
return detail::NVG::getCurrentContext()->createImageMem(imageFlags, data, ndata);
}
int createImageRGBA(int w, int h, int imageFlags, const unsigned char* data) {
return detail::NVG::getCurrentContext()->createImageRGBA(w, h, imageFlags, data);
}
void updateImage(int image, const unsigned char* data) {
detail::NVG::getCurrentContext()->updateImage(image, data);
}
void imageSize(int image, int* w, int* h) {
detail::NVG::getCurrentContext()->imageSize(image, w, h);
}
void deleteImage(int image) {
detail::NVG::getCurrentContext()->deleteImage(image);
}
void beginPath() {
detail::NVG::getCurrentContext()->beginPath();
}
void moveTo(float x, float y) {
detail::NVG::getCurrentContext()->moveTo(x, y);
}
void lineTo(float x, float y) {
detail::NVG::getCurrentContext()->lineTo(x, y);
}
void bezierTo(float c1x, float c1y, float c2x, float c2y, float x, float y) {
detail::NVG::getCurrentContext()->bezierTo(c1x, c1y, c2x, c2y, x, y);
}
void quadTo(float cx, float cy, float x, float y) {
detail::NVG::getCurrentContext()->quadTo(cx, cy, x, y);
}
void arcTo(float x1, float y1, float x2, float y2, float radius) {
detail::NVG::getCurrentContext()->arcTo(x1, y1, x2, y2, radius);
}
void closePath() {
detail::NVG::getCurrentContext()->closePath();
}
void pathWinding(int dir) {
detail::NVG::getCurrentContext()->pathWinding(dir);
}
void arc(float cx, float cy, float r, float a0, float a1, int dir) {
detail::NVG::getCurrentContext()->arc(cx, cy, r, a0, a1, dir);
}
void rect(float x, float y, float w, float h) {
detail::NVG::getCurrentContext()->rect(x, y, w, h);
}
void roundedRect(float x, float y, float w, float h, float r) {
detail::NVG::getCurrentContext()->roundedRect(x, y, w, h, r);
}
void roundedRectVarying(float x, float y, float w, float h, float radTopLeft, float radTopRight,
float radBottomRight, float radBottomLeft) {
detail::NVG::getCurrentContext()->roundedRectVarying(x, y, w, h, radTopLeft, radTopRight,
radBottomRight, radBottomLeft);
}
void ellipse(float cx, float cy, float rx, float ry) {
detail::NVG::getCurrentContext()->ellipse(cx, cy, rx, ry);
}
void circle(float cx, float cy, float r) {
detail::NVG::getCurrentContext()->circle(cx, cy, r);
}
void fill() {
detail::NVG::getCurrentContext()->fill();
}
void stroke() {
detail::NVG::getCurrentContext()->stroke();
}
Paint linearGradient(float sx, float sy, float ex, float ey, const cv::Scalar& icol,
const cv::Scalar& ocol) {
return detail::NVG::getCurrentContext()->linearGradient(sx, sy, ex, ey, icol, ocol);
}
Paint boxGradient(float x, float y, float w, float h, float r, float f, const cv::Scalar& icol,
const cv::Scalar& ocol) {
return detail::NVG::getCurrentContext()->boxGradient(x, y, w, h, r, f, icol, ocol);
}
Paint radialGradient(float cx, float cy, float inr, float outr, const cv::Scalar& icol,
const cv::Scalar& ocol) {
return detail::NVG::getCurrentContext()->radialGradient(cx, cy, inr, outr, icol, ocol);
}
Paint imagePattern(float ox, float oy, float ex, float ey, float angle, int image, float alpha) {
return detail::NVG::getCurrentContext()->imagePattern(ox, oy, ex, ey, angle, image, alpha);
}
void scissor(float x, float y, float w, float h) {
detail::NVG::getCurrentContext()->scissor(x, y, w, h);
}
void intersectScissor(float x, float y, float w, float h) {
detail::NVG::getCurrentContext()->intersectScissor(x, y, w, h);
}
void resetScissor() {
detail::NVG::getCurrentContext()->resetScissor();
}
void clear(const cv::Scalar& bgra) {
const float& b = bgra[0] / 255.0f;
const float& g = bgra[1] / 255.0f;
const float& r = bgra[2] / 255.0f;
const float& a = bgra[3] / 255.0f;
GL_CHECK(glClearColor(r, g, b, a));
GL_CHECK(glClear(GL_COLOR_BUFFER_BIT | GL_STENCIL_BUFFER_BIT | GL_DEPTH_BUFFER_BIT));
}
}
}
}

@ -0,0 +1,33 @@
#include "../include/opencv2/v4d/detail/resequence.hpp"
#include <opencv2/core/utils/logger.hpp>
namespace cv {
namespace v4d {
void Resequence::finish() {
std::unique_lock<std::mutex> lock(putMtx_);
finish_ = true;
notify();
}
void Resequence::notify() {
cv_.notify_all();
}
void Resequence::waitFor(const uint64_t& seq) {
while(true) {
{
std::unique_lock<std::mutex> lock(putMtx_);
if(finish_)
break;
if(seq == nextSeq_) {
++nextSeq_;
break;
}
}
std::unique_lock<std::mutex> lock(waitMtx_);
cv_.wait(lock);
}
}
} /* namespace v4d */
} /* namespace cv */

@ -0,0 +1,480 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "../include/opencv2/v4d/scene.hpp"
#include <iostream>
#include <assimp/postprocess.h>
#include <opencv2/calib3d.hpp>
#include <functional>
namespace cv {
namespace v4d {
namespace gl {
#include <opencv2/core.hpp>
cv::Vec3f cross(const cv::Vec3f& v1, const cv::Vec3f& v2) {
return cv::Vec3f(v1[1] * v2[2] - v1[2] * v2[1],
v1[2] * v2[0] - v1[0] * v2[2],
v1[0] * v2[1] - v1[1] * v2[0]);
}
void releaseAssimpScene(const aiScene* scene) {
if (scene) {
for (unsigned int i = 0; i < scene->mNumMeshes; ++i) {
delete[] scene->mMeshes[i]->mVertices;
delete[] scene->mMeshes[i]->mNormals;
for (unsigned int j = 0; j < scene->mMeshes[i]->mNumFaces; ++j) {
delete[] scene->mMeshes[i]->mFaces[j].mIndices;
}
delete[] scene->mMeshes[i]->mFaces;
delete scene->mMeshes[i];
}
delete[] scene->mMeshes;
delete scene->mRootNode;
delete scene;
}
}
aiScene* createAssimpScene(std::vector<cv::Point3f>& vertices) {
if (vertices.size() % 3 != 0) {
vertices.resize(vertices.size() / 3);
}
aiScene* scene = new aiScene();
aiMesh* mesh = new aiMesh();
// Set vertices
mesh->mVertices = new aiVector3D[vertices.size()];
for (size_t i = 0; i < vertices.size(); ++i) {
mesh->mVertices[i] = aiVector3D(vertices[i].x, vertices[i].y, vertices[i].z);
}
mesh->mNumVertices = static_cast<unsigned int>(vertices.size());
// Generate normals
mesh->mNormals = new aiVector3D[mesh->mNumVertices];
std::fill(mesh->mNormals, mesh->mNormals + mesh->mNumVertices, aiVector3D(0.0f, 0.0f, 0.0f));
size_t numFaces = vertices.size() / 3; // Assuming each face has 3 vertices
mesh->mFaces = new aiFace[numFaces];
mesh->mNumFaces = static_cast<unsigned int>(numFaces);
for (size_t i = 0; i < numFaces; ++i) {
aiFace& face = mesh->mFaces[i];
face.mIndices = new unsigned int[3]; // Assuming each face has 3 vertices
face.mIndices[0] = static_cast<unsigned int>(3 * i);
face.mIndices[1] = static_cast<unsigned int>(3 * i + 1);
face.mIndices[2] = static_cast<unsigned int>(3 * i + 2);
face.mNumIndices = 3;
// Calculate normal for this face
aiVector3D edge1 = mesh->mVertices[face.mIndices[1]] - mesh->mVertices[face.mIndices[0]];
aiVector3D edge2 = mesh->mVertices[face.mIndices[2]] - mesh->mVertices[face.mIndices[0]];
aiVector3D normal = edge1 ^ edge2; // Cross product
normal.Normalize();
// Assign the computed normal to all three vertices of the triangle
mesh->mNormals[face.mIndices[0]] = normal;
mesh->mNormals[face.mIndices[1]] = normal;
mesh->mNormals[face.mIndices[2]] = normal;
}
// Attach the mesh to the scene
scene->mMeshes = new aiMesh*[1];
scene->mMeshes[0] = mesh;
scene->mNumMeshes = 1;
// Create a root node and attach the mesh
scene->mRootNode = new aiNode();
scene->mRootNode->mMeshes = new unsigned int[1]{0};
scene->mRootNode->mNumMeshes = 1;
return scene;
}
cv::Vec3f rotate3D(const cv::Vec3f& point, const cv::Vec3f& center, const cv::Vec3f& rotation)
{
// Convert rotation vector to rotation matrix
cv::Matx33f rotationMatrix;
cv::Rodrigues(rotation, rotationMatrix);
// Subtract center from point
cv::Vec3f translatedPoint = point - center;
// Rotate the point using the rotation matrix
cv::Vec3f rotatedPoint = rotationMatrix * translatedPoint;
// Translate the point back
rotatedPoint += center;
return rotatedPoint;
}
cv::Matx44f perspective(float fov, float aspect, float zNear, float zFar) {
float tanHalfFovy = tan(fov / 2.0f);
cv::Matx44f projection = cv::Matx44f::eye();
projection(0, 0) = 1.0f / (aspect * tanHalfFovy);
projection(1, 1) = 1.0f / (tanHalfFovy); // Invert the y-coordinate
projection(2, 2) = -(zFar + zNear) / (zFar - zNear); // Invert the z-coordinate
projection(2, 3) = -1.0f;
projection(3, 2) = -(2.0f * zFar * zNear) / (zFar - zNear);
projection(3, 3) = 0.0f;
return projection;
}
cv::Matx44f lookAt(cv::Vec3f eye, cv::Vec3f center, cv::Vec3f up) {
cv::Vec3f f = cv::normalize(center - eye);
cv::Vec3f s = cv::normalize(f.cross(up));
cv::Vec3f u = s.cross(f);
cv::Matx44f view = cv::Matx44f::eye();
view(0, 0) = s[0];
view(0, 1) = u[0];
view(0, 2) = -f[0];
view(0, 3) = 0.0f;
view(1, 0) = s[1];
view(1, 1) = u[1];
view(1, 2) = -f[1];
view(1, 3) = 0.0f;
view(2, 0) = s[2];
view(2, 1) = u[2];
view(2, 2) = -f[2];
view(2, 3) = 0.0f;
view(3, 0) = -s.dot(eye);
view(3, 1) = -u.dot(eye);
view(3, 2) = f.dot(eye);
view(3, 3) = 1.0f;
return view;
}
cv::Matx44f modelView(const cv::Vec3f& translation, const cv::Vec3f& rotationVec, const cv::Vec3f& scaleVec) {
cv::Matx44f scaleMat(
scaleVec[0], 0.0, 0.0, 0.0,
0.0, scaleVec[1], 0.0, 0.0,
0.0, 0.0, scaleVec[2], 0.0,
0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotXMat(
1.0, 0.0, 0.0, 0.0,
0.0, cos(rotationVec[0]), -sin(rotationVec[0]), 0.0,
0.0, sin(rotationVec[0]), cos(rotationVec[0]), 0.0,
0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotYMat(
cos(rotationVec[1]), 0.0, sin(rotationVec[1]), 0.0,
0.0, 1.0, 0.0, 0.0,
-sin(rotationVec[1]), 0.0,cos(rotationVec[1]), 0.0,
0.0, 0.0, 0.0, 1.0);
cv::Matx44f rotZMat(
cos(rotationVec[2]), -sin(rotationVec[2]), 0.0, 0.0,
sin(rotationVec[2]), cos(rotationVec[2]), 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0);
cv::Matx44f translateMat(
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
translation[0], translation[1], translation[2], 1.0);
return translateMat * rotXMat * rotYMat * rotZMat * scaleMat;
}
static void calculateBoundingBox(const aiMesh* mesh, cv::Vec3f& min, cv::Vec3f& max) {
for (unsigned int i = 0; i < mesh->mNumVertices; ++i) {
aiVector3D vertex = mesh->mVertices[i];
if (i == 0) {
min = max = cv::Vec3f(vertex.x, vertex.y, vertex.z);
} else {
min[0] = std::min(min[0], vertex.x);
min[1] = std::min(min[1], vertex.y);
min[2] = std::min(min[2], vertex.z);
max[0] = std::max(max[0], vertex.x);
max[1] = std::max(max[1], vertex.y);
max[2] = std::max(max[2], vertex.z);
}
}
}
static void calculateBoundingBoxInfo(const aiMesh* mesh, cv::Vec3f& center, cv::Vec3f& size) {
cv::Vec3f min, max;
calculateBoundingBox(mesh, min, max);
center = (min + max) / 2.0f;
size = max - min;
}
static float calculateAutoScale(const aiMesh* mesh) {
cv::Vec3f center, size;
calculateBoundingBoxInfo(mesh, center, size);
float maxDimension = std::max(size[0], std::max(size[1], size[2]));
return 1.0f / maxDimension;
}
static void drawMesh(aiMesh* mesh, Scene::RenderMode mode) {
// Generate and bind VAO
GLuint VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
// Load vertex data
GLuint VBO;
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, mesh->mNumVertices * 3 * sizeof(float), mesh->mVertices, GL_STATIC_DRAW);
// Specify vertex attributes
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// Load index data, if present
if (mesh->HasFaces()) {
std::vector<unsigned int> indices;
for (unsigned int i = 0; i < mesh->mNumFaces; i++) {
aiFace face = mesh->mFaces[i];
for (unsigned int j = 0; j < face.mNumIndices; j++)
indices.push_back(face.mIndices[j]);
}
if (mode != Scene::RenderMode::DEFAULT) {
// Duplicate vertices for wireframe rendering or point rendering
std::vector<unsigned int> modifiedIndices;
for (size_t i = 0; i < indices.size(); i += 3) {
if (mode == Scene::RenderMode::WIREFRAME) {
// Duplicate vertices for wireframe rendering
modifiedIndices.push_back(indices[i]);
modifiedIndices.push_back(indices[i + 1]);
modifiedIndices.push_back(indices[i + 1]);
modifiedIndices.push_back(indices[i + 2]);
modifiedIndices.push_back(indices[i + 2]);
modifiedIndices.push_back(indices[i]);
}
if (mode == Scene::RenderMode::POINTCLOUD) {
// Duplicate vertices for point rendering
modifiedIndices.push_back(indices[i]);
modifiedIndices.push_back(indices[i + 1]);
modifiedIndices.push_back(indices[i + 2]);
}
}
GLuint EBO;
glGenBuffers(1, &EBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, modifiedIndices.size() * sizeof(unsigned int), &modifiedIndices[0], GL_STATIC_DRAW);
// Draw as lines or points
if (mode == Scene::RenderMode::WIREFRAME) {
glDrawElements(GL_LINES, modifiedIndices.size(), GL_UNSIGNED_INT, 0);
} else if (mode == Scene::RenderMode::POINTCLOUD) {
glDrawElements(GL_POINTS, modifiedIndices.size(), GL_UNSIGNED_INT, 0);
}
// Cleanup
glDeleteBuffers(1, &EBO);
} else {
GLuint EBO;
glGenBuffers(1, &EBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), &indices[0], GL_STATIC_DRAW);
// Draw as triangles
glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_INT, 0);
// Cleanup
glDeleteBuffers(1, &EBO);
}
} else {
glDrawArrays(GL_TRIANGLES, 0, mesh->mNumVertices);
}
// Cleanup
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
}
// Function to recursively draw a node and its children
static void drawNode(aiNode* node, const aiScene* scene, Scene::RenderMode mode) {
// Draw all meshes at this node
for(unsigned int i = 0; i < node->mNumMeshes; i++) {
aiMesh* mesh = scene->mMeshes[node->mMeshes[i]];
drawMesh(mesh, mode);
}
// Recurse for all children
for(unsigned int i = 0; i < node->mNumChildren; i++) {
drawNode(node->mChildren[i], scene, mode);
}
}
// Function to draw a model
static void drawModel(const aiScene* scene, Scene::RenderMode mode) {
// Draw the root node
drawNode(scene->mRootNode, scene, mode);
}
static void applyModelView(cv::Mat_<float>& points, const cv::Matx44f& transformation) {
// Ensure the input points matrix has the correct dimensions (3 columns for x, y, z)
CV_Assert(points.cols == 3);
// Construct the 4x4 transformation matrix with scaling
// Convert points to homogeneous coordinates (add a column of ones)
cv::hconcat(points, cv::Mat::ones(points.rows, 1, CV_32F), points);
// Transpose the points matrix for multiplication
cv::Mat pointsTransposed = points.t();
// Apply the transformation
cv::Mat transformedPoints = transformation * pointsTransposed;
// Transpose back to the original orientation
transformedPoints = transformedPoints.t();
// Extract the transformed 3D points (excluding the fourth homogeneous coordinate)
points = transformedPoints(cv::Rect(0, 0, 3, transformedPoints.rows)).clone();
}
static void applyModelView(std::vector<cv::Point3f>& points, const cv::Matx44f& transformation) {
// Ensure the input points vector is not empty
if (points.empty()) {
std::cerr << "Error: Input points vector is empty.\n";
return;
}
// Apply the model-view transformation to each point
for (auto& point : points) {
// Convert the point to a column vector
cv::Mat pointMat = (cv::Mat_<float>(3, 1) << point.x, point.y, point.z);
pointMat = transformation * pointMat;
// Update the point with the transformed values
point = cv::Point3f(pointMat.at<float>(0, 0), pointMat.at<float>(1, 0), pointMat.at<float>(2, 0));
}
}
static void processNode(const aiNode* node, const aiScene* scene, cv::Mat_<float>& allVertices) {
// Process all meshes in the current node
for (unsigned int i = 0; i < node->mNumMeshes; ++i) {
const aiMesh* mesh = scene->mMeshes[node->mMeshes[i]];
// Process all vertices in the current mesh
for (unsigned int j = 0; j < mesh->mNumVertices; ++j) {
aiVector3D aiVertex = mesh->mVertices[j];
cv::Mat_<float> vertex = (cv::Mat_<float>(1, 3) << aiVertex.x, aiVertex.y, aiVertex.z);
allVertices.push_back(vertex);
}
}
// Recursively process child nodes
for (unsigned int i = 0; i < node->mNumChildren; ++i) {
processNode(node->mChildren[i], scene, allVertices);
}
}
static void processNode(const aiNode* node, const aiScene* scene, std::vector<cv::Point3f>& allVertices) {
// Process all meshes in the current node
for (unsigned int i = 0; i < node->mNumMeshes; ++i) {
const aiMesh* mesh = scene->mMeshes[node->mMeshes[i]];
// Process all vertices in the current mesh
for (unsigned int j = 0; j < mesh->mNumVertices; ++j) {
aiVector3D aiVertex = mesh->mVertices[j];
cv::Point3f vertex(aiVertex.x, aiVertex.y, aiVertex.z);
allVertices.push_back(vertex);
}
}
// Recursively process child nodes
for (unsigned int i = 0; i < node->mNumChildren; ++i) {
processNode(node->mChildren[i], scene, allVertices);
}
}
Scene::Scene() {
}
Scene::~Scene() {
}
void Scene::reset() {
if(shaderHandles_[0] > 0)
glDeleteProgram(shaderHandles_[0]);
if(shaderHandles_[1] > 0)
glDeleteShader(shaderHandles_[1]);
if(shaderHandles_[2] > 0)
glDeleteShader(shaderHandles_[2]);
//FIXME how to cleanup a scene?
// releaseAssimpScene(scene_);
}
bool Scene::load(const std::vector<Point3f>& points) {
reset();
std::vector<Point3f> copy = points;
scene_ = createAssimpScene(copy);
cv::v4d::initShader(shaderHandles_, vertexShaderSource_.c_str(), fragmentShaderSource_.c_str(), "fragColor");
calculateBoundingBoxInfo(scene_->mMeshes[0], autoCenter_, size_);
autoScale_ = calculateAutoScale(scene_->mMeshes[0]);
return true;
}
bool Scene::load(const std::string& filename) {
reset();
scene_ = importer_.ReadFile(filename, aiProcess_Triangulate | aiProcess_GenNormals);
if (!scene_ || (scene_->mFlags & AI_SCENE_FLAGS_INCOMPLETE) || !scene_->mRootNode) {
return false;
}
cv::v4d::initShader(shaderHandles_, vertexShaderSource_.c_str(), fragmentShaderSource_.c_str(), "fragColor");
calculateBoundingBoxInfo(scene_->mMeshes[0], autoCenter_, size_);
autoScale_ = calculateAutoScale(scene_->mMeshes[0]);
return true;
}
cv::Mat_<float> Scene::pointCloudAsMat() {
cv::Mat_<float> allVertices;
processNode(scene_->mRootNode, scene_, allVertices);
return allVertices;
}
std::vector<cv::Point3f> Scene::pointCloudAsVector() {
std::vector<cv::Point3f> allVertices;
processNode(scene_->mRootNode, scene_, allVertices);
return allVertices;
}
void Scene::render(const cv::Rect& viewport, const cv::Matx44f& projection, const cv::Matx44f& view, const cv::Matx44f& modelView) {
glViewport(viewport.x, viewport.y, viewport.width, viewport.height);
glEnable(GL_DEPTH_TEST);
glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
glUniformMatrix4fv(glGetUniformLocation(shaderHandles_[0], "projection"), 1, GL_FALSE, projection.val);
glUniformMatrix4fv(glGetUniformLocation(shaderHandles_[0], "view"), 1, GL_FALSE, view.val);
glUniform3fv(glGetUniformLocation(shaderHandles_[0], "lightPos"), 1, lightPos_.val);
glUniform3fv(glGetUniformLocation(shaderHandles_[0], "viewPos"), 1, viewPos_.val);
glUniform1i(glGetUniformLocation(shaderHandles_[0], "renderMode"), mode_);
glUniformMatrix4fv(glGetUniformLocation(shaderHandles_[0], "model"), 1, GL_FALSE, modelView.val);
glUseProgram(shaderHandles_[0]);
drawModel(scene_, mode_);
}
} /* namespace gl */
} /* namespace v4d */
} /* namespace cv */

@ -0,0 +1,60 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "opencv2/v4d/sink.hpp"
#include <opencv2/core/utils/logger.hpp>
namespace cv {
namespace v4d {
Sink::Sink(std::function<bool(const uint64_t&, const cv::UMat&)> consumer) :
consumer_(consumer) {
}
Sink::Sink() {
}
Sink::~Sink() {
}
bool Sink::isReady() {
std::lock_guard<std::mutex> lock(mtx_);
if (consumer_)
return true;
else
return false;
}
bool Sink::isOpen() {
std::lock_guard<std::mutex> lock(mtx_);
return open_;
}
void Sink::operator()(const uint64_t& seq, const cv::UMat& frame) {
std::lock_guard<std::mutex> lock(mtx_);
if(seq == nextSeq_) {
uint64_t currentSeq = seq;
cv::UMat currentFrame = frame;
buffer_[seq] = frame;
do {
open_ = consumer_(currentSeq, currentFrame);
++nextSeq_;
buffer_.erase(buffer_.begin());
if(buffer_.empty())
break;
auto pair = (*buffer_.begin());
currentSeq = pair.first;
currentFrame = pair.second;
} while(currentSeq == nextSeq_);
} else {
buffer_[seq] = frame;
}
if(buffer_.size() > 240) {
CV_LOG_WARNING(nullptr, "Buffer overrun in sink.");
buffer_.clear();
}
}
} /* namespace v4d */
} /* namespace kb */

@ -0,0 +1,45 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include "opencv2/v4d/source.hpp"
namespace cv {
namespace v4d {
Source::Source(std::function<bool(cv::UMat&)> generator, float fps) :
generator_(generator), fps_(fps) {
}
Source::Source() :
open_(false), fps_(0) {
}
Source::~Source() {
}
bool Source::isOpen() {
std::lock_guard<std::mutex> guard(mtx_);
return generator_ && open_;
}
float Source::fps() {
return fps_;
}
std::pair<uint64_t, cv::UMat> Source::operator()() {
std::lock_guard<std::mutex> guard(mtx_);
static thread_local cv::UMat frame;
if(threadSafe_) {
static std::mutex mtx_;
std::unique_lock<std::mutex> lock(mtx_);
open_ = generator_(frame);
return {count_++, frame};
} else {
open_ = generator_(frame);
return {count_++, frame};
}
}
} /* namespace v4d */
} /* namespace kb */

@ -0,0 +1,432 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright Amir Hassan (kallaballa) <amir@viel-zu.org>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/core/ocl.hpp>
#include "../include/opencv2/v4d/v4d.hpp"
#include "../include/opencv2/v4d/util.hpp"
#include <csignal>
#include <unistd.h>
#include <chrono>
#include <mutex>
#include <functional>
#include <iostream>
#include <cmath>
using std::cerr;
using std::endl;
namespace cv {
namespace v4d {
namespace detail {
#ifdef __GNUG__
std::string demangle(const char* name) {
int status = -4; // some arbitrary value to eliminate the compiler warning
std::unique_ptr<char, void(*)(void*)> res {
abi::__cxa_demangle(name, NULL, NULL, &status),
std::free
};
return (status==0) ? res.get() : name ;
}
#else
// does nothing if not g++
std::string demangle(const char* name) {
return name;
}
#endif
size_t cnz(const cv::UMat& m) {
cv::UMat grey;
if(m.channels() == 1) {
grey = m;
} else if(m.channels() == 3) {
cvtColor(m, grey, cv::COLOR_BGR2GRAY);
} else if(m.channels() == 4) {
cvtColor(m, grey, cv::COLOR_BGRA2GRAY);
} else {
assert(false);
}
return cv::countNonZero(grey);
}
}
CV_EXPORTS void copy_shared(const cv::UMat& src, cv::UMat& dst) {
if(dst.empty())
dst.create(src.size(), src.type());
src.copyTo(dst.getMat(cv::ACCESS_READ));
}
cv::Scalar colorConvert(const cv::Scalar& src, cv::ColorConversionCodes code) {
cv::Mat tmpIn(1, 1, CV_8UC3);
cv::Mat tmpOut(1, 1, CV_8UC3);
tmpIn.at<cv::Vec3b>(0, 0) = cv::Vec3b(src[0], src[1], src[2]);
cvtColor(tmpIn, tmpOut, code);
const cv::Vec3b& vdst = tmpOut.at<cv::Vec3b>(0, 0);
cv::Scalar dst(vdst[0], vdst[1], vdst[2], src[3]);
return dst;
}
void gl_check_error(const std::filesystem::path& file, unsigned int line, const char* expression) {
int errorCode = glGetError();
// cerr << "TRACE: " << file.filename() << " (" << line << ") : " << expression << " => code: " << errorCode << endl;
if (errorCode != 0) {
std::stringstream ss;
ss << "GL failed in " << file.filename() << " (" << line << ") : " << "\nExpression:\n "
<< expression << "\nError code:\n " << errorCode;
CV_LOG_WARNING(nullptr, ss.str());
}
}
void initShader(unsigned int handles[3], const char* vShader, const char* fShader, const char* outputAttributeName) {
struct Shader {
GLenum type;
const char* source;
} shaders[2] = { { GL_VERTEX_SHADER, vShader }, { GL_FRAGMENT_SHADER, fShader } };
GLuint program = glCreateProgram();
handles[0] = program;
for (int i = 0; i < 2; ++i) {
Shader& s = shaders[i];
GLuint shader = glCreateShader(s.type);
handles[i + 1] = shader;
glShaderSource(shader, 1, (const GLchar**) &s.source, NULL);
glCompileShader(shader);
GLint compiled;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
std::cerr << " failed to compile:" << std::endl;
GLint logSize;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logSize);
char* logMsg = new char[logSize];
glGetShaderInfoLog(shader, logSize, NULL, logMsg);
std::cerr << logMsg << std::endl;
delete[] logMsg;
exit (EXIT_FAILURE);
}
glAttachShader(program, shader);
}
#if !defined(OPENCV_V4D_USE_ES3)
/* Link output */
glBindFragDataLocation(program, 0, outputAttributeName);
#else
CV_UNUSED(outputAttributeName);
#endif
/* link and error check */
glLinkProgram(program);
GLint linked;
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if (!linked) {
std::cerr << "Shader program failed to link" << std::endl;
GLint logSize;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logSize);
char* logMsg = new char[logSize];
glGetProgramInfoLog(program, logSize, NULL, logMsg);
std::cerr << logMsg << std::endl;
delete[] logMsg;
exit (EXIT_FAILURE);
}
}
std::string getGlVendor() {
std::ostringstream oss;
oss << reinterpret_cast<const char*>(glGetString(GL_VENDOR));
return oss.str();
}
std::string getGlInfo() {
std::ostringstream oss;
oss << "\n\t" << reinterpret_cast<const char*>(glGetString(GL_VERSION))
<< "\n\t" << reinterpret_cast<const char*>(glGetString(GL_RENDERER)) << endl;
return oss.str();
}
std::string getClInfo() {
std::stringstream ss;
#ifdef HAVE_OPENCL
if(cv::ocl::useOpenCL()) {
std::vector<cv::ocl::PlatformInfo> plt_info;
cv::ocl::getPlatfomsInfo(plt_info);
const cv::ocl::Device& defaultDevice = cv::ocl::Device::getDefault();
cv::ocl::Device current;
ss << endl;
for (const auto& info : plt_info) {
for (int i = 0; i < info.deviceNumber(); ++i) {
ss << "\t";
info.getDevice(current, i);
if (defaultDevice.name() == current.name())
ss << "* ";
else
ss << " ";
ss << info.version() << " = " << info.name() << endl;
ss << "\t\t GL sharing: "
<< (current.isExtensionSupported("cl_khr_gl_sharing") ? "true" : "false")
<< endl;
ss << "\t\t VAAPI media sharing: "
<< (current.isExtensionSupported("cl_intel_va_api_media_sharing") ?
"true" : "false") << endl;
}
}
}
#endif
return ss.str();
}
bool isIntelVaSupported() {
#ifdef HAVE_OPENCL
if(cv::ocl::useOpenCL()) {
try {
std::vector<cv::ocl::PlatformInfo> plt_info;
cv::ocl::getPlatfomsInfo(plt_info);
cv::ocl::Device current;
for (const auto& info : plt_info) {
for (int i = 0; i < info.deviceNumber(); ++i) {
info.getDevice(current, i);
return current.isExtensionSupported("cl_intel_va_api_media_sharing");
}
}
} catch (std::exception& ex) {
cerr << "Intel VAAPI query failed: " << ex.what() << endl;
} catch (...) {
cerr << "Intel VAAPI query failed" << endl;
}
}
#endif
return false;
}
bool isClGlSharingSupported() {
#ifdef HAVE_OPENCL
if(cv::ocl::useOpenCL()) {
try {
if(!cv::ocl::useOpenCL())
return false;
std::vector<cv::ocl::PlatformInfo> plt_info;
cv::ocl::getPlatfomsInfo(plt_info);
cv::ocl::Device current;
for (const auto& info : plt_info) {
for (int i = 0; i < info.deviceNumber(); ++i) {
info.getDevice(current, i);
return current.isExtensionSupported("cl_khr_gl_sharing");
}
}
} catch (std::exception& ex) {
cerr << "CL-GL sharing query failed: " << ex.what() << endl;
} catch (...) {
cerr << "CL-GL sharing query failed with unknown error." << endl;
}
}
#endif
return false;
}
static std::mutex finish_mtx;
/*!
* Internal variable that signals that finishing all operation is requested
*/
static bool finish_requested = false;
/*!
* Internal variable that tracks if signal handlers have already been installed
*/
static bool signal_handlers_installed = false;
/*!
* Signal handler callback that signals the application to terminate.
* @param ignore We ignore the signal number
*/
static void request_finish(int ignore) {
std::lock_guard guard(finish_mtx);
CV_UNUSED(ignore);
finish_requested = true;
}
/*!
* Installs #request_finish() as signal handler for SIGINT and SIGTERM
*/
static void install_signal_handlers() {
signal(SIGINT, request_finish);
signal(SIGTERM, request_finish);
}
bool keepRunning() {
std::lock_guard guard(finish_mtx);
if (!signal_handlers_installed) {
install_signal_handlers();
}
return !finish_requested;
}
void requestFinish() {
request_finish(0);
}
cv::Ptr<Sink> makeVaSink(cv::Ptr<V4D> window, const string& outputFilename, const int fourcc, const float fps,
const cv::Size& frameSize, const int vaDeviceIndex) {
cv::Ptr<cv::VideoWriter> writer = new cv::VideoWriter(outputFilename, cv::CAP_FFMPEG,
fourcc, fps, frameSize, {
cv::VIDEOWRITER_PROP_HW_DEVICE, vaDeviceIndex,
cv::VIDEOWRITER_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI,
cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1 });
if(isIntelVaSupported())
window->sourceCtx()->copyContext();
cerr << "Using a VA sink" << endl;
if(writer->isOpened()) {
return new Sink([=](const uint64_t& seq, const cv::UMat& frame) {
CV_UNUSED(seq);
CLExecScope_t scope(window->sourceCtx()->getCLExecContext());
//FIXME cache it
cv::UMat converted;
cv::resize(frame, converted, frameSize);
cvtColor(converted, converted, cv::COLOR_BGRA2RGB);
(*writer) << converted;
return writer->isOpened();
});
} else {
return new Sink();
}
}
cv::Ptr<Source> makeVaSource(cv::Ptr<V4D> window, const string& inputFilename, const int vaDeviceIndex) {
cv::Ptr<cv::VideoCapture> capture = new cv::VideoCapture(inputFilename, cv::CAP_FFMPEG, {
cv::CAP_PROP_HW_DEVICE, vaDeviceIndex, cv::CAP_PROP_HW_ACCELERATION,
cv::VIDEO_ACCELERATION_VAAPI, cv::CAP_PROP_HW_ACCELERATION_USE_OPENCL, 1 });
float fps = capture->get(cv::CAP_PROP_FPS);
cerr << "Using a VA source" << endl;
if(isIntelVaSupported())
window->sourceCtx()->copyContext();
return new Source([=](cv::UMat& frame) {
CLExecScope_t scope(window->sourceCtx()->getCLExecContext());
(*capture) >> frame;
return !frame.empty();
}, fps);
}
static cv::Ptr<Sink> makeAnyHWSink(const string& outputFilename, const int fourcc, const float fps,
const cv::Size& frameSize) {
cv::Ptr<cv::VideoWriter> writer = new cv::VideoWriter(outputFilename, cv::CAP_FFMPEG,
fourcc, fps, frameSize, { cv::VIDEOWRITER_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_ANY });
if(writer->isOpened()) {
return new Sink([=](const uint64_t& seq, const cv::UMat& frame) {
CV_UNUSED(seq);
cv::UMat converted;
cv::UMat context_corrected;
frame.copyTo(context_corrected);
cv::resize(context_corrected, converted, frameSize);
cvtColor(converted, converted, cv::COLOR_BGRA2RGB);
(*writer) << converted;
return writer->isOpened();
});
} else {
return new Sink();
}
}
static cv::Ptr<Source> makeAnyHWSource(const string& inputFilename) {
cv::Ptr<cv::VideoCapture> capture = new cv::VideoCapture(inputFilename, cv::CAP_FFMPEG, {
cv::CAP_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_ANY });
float fps = capture->get(cv::CAP_PROP_FPS);
return new Source([=](cv::UMat& frame) {
(*capture) >> frame;
return !frame.empty();
}, fps);
}
cv::Ptr<Sink> makeWriterSink(cv::Ptr<V4D> window, const string& outputFilename, const float fps, const cv::Size& frameSize) {
int fourcc = 0;
//FIXME find a cleverer way to guess a decent codec
if(getGlVendor() == "NVIDIA Corporation") {
fourcc = cv::VideoWriter::fourcc('H', '2', '6', '4');
} else {
fourcc = cv::VideoWriter::fourcc('V', 'P', '9', '0');
}
return makeWriterSink(window, outputFilename, fps, frameSize, fourcc);
}
cv::Ptr<Sink> makeWriterSink(cv::Ptr<V4D> window, const string& outputFilename, const float fps,
const cv::Size& frameSize, int fourcc) {
if (isIntelVaSupported()) {
return makeVaSink(window, outputFilename, fourcc, fps, frameSize, 0);
} else {
try {
return makeAnyHWSink(outputFilename, fourcc, fps, frameSize);
} catch(...) {
cerr << "Failed creating hardware source" << endl;
}
}
cv::Ptr<cv::VideoWriter> writer = new cv::VideoWriter(outputFilename, cv::CAP_FFMPEG,
fourcc, fps, frameSize);
if(writer->isOpened()) {
return new Sink([=](const uint64_t& seq, const cv::UMat& frame) {
CV_UNUSED(seq);
cv::UMat converted;
cv::resize(frame, converted, frameSize);
cvtColor(converted, converted, cv::COLOR_BGRA2RGB);
(*writer) << converted;
return writer->isOpened();
});
} else {
return new Sink();
}
}
cv::Ptr<Source> makeCaptureSource(cv::Ptr<V4D> window, const string& inputFilename) {
if (isIntelVaSupported()) {
return makeVaSource(window, inputFilename, 0);
} else {
try {
return makeAnyHWSource(inputFilename);
} catch(...) {
cerr << "Failed creating hardware source" << endl;
}
}
cv::Ptr<cv::VideoCapture> capture = new cv::VideoCapture(inputFilename, cv::CAP_FFMPEG);
float fps = capture->get(cv::CAP_PROP_FPS);
return new Source([=](cv::UMat& frame) {
(*capture) >> frame;
return !frame.empty();
}, fps);
}
void resizePreserveAspectRatio(const cv::UMat& src, cv::UMat& output, const cv::Size& dstSize, const cv::Scalar& bgcolor) {
cv::UMat tmp;
double hf = double(dstSize.height) / src.size().height;
double wf = double(dstSize.width) / src.size().width;
double f = std::min(hf, wf);
if (f < 0)
f = 1.0 / f;
cv::resize(src, tmp, cv::Size(), f, f);
int top = (dstSize.height - tmp.rows) / 2;
int down = (dstSize.height - tmp.rows + 1) / 2;
int left = (dstSize.width - tmp.cols) / 2;
int right = (dstSize.width - tmp.cols + 1) / 2;
cv::copyMakeBorder(tmp, output, top, down, left, right, cv::BORDER_CONSTANT, bgcolor);
}
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save