emscripten adaptions and ImGui porting

pull/3471/head
kallaballa 2 years ago
parent e93aca25d7
commit 68a74d8a04
  1. 16
      modules/v4d/CMakeLists.txt
  2. 1
      modules/v4d/include/opencv2/v4d/detail/imguicontext.hpp
  3. 10
      modules/v4d/include/opencv2/v4d/util.hpp
  4. 6
      modules/v4d/include/opencv2/v4d/v4d.hpp
  5. 6
      modules/v4d/samples/beauty-demo.cpp
  6. 12
      modules/v4d/samples/cube-demo.cpp
  7. 2
      modules/v4d/samples/custom_source_and_sink.cpp
  8. 2
      modules/v4d/samples/font-demo.cpp
  9. 9
      modules/v4d/samples/many_cubes-demo.cpp
  10. 2
      modules/v4d/samples/nanovg-demo.cpp
  11. 194
      modules/v4d/samples/optflow-demo.cpp
  12. 3
      modules/v4d/samples/pedestrian-demo.cpp
  13. 3
      modules/v4d/samples/shader-demo.cpp
  14. 7
      modules/v4d/samples/video-demo.cpp
  15. 3
      modules/v4d/samples/video_editing.cpp
  16. 40
      modules/v4d/src/detail/framebuffercontext.cpp
  17. 36
      modules/v4d/src/detail/imguicontext.cpp
  18. 6
      modules/v4d/src/detail/nanovgcontext.cpp
  19. 22
      modules/v4d/src/util.cpp
  20. 5
      modules/v4d/src/v4d.cpp

@ -9,7 +9,7 @@ endif()
set(the_description "V4D Visualization Module") set(the_description "V4D Visualization Module")
set(OPENCV_MODULE_IS_PART_OF_WORLD OFF) set(OPENCV_MODULE_IS_PART_OF_WORLD OFF)
macro(add_emscripten_sample sample source assets) macro(add_emscripten_sample sample source models)
ocv_add_executable(${sample} ${source}) ocv_add_executable(${sample} ${source})
ocv_target_include_modules(${sample} opencv_core opencv_imgproc opencv_videoio opencv_video opencv_imgcodecs opencv_v4d opencv_face opencv_tracking opencv_objdetect opencv_stitching opencv_optflow opencv_imgcodecs opencv_features2d opencv_dnn opencv_flann) ocv_target_include_modules(${sample} opencv_core opencv_imgproc opencv_videoio opencv_video opencv_imgcodecs opencv_v4d opencv_face opencv_tracking opencv_objdetect opencv_stitching opencv_optflow opencv_imgcodecs opencv_features2d opencv_dnn opencv_flann)
ocv_target_link_libraries(${sample} opencv_v4d opencv_core opencv_imgproc opencv_videoio opencv_video opencv_imgcodecs opencv_v4d opencv_face opencv_tracking opencv_objdetect opencv_stitching opencv_optflow opencv_imgcodecs opencv_features2d opencv_dnn opencv_flann) ocv_target_link_libraries(${sample} opencv_v4d opencv_core opencv_imgproc opencv_videoio opencv_video opencv_imgcodecs opencv_v4d opencv_face opencv_tracking opencv_objdetect opencv_stitching opencv_optflow opencv_imgcodecs opencv_features2d opencv_dnn opencv_flann)
@ -22,8 +22,8 @@ macro(add_emscripten_sample sample source assets)
COMMAND ${CMAKE_COMMAND} -E copy COMMAND ${CMAKE_COMMAND} -E copy
"${CMAKE_CURRENT_LIST_DIR}/samples/${sample}.html" "${CMAKE_CURRENT_LIST_DIR}/samples/${sample}.html"
"${CMAKE_CURRENT_BINARY_DIR}/../../bin/${sample}.html") "${CMAKE_CURRENT_BINARY_DIR}/../../bin/${sample}.html")
if(${assets}) if(${models})
target_link_options(${sample} PRIVATE --preload-file "${CMAKE_BINARY_DIR}/assets") target_link_options(${sample} PRIVATE --preload-file "${CMAKE_BINARY_DIR}/assets/models/")
endif() endif()
endmacro() endmacro()
@ -36,7 +36,7 @@ endmacro()
if(EMSCRIPTEN) if(EMSCRIPTEN)
# set(EM_LINKER_FLAGS "-sENVIRONMENT=web,worker -sWASM_BIGINT=1 -sOFFSCREENCANVAS_SUPPORT -sOFFSCREENCANVASES_TO_PTHREAD=#offscreenCanvas -sEXPORTED_FUNCTIONS=_main,_v4dInitCapture -sEXPORTED_RUNTIME_METHODS=ccall -sPROXY_TO_PTHREAD=1 --use-preload-plugins --preload-file '${CMAKE_CURRENT_BINARY_DIR}/doc/lena.png' -sINITIAL_MEMORY=128MB -sALLOW_MEMORY_GROWTH=1 -sUSE_GLFW=3 -sMIN_WEBGL_VERSION=2 -sMAX_WEBGL_VERSION=2 --bind") # set(EM_LINKER_FLAGS "-sENVIRONMENT=web,worker -sWASM_BIGINT=1 -sOFFSCREENCANVAS_SUPPORT -sOFFSCREENCANVASES_TO_PTHREAD=#offscreenCanvas -sEXPORTED_FUNCTIONS=_main,_v4dInitCapture -sEXPORTED_RUNTIME_METHODS=ccall -sPROXY_TO_PTHREAD=1 --use-preload-plugins --preload-file '${CMAKE_CURRENT_BINARY_DIR}/doc/lena.png' -sINITIAL_MEMORY=128MB -sALLOW_MEMORY_GROWTH=1 -sUSE_GLFW=3 -sMIN_WEBGL_VERSION=2 -sMAX_WEBGL_VERSION=2 --bind")
set(EM_LINKER_FLAGS "-sENVIRONMENT=web,worker -sWASM_BIGINT=1 -sOFFSCREENCANVAS_SUPPORT -sOFFSCREENCANVASES_TO_PTHREAD=#v4dOffscreenCanvas -sEXPORTED_FUNCTIONS=_main,_v4dInitCapture -sEXPORTED_RUNTIME_METHODS=ccall -sPROXY_TO_PTHREAD=1 --use-preload-plugins --preload-file '${CMAKE_CURRENT_BINARY_DIR}/doc/lena.png' -sINITIAL_MEMORY=128MB -sALLOW_MEMORY_GROWTH=1 -sUSE_GLFW=3 -sMIN_WEBGL_VERSION=2 -sMAX_WEBGL_VERSION=2 --bind") set(EM_LINKER_FLAGS "-sENVIRONMENT=web,worker -sWASM_BIGINT=1 -sOFFSCREENCANVAS_SUPPORT -sOFFSCREENCANVASES_TO_PTHREAD=#v4dOffscreenCanvas -sEXPORTED_FUNCTIONS=_main,_v4dInitCapture -sEXPORTED_RUNTIME_METHODS=ccall -sPROXY_TO_PTHREAD=1 --use-preload-plugins --preload-file \"${CMAKE_BINARY_DIR}/assets/fonts/\" --preload-file \"${CMAKE_CURRENT_BINARY_DIR}/doc/lena.png\" -sINITIAL_MEMORY=128MB -sALLOW_MEMORY_GROWTH=1 -sUSE_GLFW=3 -sMIN_WEBGL_VERSION=2 -sMAX_WEBGL_VERSION=2 --bind")
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${EM_LINKER_FLAGS}") set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${EM_LINKER_FLAGS}")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${EM_LINKER_FLAGS}") set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${EM_LINKER_FLAGS}")
@ -44,16 +44,16 @@ if(EMSCRIPTEN)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug") if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
set(EM_DEBUG_FLAGS "-s GL_DEBUG=1 -sSTACK_OVERFLOW_CHECK=2 -sASSERTIONS=2 -sNO_DISABLE_EXCEPTION_CATCHING -sEXCEPTION_DEBUG=1") set(EM_DEBUG_FLAGS "")
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${EM_DEBUG_FLAGS}") set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${EM_DEBUG_FLAGS}") set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${EM_DEBUG_FLAGS}") set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
endif() endif()
else() else()
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fsanitize=undefined") set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS}")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fsanitize=undefined") set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS}")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined") set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
endif() endif()

@ -26,6 +26,7 @@ public:
CV_EXPORTS ImGuiContextImpl(FrameBufferContext& fbContext); CV_EXPORTS ImGuiContextImpl(FrameBufferContext& fbContext);
CV_EXPORTS void build(std::function<void(ImGuiContext*)> fn); CV_EXPORTS void build(std::function<void(ImGuiContext*)> fn);
protected: protected:
CV_EXPORTS void makeCurrent();
CV_EXPORTS void render(); CV_EXPORTS void render();
}; };
} }

@ -120,6 +120,12 @@ CV_EXPORTS void gl_check_error(const std::filesystem::path& file, unsigned int l
expr; expr;
#endif #endif
CV_EXPORTS unsigned int initShader(const char* vShader, const char* fShader, const char* outputAttributeName); CV_EXPORTS unsigned int initShader(const char* vShader, const char* fShader, const char* outputAttributeName);
/*!
* Returns the OpenGL vendor string
* @return a string object with the OpenGL vendor information
*/
CV_EXPORTS std::string getGlVendor();
/*! /*!
* Returns the OpenGL Version information. * Returns the OpenGL Version information.
* @return a string object with the OpenGL version information * @return a string object with the OpenGL version information
@ -178,8 +184,10 @@ CV_EXPORTS Source makeVaSource(const string& inputFilename, const int vaDeviceIn
* @param frameSize The frame size of the target video. * @param frameSize The frame size of the target video.
* @return A (optionally VAAPI enabled) VideoWriter sink object. * @return A (optionally VAAPI enabled) VideoWriter sink object.
*/ */
CV_EXPORTS Sink makeWriterSink(const string& outputFilename, const int fourcc, const float fps, CV_EXPORTS Sink makeWriterSink(const string& outputFilename, const float fps,
const cv::Size& frameSize); const cv::Size& frameSize);
CV_EXPORTS Sink makeWriterSink(const string& outputFilename, const float fps,
const cv::Size& frameSize, const int fourcc);
/*! /*!
* Creates a VideoCapture source object to use in conjunction with #V4D::setSource(). * Creates a VideoCapture source object to use in conjunction with #V4D::setSource().
* This function automatically determines if Intel VAAPI is available and enables it if so. * This function automatically determines if Intel VAAPI is available and enables it if so.

@ -154,9 +154,7 @@ public:
*/ */
CV_EXPORTS void nvg(std::function<void(const cv::Size&)> fn); CV_EXPORTS void nvg(std::function<void(const cv::Size&)> fn);
CV_EXPORTS void nvg(std::function<void()> fn); CV_EXPORTS void nvg(std::function<void()> fn);
CV_EXPORTS void imgui(std::function<void(const cv::Size&)> fn);
CV_EXPORTS void imgui(std::function<void(ImGuiContext* ctx)> fn); CV_EXPORTS void imgui(std::function<void(ImGuiContext* ctx)> fn);
CV_EXPORTS void imgui(std::function<void(cv::Ptr<V4D>)> fn);
/*! /*!
* Copy the framebuffer contents to an OutputArray. * Copy the framebuffer contents to an OutputArray.
@ -177,12 +175,12 @@ public:
/*! /*!
* Called to feed an image directly to the framebuffer * Called to feed an image directly to the framebuffer
*/ */
CV_EXPORTS void feed(const cv::UMat& in); CV_EXPORTS void feed(cv::InputArray in);
/*! /*!
* Fetches a copy of frambuffer * Fetches a copy of frambuffer
* @return a copy of the framebuffer * @return a copy of the framebuffer
*/ */
CV_EXPORTS cv::UMat fetch(); CV_EXPORTS cv::_InputArray fetch();
/*! /*!
* Called to capture to the framebuffer from a #cv::viz::Source object provided via #V4D::setSource(). * Called to capture to the framebuffer from a #cv::viz::Source object provided via #V4D::setSource().

@ -201,9 +201,9 @@ static bool iteration(cv::Ptr<V4D> window) {
try { try {
//Face detector //Face detector
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
static cv::Ptr<cv::FaceDetectorYN> detector = cv::FaceDetectorYN::create("assets/face_detection_yunet_2023mar.onnx", "", cv::Size(DOWNSIZE_WIDTH, DOWNSIZE_HEIGHT), 0.9, 0.3, 5000, cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_OPENCL); static cv::Ptr<cv::FaceDetectorYN> detector = cv::FaceDetectorYN::create("assets/models/face_detection_yunet_2023mar.onnx", "", cv::Size(DOWNSIZE_WIDTH, DOWNSIZE_HEIGHT), 0.9, 0.3, 5000, cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_OPENCL);
#else #else
static cv::Ptr<cv::FaceDetectorYN> detector = cv::FaceDetectorYN::create("assets/face_detection_yunet_2023mar.onnx", "", cv::Size(DOWNSIZE_WIDTH, DOWNSIZE_HEIGHT), 0.9, 0.3, 5000, cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_CPU); static cv::Ptr<cv::FaceDetectorYN> detector = cv::FaceDetectorYN::create("assets/models/face_detection_yunet_2023mar.onnx", "", cv::Size(DOWNSIZE_WIDTH, DOWNSIZE_HEIGHT), 0.9, 0.3, 5000, cv::dnn::DNN_BACKEND_OPENCV, cv::dnn::DNN_TARGET_CPU);
#endif #endif
//BGR //BGR
static cv::UMat input, down, blurred, contrast, faceOval, eyesAndLips, skin; static cv::UMat input, down, blurred, contrast, faceOval, eyesAndLips, skin;
@ -373,7 +373,7 @@ int main() {
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
Source src = makeCaptureSource(argv[1]); Source src = makeCaptureSource(argv[1]);
window->setSource(src); window->setSource(src);
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), src.fps(), cv::Size(WIDTH, HEIGHT)); Sink sink = makeWriterSink(OUTPUT_FILENAME, src.fps(), cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT, window); Source src = makeCaptureSource(WIDTH, HEIGHT, window);

@ -20,7 +20,7 @@ constexpr double FPS = 60;
constexpr const char* OUTPUT_FILENAME = "cube-demo.mkv"; constexpr const char* OUTPUT_FILENAME = "cube-demo.mkv";
#endif #endif
const unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT)); const unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT));
const int GLOW_KERNEL_SIZE = std::max(int(DIAG / 138 % 2 == 0 ? DIAG / 138 + 1 : DIAG / 138), 1); const int glow_kernel_size = std::max(int(DIAG / 138 % 2 == 0 ? DIAG / 138 + 1 : DIAG / 138), 1);
using std::cerr; using std::cerr;
using std::endl; using std::endl;
@ -232,7 +232,7 @@ static bool iteration(cv::Ptr<V4D> window) {
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
//Aquire the frame buffer for use by OpenCV //Aquire the frame buffer for use by OpenCV
window->fb([&](cv::UMat& framebuffer) { window->fb([&](cv::UMat& framebuffer) {
glow_effect(framebuffer, framebuffer, GLOW_KERNEL_SIZE); glow_effect(framebuffer, framebuffer, glow_kernel_size);
}); });
#endif #endif
@ -244,15 +244,11 @@ static bool iteration(cv::Ptr<V4D> window) {
int main() { int main() {
cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Cube Demo", false, false, 0); cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Cube Demo", false, false, 0);
window->printSystemInfo(); window->printSystemInfo();
cerr << "\n\t" << reinterpret_cast<const char*>(glGetString(GL_VERSION))
<< "\n\t" << reinterpret_cast<const char*>(glGetString(GL_RENDERER)) << endl;
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
//Creates a writer sink using the VP9 codec (which might be hardware accelerated) //Creates a writer sink (which might be hardware accelerated)
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), FPS, Sink sink = makeWriterSink(OUTPUT_FILENAME, FPS, cv::Size(WIDTH, HEIGHT));
cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#endif #endif
cerr << glGetError() << endl;
window->gl(init_scene); window->gl(init_scene);
window->run(iteration); window->run(iteration);

@ -15,7 +15,7 @@ int main() {
static long cnt = 0; static long cnt = 0;
//The source is responsible for initializing the frame.. //The source is responsible for initializing the frame..
if(frame.empty()) if(frame.empty())
frame.create(Size(1280, 720), CV_8UC3); frame.create(Size(960, 960), CV_8UC3);
frame = colorConvert(Scalar(++cnt % 180, 128, 128, 255), COLOR_HLS2BGR); frame = colorConvert(Scalar(++cnt % 180, 128, 128, 255), COLOR_HLS2BGR);
return true; return true;
}, 60.0f); }, 60.0f);

@ -221,7 +221,7 @@ int main() {
} }
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), FPS, cv::Size(WIDTH, HEIGHT)); Sink sink = makeWriterSink(OUTPUT_FILENAME, FPS, cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#endif #endif

@ -21,7 +21,7 @@ constexpr double FPS = 60;
constexpr const char* OUTPUT_FILENAME = "many_cubes-demo.mkv"; constexpr const char* OUTPUT_FILENAME = "many_cubes-demo.mkv";
#endif #endif
const unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT)); const unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT));
const int GLOW_KERNEL_SIZE = std::max(int(DIAG / 138 % 2 == 0 ? DIAG / 138 + 1 : DIAG / 138), 1); const int glow_kernel_size = std::max(int(DIAG / 138 % 2 == 0 ? DIAG / 138 + 1 : DIAG / 138), 1);
using std::cerr; using std::cerr;
using std::endl; using std::endl;
@ -244,7 +244,7 @@ static bool iteration(cv::Ptr<V4D> window) {
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
//Aquire the frame buffer for use by OpenCV //Aquire the frame buffer for use by OpenCV
window->fb([&](cv::UMat& framebuffer) { window->fb([&](cv::UMat& framebuffer) {
glow_effect(framebuffer, framebuffer, GLOW_KERNEL_SIZE); glow_effect(framebuffer, framebuffer, glow_kernel_size);
}); });
#endif #endif
@ -258,9 +258,8 @@ int main() {
window->printSystemInfo(); window->printSystemInfo();
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
//Creates a writer sink using the VP9 codec (which might be hardware accelerated) //Creates a writer sink (which might be hardware accelerated)
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), FPS, Sink sink = makeWriterSink(OUTPUT_FILENAME, FPS, cv::Size(WIDTH, HEIGHT));
cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#endif #endif
for(size_t i = 0; i < NUMBER_OF_CUBES; ++i) for(size_t i = 0; i < NUMBER_OF_CUBES; ++i)

@ -189,7 +189,7 @@ int main() {
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
Source src = makeCaptureSource(argv[1]); Source src = makeCaptureSource(argv[1]);
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), src.fps(), cv::Size(WIDTH, HEIGHT)); Sink sink = makeWriterSink(OUTPUT_FILENAME, src.fps(), cv::Size(WIDTH, HEIGHT));
window->setSource(src); window->setSource(src);
window->setSink(sink); window->setSink(sink);
#else #else

@ -39,6 +39,10 @@ constexpr const char* OUTPUT_FILENAME = "optflow-demo.mkv";
#endif #endif
constexpr bool OFFSCREEN = false; constexpr bool OFFSCREEN = false;
#ifndef __EMSCRIPTEN__
//the second window
static cv::Ptr<cv::v4d::V4D> menuWindow;
#endif
/* Visualization parameters */ /* Visualization parameters */
//How the background will be visualized //How the background will be visualized
@ -86,11 +90,9 @@ float point_loss = 10;
// The theoretical maximum size of the drawing stroke which is scaled by the area of the convex hull // The theoretical maximum size of the drawing stroke which is scaled by the area of the convex hull
// of tracked points and therefor is usually much smaller. // of tracked points and therefor is usually much smaller.
int max_stroke = 10; int max_stroke = 10;
// Keep alpha separate for the GUI
float alpha = 0.1f;
// Red, green, blue and alpha. All from 0.0f to 1.0f // Red, green, blue and alpha. All from 0.0f to 1.0f
//nanogui::Color effect_color(1.0f, 0.75f, 0.4f, 1.0f); float effect_color[4] = {1.0f, 0.75f, 0.4f, 1.0f};
//display on-screen FPS //display on-screen FPS
bool show_fps = true; bool show_fps = true;
//Stretch frame buffer to window size //Stretch frame buffer to window size
@ -104,7 +106,7 @@ PostProcModes post_proc_mode = GLOW;
PostProcModes post_proc_mode = NONE; PostProcModes post_proc_mode = NONE;
#endif #endif
// Intensity of glow or bloom defined by kernel size. The default scales with the image diagonal. // Intensity of glow or bloom defined by kernel size. The default scales with the image diagonal.
int GLOW_KERNEL_SIZE = std::max(int(DIAG / 100 % 2 == 0 ? DIAG / 100 + 1 : DIAG / 100), 1); int glow_kernel_size = std::max(int(DIAG / 100 % 2 == 0 ? DIAG / 100 + 1 : DIAG / 100), 1);
//The lightness selection threshold //The lightness selection threshold
int bloom_thresh = 210; int bloom_thresh = 210;
//The intensity of the bloom filter //The intensity of the bloom filter
@ -336,103 +338,71 @@ static void composite_layers(cv::UMat& background, const cv::UMat& foreground, c
using namespace cv::v4d; using namespace cv::v4d;
//Build the GUI //Build the GUI
//static void setup_gui(cv::Ptr<V4D> main, cv::Ptr<V4D> menu) { static void setup_gui(cv::Ptr<V4D> main, cv::Ptr<V4D> menu) {
// main->nanogui([&](cv::v4d::FormHelper& form){ main->imgui([main, menu](ImGuiContext* ctx){
// form.makeDialog(5, 30, "Effects"); using namespace ImGui;
// SetCurrentContext(ctx);
// form.makeGroup("Foreground"); Begin("Effects");
// form.makeFormVariable("Scale", fg_scale, 0.1f, 4.0f, true, "", "Generate the foreground at this scale"); Text("Foreground");
// form.makeFormVariable("Loss", fg_loss, 0.1f, 99.9f, true, "%", "On every frame the foreground loses on brightness"); SliderFloat("Scale", &fg_scale, 0.1f, 4.0f);
// SliderFloat("Loss", &fg_loss, 0.1f, 99.9f);
// form.makeGroup("Background");
// form.makeComboBox("Mode",background_mode, {"Grey", "Color", "Value", "Black"}); Text("Background");
// static const char* bgm_items[4] = {"Grey", "Color", "Value", "Black"};
// form.makeGroup("Points"); static int* bgm = (int*)&background_mode;
// form.makeFormVariable("Max. Points", max_points, 10, 1000000, true, "", "The theoretical maximum number of points to track which is scaled by the density of detected points and therefor is usually much smaller"); ListBox("Mode", bgm, bgm_items, 4, 4);
// form.makeFormVariable("Point Loss", point_loss, 0.0f, 100.0f, true, "%", "How many of the tracked points to lose intentionally");
// Text("Points");
// form.makeGroup("Optical flow"); SliderInt("Max. Points", &max_points, 10, 1000000);
// form.makeFormVariable("Max. Stroke Size", max_stroke, 1, 100, true, "px", "The theoretical maximum size of the drawing stroke which is scaled by the area of the convex hull of tracked points and therefor is usually much smaller"); SliderFloat("Point Loss", &point_loss, 0.0f, 100.0f);
// form.makeColorPicker("Color", effect_color, "The primary effect color",[&](const nanogui::Color &c) {
// effect_color[0] = c[0]; Text("Optical flow");
// effect_color[1] = c[1]; SliderInt("Max. Stroke Size", &max_stroke, 1, 100);
// effect_color[2] = c[2]; ColorPicker4("Color", effect_color);
// }); End();
// form.makeFormVariable("Alpha", alpha, 0.0f, 1.0f, true, "", "The opacity of the effect");
// Begin("Post Processing");
// form.makeDialog(220, 30, "Post Processing"); static const char* ppm_items[3] = {"Glow", "Bloom", "None"};
// auto* postPocMode = form.makeComboBox("Mode",post_proc_mode, {"Glow", "Bloom", "None"}); static int* ppm = (int*)&post_proc_mode;
// auto* kernelSize = form.makeFormVariable("Kernel Size", GLOW_KERNEL_SIZE, 1, 63, true, "", "Intensity of glow defined by kernel size"); ListBox("Effect",ppm, ppm_items, 3, 3);
// kernelSize->set_callback([=](const int& k) { SliderInt("Kernel Size",&glow_kernel_size, 1, 63);
// static int lastKernelSize = GLOW_KERNEL_SIZE; SliderFloat("Gain", &bloom_gain, 0.1f, 20.0f);
// End();
// if(k == lastKernelSize)
// return; Begin("Settings");
// Text("Scene Change Detection");
// if(k <= lastKernelSize) { SliderFloat("Threshold", &scene_change_thresh, 0.1f, 1.0f);
// GLOW_KERNEL_SIZE = std::max(int(k % 2 == 0 ? k - 1 : k), 1); SliderFloat("Threshold Diff", &scene_change_thresh_diff, 0.1f, 1.0f);
// } else if(k > lastKernelSize) End();
// GLOW_KERNEL_SIZE = std::max(int(k % 2 == 0 ? k + 1 : k), 1); });
// #ifndef __EMSCRIPTEN__
// lastKernelSize = k; menu->imgui([main](ImGuiContext* ctx){
// kernelSize->set_value(GLOW_KERNEL_SIZE); using namespace ImGui;
// }); SetCurrentContext(ctx);
// auto* thresh = form.makeFormVariable("Threshold", bloom_thresh, 1, 255, true, "", "The lightness selection threshold", true, false); Begin("Display");
// auto* gain = form.makeFormVariable("Gain", bloom_gain, 0.1f, 20.0f, true, "", "Intensity of the effect defined by gain", true, false); Checkbox("Show FPS", &show_fps);
// postPocMode->set_callback([=](const int& m) { if(Checkbox("Scale", &scale)) {
// switch(m) { main->setScaling(scale);
// case GLOW: }
// kernelSize->set_enabled(true);
// thresh->set_enabled(false); if(Button("Fullscreen")) {
// gain->set_enabled(false); main->setFullscreen(!main->isFullscreen());
// break; };
// case BLOOM:
// kernelSize->set_enabled(true); if(Button("Offscreen")) {
// thresh->set_enabled(true); main->setVisible(!main->isVisible());
// gain->set_enabled(true); };
// break; End();
// case NONE: });
// kernelSize->set_enabled(false); #endif
// thresh->set_enabled(false); }
// gain->set_enabled(false);
// break;
//
// }
// postPocMode->set_selected_index(m);
// });
//
// form.makeDialog(220, 175, "Settings");
//
// form.makeGroup("Scene Change Detection");
// form.makeFormVariable("Threshold", scene_change_thresh, 0.1f, 1.0f, true, "", "Peak threshold. Lowering it makes detection more sensitive");
// form.makeFormVariable("Threshold Diff", scene_change_thresh_diff, 0.1f, 1.0f, true, "", "Difference of peak thresholds. Lowering it makes detection more sensitive");
// });
//
// menu->nanogui([&](cv::v4d::FormHelper& form){
// form.makeDialog(8, 16, "Display");
//
// form.makeGroup("Display");
// form.makeFormVariable("Show FPS", show_fps, "Enable or disable the On-screen FPS display");
// form.makeFormVariable("Scale", scale, "Scale the frame buffer to the window size")->set_callback([=](const bool &s) {
// main->setScaling(s);
// });
//
//#ifndef __EMSCRIPTEN__
// form.makeButton("Fullscreen", [=]() {
// main->setFullscreen(!main->isFullscreen());
// });
//
// form.makeButton("Offscreen", [=]() {
// main->setVisible(!main->isVisible());
// });
//#endif
// });
//}
static bool iteration(cv::Ptr<V4D> window) { static bool iteration(cv::Ptr<V4D> window) {
//BGRA //BGRA
static cv::UMat background, down; static cv::UMat background, down;
static cv::UMat foreground(window->framebufferSize(), CV_8UC4, cv::Scalar::all(0)); static cv::UMat foreground(window->framebufferSize(), CV_8UC4, cv::Scalar::all(0));
//BGR
static cv::UMat menuFrame;
//GREY //GREY
static cv::UMat downPrevGrey, downNextGrey, downMotionMaskGrey; static cv::UMat downPrevGrey, downNextGrey, downMotionMaskGrey;
static vector<cv::Point2f> detectedPoints; static vector<cv::Point2f> detectedPoints;
@ -459,7 +429,7 @@ static bool iteration(cv::Ptr<V4D> window) {
//We don't want the algorithm to get out of hand when there is a scene change, so we suppress it when we detect one. //We don't want the algorithm to get out of hand when there is a scene change, so we suppress it when we detect one.
if (!detect_scene_change(downMotionMaskGrey, scene_change_thresh, scene_change_thresh_diff)) { if (!detect_scene_change(downMotionMaskGrey, scene_change_thresh, scene_change_thresh_diff)) {
//Visualize the sparse optical flow using nanovg //Visualize the sparse optical flow using nanovg
cv::Scalar color = cv::Scalar(255, 0, 0, 255); cv::Scalar color = cv::Scalar(effect_color[2] * 255, effect_color[1] * 255, effect_color[0] * 255, effect_color[3] * 255);
visualize_sparse_optical_flow(downPrevGrey, downNextGrey, detectedPoints, fg_scale, max_stroke, color, max_points, point_loss); visualize_sparse_optical_flow(downPrevGrey, downNextGrey, detectedPoints, fg_scale, max_stroke, color, max_points, point_loss);
} }
} }
@ -469,33 +439,47 @@ static bool iteration(cv::Ptr<V4D> window) {
window->fb([&](cv::UMat& framebuffer){ window->fb([&](cv::UMat& framebuffer){
//Put it all together (OpenCL) //Put it all together (OpenCL)
composite_layers(background, foreground, framebuffer, framebuffer, GLOW_KERNEL_SIZE, fg_loss, background_mode, post_proc_mode); composite_layers(background, foreground, framebuffer, framebuffer, glow_kernel_size, fg_loss, background_mode, post_proc_mode);
cvtColor(framebuffer, menuFrame, cv::COLOR_BGRA2BGR);
}); });
#ifndef __EMSCRIPTEN__
menuWindow->feed(menuFrame.getMat(cv::ACCESS_READ));
#endif
window->write(); window->write();
//If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed. //If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed.
#ifndef __EMSCRIPTEN__
return menuWindow->display() && window->display();
#else
return window->display(); return window->display();
#endif
} }
#ifndef __EMSCRIPTEN__
int main(int argc, char **argv) { int main(int argc, char **argv) {
CV_UNUSED(argc);
CV_UNUSED(argv);
#ifndef __EMSCRIPTEN__
if (argc != 2) { if (argc != 2) {
std::cerr << "Usage: optflow <input-video-file>" << endl; std::cerr << "Usage: optflow <input-video-file>" << endl;
exit(1); exit(1);
} }
#else
int main() {
#endif #endif
try { try {
using namespace cv::v4d; using namespace cv::v4d;
cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Sparse Optical Flow Demo", false, false, 0); cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Sparse Optical Flow Demo", false, false, 0);
#ifndef __EMSCRIPTEN__
menuWindow = V4D::make(240, 135, "Menu", false, false, 0);
#endif
window->printSystemInfo(); window->printSystemInfo();
if (!OFFSCREEN) { if (!OFFSCREEN) {
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
// setup_gui(window, menuWindow); setup_gui(window, menuWindow);
#else #else
// setup_gui(window, window); setup_gui(window, window);
#endif #endif
} }
@ -503,8 +487,8 @@ int main() {
Source src = makeCaptureSource(argv[1]); Source src = makeCaptureSource(argv[1]);
window->setSource(src); window->setSource(src);
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), //Creates a writer sink (which might be hardware accelerated)
src.fps(), cv::Size(WIDTH, HEIGHT)); Sink sink = makeWriterSink(OUTPUT_FILENAME, src.fps(), cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT, window); Source src = makeCaptureSource(WIDTH, HEIGHT, window);

@ -244,8 +244,7 @@ int main() {
Source src = makeCaptureSource(argv[1]); Source src = makeCaptureSource(argv[1]);
window->setSource(src); window->setSource(src);
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), Sink sink = makeWriterSink(OUTPUT_FILENAME, src.fps(), cv::Size(WIDTH, HEIGHT));
src.fps(), cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT, window); Source src = makeCaptureSource(WIDTH, HEIGHT, window);

@ -352,8 +352,7 @@ int main() {
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
Source src = makeCaptureSource(argv[1]); Source src = makeCaptureSource(argv[1]);
window->setSource(src); window->setSource(src);
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), Sink sink = makeWriterSink(OUTPUT_FILENAME, FPS, cv::Size(WIDTH, HEIGHT));
FPS, cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT, window); Source src = makeCaptureSource(WIDTH, HEIGHT, window);

@ -24,7 +24,7 @@ constexpr long unsigned int HEIGHT = 960;
#endif #endif
constexpr bool OFFSCREEN = false; constexpr bool OFFSCREEN = false;
const unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT)); const unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT));
const int GLOW_KERNEL_SIZE = std::max(int(DIAG / 138 % 2 == 0 ? DIAG / 138 + 1 : DIAG / 138), 1); const int glow_kernel_size = std::max(int(DIAG / 138 % 2 == 0 ? DIAG / 138 + 1 : DIAG / 138), 1);
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
constexpr double FPS = 60; constexpr double FPS = 60;
constexpr const char* OUTPUT_FILENAME = "video-demo.mkv"; constexpr const char* OUTPUT_FILENAME = "video-demo.mkv";
@ -198,7 +198,7 @@ static bool iteration(cv::Ptr<V4D> window) {
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
window->fb([&](cv::UMat& frameBuffer) { window->fb([&](cv::UMat& frameBuffer) {
glow_effect(frameBuffer, frameBuffer, GLOW_KERNEL_SIZE); glow_effect(frameBuffer, frameBuffer, glow_kernel_size);
}); });
#endif #endif
@ -226,8 +226,7 @@ int main() {
Source src = makeCaptureSource(argv[1]); Source src = makeCaptureSource(argv[1]);
window->setSource(src); window->setSource(src);
Sink sink = makeWriterSink(OUTPUT_FILENAME, cv::VideoWriter::fourcc('V', 'P', '9', '0'), Sink sink = makeWriterSink(OUTPUT_FILENAME, src.fps(), cv::Size(WIDTH, HEIGHT));
src.fps(), cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#else #else
//Creates a webcam source is available //Creates a webcam source is available

@ -17,8 +17,7 @@ int main(int argc, char** argv) {
Source src = makeCaptureSource(argv[1]); Source src = makeCaptureSource(argv[1]);
//Make the video sink //Make the video sink
Sink sink = makeWriterSink(argv[2], VideoWriter::fourcc('V', 'P', '9', '0'), src.fps(), window->framebufferSize()); Sink sink = makeWriterSink(argv[2], src.fps(), cv::Size(960, 960));
//Attach source and sink //Attach source and sink
window->setSource(src); window->setSource(src);
window->setSink(sink); window->setSink(sink);

@ -297,7 +297,7 @@ void FrameBufferContext::init() {
#endif #endif
setup(framebufferSize_); setup(framebufferSize_);
// glfwSetWindowUserPointer(getGLFWWindow(), &getV4D()); glfwSetWindowUserPointer(getGLFWWindow(), getV4D().get());
// //
// glfwSetCursorPosCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) { // glfwSetCursorPosCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) {
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin)); // V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
@ -365,6 +365,10 @@ void FrameBufferContext::init() {
//// } //// }
//// #endif //// #endif
// }); // });
glfwSetWindowFocusCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, int i) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
v4d->makeCurrent();
});
} }
cv::Ptr<V4D> FrameBufferContext::getV4D() { cv::Ptr<V4D> FrameBufferContext::getV4D() {
@ -491,13 +495,11 @@ void FrameBufferContext::toGLTexture2D(cv::UMat& u, cv::ogl::Texture2D& texture)
clImage_ = clCreateFromGLTexture(context, CL_MEM_WRITE_ONLY, 0x0DE1, 0, texture.texId(), clImage_ = clCreateFromGLTexture(context, CL_MEM_WRITE_ONLY, 0x0DE1, 0, texture.texId(),
&status); &status);
if (status != CL_SUCCESS) if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, throw std::runtime_error("OpenCL: clCreateFromGLTexture failed: " + std::to_string(status));
("OpenCL: clCreateFromGLTexture failed: %d", status));
status = clEnqueueAcquireGLObjects(q, 1, &clImage_, 0, NULL, NULL); status = clEnqueueAcquireGLObjects(q, 1, &clImage_, 0, NULL, NULL);
if (status != CL_SUCCESS) if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, throw std::runtime_error("OpenCL: clEnqueueAcquireGLObjects failed: " + std::to_string(status));
("OpenCL: clEnqueueAcquireGLObjects failed: %d", status));
} }
cl_mem clBuffer = (cl_mem) u.handle(ACCESS_READ); cl_mem clBuffer = (cl_mem) u.handle(ACCESS_READ);
@ -508,12 +510,11 @@ void FrameBufferContext::toGLTexture2D(cv::UMat& u, cv::ogl::Texture2D& texture)
status = clEnqueueCopyBufferToImage(q, clBuffer, clImage_, offset, dst_origin, region, 0, NULL, status = clEnqueueCopyBufferToImage(q, clBuffer, clImage_, offset, dst_origin, region, 0, NULL,
NULL); NULL);
if (status != CL_SUCCESS) if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, throw std::runtime_error("OpenCL: clEnqueueCopyBufferToImage failed: " + std::to_string(status));
("OpenCL: clEnqueueCopyBufferToImage failed: %d", status));
status = clFinish(q); status = clFinish(q);
if (status != CL_SUCCESS) if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, ("OpenCL: clFinish failed: %d", status)); throw std::runtime_error("OpenCL: clFinish failed: " + std::to_string(status));
#endif #endif
} }
@ -540,13 +541,11 @@ void FrameBufferContext::fromGLTexture2D(const cv::ogl::Texture2D& texture, cv::
clImage_ = clCreateFromGLTexture(context, CL_MEM_READ_ONLY, 0x0DE1, 0, texture.texId(), clImage_ = clCreateFromGLTexture(context, CL_MEM_READ_ONLY, 0x0DE1, 0, texture.texId(),
&status); &status);
if (status != CL_SUCCESS) if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, throw std::runtime_error("OpenCL: clCreateFromGLTexture failed: " + std::to_string(status));
("OpenCL: clCreateFromGLTexture failed: %d", status));
status = clEnqueueAcquireGLObjects(q, 1, &clImage_, 0, NULL, NULL); status = clEnqueueAcquireGLObjects(q, 1, &clImage_, 0, NULL, NULL);
if (status != CL_SUCCESS) if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, throw std::runtime_error("OpenCL: clEnqueueAcquireGLObjects failed: " + std::to_string(status));
("OpenCL: clEnqueueAcquireGLObjects failed: %d", status));
} }
cl_mem clBuffer = (cl_mem) u.handle(ACCESS_WRITE); cl_mem clBuffer = (cl_mem) u.handle(ACCESS_WRITE);
@ -557,12 +556,11 @@ void FrameBufferContext::fromGLTexture2D(const cv::ogl::Texture2D& texture, cv::
status = clEnqueueCopyImageToBuffer(q, clImage_, clBuffer, src_origin, region, offset, 0, NULL, status = clEnqueueCopyImageToBuffer(q, clImage_, clBuffer, src_origin, region, offset, 0, NULL,
NULL); NULL);
if (status != CL_SUCCESS) if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, throw std::runtime_error("OpenCL: clEnqueueCopyImageToBuffer failed: " + std::to_string(status));
("OpenCL: clEnqueueCopyImageToBuffer failed: %d", status));
status = clFinish(q); status = clFinish(q);
if (status != CL_SUCCESS) if (status != CL_SUCCESS)
CV_Error_(cv::Error::OpenCLApiCallError, ("OpenCL: clFinish failed: %d", status)); throw std::runtime_error("OpenCL: clFinish failed: " + std::to_string(status));
#endif #endif
} }
@ -668,11 +666,13 @@ void FrameBufferContext::blitFrameBufferToScreen(const cv::Rect& viewport,
GLint dstY0 = scale ? marginhs : marginh; GLint dstY0 = scale ? marginhs : marginh;
GLint dstX1 = scale ? marginws + fbws : marginw + framebufferSize_.width; GLint dstX1 = scale ? marginws + fbws : marginw + framebufferSize_.width;
GLint dstY1 = scale ? marginhs + fbhs : marginh + framebufferSize_.height; GLint dstY1 = scale ? marginhs + fbhs : marginh + framebufferSize_.height;
{ //#ifdef __EMSCRIPTEN__
//FIXME WebGL2 workaround for webkit. without we have flickering // {
cv::UMat tmp; // //FIXME WebGL2 workaround for webkit. without we have flickering
FrameBufferContext::FrameBufferScope fbScope(*this, tmp); // cv::UMat tmp(size(), CV_8UC4);
} // FrameBufferContext::FrameBufferScope fbScope(*this, tmp);
// }
//#endif
GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, drawFramebufferID)); GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, drawFramebufferID));
GL_CHECK(glBlitFramebuffer( srcX0, srcY0, srcX1, srcY1, GL_CHECK(glBlitFramebuffer( srcX0, srcY0, srcX1, srcY1,
dstX0, dstY0, dstX1, dstY1, dstX0, dstY0, dstX1, dstY1,

@ -24,6 +24,7 @@ ImGuiContextImpl::ImGuiContextImpl(FrameBufferContext& fbContext) :
FrameBufferContext::GLScope glScope(mainFbContext_, GL_FRAMEBUFFER); FrameBufferContext::GLScope glScope(mainFbContext_, GL_FRAMEBUFFER);
IMGUI_CHECKVERSION(); IMGUI_CHECKVERSION();
context_ = ImGui::CreateContext(); context_ = ImGui::CreateContext();
ImGui::SetCurrentContext(context_);
ImGuiIO& io = ImGui::GetIO(); ImGuiIO& io = ImGui::GetIO();
(void)io; (void)io;
io.ConfigFlags |= ImGuiConfigFlags_NavEnableKeyboard; io.ConfigFlags |= ImGuiConfigFlags_NavEnableKeyboard;
@ -42,31 +43,48 @@ void ImGuiContextImpl::build(std::function<void(ImGuiContext*)> fn) {
renderCallback_ = fn; renderCallback_ = fn;
} }
void ImGuiContextImpl::makeCurrent() {
ImGui::SetCurrentContext(context_);
}
void ImGuiContextImpl::render() { void ImGuiContextImpl::render() {
run_sync_on_main<25>([&,this](){
{ {
mainFbContext_.makeCurrent(); mainFbContext_.makeCurrent();
ImGui::SetCurrentContext(context_);
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, 0)); GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, 0));
#if !defined(OPENCV_V4D_USE_ES3) && !defined(EMSCRIPTEN) #if !defined(OPENCV_V4D_USE_ES3) && !defined(EMSCRIPTEN)
GL_CHECK(glDrawBuffer(GL_BACK)); GL_CHECK(glDrawBuffer(GL_BACK));
#endif #endif
GL_CHECK(glViewport(0, 0, mainFbContext_.getWindowSize().width, mainFbContext_.getWindowSize().height)); GL_CHECK(
glViewport(0, 0, mainFbContext_.getWindowSize().width,
mainFbContext_.getWindowSize().height));
ImGui_ImplOpenGL3_NewFrame(); ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplGlfw_NewFrame(); ImGui_ImplGlfw_NewFrame();
ImGui::NewFrame(); ImGui::NewFrame();
ImGui::Begin("Display"); \ static bool open_ptr[1] = { true };
ImGuiIO& io = ImGui::GetIO(); \ static ImGuiWindowFlags window_flags = 0;
ImGui::Text("%.3f ms/frame (%.1f FPS)", 1000.0f / io.Framerate, io.Framerate); \ window_flags |= ImGuiWindowFlags_NoBackground;
window_flags |= ImGuiWindowFlags_NoBringToFrontOnFocus;
window_flags |= ImGuiWindowFlags_NoMove;
window_flags |= ImGuiWindowFlags_NoScrollWithMouse;
window_flags |= ImGuiWindowFlags_AlwaysAutoResize;
window_flags |= ImGuiWindowFlags_NoSavedSettings;
window_flags |= ImGuiWindowFlags_NoFocusOnAppearing;
window_flags |= ImGuiWindowFlags_NoNav;
window_flags |= ImGuiWindowFlags_NoDecoration;
window_flags |= ImGuiWindowFlags_NoInputs;
ImGui::Begin("Display", open_ptr, window_flags);
ImGuiIO& io = ImGui::GetIO();
ImGui::Text("%.3f ms/frame (%.1f FPS)", 1000.0f / io.Framerate, io.Framerate);
ImGui::End(); ImGui::End();
if (renderCallback_)
if(renderCallback_) renderCallback_(context_);
renderCallback_(ImGui::GetCurrentContext());
ImGui::Render(); ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
} }
GL_CHECK(glFinish()); GL_CHECK(glFinish());
});
} }
} }
} }

@ -30,9 +30,9 @@ NanoVGContext::NanoVGContext(FrameBufferContext& fbContext) :
if (!context_) if (!context_)
throw std::runtime_error("Could not initialize NanoVG!"); throw std::runtime_error("Could not initialize NanoVG!");
} }
int font = nvgCreateFont(context_, "icons", "assets/entypo.ttf"); int font = nvgCreateFont(context_, "icons", "assets/fonts/entypo.ttf");
font = nvgCreateFont(context_, "sans", "assets/Roboto-Regular.ttf"); font = nvgCreateFont(context_, "sans", "assets/fonts/Roboto-Regular.ttf");
font = nvgCreateFont(context_, "sans-bold", "assets/Roboto-Bold.ttf"); font = nvgCreateFont(context_, "sans-bold", "assets/fonts/Roboto-Bold.ttf");
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
mainFbContext_.initWebGLCopy(fbCtx().getIndex()); mainFbContext_.initWebGLCopy(fbCtx().getIndex());
#endif #endif

@ -170,6 +170,12 @@ unsigned int initShader(const char* vShader, const char* fShader, const char* ou
return program; return program;
} }
std::string getGlVendor() {
std::ostringstream oss;
oss << reinterpret_cast<const char*>(glGetString(GL_VENDOR));
return oss.str();
}
std::string getGlInfo() { std::string getGlInfo() {
std::ostringstream oss; std::ostringstream oss;
oss << "\n\t" << reinterpret_cast<const char*>(glGetString(GL_VERSION)) oss << "\n\t" << reinterpret_cast<const char*>(glGetString(GL_VERSION))
@ -344,8 +350,20 @@ static Source makeAnyHWSource(const string& inputFilename) {
#endif #endif
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
Sink makeWriterSink(const string& outputFilename, const int fourcc, const float fps, Sink makeWriterSink(const string& outputFilename, const float fps, const cv::Size& frameSize) {
const cv::Size& frameSize) { int fourcc = 0;
cerr << getGlVendor() << endl;
//FIXME find a cleverer way to guess a decent codec
if(getGlVendor() == "NVIDIA Corporation") {
fourcc = cv::VideoWriter::fourcc('H', '2', '6', '4');
} else {
fourcc = cv::VideoWriter::fourcc('V', 'P', '9', '0');
}
return makeWriterSink(outputFilename, fps, frameSize, fourcc);
}
Sink makeWriterSink(const string& outputFilename, const float fps,
const cv::Size& frameSize, int fourcc) {
if (isIntelVaSupported()) { if (isIntelVaSupported()) {
return makeVaSink(outputFilename, fourcc, fps, frameSize, 0); return makeVaSink(outputFilename, fourcc, fps, frameSize, 0);
} else { } else {

@ -241,7 +241,7 @@ void V4D::setSource(const Source& src) {
source_ = src; source_ = src;
} }
void V4D::feed(const cv::UMat& in) { void V4D::feed(cv::InputArray in) {
TimeTracker::getInstance()->execute("feed", [&](){ TimeTracker::getInstance()->execute("feed", [&](){
cv::UMat frame; cv::UMat frame;
clvaCtx().capture([&](cv::UMat& videoFrame) { clvaCtx().capture([&](cv::UMat& videoFrame) {
@ -254,7 +254,7 @@ void V4D::feed(const cv::UMat& in) {
}); });
} }
cv::UMat V4D::fetch() { cv::_InputArray V4D::fetch() {
cv::UMat frame; cv::UMat frame;
TimeTracker::getInstance()->execute("copyTo", [&](){ TimeTracker::getInstance()->execute("copyTo", [&](){
fb([frame](cv::UMat& framebuffer){ fb([frame](cv::UMat& framebuffer){
@ -528,6 +528,7 @@ void V4D::printSystemInfo() {
void V4D::makeCurrent() { void V4D::makeCurrent() {
fbCtx().makeCurrent(); fbCtx().makeCurrent();
imguiCtx().makeCurrent();
} }
cv::Ptr<V4D> V4D::self() { cv::Ptr<V4D> V4D::self() {

Loading…
Cancel
Save