emscripten adaptions

pull/3471/head
kallaballa 2 years ago
parent cbc6082fcf
commit 15a792fdd1
  1. 2
      modules/v4d/include/opencv2/v4d/detail/framebuffercontext.hpp
  2. 7
      modules/v4d/include/opencv2/v4d/util.hpp
  3. 80
      modules/v4d/include/opencv2/v4d/v4d.hpp
  4. 357
      modules/v4d/samples/beauty-demo.cpp
  5. 19
      modules/v4d/samples/cube-demo.cpp
  6. 4
      modules/v4d/samples/custom_source_and_sink.cpp
  7. 12
      modules/v4d/samples/display_image.cpp
  8. 6
      modules/v4d/samples/display_image_fb.cpp
  9. 2
      modules/v4d/samples/display_image_nvg.cpp
  10. 2
      modules/v4d/samples/font-demo.cpp
  11. 7
      modules/v4d/samples/many_cubes-demo.cpp
  12. 12
      modules/v4d/samples/nanovg-demo.cpp
  13. 20
      modules/v4d/samples/optflow-demo.cpp
  14. 3
      modules/v4d/samples/render_opengl.cpp
  15. 15
      modules/v4d/samples/shader-demo.cpp
  16. 4
      modules/v4d/samples/vector_graphics_and_fb.cpp
  17. 16
      modules/v4d/samples/video-demo.cpp
  18. 9
      modules/v4d/samples/video_editing.cpp
  19. 17
      modules/v4d/src/detail/framebuffercontext.cpp
  20. 4
      modules/v4d/src/detail/glcontext.cpp
  21. 4
      modules/v4d/src/detail/nanovgcontext.cpp
  22. 32
      modules/v4d/src/detail/sourcecontext.cpp
  23. 9
      modules/v4d/src/source.cpp
  24. 10
      modules/v4d/src/util.cpp
  25. 71
      modules/v4d/src/v4d.cpp

@ -286,7 +286,7 @@ protected:
void setup(const cv::Size& sz); void setup(const cv::Size& sz);
void teardown(); void teardown();
void initWebGLCopy(const size_t& index); void initWebGLCopy(const size_t& index);
void doWebGLCopy(FrameBufferContext& other); void doWebGLCopy(cv::Ptr<FrameBufferContext> other);
/*! /*!
* The UMat used to copy or bind (depending on cl-gl interop capability) the OpenGL framebuffer. * The UMat used to copy or bind (depending on cl-gl interop capability) the OpenGL framebuffer.
*/ */

@ -38,9 +38,10 @@ namespace detail {
using std::cout; using std::cout;
using std::endl; using std::endl;
static thread_local std::mutex mtx_;
static thread_local bool sync_run_ = false;
class ThreadLocal { class ThreadLocal {
static thread_local std::mutex mtx_;
static thread_local bool sync_run_;
public: public:
static bool& sync_run() { static bool& sync_run() {
return sync_run_; return sync_run_;
@ -342,7 +343,7 @@ CV_EXPORTS cv::Ptr<Source> makeCaptureSource(cv::Ptr<V4D> window, const string&
* @param height The frame height to capture (usually the initial height of the V4D object) * @param height The frame height to capture (usually the initial height of the V4D object)
* @return A WebCam source object. * @return A WebCam source object.
*/ */
CV_EXPORTS cv::Ptr<Source> makeCaptureSource(int width, int height, cv::Ptr<V4D> window); CV_EXPORTS cv::Ptr<Source> makeCaptureSource(cv::Ptr<V4D> window);
#endif #endif
void resizePreserveAspectRatio(const cv::UMat& src, cv::UMat& output, const cv::Size& dstSize, const cv::Scalar& bgcolor = {0,0,0,255}); void resizePreserveAspectRatio(const cv::UMat& src, cv::UMat& output, const cv::Size& dstSize, const cv::Scalar& bgcolor = {0,0,0,255});

@ -477,17 +477,22 @@ public:
} }
void capture(cv::UMat& frame) { void capture(cv::UMat& frame) {
auto isTrue = [](const bool& b){ return b; };
capture([](const cv::UMat& inputFrame, cv::UMat& f){ capture([](const cv::UMat& inputFrame, cv::UMat& f){
inputFrame.copyTo(f); if(!inputFrame.empty())
inputFrame.copyTo(f);
}, frame); }, frame);
graph(isTrue, frame.empty());
fb([](cv::UMat& frameBuffer, const cv::UMat& f) { fb([](cv::UMat& frameBuffer, const cv::UMat& f) {
f.copyTo(frameBuffer); if(!f.empty())
f.copyTo(frameBuffer);
}, frame); }, frame);
endgraph(isTrue, frame.empty());
} }
void capture() { void capture() {
static thread_local cv::UMat tmp(fbSize(), CV_8UC3); static thread_local cv::UMat tmp;
capture(tmp); capture(tmp);
} }
@ -507,7 +512,7 @@ public:
} }
void write() { void write() {
static thread_local cv::UMat frame(fbSize(), CV_8UC3); static thread_local cv::UMat frame(fbSize(), CV_8UC4);
fb([](const cv::UMat& frameBuffer, cv::UMat& f) { fb([](const cv::UMat& frameBuffer, cv::UMat& f) {
frameBuffer.copyTo(f); frameBuffer.copyTo(f);
@ -594,12 +599,9 @@ public:
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
bool first = true;
static void do_frame(void* void_fn_ptr) { static void do_frame(void* void_fn_ptr) {
if(first) { glfwSwapInterval(0);
glfwSwapInterval(0);
first = false;
}
auto* fn_ptr = reinterpret_cast<std::function<bool()>*>(void_fn_ptr); auto* fn_ptr = reinterpret_cast<std::function<bool()>*>(void_fn_ptr);
if (fn_ptr) { if (fn_ptr) {
auto& fn = *fn_ptr; auto& fn = *fn_ptr;
@ -671,37 +673,41 @@ public:
// if(this->isMain()) // if(this->isMain())
// this->makeCurrent(); // this->makeCurrent();
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
bool success = true; CLExecScope_t scope(this->fbCtx()->getCLExecContext());
CLExecScope_t scope(this->fbCtx()->getCLExecContext()); #endif
plan->setup(self()); plan->setup(self());
this->makePlan(); this->makePlan();
this->runPlan(); this->runPlan();
this->display(); this->clearPlan();
this->clearPlan(); plan->infer(self());
plan->infer(self()); this->makePlan();
this->makePlan(); #ifndef __EMSCRIPTEN__
do { if(this->isMain())
this->runPlan(); this->printSystemInfo();
} while(this->display()); do {
plan->teardown(self());
this->makePlan();
this->runPlan(); this->runPlan();
this->display(); } while(this->display());
this->clearPlan(); #else
#else
if(this->isMain()) { if(this->isMain()) {
std::function<bool()> fnFrame([=,this](){ std::function<bool()> fnFrame([this](){
return fn(self()); this->printSystemInfo();
do {
this->runPlan();
} while(this->display());
return false;
}); });
emscripten_set_main_loop_arg(do_frame, &fnFrame, -1, true); emscripten_set_main_loop_arg(do_frame, &fnFrame, -1, true);
} else { } else {
while (true) { do {
fn(self()); this->runPlan();
} } while(this->display());
} }
#endif #endif
plan->teardown(self());
this->makePlan();
this->runPlan();
this->clearPlan();
if(this->isMain()) { if(this->isMain()) {
for(auto& t : threads) for(auto& t : threads)
@ -711,12 +717,12 @@ public:
/*! /*!
* Called to feed an image directly to the framebuffer * Called to feed an image directly to the framebuffer
*/ */
CV_EXPORTS void feed(cv::InputArray in); void feed(cv::UMat& in);
/*! /*!
* Fetches a copy of frambuffer * Fetches a copy of frambuffer
* @return a copy of the framebuffer * @return a copy of the framebuffer
*/ */
CV_EXPORTS cv::_InputArray fetch(); CV_EXPORTS cv::UMat fetch();
/*! /*!
* Set the current #cv::viz::Source object. Usually created using #makeCaptureSource(). * Set the current #cv::viz::Source object. Usually created using #makeCaptureSource().
@ -865,8 +871,6 @@ public:
*/ */
CV_EXPORTS void printSystemInfo(); CV_EXPORTS void printSystemInfo();
CV_EXPORTS void makeCurrent();
CV_EXPORTS GLFWwindow* getGLFWWindow(); CV_EXPORTS GLFWwindow* getGLFWWindow();
CV_EXPORTS cv::Ptr<FrameBufferContext> fbCtx(); CV_EXPORTS cv::Ptr<FrameBufferContext> fbCtx();

@ -215,154 +215,159 @@ class BeautyDemoPlan : public Plan {
bool faceFound_ = false; bool faceFound_ = false;
FaceFeatures features_; FaceFeatures features_;
public: public:
void setup(cv::Ptr<V4D> window) override {
window->parallel([](cv::Ptr<cv::face::Facemark>& facemark){
#ifndef __EMSCRIPTEN__
facemark->loadModel("modules/v4d/assets/models/lbfmodel.yaml");
#else
facemark->loadModel("assets/models/lbfmodel.yaml");
#endif
cerr << "Loading finished" << endl;
}, facemark_);
}
void infer(cv::Ptr<V4D> window) override { void infer(cv::Ptr<V4D> window) override {
auto always = [](){ return true; }; auto always = [](){ return true; };
auto isTrue = [](bool& ff){ return ff; }; auto isTrue = [](bool& ff){ return ff; };
auto isFalse = [](bool& ff){ return !ff; }; auto isFalse = [](bool& ff){ return !ff; };
//Face landmark detector try {
try { window->graph(always);
#ifndef __EMSCRIPTEN__ {
facemark_->loadModel("modules/v4d/assets/models/lbfmodel.yaml"); window->capture();
#else
facemark_->loadModel("assets/models/lbfmodel.yaml"); //Save the video frame as BGR
#endif window->fb([](const cv::UMat &framebuffer, cv::UMat& in, cv::UMat& d) {
cvtColor(framebuffer, in, cv::COLOR_BGRA2BGR);
window->graph(always); //Downscale the video frame for face detection
{ cv::resize(in, d, cv::Size(DOWNSIZE_WIDTH, DOWNSIZE_HEIGHT));
//Save the video frame as BGR }, input_, down_);
window->capture([](const cv::UMat &videoFrame, cv::UMat& in, cv::UMat& d) {
cvtColor(videoFrame, in, cv::COLOR_BGRA2BGR); window->parallel([](cv::Ptr<cv::FaceDetectorYN>& de, cv::Ptr<cv::face::Facemark>& fm, vector<vector<cv::Point2f>>& sh, const cv::UMat& d, std::vector<cv::Rect>& fr, bool& ff, FaceFeatures& ft) {
//Downscale the video frame for face detection sh.clear();
cv::resize(in, d, cv::Size(DOWNSIZE_WIDTH, DOWNSIZE_HEIGHT)); cv::Mat faces;
}, input_, down_); //Detect faces in the down-scaled image
de->detect(d, faces);
window->parallel([](cv::Ptr<cv::FaceDetectorYN>& de, cv::Ptr<cv::face::Facemark>& fm, vector<vector<cv::Point2f>>& sh, const cv::UMat& d, std::vector<cv::Rect>& fr, bool& ff, FaceFeatures& ft) { //Only add the first face
sh.clear(); cv::Rect faceRect;
cv::Mat faces; if(!faces.empty())
//Detect faces in the down-scaled image faceRect = cv::Rect(int(faces.at<float>(0, 0)), int(faces.at<float>(0, 1)), int(faces.at<float>(0, 2)), int(faces.at<float>(0, 3)));
de->detect(d, faces); fr = {faceRect};
//Only add the first face //find landmarks if faces have been detected
cv::Rect faceRect; ff = !faceRect.empty() && fm->fit(d, fr, sh);
if(!faces.empty()) if(ff)
faceRect = cv::Rect(int(faces.at<float>(0, 0)), int(faces.at<float>(0, 1)), int(faces.at<float>(0, 2)), int(faces.at<float>(0, 3))); ft = FaceFeatures(fr[0], sh[0], float(d.size().width) / WIDTH);
fr = {faceRect}; }, detector_, facemark_, shapes_, down_, faceRects_, faceFound_, features_);
//find landmarks if faces have been detected }
ff = !faceRect.empty() && fm->fit(d, fr, sh); window->endgraph(always);
if(ff)
ft = FaceFeatures(fr[0], sh[0], float(d.size().width) / WIDTH); window->graph(isTrue, faceFound_);
}, detector_, facemark_, shapes_, down_, faceRects_, faceFound_, features_); {
window->nvg([](const FaceFeatures& f) {
//Draw the face oval of the first face
draw_face_oval_mask(f);
}, features_);
window->fb([](const cv::UMat& frameBuffer, cv::UMat& fo) {
//Convert/Copy the mask
cvtColor(frameBuffer, fo, cv::COLOR_BGRA2GRAY);
}, faceOval_);
window->nvg([](const FaceFeatures& f) {
//Draw eyes eyes and lips areas of the first face
draw_face_eyes_and_lips_mask(f);
}, features_);
window->fb([](const cv::UMat &frameBuffer, cv::UMat& ealmg) {
//Convert/Copy the mask
cvtColor(frameBuffer, ealmg, cv::COLOR_BGRA2GRAY);
}, eyesAndLipsMaskGrey_);
window->parallel([](const cv::UMat& fo, const cv::UMat& ealmg, cv::UMat& fsmg, cv::UMat& bmg) {
//Create the skin mask
cv::subtract(fo, ealmg, fsmg);
//Create the background mask
cv::bitwise_not(ealmg, bmg);
}, faceOval_, eyesAndLipsMaskGrey_, faceSkinMaskGrey_, backgroundMaskGrey_);
window->parallel([](const cv::UMat& in, cv::UMat& eal, float& eals, cv::UMat& c, cv::UMat& s) {
//boost saturation of eyes and lips
adjust_saturation(in, eal, eals);
//reduce skin contrast
multiply(in, cv::Scalar::all(skin_contrast), c);
//fix skin brightness
add(c, cv::Scalar::all((1.0 - skin_contrast) / 2.0) * 255.0, c);
//blur the skin_
cv::boxFilter(c, c, -1, cv::Size(blur_skin_kernel_size, blur_skin_kernel_size), cv::Point(-1, -1), true, cv::BORDER_REPLICATE);
//boost skin saturation
adjust_saturation(c, s, skin_saturation);
}, input_, eyesAndLips_, eyes_and_lips_saturation, contrast_, skin_);
window->parallel([](cv::Ptr<cv::detail::MultiBandBlender>& bl,
const cv::UMat& s, const cv::UMat& fsmg,
const cv::UMat& in, const cv::UMat& bmg,
const cv::UMat& eal, const cv::UMat& ealmg,
cv::UMat& fout) {
cv:: UMat foFloat;
//FIXME do it once?
CV_Assert(!s.empty());
CV_Assert(!in.empty());
CV_Assert(!eal.empty());
//piece it all together
//FIXME prepare only once?
bl->prepare(cv::Rect(0, 0, WIDTH, HEIGHT));
bl->feed(s, fsmg, cv::Point(0, 0));
bl->feed(in, bmg, cv::Point(0, 0));
bl->feed(eal, ealmg, cv::Point(0, 0));
bl->blend(foFloat, cv::UMat());
CV_Assert(!foFloat.empty());
foFloat.convertTo(fout, CV_8U, 1.0);
}, blender_, skin_, faceSkinMaskGrey_, input_, backgroundMaskGrey_, eyesAndLips_, eyesAndLipsMaskGrey_, frameOut_);
window->parallel([](cv::UMat& fout, const cv::UMat& in, cv::UMat& lh, cv::UMat& rh) {
if (side_by_side) {
//create side-by-side view with a result
cv::resize(in, lh, cv::Size(0, 0), 0.5, 0.5);
cv::resize(fout, rh, cv::Size(0, 0), 0.5, 0.5);
fout = cv::Scalar::all(0);
lh.copyTo(fout(cv::Rect(0, 0, lh.size().width, lh.size().height)));
rh.copyTo(fout(cv::Rect(rh.size().width, 0, rh.size().width, rh.size().height)));
}
}, frameOut_, input_, lhalf_, rhalf_);
}
window->endgraph(isTrue, faceFound_);
window->graph(isFalse, faceFound_);
{
window->parallel([](cv::UMat& fout, const cv::UMat& in, cv::UMat& lh) {
if (side_by_side) {
//create side-by-side view without a result (using the input image for both sides)
fout = cv::Scalar::all(0);
cv::resize(in, lh, cv::Size(0, 0), 0.5, 0.5);
lh.copyTo(fout(cv::Rect(0, 0, lh.size().width, lh.size().height)));
lh.copyTo(fout(cv::Rect(lh.size().width, 0, lh.size().width, lh.size().height)));
} else {
in.copyTo(fout);
}
}, frameOut_, input_, lhalf_);
}
window->endgraph(isFalse, faceFound_);
window->graph(always);
{
//write the result to the framebuffer
window->fb([](cv::UMat &frameBuffer, const cv::UMat& f) {
cvtColor(f, frameBuffer, cv::COLOR_BGR2BGRA);
}, frameOut_);
}
window->endgraph(always);
} catch (std::exception &ex) {
cerr << ex.what() << endl;
} }
window->endgraph(always); }
window->graph(isTrue, faceFound_);
{
window->nvg([](const FaceFeatures& f) {
//Draw the face oval of the first face
draw_face_oval_mask(f);
}, features_);
window->fb([](const cv::UMat& frameBuffer, cv::UMat& fo) {
//Convert/Copy the mask
cvtColor(frameBuffer, fo, cv::COLOR_BGRA2GRAY);
}, faceOval_);
window->nvg([](const FaceFeatures& f) {
//Draw eyes eyes and lips areas of the first face
draw_face_eyes_and_lips_mask(f);
}, features_);
window->fb([](const cv::UMat &frameBuffer, cv::UMat& ealmg) {
//Convert/Copy the mask
cvtColor(frameBuffer, ealmg, cv::COLOR_BGRA2GRAY);
}, eyesAndLipsMaskGrey_);
window->parallel([](const cv::UMat& fo, const cv::UMat& ealmg, cv::UMat& fsmg, cv::UMat& bmg) {
//Create the skin mask
cv::subtract(fo, ealmg, fsmg);
//Create the background mask
cv::bitwise_not(ealmg, bmg);
}, faceOval_, eyesAndLipsMaskGrey_, faceSkinMaskGrey_, backgroundMaskGrey_);
window->parallel([](const cv::UMat& in, cv::UMat& eal, float& eals, cv::UMat& c, cv::UMat& s) {
//boost saturation of eyes and lips
adjust_saturation(in, eal, eals);
//reduce skin contrast
multiply(in, cv::Scalar::all(skin_contrast), c);
//fix skin brightness
add(c, cv::Scalar::all((1.0 - skin_contrast) / 2.0) * 255.0, c);
//blur the skin_
cv::boxFilter(c, c, -1, cv::Size(blur_skin_kernel_size, blur_skin_kernel_size), cv::Point(-1, -1), true, cv::BORDER_REPLICATE);
//boost skin saturation
adjust_saturation(c, s, skin_saturation);
}, input_, eyesAndLips_, eyes_and_lips_saturation, contrast_, skin_);
window->parallel([](cv::Ptr<cv::detail::MultiBandBlender>& bl,
const cv::UMat& s, const cv::UMat& fsmg,
const cv::UMat& in, const cv::UMat& bmg,
const cv::UMat& eal, const cv::UMat& ealmg,
cv::UMat& fout) {
cv:: UMat foFloat;
//FIXME do it once?
CV_Assert(!s.empty());
CV_Assert(!in.empty());
CV_Assert(!eal.empty());
//piece it all together
//FIXME prepare only once?
bl->prepare(cv::Rect(0, 0, WIDTH, HEIGHT));
bl->feed(s, fsmg, cv::Point(0, 0));
bl->feed(in, bmg, cv::Point(0, 0));
bl->feed(eal, ealmg, cv::Point(0, 0));
bl->blend(foFloat, cv::UMat());
CV_Assert(!foFloat.empty());
foFloat.convertTo(fout, CV_8U, 1.0);
}, blender_, skin_, faceSkinMaskGrey_, input_, backgroundMaskGrey_, eyesAndLips_, eyesAndLipsMaskGrey_, frameOut_);
window->parallel([](cv::UMat& fout, const cv::UMat& in, cv::UMat& lh, cv::UMat& rh) {
if (side_by_side) {
//create side-by-side view with a result
cv::resize(in, lh, cv::Size(0, 0), 0.5, 0.5);
cv::resize(fout, rh, cv::Size(0, 0), 0.5, 0.5);
fout = cv::Scalar::all(0);
lh.copyTo(fout(cv::Rect(0, 0, lh.size().width, lh.size().height)));
rh.copyTo(fout(cv::Rect(rh.size().width, 0, rh.size().width, rh.size().height)));
}
}, frameOut_, input_, lhalf_, rhalf_);
}
window->endgraph(isTrue, faceFound_);
window->graph(isFalse, faceFound_);
{
window->parallel([](cv::UMat& fout, const cv::UMat& in, cv::UMat& lh) {
if (side_by_side) {
//create side-by-side view without a result (using the input image for both sides)
fout = cv::Scalar::all(0);
cv::resize(in, lh, cv::Size(0, 0), 0.5, 0.5);
lh.copyTo(fout(cv::Rect(0, 0, lh.size().width, lh.size().height)));
lh.copyTo(fout(cv::Rect(lh.size().width, 0, lh.size().width, lh.size().height)));
} else {
in.copyTo(fout);
}
}, frameOut_, input_, lhalf_);
}
window->endgraph(isFalse, faceFound_);
window->graph(always);
{
//write the result to the framebuffer
window->fb([](cv::UMat &frameBuffer, const cv::UMat& f) {
cvtColor(f, frameBuffer, cv::COLOR_BGR2BGRA);
}, frameOut_);
}
window->endgraph(always);
} catch (std::exception &ex) {
cerr << ex.what() << endl;
}
}
}; };
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
@ -376,51 +381,51 @@ int main() {
#endif #endif
using namespace cv::v4d; using namespace cv::v4d;
cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Beautification Demo", ALL, OFFSCREEN); cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Beautification Demo", ALL, OFFSCREEN);
window->printSystemInfo(); // window->printSystemInfo();
window->setStretching(stretch); window->setStretching(stretch);
if (!OFFSCREEN) { // if (!OFFSCREEN) {
window->imgui([window](ImGuiContext* ctx){ // window->imgui([window](ImGuiContext* ctx){
using namespace ImGui; // using namespace ImGui;
SetCurrentContext(ctx); // SetCurrentContext(ctx);
Begin("Effect"); // Begin("Effect");
Text("Display"); // Text("Display");
Checkbox("Side by side", &side_by_side); // Checkbox("Side by side", &side_by_side);
if(Checkbox("Stetch", &stretch)) { // if(Checkbox("Stetch", &stretch)) {
window->setStretching(true); // window->setStretching(true);
} else // } else
window->setStretching(false); // window->setStretching(false);
//
#ifndef __EMSCRIPTEN__ // #ifndef __EMSCRIPTEN__
if(Button("Fullscreen")) { // if(Button("Fullscreen")) {
window->setFullscreen(!window->isFullscreen()); // window->setFullscreen(!window->isFullscreen());
}; // };
#endif // #endif
//
if(Button("Offscreen")) { // if(Button("Offscreen")) {
window->setVisible(!window->isVisible()); // window->setVisible(!window->isVisible());
}; // };
//
Text("Face Skin"); // Text("Face Skin");
SliderInt("Blur", &blur_skin_kernel_size, 0, 128); // SliderInt("Blur", &blur_skin_kernel_size, 0, 128);
SliderFloat("Saturation", &skin_saturation, 0.0f, 100.0f); // SliderFloat("Saturation", &skin_saturation, 0.0f, 100.0f);
SliderFloat("Contrast", &skin_contrast, 0.0f, 1.0f); // SliderFloat("Contrast", &skin_contrast, 0.0f, 1.0f);
Text("Eyes and Lips"); // Text("Eyes and Lips");
SliderFloat("Saturation ", &eyes_and_lips_saturation, 0.0f, 100.0f); // SliderFloat("Saturation ", &eyes_and_lips_saturation, 0.0f, 100.0f);
End(); // End();
}); // });
} // }
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
auto src = makeCaptureSource(window, argv[1]); auto src = makeCaptureSource(window, argv[1]);
window->setSource(src); window->setSource(src);
// Sink sink = makeWriterSink(window, OUTPUT_FILENAME, src.fps(), cv::Size(WIDTH, HEIGHT)); // Sink sink = makeWriterSink(window, OUTPUT_FILENAME, src.fps(), cv::Size(WIDTH, HEIGHT));
// window->setSink(sink); // window->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT, window); auto src = makeCaptureSource(window);
window->setSource(src); window->setSource(src);
#endif #endif
window->run<BeautyDemoPlan>(2); window->run<BeautyDemoPlan>(0);
return 0; return 0;
} }

@ -12,7 +12,6 @@
constexpr long unsigned int WIDTH = 1280; constexpr long unsigned int WIDTH = 1280;
constexpr long unsigned int HEIGHT = 720; constexpr long unsigned int HEIGHT = 720;
#else #else
constexpr size_t NUMBER_OF_CUBES = 5;
constexpr long unsigned int WIDTH = 960; constexpr long unsigned int WIDTH = 960;
constexpr long unsigned int HEIGHT = 960; constexpr long unsigned int HEIGHT = 960;
#endif #endif
@ -210,30 +209,32 @@ static void glow_effect(const cv::UMat& src, cv::UMat& dst, const int ksize) {
using namespace cv::v4d; using namespace cv::v4d;
class CubeDemoPlan : public Plan { class CubeDemoPlan : public Plan {
cv::UMat frame_; cv::UMat frame_;
GLuint vao; GLuint vao_;
GLuint shaderProgram; GLuint shaderProgram_;
GLuint uniformTransform; GLuint uniformTransform_;
public: public:
void setup(cv::Ptr<V4D> window) { void setup(cv::Ptr<V4D> window) {
window->gl([](const cv::Size& sz, GLuint& v, GLuint& sp, GLuint& ut){ window->gl([](const cv::Size& sz, GLuint& v, GLuint& sp, GLuint& ut){
init_scene(sz, v, sp, ut); init_scene(sz, v, sp, ut);
}, window->fbSize(), vao, shaderProgram, uniformTransform); }, window->fbSize(), vao_, shaderProgram_, uniformTransform_);
} }
void infer(cv::Ptr<V4D> window) { void infer(cv::Ptr<V4D> window) {
window->gl([](){ window->gl([](){
//Clear the background //Clear the background
glClearColor(0.2, 0.24, 0.4, 1); glClearColor(0.2f, 0.24f, 0.4f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT); glClear(GL_COLOR_BUFFER_BIT);
}); });
//Render using multiple OpenGL contexts //Render using multiple OpenGL contexts
window->gl([](GLuint& v, GLuint& sp, GLuint& ut){ window->gl([](GLuint& v, GLuint& sp, GLuint& ut){
render_scene(v, sp, ut); render_scene(v, sp, ut);
}, vao, shaderProgram, uniformTransform); }, vao_, shaderProgram_, uniformTransform_);
//Aquire the frame buffer for use by OpenCV //Aquire the frame buffer for use by OpenCV
window->fb([](cv::UMat& framebuffer, cv::UMat& f) { window->fb([](cv::UMat& framebuffer, cv::UMat& f) {
#ifndef __EMSCRIPTEN__
glow_effect(framebuffer, framebuffer, glow_kernel_size); glow_effect(framebuffer, framebuffer, glow_kernel_size);
#endif
framebuffer.copyTo(f); framebuffer.copyTo(f);
}, frame_); }, frame_);
@ -244,9 +245,7 @@ public:
}; };
int main() { int main() {
cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Many Cubes Demo", IMGUI, OFFSCREEN); cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Cube Demo", ALL, OFFSCREEN);
window->printSystemInfo();
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
//Creates a writer sink (which might be hardware accelerated) //Creates a writer sink (which might be hardware accelerated)
auto sink = makeWriterSink(window, OUTPUT_FILENAME, FPS, cv::Size(WIDTH, HEIGHT)); auto sink = makeWriterSink(window, OUTPUT_FILENAME, FPS, cv::Size(WIDTH, HEIGHT));

@ -40,7 +40,7 @@ int main() {
class CustomSourceAndSinkPlan : public Plan { class CustomSourceAndSinkPlan : public Plan {
string hr_ = "Hello Rainbow!"; string hr_ = "Hello Rainbow!";
public:
void infer(cv::Ptr<V4D> win) override { void infer(cv::Ptr<V4D> win) override {
win->capture(); win->capture();
@ -58,5 +58,7 @@ int main() {
win->write(); win->write();
} }
}; };
window->run<CustomSourceAndSinkPlan>(0);
} }

@ -11,14 +11,16 @@ int main() {
cv::Ptr<V4D> window = V4D::make(960, 960, "Display an Image"); cv::Ptr<V4D> window = V4D::make(960, 960, "Display an Image");
class DisplayImagePlan : public Plan { class DisplayImagePlan : public Plan {
UMat image_ = imread(samples::findFile("lena.jpg")).getUMat(ACCESS_READ); UMat image_;
public:
void setup(Ptr<V4D> win) override { void setup(Ptr<V4D> win) override {
win->parallel([](cv::UMat& image){
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
image_ = read_embedded_image("doc/lena.png").getUMat(ACCESS_READ); image = read_embedded_image("doc/lena.png").getUMat(ACCESS_READ);
#else #else
image_ = imread(samples::findFile("lena.jpg")).getUMat(ACCESS_READ); image = imread(samples::findFile("lena.jpg")).getUMat(ACCESS_READ);
#endif #endif
}, image_);
} }
//Display the framebuffer in the native window in an endless loop. //Display the framebuffer in the native window in an endless loop.
void infer(Ptr<V4D> win) override { void infer(Ptr<V4D> win) override {
@ -26,4 +28,6 @@ int main() {
win->feed(image_); win->feed(image_);
} }
}; };
window->run<DisplayImagePlan>(0);
} }

@ -8,11 +8,11 @@ int main() {
//Creates a V4D object //Creates a V4D object
Ptr<V4D> window = V4D::make(960, 960, "Display an Image through direct FB access"); Ptr<V4D> window = V4D::make(960, 960, "Display an Image through direct FB access");
class DislayImageFB : public Plan { class DisplayImageFB : public Plan {
UMat image_; UMat image_;
UMat resized_; UMat resized_;
UMat converted_; UMat converted_;
public:
void setup(cv::Ptr<V4D> win) { void setup(cv::Ptr<V4D> win) {
win->parallel([](cv::UMat& image, cv::UMat& resized, cv::UMat& converted, const cv::Size& sz){ win->parallel([](cv::UMat& image, cv::UMat& resized, cv::UMat& converted, const cv::Size& sz){
//Loads an image as a UMat (just in case we have hardware acceleration available) //Loads an image as a UMat (just in case we have hardware acceleration available)
@ -36,4 +36,6 @@ int main() {
}, converted_); }, converted_);
} }
}; };
window->run<DisplayImageFB>(0);
} }

@ -13,7 +13,7 @@ struct Image_t {
}; };
int main() { int main() {
cv::Ptr<V4D> window = V4D::make(960, 960, "Display an Image using NanoVG"); cv::Ptr<V4D> window = V4D::make(960, 960, "Display an Image using NanoVG", ALL, false, true);
class DisplayImageNVG : public Plan { class DisplayImageNVG : public Plan {
Image_t image_; Image_t image_;

@ -198,7 +198,7 @@ int main() {
setup_gui(window); setup_gui(window);
} }
window->printSystemInfo(); // window->printSystemInfo();
//The text to display //The text to display
string txt = cv::getBuildInformation(); string txt = cv::getBuildInformation();

@ -194,6 +194,7 @@ static void render_scene(const double& x, const double& y, const double& angleMo
glDrawElements(GL_TRIANGLES, triangles * 3, GL_UNSIGNED_SHORT, NULL); glDrawElements(GL_TRIANGLES, triangles * 3, GL_UNSIGNED_SHORT, NULL);
} }
#ifndef __EMSCRIPTEN__
//applies a glow effect to an image //applies a glow effect to an image
static void glow_effect(const cv::UMat& src, cv::UMat& dst, const int ksize) { static void glow_effect(const cv::UMat& src, cv::UMat& dst, const int ksize) {
thread_local cv::UMat resize; thread_local cv::UMat resize;
@ -217,6 +218,7 @@ static void glow_effect(const cv::UMat& src, cv::UMat& dst, const int ksize) {
cv::bitwise_not(dst, dst); cv::bitwise_not(dst, dst);
} }
#endif
using namespace cv::v4d; using namespace cv::v4d;
class ManyCubesDemoPlan : public Plan { class ManyCubesDemoPlan : public Plan {
@ -250,7 +252,9 @@ public:
//Aquire the frame buffer for use by OpenCV //Aquire the frame buffer for use by OpenCV
window->fb([](cv::UMat& framebuffer, cv::UMat& f) { window->fb([](cv::UMat& framebuffer, cv::UMat& f) {
#ifndef __EMSCRIPTEN__
glow_effect(framebuffer, framebuffer, glow_kernel_size); glow_effect(framebuffer, framebuffer, glow_kernel_size);
#endif
framebuffer.copyTo(f); framebuffer.copyTo(f);
}, frame_); }, frame_);
@ -262,14 +266,13 @@ public:
int main() { int main() {
cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Many Cubes Demo", IMGUI, OFFSCREEN); cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Many Cubes Demo", IMGUI, OFFSCREEN);
window->printSystemInfo();
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
//Creates a writer sink (which might be hardware accelerated) //Creates a writer sink (which might be hardware accelerated)
auto sink = makeWriterSink(window, OUTPUT_FILENAME, FPS, cv::Size(WIDTH, HEIGHT)); auto sink = makeWriterSink(window, OUTPUT_FILENAME, FPS, cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#endif #endif
window->run<ManyCubesDemoPlan>(9); window->run<ManyCubesDemoPlan>(0);
return 0; return 0;
} }

@ -131,16 +131,17 @@ class NanoVGDemoPlan : public Plan {
cv::UMat bgra_; cv::UMat bgra_;
cv::UMat hsv_; cv::UMat hsv_;
cv::UMat hueChannel_; cv::UMat hueChannel_;
long cnt_ = 0;
double hue_; double hue_;
public: public:
void infer(cv::Ptr<V4D> window) override { void infer(cv::Ptr<V4D> window) override {
window->parallel([](const uint64_t& frameCount, double& hue){ window->parallel([](long& cnt, double& hue){
//we use frame count to calculate the current hue //we use frame count to calculate the current hue
float t = frameCount / 60.0; double t = ++cnt / 60.0;
//nanovg hue fading depending on t //nanovg hue fading depending on t
hue = (sinf(t * 0.12) + 1.0) * 127.5; hue = (sinf(t * 0.12) + 1.0) * 127.5;
}, window->frameCount(), hue_); }, cnt_, hue_);
window->capture(); window->capture();
@ -149,7 +150,7 @@ public:
cvtColor(framebuffer, rgb, cv::COLOR_BGRA2RGB); cvtColor(framebuffer, rgb, cv::COLOR_BGRA2RGB);
}, rgb_); }, rgb_);
window->parallel([](cv::UMat& rgb, cv::UMat& hsv, std::vector<cv::UMat>& hsvChannels, double hue){ window->parallel([](cv::UMat& rgb, cv::UMat& hsv, std::vector<cv::UMat>& hsvChannels, double& hue){
//Color-conversion from RGB to HSV //Color-conversion from RGB to HSV
cv::cvtColor(rgb, hsv, cv::COLOR_RGB2HSV_FULL); cv::cvtColor(rgb, hsv, cv::COLOR_RGB2HSV_FULL);
@ -171,6 +172,7 @@ public:
//Render using nanovg //Render using nanovg
window->nvg([](const cv::Size &sz, const double& h) { window->nvg([](const cv::Size &sz, const double& h) {
cerr << "HUE: " << h << endl;
draw_color_wheel(sz.width - 300, sz.height - 300, 250.0f, 250.0f, h); draw_color_wheel(sz.width - 300, sz.height - 300, 250.0f, 250.0f, h);
}, window->fbSize(), hue_); }, window->fbSize(), hue_);
@ -196,7 +198,7 @@ int main() {
window->setSource(src); window->setSource(src);
window->setSink(sink); window->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT, window); auto src = makeCaptureSource(window);
window->setSource(src); window->setSource(src);
#endif #endif

@ -404,11 +404,13 @@ class OptflowPlan : public Plan {
public: public:
virtual ~OptflowPlan() override {}; virtual ~OptflowPlan() override {};
virtual void infer(cv::Ptr<V4D> window) override { virtual void infer(cv::Ptr<V4D> window) override {
window->capture([](const cv::UMat& videoFrame, cv::UMat& d, cv::UMat& b) { window->capture();
window->fb([](const cv::UMat& framebuffer, cv::UMat& d, cv::UMat& b) {
//resize to foreground scale //resize to foreground scale
cv::resize(videoFrame, d, cv::Size(videoFrame.size().width * fg_scale, videoFrame.size().height * fg_scale)); cv::resize(framebuffer, d, cv::Size(framebuffer.size().width * fg_scale, framebuffer.size().height * fg_scale));
//save video background //save video background
videoFrame.copyTo(b); framebuffer.copyTo(b);
}, down, background); }, down, background);
window->parallel([](const cv::UMat& d, cv::UMat& dng, cv::UMat& dmmg, std::vector<cv::Point2f>& dp){ window->parallel([](const cv::UMat& d, cv::UMat& dng, cv::UMat& dmmg, std::vector<cv::Point2f>& dp){
@ -435,15 +437,12 @@ public:
dpg = dng.clone(); dpg = dng.clone();
}, downPrevGrey, downNextGrey); }, downPrevGrey, downNextGrey);
window->fb([](cv::UMat& framebuffer, cv::UMat& b, cv::UMat& f, cv::UMat& r) { window->fb([](cv::UMat& framebuffer, cv::UMat& b, cv::UMat& f) {
//Put it all together (OpenCL) //Put it all together (OpenCL)
composite_layers(b, f, framebuffer, framebuffer, glow_kernel_size, fg_loss, background_mode, post_proc_mode); composite_layers(b, f, framebuffer, framebuffer, glow_kernel_size, fg_loss, background_mode, post_proc_mode);
framebuffer.copyTo(r); }, background, foreground);
}, background, foreground, result);
window->write([](cv::UMat& videoFrame, cv::UMat& r) { window->write();
r.copyTo(videoFrame);
}, result);
} }
}; };
@ -461,7 +460,6 @@ int main(int argc, char **argv) {
try { try {
using namespace cv::v4d; using namespace cv::v4d;
cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Sparse Optical Flow Demo", ALL, OFFSCREEN); cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Sparse Optical Flow Demo", ALL, OFFSCREEN);
window->printSystemInfo();
window->setStretching(stretch); window->setStretching(stretch);
if (!OFFSCREEN) { if (!OFFSCREEN) {
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
@ -477,7 +475,7 @@ int main(int argc, char **argv) {
auto sink = makeWriterSink(window, OUTPUT_FILENAME, src->fps(), cv::Size(WIDTH, HEIGHT)); auto sink = makeWriterSink(window, OUTPUT_FILENAME, src->fps(), cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#else #else
cv::Ptr<Source> src = makeCaptureSource(WIDTH, HEIGHT, window); cv::Ptr<Source> src = makeCaptureSource(window);
window->setSource(src); window->setSource(src);
#endif #endif

@ -5,8 +5,6 @@ using namespace cv;
using namespace cv::v4d; using namespace cv::v4d;
int main() { int main() {
Ptr<V4D> window = V4D::make(960, 960, "GL Blue Screen");
class RenderOpenGLPlan : public Plan { class RenderOpenGLPlan : public Plan {
public: public:
void setup(Ptr<V4D> win) override { void setup(Ptr<V4D> win) override {
@ -23,6 +21,7 @@ int main() {
} }
}; };
Ptr<V4D> window = V4D::make(960, 960, "GL Blue Screen");
window->run<RenderOpenGLPlan>(0); window->run<RenderOpenGLPlan>(0);
} }

@ -268,7 +268,6 @@ static void setup_gui(cv::Ptr<V4D> window) {
} }
class ShaderDemoPlan : public Plan { class ShaderDemoPlan : public Plan {
cv::UMat frame_;
public: public:
void setup(cv::Ptr<V4D> window) override { void setup(cv::Ptr<V4D> window) override {
window->gl([](const cv::Size &sz) { window->gl([](const cv::Size &sz) {
@ -277,20 +276,19 @@ void setup(cv::Ptr<V4D> window) override {
} }
void infer(cv::Ptr<V4D> window) override { void infer(cv::Ptr<V4D> window) override {
window->capture(frame_); window->capture();
window->gl([](const cv::Size &sz) { window->gl([](const cv::Size &sz) {
render_scene(sz); render_scene(sz);
}, window->fbSize()); }, window->fbSize());
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
window->fb([](cv::UMat& framebuffer, cv::UMat& f) { window->fb([](cv::UMat& framebuffer) {
glow_effect(framebuffer, framebuffer, glow_kernel_size); glow_effect(framebuffer, framebuffer, glow_kernel_size);
framebuffer.copyTo(f); });
}, frame_);
#endif #endif
window->write(frame_); window->write();
} }
}; };
@ -309,7 +307,6 @@ int main() {
setup_gui(window); setup_gui(window);
} }
window->printSystemInfo();
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
auto src = makeCaptureSource(window, argv[1]); auto src = makeCaptureSource(window, argv[1]);
@ -317,11 +314,11 @@ int main() {
auto sink = makeWriterSink(window, OUTPUT_FILENAME, src->fps(), cv::Size(WIDTH, HEIGHT)); auto sink = makeWriterSink(window, OUTPUT_FILENAME, src->fps(), cv::Size(WIDTH, HEIGHT));
window->setSink(sink); window->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT, window); auto src = makeCaptureSource(window);
window->setSource(src); window->setSource(src);
#endif #endif
window->run<ShaderDemoPlan>(3); window->run<ShaderDemoPlan>(0);
} catch (std::exception& ex) { } catch (std::exception& ex) {
cerr << "Exception: " << ex.what() << endl; cerr << "Exception: " << ex.what() << endl;
} }

@ -5,8 +5,6 @@ using namespace cv;
using namespace cv::v4d; using namespace cv::v4d;
int main() { int main() {
Ptr<V4D> window = V4D::make(960, 960, "Vector Graphics and Framebuffer");
class VectorGraphicsAndFBPlan : public Plan { class VectorGraphicsAndFBPlan : public Plan {
public: public:
void infer(Ptr<V4D> win) override { void infer(Ptr<V4D> win) override {
@ -101,7 +99,7 @@ int main() {
}); });
} }
}; };
Ptr<V4D> window = V4D::make(960, 960, "Vector Graphics and Framebuffer");
window->run<VectorGraphicsAndFBPlan>(0); window->run<VectorGraphicsAndFBPlan>(0);
} }

@ -190,7 +190,6 @@ static void glow_effect(const cv::UMat& src, cv::UMat& dst, const int ksize) {
using namespace cv::v4d; using namespace cv::v4d;
class VideoDemoPlan: public Plan { class VideoDemoPlan: public Plan {
cv::UMat frame_;
public: public:
void setup(cv::Ptr<V4D> window) override { void setup(cv::Ptr<V4D> window) override {
window->gl([]() { window->gl([]() {
@ -198,19 +197,21 @@ public:
}); });
} }
void infer(cv::Ptr<V4D> window) override { void infer(cv::Ptr<V4D> window) override {
window->capture(frame_); window->capture();
window->gl([]() { window->gl([]() {
render_scene(); render_scene();
}); });
window->fb([](cv::UMat &framebuffer, cv::UMat& f) { #ifndef __EMSCRIPTEN__
window->fb([](cv::UMat &framebuffer) {
glow_effect(framebuffer, framebuffer, glow_kernel_size); glow_effect(framebuffer, framebuffer, glow_kernel_size);
framebuffer.copyTo(f);
}, frame_); });
#endif
//Ignored in WebAssmebly builds because there is no sink set. //Ignored in WebAssmebly builds because there is no sink set.
window->write(frame_); window->write();
} }
}; };
@ -225,7 +226,6 @@ int main() {
#endif #endif
using namespace cv::v4d; using namespace cv::v4d;
cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Video Demo", NONE, OFFSCREEN, false, 0); cv::Ptr<V4D> window = V4D::make(WIDTH, HEIGHT, "Video Demo", NONE, OFFSCREEN, false, 0);
window->printSystemInfo();
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
//Creates a source from a file or a device //Creates a source from a file or a device
@ -236,7 +236,7 @@ int main() {
window->setSink(sink); window->setSink(sink);
#else #else
//Creates a webcam source is available //Creates a webcam source is available
auto src = makeCaptureSource(WIDTH, HEIGHT, window); auto src = makeCaptureSource(window);
window->setSource(src); window->setSource(src);
#endif #endif

@ -13,7 +13,12 @@ int main(int argc, char** argv) {
class VideoEditingPlan : public Plan { class VideoEditingPlan : public Plan {
cv::UMat frame_; cv::UMat frame_;
const string hv_ = "Hello Video!"; const string hv_ = "Hello Video!";
cv::Size fbSz_;
public: public:
void setup(Ptr<V4D> win) override {
fbSz_ = win->fbSize();
}
void infer(Ptr<V4D> win) override { void infer(Ptr<V4D> win) override {
//Capture video from the source //Capture video from the source
win->capture(); win->capture();
@ -27,7 +32,7 @@ int main(int argc, char** argv) {
fillColor(Scalar(255, 0, 0, 255)); fillColor(Scalar(255, 0, 0, 255));
textAlign(NVG_ALIGN_CENTER | NVG_ALIGN_TOP); textAlign(NVG_ALIGN_CENTER | NVG_ALIGN_TOP);
text(sz.width / 2.0, sz.height / 2.0, str.c_str(), str.c_str() + str.size()); text(sz.width / 2.0, sz.height / 2.0, str.c_str(), str.c_str() + str.size());
}, win->fbSize(), hv_); }, fbSz_, hv_);
//Write video to the sink (do nothing in case of WebAssembly) //Write video to the sink (do nothing in case of WebAssembly)
win->write(); win->write();
@ -45,7 +50,7 @@ int main(int argc, char** argv) {
window->setSink(sink); window->setSink(sink);
#else #else
//Make a webcam Source //Make a webcam Source
Source src = makeCaptureSource(960, 960, window); auto src = makeCaptureSource(window);
//Attach webcam source //Attach webcam source
window->setSource(src); window->setSource(src);
#endif #endif

@ -133,16 +133,17 @@ void FrameBufferContext::initWebGLCopy(const size_t& index) {
#endif #endif
} }
void FrameBufferContext::doWebGLCopy(FrameBufferContext& other) { void FrameBufferContext::doWebGLCopy(cv::Ptr<FrameBufferContext> other) {
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
size_t index = other.getIndex(); cerr << "do copy" << endl;
size_t index = other->getIndex();
this->makeCurrent(); this->makeCurrent();
int width = getWindowSize().width; int width = getWindowSize().width;
int height = getWindowSize().height; int height = getWindowSize().height;
{ {
FrameBufferContext::GLScope glScope(*this, GL_READ_FRAMEBUFFER); FrameBufferContext::GLScope glScope(self(), GL_READ_FRAMEBUFFER);
other.blitFrameBufferToFrameBuffer( other->blitFrameBufferToFrameBuffer(
cv::Rect(0,0, other.size().width, other.size().height), cv::Rect(0,0, other->size().width, other->size().height),
this->getWindowSize(), this->getWindowSize(),
0, false); 0, false);
emscripten_webgl_commit_frame(); emscripten_webgl_commit_frame();
@ -703,7 +704,7 @@ void FrameBufferContext::begin(GLenum framebufferTarget) {
} }
void FrameBufferContext::end() { void FrameBufferContext::end() {
GL_CHECK(glFlush()); GL_CHECK(glFinish());
this->makeNoneCurrent(); this->makeNoneCurrent();
// this->fence(); // this->fence();
} }
@ -793,7 +794,9 @@ void FrameBufferContext::makeCurrent() {
} }
void FrameBufferContext::makeNoneCurrent() { void FrameBufferContext::makeNoneCurrent() {
glfwMakeContextCurrent(nullptr); #ifndef __EMSCRIPTEN__
glfwMakeContextCurrent(nullptr);
#endif
} }

@ -13,7 +13,7 @@ GLContext::GLContext(cv::Ptr<FrameBufferContext> fbContext) :
mainFbContext_(fbContext), glFbContext_(new FrameBufferContext(*fbContext->getV4D(), "OpenGL", *fbContext)) { mainFbContext_(fbContext), glFbContext_(new FrameBufferContext(*fbContext->getV4D(), "OpenGL", *fbContext)) {
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
run_sync_on_main<19>([&,this](){ run_sync_on_main<19>([&,this](){
mainFbContext_.initWebGLCopy(fbCtx()->getIndex()); mainFbContext_->initWebGLCopy(fbCtx()->getIndex());
}); });
#endif #endif
} }
@ -42,7 +42,7 @@ void GLContext::execute(std::function<void()> fn) {
} }
if(!fbCtx()->isShared()) { if(!fbCtx()->isShared()) {
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
mainFbContext_.doWebGLCopy(fbCtx()); mainFbContext_->doWebGLCopy(fbCtx());
#else #else
UMat tmp; UMat tmp;
fbCtx()->copyTo(tmp); fbCtx()->copyTo(tmp);

@ -35,7 +35,7 @@ NanoVGContext::NanoVGContext(cv::Ptr<FrameBufferContext> fbContext) :
nvgCreateFont(context_, "sans-bold", "modules/v4d/assets/fonts/Roboto-Bold.ttf"); nvgCreateFont(context_, "sans-bold", "modules/v4d/assets/fonts/Roboto-Bold.ttf");
#endif #endif
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
mainFbContext_.initWebGLCopy(fbCtx()->getIndex()); mainFbContext_->initWebGLCopy(fbCtx()->getIndex());
#endif #endif
} }
}); });
@ -63,7 +63,7 @@ void NanoVGContext::execute(std::function<void()> fn) {
} }
if (!fbCtx()->isShared()) { if (!fbCtx()->isShared()) {
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
mainFbContext_.doWebGLCopy(fbCtx()); mainFbContext_->doWebGLCopy(fbCtx());
#else #else
UMat tmp; UMat tmp;
fbCtx()->copyTo(tmp); fbCtx()->copyTo(tmp);

@ -23,20 +23,36 @@ void SourceContext::execute(std::function<void()> fn) {
CLExecScope_t scope(getCLExecContext()); CLExecScope_t scope(getCLExecContext());
#endif #endif
if (mainFbContext_->getV4D()->hasSource()) { if (mainFbContext_->getV4D()->hasSource()) {
auto p = mainFbContext_->getV4D()->getSource()->operator ()(); auto src = mainFbContext_->getV4D()->getSource();
currentSeqNr_ = p.first;
resizePreserveAspectRatio(p.second, captureBufferRGB_, mainFbContext_->size()); if(src->isOpen()) {
cv::cvtColor(captureBufferRGB_, sourceBuffer(), cv::COLOR_RGB2BGRA); auto p = src->operator ()();
currentSeqNr_ = p.first;
if(p.second.empty())
p.second.create(mainFbContext_->size(), CV_8UC3);
resizePreserveAspectRatio(p.second, captureBufferRGB_, mainFbContext_->size());
cv::cvtColor(captureBufferRGB_, sourceBuffer(), cv::COLOR_RGB2BGRA);
}
} }
fn(); fn();
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
} else { } else {
if (mainFbContext_->getV4D()->hasSource()) { if (mainFbContext_->getV4D()->hasSource()) {
auto p = mainFbContext_->getV4D()->getSource()->operator ()(); auto src = mainFbContext_->getV4D()->getSource();
currentSeqNr_ = p.first;
resizePreserveAspectRatio(p.second, captureBufferRGB_, mainFbContext_->size()); if(src->isOpen()) {
cv::cvtColor(captureBufferRGB_, sourceBuffer(), cv::COLOR_RGB2BGRA); auto p = src->operator ()();
currentSeqNr_ = p.first;
if(p.second.empty())
p.second.create(mainFbContext_->size(), CV_8UC3);
resizePreserveAspectRatio(p.second, captureBufferRGB_, mainFbContext_->size());
cv::cvtColor(captureBufferRGB_, sourceBuffer(), cv::COLOR_RGB2BGRA);
}
} }
fn(); fn();
} }

@ -19,15 +19,8 @@ Source::Source() :
Source::~Source() { Source::~Source() {
} }
bool Source::isReady() {
if (generator_)
return true;
else
return false;
}
bool Source::isOpen() { bool Source::isOpen() {
return open_; return generator_ && open_;
} }
bool Source::isAsync() { bool Source::isAsync() {

@ -39,8 +39,6 @@ namespace cv {
namespace v4d { namespace v4d {
namespace detail { namespace detail {
thread_local std::mutex ThreadLocal::mtx_;
thread_local bool ThreadLocal::sync_run_;
std::mutex Global::mtx_; std::mutex Global::mtx_;
uint64_t Global::frame_cnt_ = 0; uint64_t Global::frame_cnt_ = 0;
uint64_t Global::start_time_ = get_epoch_nanos(); uint64_t Global::start_time_ = get_epoch_nanos();
@ -509,8 +507,8 @@ public:
}; };
cv::Ptr<HTML5Capture> capture = nullptr; cv::Ptr<HTML5Capture> capture = nullptr;
int capture_width = 0; static thread_local int capture_width = 0;
int capture_height = 0; static thread_local int capture_height = 0;
extern "C" { extern "C" {
@ -522,10 +520,10 @@ void v4dInitCapture(int width, int height) {
} }
cv::Ptr<Source> makeCaptureSource(int width, int height, cv::Ptr<V4D> window) { cv::Ptr<Source> makeCaptureSource(cv::Ptr<V4D> window) {
using namespace std; using namespace std;
return new Source([=](cv::UMat& frame) { return new Source([window](cv::UMat& frame) {
if(capture_width > 0 && capture_height > 0) { if(capture_width > 0 && capture_height > 0) {
try { try {
run_sync_on_main<17>([&]() { run_sync_on_main<17>([&]() {

@ -204,31 +204,26 @@ bool V4D::hasSource() {
return source_ != nullptr; return source_ != nullptr;
} }
void V4D::feed(cv::InputArray in) { void V4D::feed(cv::UMat& in) {
CV_Assert(false); static thread_local cv::UMat frame;
//#ifndef __EMSCRIPTEN__
// CLExecScope_t scope(fbCtx()->getCLExecContext()); parallel([](cv::UMat& src, cv::UMat& f, const cv::Size sz) {
//#endif cv::UMat rgb;
// TimeTracker::getInstance()->execute("feed", [this, &in](){
// cv::UMat frame; resizePreserveAspectRatio(src, rgb, sz);
// captureCtx().capture([&](cv::UMat& videoFrame) { cv::cvtColor(rgb, f, cv::COLOR_RGB2BGRA);
// in.copyTo(videoFrame); }, in, frame, mainFbContext_->size());
// }, frame);
// fb([](cv::UMat& frameBuffer, const cv::UMat& f) {
// fb([](cv::UMat& fb, const cv::UMat& f) { f.copyTo(frameBuffer);
// f.copyTo(fb); }, frame);
// }, frame); }
// });
} cv::UMat V4D::fetch() {
cv::_InputArray V4D::fetch() {
CV_Assert(false);
cv::UMat frame; cv::UMat frame;
// TimeTracker::getInstance()->execute("copyTo", [this, &frame](){ fb([](const cv::UMat& fb, cv::UMat& f) {
// fb([](const cv::UMat& fb, cv::UMat& f) { fb.copyTo(f);
// fb.copyTo(f); }, frame);
// }, frame);
// });
return frame; return frame;
} }
@ -351,9 +346,21 @@ bool V4D::isFocused() {
} }
void V4D::swapContextBuffers() { void V4D::swapContextBuffers() {
{
FrameBufferContext::GLScope glScope(glCtx(-1)->fbCtx(), GL_READ_FRAMEBUFFER);
glCtx(-1)->fbCtx()->blitFrameBufferToFrameBuffer(viewport(), glCtx(-1)->fbCtx()->getWindowSize(), 0, isStretching());
// GL_CHECK(glFinish());
#ifndef __EMSCRIPTEN__
glfwSwapBuffers(glCtx(-1)->fbCtx()->getGLFWWindow());
#else
emscripten_webgl_commit_frame();
#endif
}
for(size_t i = 0; i < numGlCtx(); ++i) { for(size_t i = 0; i < numGlCtx(); ++i) {
FrameBufferContext::GLScope glScope(glCtx(i)->fbCtx(), GL_READ_FRAMEBUFFER); FrameBufferContext::GLScope glScope(glCtx(i)->fbCtx(), GL_READ_FRAMEBUFFER);
glCtx(i)->fbCtx()->blitFrameBufferToFrameBuffer(viewport(), glCtx(i)->fbCtx()->getWindowSize(), 0, isStretching()); glCtx(i)->fbCtx()->blitFrameBufferToFrameBuffer(viewport(), glCtx(i)->fbCtx()->getWindowSize(), 0, isStretching());
// GL_CHECK(glFinish());
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
glfwSwapBuffers(glCtx(i)->fbCtx()->getGLFWWindow()); glfwSwapBuffers(glCtx(i)->fbCtx()->getGLFWWindow());
#else #else
@ -364,6 +371,7 @@ void V4D::swapContextBuffers() {
if(hasNvgCtx()) { if(hasNvgCtx()) {
FrameBufferContext::GLScope glScope(nvgCtx()->fbCtx(), GL_READ_FRAMEBUFFER); FrameBufferContext::GLScope glScope(nvgCtx()->fbCtx(), GL_READ_FRAMEBUFFER);
nvgCtx()->fbCtx()->blitFrameBufferToFrameBuffer(viewport(), nvgCtx()->fbCtx()->getWindowSize(), 0, isStretching()); nvgCtx()->fbCtx()->blitFrameBufferToFrameBuffer(viewport(), nvgCtx()->fbCtx()->getWindowSize(), 0, isStretching());
// GL_CHECK(glFinish());
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
glfwSwapBuffers(nvgCtx()->fbCtx()->getGLFWWindow()); glfwSwapBuffers(nvgCtx()->fbCtx()->getGLFWWindow());
#else #else
@ -395,16 +403,21 @@ bool V4D::display() {
Global::fps() = (fcnt / diff_seconds); Global::fps() = (fcnt / diff_seconds);
cerr << "\rFPS:" << Global::fps() << endl; cerr << "\rFPS:" << Global::fps() << endl;
} }
#ifndef __EMSCRIPTEN__
if(debug_) {
swapContextBuffers();
}
#else
swapContextBuffers();
#endif
{ {
FrameBufferContext::GLScope glScope(fbCtx(), GL_READ_FRAMEBUFFER); FrameBufferContext::GLScope glScope(fbCtx(), GL_READ_FRAMEBUFFER);
fbCtx()->blitFrameBufferToFrameBuffer(viewport(), fbCtx()->getWindowSize(), 0, isStretching()); fbCtx()->blitFrameBufferToFrameBuffer(viewport(), fbCtx()->getWindowSize(), 0, isStretching());
} }
if(hasImguiCtx()) if(hasImguiCtx())
imguiCtx()->render(getShowFPS()); imguiCtx()->render(getShowFPS());
#ifndef __EMSCRIPTEN__
if(debug_) fbCtx()->makeCurrent();
swapContextBuffers();
#endif
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
glfwSwapBuffers(fbCtx()->getGLFWWindow()); glfwSwapBuffers(fbCtx()->getGLFWWindow());
#else #else
@ -416,7 +429,7 @@ bool V4D::display() {
{ {
FrameBufferContext::GLScope glScope(fbCtx(), GL_DRAW_FRAMEBUFFER); FrameBufferContext::GLScope glScope(fbCtx(), GL_DRAW_FRAMEBUFFER);
GL_CHECK(glViewport(0, 0, fbCtx()->size().width, fbCtx()->size().height)); GL_CHECK(glViewport(0, 0, fbCtx()->size().width, fbCtx()->size().height));
GL_CHECK(glClearColor(0,0,0,255)); GL_CHECK(glClearColor(0,0,0,0));
GL_CHECK(glClear(GL_COLOR_BUFFER_BIT)); GL_CHECK(glClear(GL_COLOR_BUFFER_BIT));
} }
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__

Loading…
Cancel
Save