implemented time tracker

pull/3471/head
kallaballa 2 years ago
parent 49619e65a9
commit f68c1a9695
  1. 47
      modules/v4d/include/opencv2/v4d/v4d.hpp
  2. 11
      modules/v4d/samples/beauty-demo.cpp
  3. 4
      modules/v4d/samples/cube-demo.cpp
  4. 87
      modules/v4d/src/detail/framebuffercontext.cpp
  5. 144
      modules/v4d/src/detail/nanoguicontext.cpp
  6. 3
      modules/v4d/src/detail/nanoguicontext.hpp
  7. 0
      modules/v4d/src/detail/timetracker.cpp
  8. 51
      modules/v4d/src/detail/timetracker.hpp
  9. 231
      modules/v4d/src/v4d.cpp

@ -83,6 +83,19 @@ template<typename T> void find_widgets(const nanogui::Widget* parent, std::vecto
}
}
}
template<typename T> std::string int_to_hex( T i )
{
std::stringstream stream;
stream << "0x"
<< std::setfill ('0') << std::setw(sizeof(T) * 2)
<< std::hex << i;
return stream.str();
}
template<typename T, typename... U> std::string func_id(std::function<T (U ...)> f) {
return int_to_hex((size_t) &f);
}
}
using namespace cv::v4d::detail;
@ -96,8 +109,6 @@ class CV_EXPORTS V4D {
int samples_;
bool debug_;
cv::Rect viewport_;
float zoomScale_;
cv::Vec2f mousePos_;
bool scaling_;
FrameBufferContext* mainFbContext_ = nullptr;
CLVAContext* clvaContext_ = nullptr;
@ -105,7 +116,6 @@ class CV_EXPORTS V4D {
NanoguiContext* nguiContext_ = nullptr;
std::map<int64_t,GLContext*> glContexts_;
bool closed_ = false;
bool mouseDrag_ = false;
Source source_;
Sink sink_;
concurrent::threadpool pool_;
@ -118,6 +128,7 @@ class CV_EXPORTS V4D {
uint64_t frameCnt_ = 0;
bool showFPS_ = true;
bool printFPS_ = true;
bool showTracking_ = true;
public:
/*!
* Creates a V4D object which is the central object to perform visualizations with.
@ -192,12 +203,12 @@ public:
/*!
* Called to feed an image directly to the framebuffer
*/
CV_EXPORTS void feed(cv::InputArray& in);
CV_EXPORTS void feed(cv::InputArray in);
/*!
* Fetches a copy of frambuffer
* @return a copy of the framebuffer
*/
CV_EXPORTS InputOutputArray fetch();
CV_EXPORTS _InputOutputArray fetch();
/*!
* Called to capture to the framebuffer from a #cv::viz::Source object provided via #V4D::setSource().
@ -245,30 +256,14 @@ public:
* @param s if true show the GUI.
*/
CV_EXPORTS void showGui(bool s);
/*!
* if zoomed in, move the content by x and y
* @param x The amount on the x-axis to move
* @param y The amount on the y-axis to move
*/
CV_EXPORTS void pan(int x, int y);
/*!
* Zoom by factor.
* @param factor The zoom factor.
*/
CV_EXPORTS void zoom(float factor);
/*!
* Get the window position.
* @return The window position.
*/
CV_EXPORTS cv::Vec2f position();
/*!
* Get current zoom scale.
* @return The zoom scale.
*/
CV_EXPORTS float zoomScale();
/*!
* Get the current viewport.
* @return The current viewport.
* Get the current viewport reference.
* @return The current viewport reference.
*/
CV_EXPORTS cv::Rect& viewport();
/*!
@ -310,6 +305,8 @@ public:
CV_EXPORTS void setShowFPS(bool s);
CV_EXPORTS bool getPrintFPS();
CV_EXPORTS void setPrintFPS(bool p);
CV_EXPORTS bool getShowTracking();
CV_EXPORTS void setShowTracking(bool st);
CV_EXPORTS bool isFullscreen();
/*!
@ -382,11 +379,7 @@ private:
void init();
void setMouseDrag(bool d);
bool isMouseDrag();
cv::Vec2f getMousePosition();
bool keyboard_event(int key, int scancode, int action, int modifiers);
void setMousePosition(int x, int y);
FrameBufferContext& fbCtx();
CLVAContext& clvaCtx();

@ -276,8 +276,8 @@ static bool iteration() {
shapes.clear();
cv::Mat faces;
//Detect faces in the down-scaled image
detector->detect(down, faces);
cv::Mat m = down.getMat(cv::ACCESS_RW);
detector->detect(m, faces);
//Only add the first face
cv::Rect faceRect;
if(!faces.empty())
@ -285,7 +285,6 @@ static bool iteration() {
std::vector<cv::Rect>faceRects = {faceRect};
//find landmarks if faces have been detected
if (!faceRect.empty() && facemark->fit(down, faceRects, shapes)) {
//a FaceFeatures instance for each face
FaceFeatures features(faceRect, shapes[0], float(down.size().width) / WIDTH);
window->nvg([&]() {
@ -311,10 +310,10 @@ static bool iteration() {
//Create the skin mask
cv::subtract(faceOval, eyesAndLipsMaskGrey, faceSkinMaskGrey);
//Create the background mask
cv::bitwise_not(eyesAndLipsMaskGrey,backgroundMaskGrey);
cv::bitwise_not(eyesAndLipsMaskGrey, backgroundMaskGrey);
//boost saturation of eyes and lips
adjust_saturation(input,eyesAndLips, eyes_and_lips_saturation);
adjust_saturation(input, eyesAndLips, eyes_and_lips_saturation);
//reduce skin contrast
multiply(input, cv::Scalar::all(skin_contrast), contrast);
//fix skin brightness
@ -322,7 +321,7 @@ static bool iteration() {
//blur the skin
cv::boxFilter(contrast, blurred, -1, cv::Size(blur_skin_kernel_size, blur_skin_kernel_size), cv::Point(-1, -1), true, cv::BORDER_REPLICATE);
//boost skin saturation
adjust_saturation(blurred,skin, skin_saturation);
adjust_saturation(blurred, skin, skin_saturation);
blender.prepare(cv::Rect(0, 0, WIDTH, HEIGHT));

@ -232,8 +232,8 @@ static bool iteration() {
//To slow for WASM
#ifndef __EMSCRIPTEN__
//Aquire the frame buffer for use by OpenCV
window->fb([&](cv::UMat& frameBuffer) {
glow_effect(frameBuffer, frameBuffer, GLOW_KERNEL_SIZE);
window->fb([&](cv::UMat& framebuffer) {
glow_effect(framebuffer, framebuffer, GLOW_KERNEL_SIZE);
});
#endif

@ -258,8 +258,15 @@ void FrameBufferContext::init() {
glfwWindow_ = glfwCreateWindow(framebufferSize_.width, framebufferSize_.height, title_.c_str(), nullptr,
sharedWindow_);
if (glfwWindow_ == NULL) {
assert(false);
if (glfwWindow_ == nullptr) {
//retry with native api
glfwWindowHint(GLFW_CONTEXT_CREATION_API, GLFW_NATIVE_CONTEXT_API);
glfwWindow_ = glfwCreateWindow(framebufferSize_.width, framebufferSize_.height, title_.c_str(), nullptr,
sharedWindow_);
if (glfwWindow_ == nullptr) {
throw std::runtime_error("Unable to initialize window.");
}
}
this->makeCurrent();
#ifndef __EMSCRIPTEN__
@ -292,33 +299,33 @@ void FrameBufferContext::init() {
glfwSetWindowUserPointer(getGLFWWindow(), &getV4D());
glfwSetCursorPosCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
#ifdef __EMSCRIPTEN__
x *= v4d->pixelRatioX();
y *= v4d->pixelRatioY();
#endif
if(v4d->hasNguiCtx()) {
v4d->nguiCtx().screen().cursor_pos_callback_event(x, y);
}
#ifndef __EMSCRIPTEN__
auto cursor = v4d->getMousePosition();
auto diff = cursor - cv::Vec2f(x, y);
if (v4d->isMouseDrag()) {
v4d->pan(diff[0], -diff[1]);
}
#endif
v4d->setMousePosition(x, y);
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
//#ifdef __EMSCRIPTEN__
// x *= v4d->pixelRatioX();
// y *= v4d->pixelRatioY();
//#endif
//
// if(v4d->hasNguiCtx()) {
// v4d->nguiCtx().screen().cursor_pos_callback_event(x, y);
// }
//#ifndef __EMSCRIPTEN__
// auto cursor = v4d->getMousePosition();
// auto diff = cursor - cv::Vec2f(x, y);
// if (v4d->isMouseDrag()) {
// v4d->pan(diff[0], -diff[1]);
// }
//#endif
// v4d->setMousePosition(x, y);
}
);
glfwSetMouseButtonCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, int button, int action, int modifiers) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
if(v4d->hasNguiCtx())
v4d->nguiCtx().screen().mouse_button_callback_event(button, action, modifiers);
if (button == GLFW_MOUSE_BUTTON_RIGHT) {
v4d->setMouseDrag(action == GLFW_PRESS);
}
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
// if(v4d->hasNguiCtx())
// v4d->nguiCtx().screen().mouse_button_callback_event(button, action, modifiers);
// if (button == GLFW_MOUSE_BUTTON_RIGHT) {
// v4d->setMouseDrag(action == GLFW_PRESS);
// }
}
);
glfwSetKeyCallback(getGLFWWindow(),
@ -343,21 +350,21 @@ void FrameBufferContext::init() {
);
glfwSetScrollCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, double x, double y) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
std::vector<nanogui::Widget*> widgets;
if(v4d->hasNguiCtx()) {
for (auto* w : v4d->nguiCtx().screen().children()) {
auto pt = v4d->getMousePosition();
auto mousePos = nanogui::Vector2i(pt[0], pt[1]);
if(cv::v4d::detail::contains_absolute(w, mousePos)) {
v4d->nguiCtx().screen().scroll_callback_event(x, y);
return;
}
}
}
#ifndef __EMSCRIPTEN__
v4d->zoom(y < 0 ? 1.1 : 0.9);
#endif
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
// std::vector<nanogui::Widget*> widgets;
// if(v4d->hasNguiCtx()) {
// for (auto* w : v4d->nguiCtx().screen().children()) {
// auto pt = v4d->getMousePosition();
// auto mousePos = nanogui::Vector2i(pt[0], pt[1]);
// if(cv::v4d::detail::contains_absolute(w, mousePos)) {
// v4d->nguiCtx().screen().scroll_callback_event(x, y);
// return;
// }
// }
// }
//#ifndef __EMSCRIPTEN__
// v4d->zoom(y < 0 ? 1.1 : 0.9);
//#endif
}
);

@ -5,6 +5,7 @@
#include "opencv2/v4d/v4d.hpp"
#include "nanoguicontext.hpp"
#include "timetracker.hpp"
namespace cv {
namespace v4d {
@ -41,90 +42,111 @@ NanoguiContext::NanoguiContext(FrameBufferContext& fbContext) :
});
}
void NanoguiContext::render(bool print, bool graphical) {
if (!first_) {
tick_.stop();
void NanoguiContext::render(bool printFPS, bool showFPS, bool showTracking) {
tick_.stop();
if (tick_.getTimeMilli() > 100) {
if(print) {
cerr << "FPS : " << (fps_ = tick_.getFPS());
if (tick_.getTimeMilli() > 100) {
if(printFPS) {
cerr << "FPS : " << (fps_ = tick_.getFPS()) << endl;
#ifndef __EMSCRIPTEN__
cerr << '\r';
cerr << '\r';
#else
cerr << endl;
cerr << endl;
#endif
}
tick_.reset();
}
tick_.reset();
}
run_sync_on_main<4>([this, graphical](){
string txt = "FPS: " + std::to_string(fps_);
run_sync_on_main<4>([this, showFPS, showTracking](){
string txt = "FPS: " + std::to_string(fps_);
#ifndef __EMSCRIPTEN__
if(!fbCtx().isShared()) {
mainFbContext_.copyTo(copyBuffer_);
fbCtx().copyFrom(copyBuffer_);
}
if(!fbCtx().isShared()) {
mainFbContext_.copyTo(copyBuffer_);
fbCtx().copyFrom(copyBuffer_);
}
#endif
{
{
#ifndef __EMSCRIPTEN__
mainFbContext_.makeCurrent();
GL_CHECK(glFinish());
GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0));
GL_CHECK(glViewport(0, 0, mainFbContext_.getWindowSize().width, mainFbContext_.getWindowSize().height));
glClear(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
mainFbContext_.makeCurrent();
GL_CHECK(glFinish());
GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0));
GL_CHECK(glViewport(0, 0, mainFbContext_.getWindowSize().width, mainFbContext_.getWindowSize().height));
glClear(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
#else
FrameBufferContext::GLScope glScope(fbCtx(), GL_FRAMEBUFFER);
GL_CHECK(glClearColor(0,0,0,0));
GL_CHECK(glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT));
GL_CHECK(glViewport(0, 0, mainFbContext_.getWindowSize().width, mainFbContext_.getWindowSize().height));
FrameBufferContext::GLScope glScope(fbCtx(), GL_FRAMEBUFFER);
GL_CHECK(glClearColor(0,0,0,0));
GL_CHECK(glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT));
GL_CHECK(glViewport(0, 0, mainFbContext_.getWindowSize().width, mainFbContext_.getWindowSize().height));
#endif
if (graphical) {
float w = mainFbContext_.size().width;
float h = mainFbContext_.size().height;
float r = mainFbContext_.pixelRatioX();
nvgSave(context_);
nvgBeginFrame(context_, w, h, r);
cv::v4d::nvg::detail::NVG::initializeContext(context_);
using namespace cv::v4d::nvg;
beginPath();
roundedRect(5, 5, 15 * txt.size() + 5, 30, 5);
fillColor(cv::Scalar(255, 255, 255, 180));
fill();
float w = mainFbContext_.size().width;
float h = mainFbContext_.size().height;
float r = mainFbContext_.pixelRatioX();
if(showFPS || showTracking) {
nvgSave(context_);
nvgBeginFrame(context_, w, h, r);
cv::v4d::nvg::detail::NVG::initializeContext(context_);
}
if (showFPS) {
using namespace cv::v4d::nvg;
beginPath();
roundedRect(3.75, 3.75, 10 * txt.size(), 22.5, 3.75);
fillColor(cv::Scalar(255, 255, 255, 180));
fill();
#ifdef __EMSCRIPTEN__
fbCtx().makeCurrent();
fbCtx().makeCurrent();
#endif
fontSize(30.0f);
fontFace("mono");
fillColor(cv::Scalar(90, 90, 90, 255));
textAlign(NVG_ALIGN_LEFT | NVG_ALIGN_MIDDLE);
text(10, 20, txt.c_str(), nullptr);
nvgEndFrame(context_);
nvgRestore(context_);
fontSize(20.0f);
fontFace("mono");
fillColor(cv::Scalar(90, 90, 90, 255));
textAlign(NVG_ALIGN_LEFT | NVG_ALIGN_MIDDLE);
text(7.5, 15, txt.c_str(), nullptr);
nvgEndFrame(context_);
nvgRestore(context_);
}
if(showTracking) {
using namespace cv::v4d::nvg;
std::stringstream ss;
auto& tiMap = TimeTracker::getInstance()->getMap();
size_t cnt = 0;
beginPath();
fontSize(20.0f);
fontFace("mono");
fillColor(cv::Scalar(200, 200, 200, 200));
textAlign(NVG_ALIGN_LEFT | NVG_ALIGN_MIDDLE);
for (auto& it : tiMap) {
ss.str("");
ss << it.first << ": " << it.second << std::endl;
text(7.5, 15 * (cnt + 3), ss.str().c_str(), nullptr);
++cnt;
}
nvgEndFrame(context_);
nvgRestore(context_);
}
{
}
{
#ifdef __EMSCRIPTEN__
FrameBufferContext::GLScope glScope(fbCtx(), GL_FRAMEBUFFER);
FrameBufferContext::GLScope glScope(fbCtx(), GL_FRAMEBUFFER);
#endif
screen().draw_widgets();
screen().draw_widgets();
#ifndef __EMSCRIPTEN__
GL_CHECK(glFinish());
GL_CHECK(glFinish());
#endif
}
}
if(!fbCtx().isShared()) {
if(!fbCtx().isShared()) {
#ifdef __EMSCRIPTEN__
mainFbContext_.doWebGLCopy(fbCtx());
mainFbContext_.doWebGLCopy(fbCtx());
#else
fbCtx().copyTo(copyBuffer_);
mainFbContext_.copyFrom(copyBuffer_);
fbCtx().copyTo(copyBuffer_);
mainFbContext_.copyFrom(copyBuffer_);
#endif
}
});
}
first_ = false;
}
});
tick_.start();
}

@ -32,11 +32,10 @@ class NanoguiContext {
NVGcontext* context_;
cv::TickMeter tick_;
float fps_ = 0;
bool first_ = true;
cv::UMat copyBuffer_;
public:
NanoguiContext(FrameBufferContext& fbContext);
void render(bool print, bool graphical);
void render(bool print, bool graphical, bool show);
void build(std::function<void(cv::v4d::FormHelper&)> fn);
nanogui::Screen& screen();
cv::v4d::FormHelper& form();

@ -19,45 +19,44 @@ using std::mutex;
struct TimeInfo {
long totalCnt_ = 0;
long totalTime_ = 0;
long gameCnt_ = 0;
long gameTime_ = 0;
long iterCnt_ = 0;
long iterTime_ = 0;
long last_ = 0;
map<string, TimeInfo> children_;
void add(size_t t) {
last_ = t;
totalTime_ += t;
gameTime_ += t;
iterTime_ += t;
++totalCnt_;
++gameCnt_;
++iterCnt_;
if (totalCnt_ == std::numeric_limits<long>::max() || totalTime_ == std::numeric_limits<long>::max()) {
totalCnt_ = 0;
totalTime_ = 0;
}
if (gameCnt_ == std::numeric_limits<long>::max() || gameTime_ == std::numeric_limits<long>::max()) {
gameCnt_ = 0;
gameTime_ = 0;
if (iterCnt_ == std::numeric_limits<long>::max() || iterTime_ == std::numeric_limits<long>::max()) {
iterCnt_ = 0;
iterTime_ = 0;
}
}
void newCount() {
gameCnt_ = 0;
gameTime_ = 0;
iterCnt_ = 0;
iterTime_ = 0;
}
string str() const {
stringstream ss;
ss << (totalTime_ / 1000.0) / totalCnt_ << "ms = (" << totalTime_ / 1000.0 << '\\' << totalCnt_ << ")\t";
ss << (gameTime_ / 1000.0) / gameCnt_ << "ms = (" << gameTime_ / 1000.0 << '\\' << gameCnt_ << ")\t";
ss << (iterTime_ / 1000.0) / iterCnt_ << "ms = (" << iterTime_ / 1000.0 << '\\' << iterCnt_ << ")\t";
return ss.str();
}
};
inline std::ostream& operator<<(ostream &os, TimeInfo &ti) {
os << (ti.totalTime_ / 1000.0) / ti.totalCnt_ << "ms = (" << ti.totalTime_ / 1000.0 << '\\' << ti.totalCnt_ << ")\t";
os << (ti.gameTime_ / 1000.0) / ti.gameCnt_ << "ms = (" << ti.gameTime_ / 1000.0 << '\\' << ti.gameCnt_ << ")";
os << (ti.iterTime_ / 1000.0) / ti.iterCnt_ << "ms = (" << ti.iterTime_ / 1000.0 << '\\' << ti.iterCnt_ << ")";
return os;
}
@ -68,12 +67,12 @@ private:
map<string, TimeInfo> tiMap_;
bool enabled_;
TimeTracker();
public:
virtual ~TimeTracker();
map<string, TimeInfo>& getMap() {
return tiMap_;
}
public:
virtual ~TimeTracker();
template<typename F> void execute(const string &name, F const &func) {
auto start = std::chrono::system_clock::now();
@ -83,14 +82,6 @@ public:
tiMap_[name].add(duration.count());
}
template<typename F> void execute(const string &parentName, const string &name, F const &func) {
auto start = std::chrono::system_clock::now();
func();
auto duration = std::chrono::duration_cast<microseconds>(std::chrono::system_clock::now() - start);
std::unique_lock lock(mapMtx_);
tiMap_[parentName].children_[name].add(duration.count());
}
template<typename F> size_t measure(F const &func) {
auto start = std::chrono::system_clock::now();
func();
@ -112,22 +103,19 @@ public:
ss << "Time tracking info: " << std::endl;
for (auto it : tiMap_) {
ss << "\t" << it.first << ": " << it.second << std::endl;
for (auto itc : it.second.children_) {
ss << "\t\t" << itc.first << ": " << itc.second << std::endl;
}
}
long totalTime = 0;
long totalGameTime = 0;
long totalIterTime = 0;
long totalCnt = 0;
long gameCnt = 0;
long iterCnt = 0;
for (auto& pair : getMap()) {
totalTime += pair.second.totalTime_;
totalGameTime += pair.second.gameTime_;
totalIterTime += pair.second.iterTime_;
totalCnt = pair.second.totalCnt_;
gameCnt = pair.second.gameCnt_;
iterCnt = pair.second.iterCnt_;
}
ss << std::endl << "FPS: " << (float(totalCnt) / float(totalTime / 1000000.0f)) << " / " << (float(gameCnt) / float(totalGameTime / 1000000.0f)) << std::endl;
ss << std::endl << "FPS: " << (float(totalCnt) / float(totalTime / 1000000.0f)) << " / " << (float(iterCnt) / float(totalIterTime / 1000000.0f)) << std::endl;
os << ss.str();
}
@ -154,9 +142,6 @@ public:
std::unique_lock lock(mapMtx_);
for (auto& pair : getMap()) {
pair.second.newCount();
for (auto& pairc : pair.second.children_) {
pairc.second.newCount();
}
}
}
};

@ -9,6 +9,7 @@
#include "detail/nanovgcontext.hpp"
#include "detail/nanoguicontext.hpp"
#include "detail/glcontext.hpp"
#include "detail/timetracker.hpp"
#include "opencv2/v4d/dialog.hpp"
#include "opencv2/v4d/formhelper.hpp"
#include <sstream>
@ -24,8 +25,7 @@ cv::Ptr<V4D> V4D::make(const cv::Size& size, const cv::Size& fbsize, const strin
V4D::V4D(const cv::Size& size, const cv::Size& fbsize, const string& title, bool offscreen, bool debug, bool compat, int samples) :
initialSize_(size), title_(title), compat_(
compat), samples_(samples), debug_(debug), viewport_(0, 0, size.width, size.height), zoomScale_(
1), mousePos_(0, 0), scaling_(true), pool_(2) {
compat), samples_(samples), debug_(debug), viewport_(0, 0, size.width, size.height), scaling_(true), pool_(2) {
#ifdef __EMSCRIPTEN__
printf(""); //makes sure we have FS as a dependency
#endif
@ -122,31 +122,41 @@ bool V4D::hasGlCtx(uint32_t idx) {
}
void V4D::gl(std::function<void()> fn, uint32_t idx) {
glCtx(idx).render([=](const cv::Size& sz) {
CV_UNUSED(sz);
fn();
TimeTracker::getInstance()->execute("gl(" + detail::func_id(fn) + ")/" + std::to_string(idx), [&](){
glCtx(idx).render([=](const cv::Size& sz) {
CV_UNUSED(sz);
fn();
});
});
}
void V4D::gl(std::function<void(const cv::Size&)> fn, uint32_t idx) {
glCtx(idx).render(fn);
TimeTracker::getInstance()->execute("gl(" + detail::func_id(fn) + ")/" + std::to_string(idx), [&](){
glCtx(idx).render(fn);
});
}
void V4D::fb(std::function<void(cv::UMat&)> fn) {
fbCtx().execute(fn);
TimeTracker::getInstance()->execute("fb(" + detail::func_id(fn) + ")", [&](){
fbCtx().execute(fn);
});
}
void V4D::nvg(std::function<void()> fn) {
nvgCtx().render([fn](const cv::Size& sz) {
CV_UNUSED(sz);
fn();
TimeTracker::getInstance()->execute("nvg(" + detail::func_id(fn) + ")", [&](){
nvgCtx().render([fn](const cv::Size& sz) {
CV_UNUSED(sz);
fn();
});
});
}
void V4D::nvg(std::function<void(const cv::Size&)> fn) {
nvgCtx().render(fn);
TimeTracker::getInstance()->execute("nvg(" + detail::func_id(fn) + ")", [&](){
nvgCtx().render(fn);
});
}
void V4D::nanogui(std::function<void(cv::v4d::FormHelper& form)> fn) {
@ -154,13 +164,17 @@ void V4D::nanogui(std::function<void(cv::v4d::FormHelper& form)> fn) {
}
void V4D::copyTo(cv::OutputArray m) {
UMat um = m.getUMat();
fbCtx().copyTo(um);
TimeTracker::getInstance()->execute("copyTo", [&](){
UMat um = m.getUMat();
fbCtx().copyTo(um);
});
}
void V4D::copyFrom(cv::InputArray m) {
UMat um = m.getUMat();
fbCtx().copyFrom(um);
TimeTracker::getInstance()->execute("copyTo", [&](){
UMat um = m.getUMat();
fbCtx().copyFrom(um);
});
}
#ifdef __EMSCRIPTEN__
@ -194,7 +208,8 @@ void V4D::setSource(const Source& src) {
source_ = src;
}
void V4D::feed(cv::InputArray& in) {
void V4D::feed(cv::InputArray in) {
TimeTracker::getInstance()->execute("feed", [&](){
cv::UMat frame;
clvaCtx().capture([&](cv::UMat& videoFrame) {
in.copyTo(videoFrame);
@ -203,15 +218,17 @@ void V4D::feed(cv::InputArray& in) {
fb([frame](cv::UMat& frameBuffer){
frame.copyTo(frameBuffer);
});
});
}
InputOutputArray V4D::fetch() {
cv::UMat frame;
_InputOutputArray V4D::fetch() {
cv::UMat frame;
TimeTracker::getInstance()->execute("copyTo", [&](){
fb([frame](cv::UMat& framebuffer){
framebuffer.copyTo(frame);
});
return frame;
});
return frame;
}
bool V4D::capture() {
@ -222,42 +239,42 @@ bool V4D::capture() {
}
bool V4D::capture(std::function<void(cv::UMat&)> fn) {
if (!source_.isReady() || !source_.isOpen()) {
bool res = true;
TimeTracker::getInstance()->execute("capture", [&, this](){
if (!source_.isReady() || !source_.isOpen()) {
#ifndef __EMSCRIPTEN__
return false;
#else
return true;
res = false;
#endif
}
if (futureReader_.valid()) {
if (!futureReader_.get()) {
return;
}
if (futureReader_.valid()) {
if (!futureReader_.get()) {
#ifndef __EMSCRIPTEN__
return false;
#else
return true;
res = false;
#endif
return;
}
}
}
if(nextReaderFrame_.empty()) {
if (!clvaCtx().capture(fn, nextReaderFrame_)) {
if(nextReaderFrame_.empty()) {
if (!clvaCtx().capture(fn, nextReaderFrame_)) {
#ifndef __EMSCRIPTEN__
return false;
#else
return true;
res = false;
#endif
return;
}
}
}
currentReaderFrame_ = nextReaderFrame_.clone();
futureReader_ = pool_.enqueue(
[](V4D* v, std::function<void(UMat&)> func, cv::UMat& frame) {
return v->clvaCtx().capture(func, frame);
}, this, fn, nextReaderFrame_);
fb([this](cv::UMat& frameBuffer){
currentReaderFrame_.copyTo(frameBuffer);
currentReaderFrame_ = nextReaderFrame_.clone();
futureReader_ = pool_.enqueue(
[](V4D* v, std::function<void(UMat&)> func, cv::UMat& frame) {
return v->clvaCtx().capture(func, frame);
}, this, fn, nextReaderFrame_);
fb([this](cv::UMat& frameBuffer){
currentReaderFrame_.copyTo(frameBuffer);
});
});
return true;
return res;
}
bool V4D::isSourceReady() {
@ -278,19 +295,21 @@ void V4D::write() {
}
void V4D::write(std::function<void(const cv::UMat&)> fn) {
if (!sink_.isReady() || !sink_.isOpen())
return;
TimeTracker::getInstance()->execute("write", [&, this](){
if (!sink_.isReady() || !sink_.isOpen())
return;
if (futureWriter_.valid())
futureWriter_.get();
if (futureWriter_.valid())
futureWriter_.get();
fb([this](cv::UMat& frameBuffer){
frameBuffer.copyTo(currentWriterFrame_);
});
fb([this](cv::UMat& frameBuffer){
frameBuffer.copyTo(currentWriterFrame_);
});
futureWriter_ = pool_.enqueue([](V4D* v, std::function<void(const UMat&)> func, cv::UMat& frame) {
v->clvaCtx().write(func, frame);
}, this, fn, currentWriterFrame_);
futureWriter_ = pool_.enqueue([](V4D* v, std::function<void(const UMat&)> func, cv::UMat& frame) {
v->clvaCtx().write(func, frame);
}, this, fn, currentWriterFrame_);
});
}
bool V4D::isSinkReady() {
@ -304,100 +323,10 @@ void V4D::showGui(bool s) {
}
}
void V4D::setMouseDrag(bool d) {
mouseDrag_ = d;
}
bool V4D::isMouseDrag() {
return mouseDrag_;
}
void V4D::pan(int x, int y) {
viewport_.x += x * zoomScale_;
viewport_.y += y * zoomScale_;
}
void V4D::zoom(float factor) {
if (zoomScale_ == 1 && viewport_.x == 0 && viewport_.y == 0 && factor > 1)
return;
double oldScale = zoomScale_;
double origW = framebufferSize().width;
double origH = framebufferSize().height;
zoomScale_ *= factor;
if (zoomScale_ <= 0.025) {
zoomScale_ = 0.025;
return;
} else if (zoomScale_ > 1) {
zoomScale_ = 1;
viewport_.width = origW;
viewport_.height = origH;
if (factor > 1) {
viewport_.x += log10(((viewport_.x * (1.0 - factor)) / viewport_.width) * 9 + 1.0)
* viewport_.width;
viewport_.y += log10(((viewport_.y * (1.0 - factor)) / viewport_.height) * 9 + 1.0)
* viewport_.height;
} else {
viewport_.x += log10(((-viewport_.x * (1.0 - factor)) / viewport_.width) * 9 + 1.0)
* viewport_.width;
viewport_.y += log10(((-viewport_.y * (1.0 - factor)) / viewport_.height) * 9 + 1.0)
* viewport_.height;
}
return;
}
cv::Vec2f offset;
double oldW = (origW * oldScale);
double oldH = (origH * oldScale);
viewport_.width = std::min(zoomScale_ * origW, origW);
viewport_.height = std::min(zoomScale_ * origH, origH);
float delta_x;
float delta_y;
if (factor < 1.0) {
offset = cv::Vec2f(viewport_.x, viewport_.y)
- cv::Vec2f(mousePos_[0], origH - mousePos_[1]);
delta_x = offset[0] / oldW;
delta_y = offset[1] / oldH;
} else {
offset = cv::Vec2f(viewport_.x - (viewport_.width / 2.0),
viewport_.y - (viewport_.height / 2.0)) - cv::Vec2f(viewport_.x, viewport_.y);
delta_x = offset[0] / oldW;
delta_y = offset[1] / oldH;
}
float x_offset;
float y_offset;
x_offset = delta_x * (viewport_.width - oldW);
y_offset = delta_y * (viewport_.height - oldH);
if (factor < 1.0) {
viewport_.x += x_offset;
viewport_.y += y_offset;
} else {
viewport_.x += x_offset;
viewport_.y += y_offset;
}
}
cv::Vec2f V4D::position() {
return fbCtx().position();
}
cv::Vec2f V4D::getMousePosition() {
return mousePos_;
}
void V4D::setMousePosition(int x, int y) {
mousePos_ = { float(x), float(y) };
}
float V4D::zoomScale() {
return zoomScale_;
}
cv::Rect& V4D::viewport() {
return viewport_;
}
@ -434,6 +363,10 @@ bool V4D::getPrintFPS() {
return printFPS_;
}
bool V4D::getShowTracking() {
return showTracking_;
}
void V4D::setShowFPS(bool s) {
showFPS_ = s;
}
@ -442,6 +375,10 @@ void V4D::setPrintFPS(bool p) {
printFPS_ = p;
}
void V4D::setShowTracking(bool st) {
showTracking_ = st;
}
bool V4D::isFullscreen() {
return fbCtx().isFullscreen();
}
@ -545,7 +482,7 @@ bool V4D::display() {
fbCtx().blitFrameBufferToScreen(viewport(), fbCtx().getWindowSize(), isScaling());
}
#ifndef __EMSCRIPTEN__
nguiCtx().render(printFPS_, showFPS_);
nguiCtx().render(printFPS_, showFPS_, showTracking_);
#endif
fbCtx().makeCurrent();
#ifndef __EMSCRIPTEN__

Loading…
Cancel
Save