api adjustments and gpu-video-capture experiments

pull/3471/head
kallaballa 2 years ago
parent ef7361a0db
commit 3c75dca102
  1. 2
      modules/v4d/CMakeLists.txt
  2. 12
      modules/v4d/include/opencv2/v4d/util.hpp
  3. 19
      modules/v4d/include/opencv2/v4d/v4d.hpp
  4. 2
      modules/v4d/samples/beauty-demo.cpp
  5. 3
      modules/v4d/samples/cube-demo.cpp
  6. 5
      modules/v4d/samples/custom_source_and_sink.cpp
  7. 2
      modules/v4d/samples/font-demo.cpp
  8. 2
      modules/v4d/samples/nanovg-demo.cpp
  9. 4
      modules/v4d/samples/optflow-demo.cpp
  10. 2
      modules/v4d/samples/pedestrian-demo.cpp
  11. 2
      modules/v4d/samples/shader-demo.cpp
  12. 4
      modules/v4d/samples/video-demo.cpp
  13. 22
      modules/v4d/src/detail/framebuffercontext.cpp
  14. 6
      modules/v4d/src/detail/framebuffercontext.hpp
  15. 2
      modules/v4d/src/detail/glcontext.cpp
  16. 8
      modules/v4d/src/detail/nanoguicontext.cpp
  17. 7
      modules/v4d/src/detail/nanovgcontext.cpp
  18. 88
      modules/v4d/src/util.cpp
  19. 31
      modules/v4d/src/v4d.cpp

@ -49,7 +49,7 @@ macro(add_binary_sample sample)
endmacro()
if(EMSCRIPTEN)
set(EM_LINKER_FLAGS "-sOFFSCREENCANVAS_SUPPORT -sOFFSCREENCANVASES_TO_PTHREAD=#offscreenCanvas -sEXPORTED_FUNCTIONS=_malloc,_main,_v4dSetVideoFramePointer -sEXPORTED_RUNTIME_METHODS=ccall,setValue -sPROXY_TO_PTHREAD=1 --use-preload-plugins --preload-file doc/lena.png -sINITIAL_MEMORY=128MB -sALLOW_MEMORY_GROWTH=1 -sUSE_GLFW=3 -sMIN_WEBGL_VERSION=2 -sMAX_WEBGL_VERSION=2 --bind")
set(EM_LINKER_FLAGS "-sENVIRONMENT=web,worker -sOFFSCREENCANVAS_SUPPORT -sOFFSCREENCANVASES_TO_PTHREAD=#offscreenCanvas -sEXPORTED_FUNCTIONS=_malloc,_main,_v4dSetVideoFramePointer -sEXPORTED_RUNTIME_METHODS=ccall,setValue -sPROXY_TO_PTHREAD=1 --use-preload-plugins --preload-file doc/lena.png -sINITIAL_MEMORY=128MB -sALLOW_MEMORY_GROWTH=1 -sUSE_GLFW=3 -sMIN_WEBGL_VERSION=2 -sMAX_WEBGL_VERSION=2 --bind")
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${EM_LINKER_FLAGS}")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${EM_LINKER_FLAGS}")

@ -75,7 +75,15 @@ make_function(T *t)
return {t};
}
void run_sync_on_main(std::function<void()> fn);
template<std::size_t Tid>
void run_sync_on_main(std::function<void()> fn) {
#ifdef __EMSCRIPTEN__
emscripten_sync_run_in_main_runtime_thread(EM_FUNC_SIG_V, cv::v4d::detail::get_fn_ptr<Tid>(fn));
#else
fn();
#endif
}
size_t cnz(const cv::UMat& m);
}
using std::string;
@ -160,7 +168,7 @@ CV_EXPORTS Source makeCaptureSource(const string& inputFilename);
* @param height The frame height to capture (usually the initial height of the V4D object)
* @return A WebCam source object.
*/
CV_EXPORTS Source makeCaptureSource(int width, int height);
CV_EXPORTS Source makeCaptureSource(int width, int height, cv::Ptr<V4D> window);
#endif
void resizePreserveAspectRatio(const cv::UMat& src, cv::UMat& output, const cv::Size& dstSize, const cv::Scalar& bgcolor = {0,0,0,255});

@ -149,15 +149,6 @@ class CV_EXPORTS V4D {
cv::TickMeter tick_;
float fps_ = 0;
public:
/*!
* Creates a V4D object which is the central object to perform visualizations with.
* @param size The window and framebuffer size
* @param title The window title.
* @param debug Create a debug OpenGL context.
*/
CV_EXPORTS static cv::Ptr<V4D> make(const cv::Size& size, const string& title, bool debug =
false);
/*!
* Creates a V4D object which is the central object to perform visualizations with.
* @param initialSize The initial size of the heavy-weight window.
@ -170,8 +161,8 @@ public:
* @param samples MSAA samples.
* @param debug Create a debug OpenGL context.
*/
CV_EXPORTS static cv::Ptr<V4D> make(const cv::Size& initialSize, bool offscreen, const string& title, int major = 3,
int minor = 2, bool compat = false, int samples = 0, bool debug = true);
CV_EXPORTS static cv::Ptr<V4D> make(const cv::Size& size, const string& title, bool offscreen = false, bool debug = false, int major = 3,
int minor = 2, bool compat = false, int samples = 0);
/*!
* Default destructor
*/
@ -401,9 +392,10 @@ public:
CV_EXPORTS bool display();
CV_EXPORTS void printSystemInfo();
CV_EXPORTS void updateFps(bool graphical = true);
FrameBufferContext& fbCtx();
private:
V4D(const cv::Size& initialSize, bool offscreen,
const string& title, int major = 3, int minor = 2, bool compat = false, int samples = 0, bool debug = false);
V4D(const cv::Size& initialSize,
const string& title, bool offscreen, bool debug, int major, int minor, bool compat, int samples);
void setDefaultKeyboardEventCallback();
void setKeyboardEventCallback(
std::function<bool(int key, int scancode, int action, int modifiers)> fn);
@ -412,7 +404,6 @@ private:
cv::Vec2f getMousePosition();
bool keyboard_event(int key, int scancode, int action, int modifiers);
void setMousePosition(int x, int y);
FrameBufferContext& fbCtx();
CLVAContext& clvaCtx();
NanoVGContext& nvgCtx();
NanoguiContext& nguiCtx();

@ -48,7 +48,7 @@ bool side_by_side = false;
bool stretch = false;
#endif
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), OFFSCREEN, "Beauty Demo");
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), "Beauty Demo", OFFSCREEN);
static cv::Ptr<cv::face::Facemark> facemark = cv::face::createFacemarkLBF(); //Face landmark detection
#ifdef USE_TRACKER
static cv::Ptr<cv::Tracker> tracker = cv::TrackerKCF::create(); //Instead of continues face detection we can use a tracker

@ -27,8 +27,7 @@ unsigned int shader_program;
unsigned int vao;
unsigned int uniform_transform;
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT),
OFFSCREEN, "Cube Demo");
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), "Cube Demo", OFFSCREEN);
static GLuint load_shader() {
#ifndef OPENCV_V4D_USE_ES3

@ -13,8 +13,9 @@ int main() {
//Make a Source that generates rainbow frames.
Source src([](cv::UMat& frame){
static long cnt = 0;
//The source is responsible for initializing the frame. The frame stays allocated, which makes create() have no effect in further iterations.
frame.create(Size(1280, 720), CV_8UC3);
//The source is responsible for initializing the frame..
if(frame.empty())
frame.create(Size(1280, 720), CV_8UC3);
frame = colorConvert(Scalar(++cnt % 180, 128, 128, 255), COLOR_HLS2BGR);
return true;
}, 60.0f);

@ -45,7 +45,7 @@ using std::string;
using std::vector;
using std::istringstream;
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), OFFSCREEN, "Font Demo");
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), "Font Demo", OFFSCREEN);
vector<string> lines;
static bool update_stars = true;
static bool update_perspective = true;

@ -15,7 +15,7 @@ constexpr const char *OUTPUT_FILENAME = "nanovg-demo.mkv";
using std::cerr;
using std::endl;
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), OFFSCREEN, "NanoVG Demo");
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), "NanoVG Demo", OFFSCREEN);
static void draw_color_wheel(float x, float y, float w, float h, float hue) {
//color wheel drawing code taken from https://github.com/memononen/nanovg/blob/master/example/demo.c

@ -47,9 +47,9 @@ constexpr const char* OUTPUT_FILENAME = "optflow-demo.mkv";
#endif
constexpr bool OFFSCREEN = false;
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), OFFSCREEN, "Sparse Optical Flow Demo");
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT) ,"Sparse Optical Flow Demo", OFFSCREEN);
#ifndef __EMSCRIPTEN__
static cv::Ptr<cv::v4d::V4D> v4d2 = cv::v4d::V4D::make(cv::Size(240, 360), false, "Display Settings");
static cv::Ptr<cv::v4d::V4D> v4d2 = cv::v4d::V4D::make(cv::Size(240, 360), "Display Settings", OFFSCREEN);
#endif
/** Visualization parameters **/

@ -29,7 +29,7 @@ using std::endl;
using std::vector;
using std::string;
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), OFFSCREEN, "Pedestrian Demo");
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), "Pedestrian Demo", OFFSCREEN);
static cv::HOGDescriptor hog;
//adapted from cv::dnn_objdetect::InferBbox

@ -18,7 +18,7 @@ constexpr double FPS = 60;
constexpr const char* OUTPUT_FILENAME = "shader-demo.mkv";
#endif
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), OFFSCREEN, "Shader Demo");
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT), "Shader Demo", OFFSCREEN);
int glow_kernel_size = std::max(int(DIAG / 200 % 2 == 0 ? DIAG / 200 + 1 : DIAG / 200), 1);

@ -26,7 +26,7 @@ unsigned int vao;
unsigned int uniform_transform;
static cv::Ptr<cv::v4d::V4D> v4d = cv::v4d::V4D::make(cv::Size(WIDTH, HEIGHT),
OFFSCREEN, "Video Demo");
"Video Demo", OFFSCREEN, true);
static GLuint load_shader() {
#ifndef OPENCV_V4D_USE_ES3
@ -236,7 +236,7 @@ int main() {
src.fps(), cv::Size(WIDTH, HEIGHT));
v4d->setSink(sink);
#else
Source src = makeCaptureSource(WIDTH, HEIGHT);
Source src = makeCaptureSource(WIDTH, HEIGHT, v4d);
v4d->setSource(src);
#endif

@ -27,7 +27,7 @@ FrameBufferContext::FrameBufferContext(V4D& v4d, const cv::Size& frameBufferSize
const string& title, int major, int minor, bool compat, int samples, bool debug, GLFWwindow* sharedWindow, const FrameBufferContext* parent) :
v4d_(&v4d), offscreen_(offscreen), title_(title), major_(major), minor_(
minor), compat_(compat), samples_(samples), debug_(debug), viewport_(0, 0, frameBufferSize.width, frameBufferSize.height), windowSize_(frameBufferSize), frameBufferSize_(frameBufferSize), isShared_(false), sharedWindow_(sharedWindow), parent_(parent) {
run_sync_on_main([this](){ init(); });
run_sync_on_main<1>([this](){ init(); });
}
FrameBufferContext::~FrameBufferContext() {
@ -126,7 +126,7 @@ void FrameBufferContext::init() {
glfwSetCursorPosCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
if(v4d->hasNguiCtx())
v4d->nguiCtx().screen().cursor_pos_callback_event(x * v4d->getXPixelRatio(), y * v4d->getXPixelRatio());
v4d->nguiCtx().screen().cursor_pos_callback_event(x, y);
#ifndef __EMSCRIPTEN__
auto cursor = v4d->getMousePosition();
auto diff = cursor - cv::Vec2f(x, y);
@ -257,6 +257,8 @@ void FrameBufferContext::setup(const cv::Size& sz) {
GL_CHECK(
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sz.width, sz.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0));
GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, sz.width, sz.height));
GL_CHECK(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
@ -391,7 +393,7 @@ cv::Size FrameBufferContext::size() {
}
void FrameBufferContext::execute(std::function<void(cv::UMat&)> fn) {
run_sync_on_main([&,this](){
run_sync_on_main<2>([&,this](){
frameBuffer_.create(size(), CV_8UC4);
#ifndef __EMSCRIPTEN__
CLExecScope_t clExecScope(getCLExecContext());
@ -418,8 +420,6 @@ CLExecContext_t& FrameBufferContext::getCLExecContext() {
void FrameBufferContext::blitFrameBufferToScreen(const cv::Rect& viewport,
const cv::Size& windowSize, bool stretch) {
glFinish();
GL_CHECK(glBindFramebuffer(GL_READ_FRAMEBUFFER, frameBufferID_));
GL_CHECK(glReadBuffer(GL_COLOR_ATTACHMENT0));
GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0));
glClearColor(0, 0, 0, 0);
@ -448,19 +448,21 @@ void FrameBufferContext::blitFrameBufferToScreen(const cv::Rect& viewport,
GL_COLOR_BUFFER_BIT, GL_NEAREST);
}
void FrameBufferContext::begin() {
void FrameBufferContext::begin(GLenum framebufferTarget) {
this->makeCurrent();
glGetIntegerv( GL_VIEWPORT, viewport_ );
glGetError();
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, frameBufferID_));
GL_CHECK(glBindFramebuffer(framebufferTarget, frameBufferID_));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, textureID_));
GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, renderBufferID_));
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, size().width, size().height));
GL_CHECK(
glFramebufferRenderbuffer(framebufferTarget, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID_, 0));
assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE);
glFramebufferTexture2D(framebufferTarget, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID_, 0));
assert(glCheckFramebufferStatus(framebufferTarget) == GL_FRAMEBUFFER_COMPLETE);
glViewport(0, 0, frameBufferSize_.width, frameBufferSize_.height);
glGetError();
}

@ -98,9 +98,9 @@ public:
* Setup OpenGL states.
* @param ctx The corresponding #FrameBufferContext.
*/
GLScope(FrameBufferContext& ctx) :
GLScope(FrameBufferContext& ctx, GLenum framebufferTarget = GL_FRAMEBUFFER) :
ctx_(ctx) {
ctx_.begin();
ctx_.begin(framebufferTarget);
}
/*!
* Tear-down OpenGL states.
@ -165,7 +165,7 @@ protected:
/*!
* Setup OpenGL states.
*/
void begin();
void begin(GLenum framebufferTarget);
/*!
* Tear-down OpenGL states.
*/

@ -13,7 +13,7 @@ GLContext::GLContext(V4D& v4d, FrameBufferContext& fbContext) :
}
void GLContext::render(std::function<void(const cv::Size&)> fn) {
run_sync_on_main([&,this](){
run_sync_on_main<15>([&,this](){
#ifdef __EMSCRIPTEN__
// fb_.create(mainFbContext_.size(), CV_8UC4);
// preFB_.create(mainFbContext_.size(), CV_8UC4);

@ -11,11 +11,11 @@ namespace detail {
NanoguiContext::NanoguiContext(V4D& v4d, FrameBufferContext& fbContext) :
mainFbContext_(fbContext), nguiFbContext_(v4d, "NanoGUI", fbContext) {
run_sync_on_main([this](){ init(); });
run_sync_on_main<3>([this](){ init(); });
}
void NanoguiContext::init() {
FrameBufferContext::GLScope glScope(fbCtx());
FrameBufferContext::GLScope glScope(fbCtx(), GL_DRAW_FRAMEBUFFER);
screen_ = new nanogui::Screen();
screen_->initialize(nguiFbContext_.getGLFWWindow(), false);
fbCtx().setWindowSize(fbCtx().size());
@ -23,7 +23,7 @@ void NanoguiContext::init() {
}
void NanoguiContext::render() {
run_sync_on_main([&,this](){
run_sync_on_main<4>([&,this](){
#ifdef __EMSCRIPTEN__
// fb_.create(mainFbContext_.size(), CV_8UC4);
// preFB_.create(mainFbContext_.size(), CV_8UC4);
@ -59,7 +59,7 @@ void NanoguiContext::render() {
}
void NanoguiContext::build(std::function<void(cv::v4d::FormHelper&)> fn) {
run_sync_on_main([fn,this](){
run_sync_on_main<5>([fn,this](){
FrameBufferContext::GLScope glScope(fbCtx());
fn(form());
screen().perform_layout();

@ -23,11 +23,11 @@ namespace detail {
NanoVGContext::NanoVGContext(V4D& v4d, FrameBufferContext& fbContext) :
v4d_(v4d), context_(nullptr), mainFbContext_(fbContext), nvgFbContext_(v4d, "NanoVG", fbContext) {
run_sync_on_main([this](){ init(); });
run_sync_on_main<13>([this](){ init(); });
}
void NanoVGContext::init() {
FrameBufferContext::GLScope glScope(fbCtx());
FrameBufferContext::GLScope glScope(fbCtx(), GL_DRAW_FRAMEBUFFER);
screen_ = new nanogui::Screen();
screen_->initialize(fbCtx().getGLFWWindow(), false);
fbCtx().setWindowSize(fbCtx().size());
@ -56,7 +56,7 @@ void NanoVGContext::init() {
}
void NanoVGContext::render(std::function<void(const cv::Size&)> fn) {
run_sync_on_main([&,this](){
run_sync_on_main<14>([&,this](){
#ifdef __EMSCRIPTEN__
// {
// FrameBufferContext::GLScope mainGlScope(mainFbContext_);
@ -71,7 +71,6 @@ void NanoVGContext::render(std::function<void(const cv::Size&)> fn) {
#endif
{
FrameBufferContext::GLScope glScope(fbCtx());
glClear(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
NanoVGContext::Scope nvgScope(*this);
cv::v4d::nvg::detail::NVG::initializeContext(context_);
fn(fbCtx().size());

@ -31,14 +31,6 @@
namespace cv {
namespace v4d {
namespace detail {
void run_sync_on_main(std::function<void()> fn) {
#ifdef __EMSCRIPTEN__
emscripten_sync_run_in_main_runtime_thread(EM_FUNC_SIG_V, cv::v4d::detail::get_fn_ptr<__COUNTER__>(fn));
#else
fn();
#endif
}
size_t cnz(const cv::UMat& m) {
cv::UMat grey;
if(m.channels() == 1) {
@ -331,25 +323,24 @@ void v4dSetVideoFramePointer(uint8_t* frame, int width, int height) {
}
}
//EM_JS(void,JScopyVideoFrameGPU,(int width, int height), {
//
//EM_JS(void,copyVideoFrameGPU,(int width, int height), {
// function initFramebuffer(gl) {
// console.log("init fb: " + width + "/" + height);
// if(typeof globalThis.v4dVideoFrameBuffer === 'undefined' || globalThis.v4dVideoFrameBuffer === null) {
// console.log("CREATE FRAMEBUFFER");
// globalThis.v4dVideoFrameBuffer = gl.createFramebuffer();
// }
// console.error("init fb: " + width + "/" + height);
// if(typeof globalThis.v4dVideoFrameBuffer === 'undefined' || globalThis.v4dVideoFrameBuffer === null) {
// console.error("CREATE FRAMEBUFFER");
// globalThis.v4dVideoFrameBuffer = gl.createFramebuffer();
// }
// if(typeof globalThis.v4dVideoFrameTexture === 'undefined' || globalThis.v4dVideoFrameTexture === null) {
// console.log("CREATE TEXTURE");
// console.error("CREATE TEXTURE");
// globalThis.v4dVideoFrameTexture = gl.createTexture();
// }
//
// if(typeof globalThis.v4dVideoElement === 'undefined' || globalThis.v4dVideoElement === null) {
// console.log("CREATE VIDEO ELEMENT");
// console.error("CREATE VIDEO ELEMENT");
// globalThis.v4dVideoElement = document.querySelector("#video");
// }
//
// gl.bindFramebuffer(gl.READ_FRAMEBUFFER, globalThis.v4dVideoFrameBuffer);
// gl.bindFramebuffer(gl.FRAMEBUFFER, globalThis.v4dVideoFrameBuffer);
// gl.bindTexture(gl.TEXTURE_2D, globalThis.v4dVideoFrameTexture);
// const level = 0;
// const internalFormat = gl.RGBA;
@ -365,46 +356,47 @@ void v4dSetVideoFramePointer(uint8_t* frame, int width, int height) {
// globalThis.v4dVideoElement
// );
//
// gl.framebufferTexture2D(gl.READ_FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, globalThis.v4dVideoFrameTexture, 0);
// return gl.checkFramebufferStatus(gl.READ_FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE;
// gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, globalThis.v4dVideoFrameTexture, 0);
// return gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE;
// }
//
// function updateTexture(gl) {
// if(initFramebuffer(gl)) {
// console.log("video texture copy: " + globalThis.v4dVideoFrameTexture + " -> " + mainTexture);
// gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, mainFrameBuffer);
// if(initFramebuffer(gl)) {
// console.error("video texture copy: " + globalThis.v4dVideoFrameTexture + " -> " + mainTexture);
//// gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, mainFrameBuffer);
// gl.bindTexture(gl.TEXTURE_2D, mainTexture);
// gl.framebufferTexture2D(gl.DRAW_FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, mainTexture, 0);
//
// gl.blitFramebuffer( 0, 0, globalThis.v4dVideoElement.width, globalThis.v4dVideoElement.height,
// 0, 0, width, height,
// gl.COLOR_BUFFER_BIT, gl.NEAREST);
//// gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl["RGBA"], 0, 0, width, height, 0);
//// gl.framebufferTexture2D(gl.DRAW_FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, mainTexture, 0);
//// gl.blitFramebuffer( 0, 0, globalThis.v4dVideoElement.width, globalThis.v4dVideoElement.height,
//// 0, 0, width, height,
//// gl.COLOR_BUFFER_BIT, gl.NEAREST);
// gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0, globalThis.v4dVideoElement.width, globalThis.v4dVideoElement.height, 0);
// } else {
// console.log("frambuffer incomplete");
// console.error("frambuffer incomplete");
// }
// }
//
// var ctx;
// if (typeof GL !== 'undefined' && typeof Module.ctx !== 'undefined') {
// console.error("start");
//
// gl = Module.ctx;
// mainFrameBuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING);
// mainTexture = gl.getFramebufferAttachmentParameter(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.FRAMEBUFFER_ATTACHMENT_OBJECT_NAME);
// console.log(mainFrameBuffer + ":" + mainTexture);
//
// if(Module.doCapture) {
// console.error("capture");
// gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
// updateTexture(gl, globalThis.v4dVideoFrameTexture, globalThis.v4dVideoElement);
// } else {
// console.log("video not playing");
// console.error("video not playing");
// }
// console.log("6");
// console.error("6");
// } else {
// console.log("GL unavailable");
// console.error("GL unavailable");
// }
//
// gl.flush();
// gl.finish();
////
//// gl.flush();
//// gl.finish();
//});
EM_JS(void,copyVideoFrame,(int p), {
@ -420,29 +412,25 @@ EM_JS(void,copyVideoFrame,(int p), {
if(typeof cameraArrayBuffer !== 'undefined') {
Module.HEAPU8.set(cameraArrayBuffer.data, p);
}
} else {
console.log("Camery not ready");
}
});
long acc = 0;
Source makeCaptureSource(int width, int height) {
Source makeCaptureSource(int width, int height, cv::Ptr<V4D> window) {
using namespace std;
return Source([=](cv::UMat& frame) {
try {
frame.create(cv::Size(width, height), CV_8UC3);
if(frame.empty())
frame.create(cv::Size(width, height), CV_8UC3);
if (current_frame != nullptr) {
cv::Mat tmp(cv::Size(width, height), CV_8UC4, current_frame);
//we have to read from the array to make sure the data "appears". this seems to be some kind of caching problem.
//i tried volatile statements but that didn't work.
cerr << tmp.data[0] << tmp.data[width * height - 1];
cv::Mat v = frame.getMat(cv::ACCESS_WRITE);
cvtColor(tmp, v, cv::COLOR_BGRA2RGB);
v.release();
run_sync_on_main([=](){
run_sync_on_main<6>([&](){
cv::Mat tmp(cv::Size(width, height), CV_8UC4, current_frame);
cv::UMat utmp = tmp.getUMat(ACCESS_READ);
cvtColor(utmp, frame, cv::COLOR_BGRA2RGB);
utmp.release();
tmp.release();
copyVideoFrame(reinterpret_cast<int>(current_frame));
});
} else {

@ -45,19 +45,15 @@ cv::Scalar colorConvert(const cv::Scalar& src, cv::ColorConversionCodes code) {
return dst;
}
cv::Ptr<V4D> V4D::make(const cv::Size& size, const string& title, bool debug) {
cv::Ptr<V4D> v4d = new V4D(size, false, title, 4, 6, true, 0, debug);
cv::Ptr<V4D> V4D::make(const cv::Size& size, const string& title, bool offscreen, bool debug, int major,
int minor, bool compat, int samples) {
cv::Ptr<V4D> v4d = new V4D(size, title, offscreen, debug, major, minor, false, 0);
v4d->setVisible(true);
return v4d;
}
cv::Ptr<V4D> V4D::make(const cv::Size& initialSize, bool offscreen, const string& title, int major,
int minor, bool compat, int samples, bool debug) {
return new V4D(initialSize, offscreen, title, major, minor, compat, samples, debug);
}
V4D::V4D(const cv::Size& size, bool offscreen, const string& title, int major, int minor,
bool compat, int samples, bool debug) :
V4D::V4D(const cv::Size& size, const string& title, bool offscreen, bool debug, int major, int minor,
bool compat, int samples) :
initialSize_(size), offscreen_(offscreen), title_(title), major_(major), minor_(minor), compat_(
compat), samples_(samples), debug_(debug), viewport_(0, 0, size.width, size.height), scale_(
1), mousePos_(0, 0), stretch_(true), pool_(2) {
@ -542,25 +538,25 @@ void V4D::setDefaultKeyboardEventCallback() {
bool V4D::display() {
bool result = true;
if (!offscreen_) {
// run_sync_on_main([this](){
// run_sync_on_main<9>([this](){
// FrameBufferContext::GLScope glScope(clvaCtx().fbCtx());
// clvaCtx().fbCtx().blitFrameBufferToScreen(viewport(), clvaCtx().fbCtx().getWindowSize(), isStretching());
// clvaCtx().fbCtx().makeCurrent();
// glfwSwapBuffers(clvaCtx().fbCtx().getGLFWWindow());
// });
// run_sync_on_main([this](){
// run_sync_on_main<10>([this](){
// FrameBufferContext::GLScope glScope(glCtx().fbCtx());
// glCtx().fbCtx().blitFrameBufferToScreen(viewport(), glCtx().fbCtx().getWindowSize(), isStretching());
// glCtx().fbCtx().makeCurrent();
// glfwSwapBuffers(glCtx().fbCtx().getGLFWWindow());
// });
// run_sync_on_main([this](){
// run_sync_on_main<11>([this](){
// FrameBufferContext::GLScope glScope(nvgCtx().fbCtx());
// nvgCtx().fbCtx().blitFrameBufferToScreen(viewport(), nvgCtx().fbCtx().getWindowSize(), isStretching());
// nvgCtx().fbCtx().makeCurrent();
// glfwSwapBuffers(nvgCtx().fbCtx().getGLFWWindow());
// });
// run_sync_on_main([this](){
// run_sync_on_main<12>([this](){
// FrameBufferContext::GLScope glScope(nguiCtx().fbCtx());
// nguiCtx().fbCtx().blitFrameBufferToScreen(viewport(), nguiCtx().fbCtx().getWindowSize(), isStretching());
// nguiCtx().fbCtx().makeCurrent();
@ -569,8 +565,8 @@ bool V4D::display() {
nguiCtx().render();
run_sync_on_main([&, this](){
FrameBufferContext::GLScope glScope(fbCtx());
run_sync_on_main<6>([&, this](){
FrameBufferContext::GLScope glScope(fbCtx(), GL_READ_FRAMEBUFFER);
fbCtx().blitFrameBufferToScreen(viewport(), fbCtx().getWindowSize(), isStretching());
#ifndef __EMSCRIPTEN__
glfwSwapBuffers(fbCtx().getGLFWWindow());
@ -581,7 +577,7 @@ bool V4D::display() {
result = !glfwWindowShouldClose(getGLFWWindow());
});
#ifdef __EMSCRIPTEN__
run_sync_on_main([this](){
run_sync_on_main<7>([this](){
cv::UMat tmp;
cv::v4d::detail::FrameBufferContext::GLScope glScope(fbCtx());
cv::v4d::detail::FrameBufferContext::FrameBufferScope fbScope(fbCtx(), tmp);
@ -614,7 +610,7 @@ GLFWwindow* V4D::getGLFWWindow() {
}
void V4D::printSystemInfo() {
run_sync_on_main([this](){
run_sync_on_main<8>([this](){
fbCtx().makeCurrent();
cerr << "OpenGL Version: " << getGlInfo() << endl;
cerr << "OpenCL Platforms: " << getClInfo() << endl;
@ -637,6 +633,7 @@ void V4D::updateFps(bool graphical) {
if (graphical) {
this->nvg([this]() {
glClear(GL_DEPTH_BUFFER_BIT);
using namespace cv::v4d::nvg;
string txt = "FPS: " + std::to_string(fps_);
beginPath();

Loading…
Cancel
Save