implemented gpu camera capture in the browser

pull/3471/head
kallaballa 2 years ago
parent eb65ea00b2
commit 5915da21a1
  1. 2
      modules/v4d/include/opencv2/v4d/util.hpp
  2. 3
      modules/v4d/samples/example_v4d_video-demo.html
  3. 2
      modules/v4d/samples/nanovg-demo.cpp
  4. 2
      modules/v4d/samples/optflow-demo.cpp
  5. 2
      modules/v4d/samples/pedestrian-demo.cpp
  6. 2
      modules/v4d/samples/shader-demo.cpp
  7. 2
      modules/v4d/samples/video-demo.cpp
  8. 2
      modules/v4d/samples/video_editing.cpp
  9. 19
      modules/v4d/src/detail/framebuffercontext.cpp
  10. 3
      modules/v4d/src/detail/framebuffercontext.hpp
  11. 167
      modules/v4d/src/util.cpp
  12. 2
      modules/v4d/src/v4d.cpp

@ -167,7 +167,7 @@ CV_EXPORTS Source makeCaptureSource(const string& inputFilename);
* @param height The frame height to capture (usually the initial height of the V4D object) * @param height The frame height to capture (usually the initial height of the V4D object)
* @return A WebCam source object. * @return A WebCam source object.
*/ */
CV_EXPORTS Source makeCaptureSource(int width, int height); CV_EXPORTS Source makeCaptureSource(int width, int height, cv::Ptr<V4D> window);
#endif #endif
void resizePreserveAspectRatio(const cv::UMat& src, cv::UMat& output, const cv::Size& dstSize, const cv::Scalar& bgcolor = {0,0,0,255}); void resizePreserveAspectRatio(const cv::UMat& src, cv::UMat& output, const cv::Size& dstSize, const cv::Scalar& bgcolor = {0,0,0,255});

@ -225,7 +225,8 @@
} }
cameraBtn.addEventListener('click', async function() { cameraBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false }); let stream = await navigator.mediaDevices.getUserMedia({ video: { width: 1280, height: 720 }, audio: false});
videoElement.addEventListener( videoElement.addEventListener(
"playing", "playing",
() => { () => {

@ -192,7 +192,7 @@ int main() {
v4d->setSource(src); v4d->setSource(src);
v4d->setSink(sink); v4d->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT); Source src = makeCaptureSource(WIDTH, HEIGHT, v4d);
v4d->setSource(src); v4d->setSource(src);
#endif #endif

@ -493,7 +493,7 @@ int main() {
src.fps(), cv::Size(WIDTH, HEIGHT)); src.fps(), cv::Size(WIDTH, HEIGHT));
v4d->setSink(sink); v4d->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT); Source src = makeCaptureSource(WIDTH, HEIGHT, v4d);
v4d->setSource(src); v4d->setSource(src);
#endif #endif

@ -229,7 +229,7 @@ int main() {
src.fps(), cv::Size(WIDTH, HEIGHT)); src.fps(), cv::Size(WIDTH, HEIGHT));
v4d->setSink(sink); v4d->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT); Source src = makeCaptureSource(WIDTH, HEIGHT, v4d);
v4d->setSource(src); v4d->setSource(src);
#endif #endif

@ -365,7 +365,7 @@ int main() {
FPS, cv::Size(WIDTH, HEIGHT)); FPS, cv::Size(WIDTH, HEIGHT));
v4d->setSink(sink); v4d->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT); Source src = makeCaptureSource(WIDTH, HEIGHT, v4d);
v4d->setSource(src); v4d->setSource(src);
#endif #endif

@ -236,7 +236,7 @@ int main() {
src.fps(), cv::Size(WIDTH, HEIGHT)); src.fps(), cv::Size(WIDTH, HEIGHT));
v4d->setSink(sink); v4d->setSink(sink);
#else #else
Source src = makeCaptureSource(WIDTH, HEIGHT); Source src = makeCaptureSource(WIDTH, HEIGHT, v4d);
v4d->setSource(src); v4d->setSource(src);
#endif #endif

@ -25,7 +25,7 @@ int main(int argc, char** argv) {
window->setSink(sink); window->setSink(sink);
#else #else
//Make a webcam Source //Make a webcam Source
Source src = makeCaptureSource(1280,720); Source src = makeCaptureSource(1280, 720, window);
//Attach web source //Attach web source
window->setSource(src); window->setSource(src);
#endif #endif

@ -395,15 +395,28 @@ cv::Size FrameBufferContext::size() {
return frameBufferSize_; return frameBufferSize_;
} }
void FrameBufferContext::copyTo(cv::UMat& dst) {
run_sync_on_main<18>([&,this](){
if(framebuffer_.empty())
framebuffer_.create(size(), CV_8UC4);
#ifndef __EMSCRIPTEN__
CLExecScope_t clExecScope(getCLExecContext());
#endif
FrameBufferContext::GLScope glScope(*this);
FrameBufferContext::FrameBufferScope fbScope(*this, framebuffer_);
framebuffer_.copyTo(dst);
});
}
void FrameBufferContext::execute(std::function<void(cv::UMat&)> fn) { void FrameBufferContext::execute(std::function<void(cv::UMat&)> fn) {
run_sync_on_main<2>([&,this](){ run_sync_on_main<2>([&,this](){
frameBuffer_.create(size(), CV_8UC4); framebuffer_.create(size(), CV_8UC4);
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
CLExecScope_t clExecScope(getCLExecContext()); CLExecScope_t clExecScope(getCLExecContext());
#endif #endif
FrameBufferContext::GLScope glScope(*this); FrameBufferContext::GLScope glScope(*this);
FrameBufferContext::FrameBufferScope fbScope(*this, frameBuffer_); FrameBufferContext::FrameBufferScope fbScope(*this, framebuffer_);
fn(frameBuffer_); fn(framebuffer_);
}); });
} }

@ -132,6 +132,7 @@ public:
* @return The framebuffer size. * @return The framebuffer size.
*/ */
cv::Size size(); cv::Size size();
void copyTo(cv::UMat& dst);
/*! /*!
* Execute function object fn inside a framebuffer context. * Execute function object fn inside a framebuffer context.
* The context acquires the framebuffer from OpenGL (either by up-/download or by cl-gl sharing) * The context acquires the framebuffer from OpenGL (either by up-/download or by cl-gl sharing)
@ -231,7 +232,7 @@ protected:
void toGLTexture2D(cv::UMat& u, cv::ogl::Texture2D& texture); void toGLTexture2D(cv::UMat& u, cv::ogl::Texture2D& texture);
void fromGLTexture2D(const cv::ogl::Texture2D& texture, cv::UMat& u); void fromGLTexture2D(const cv::ogl::Texture2D& texture, cv::UMat& u);
cv::UMat frameBuffer_; cv::UMat framebuffer_;
/*! /*!
* The texture bound to the OpenGL framebuffer. * The texture bound to the OpenGL framebuffer.
*/ */

@ -7,6 +7,7 @@
#include <opencv2/imgcodecs.hpp> #include <opencv2/imgcodecs.hpp>
#include "opencv2/v4d/util.hpp" #include "opencv2/v4d/util.hpp"
#include "opencv2/v4d/nvg.hpp" #include "opencv2/v4d/nvg.hpp"
#include "detail/framebuffercontext.hpp"
#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
@ -323,83 +324,69 @@ void v4dSetVideoFramePointer(uint8_t* frame, int width, int height) {
} }
} }
//EM_JS(void,copyVideoFrameGPU,(int width, int height), { GLuint framebuffer = 0;
// function initFramebuffer(gl) { GLuint texture = 0;
// console.error("init fb: " + width + "/" + height);
// if(typeof globalThis.v4dVideoFrameBuffer === 'undefined' || globalThis.v4dVideoFrameBuffer === null) { bool captureVideoFrameGPU(int width, int height) {
// console.error("CREATE FRAMEBUFFER"); int ret = EM_ASM_INT(
// globalThis.v4dVideoFrameBuffer = gl.createFramebuffer(); if(typeof Module.ctx !== 'undefined' && Module.ctx !== null && Module.doCapture) {
// } globalThis.gl = Module.ctx;
// if(typeof globalThis.v4dVideoFrameTexture === 'undefined' || globalThis.v4dVideoFrameTexture === null) { globalThis.v4dMainFrameBuffer = globalThis.gl.getParameter(globalThis.gl.FRAMEBUFFER_BINDING);
// console.error("CREATE TEXTURE"); globalThis.v4dMainTexture = globalThis.gl.getFramebufferAttachmentParameter(globalThis.gl.FRAMEBUFFER, globalThis.gl.COLOR_ATTACHMENT0, globalThis.gl.FRAMEBUFFER_ATTACHMENT_OBJECT_NAME);
// globalThis.v4dVideoFrameTexture = gl.createTexture(); return 1;
// } } else {
// return 0;
// if(typeof globalThis.v4dVideoElement === 'undefined' || globalThis.v4dVideoElement === null) { }
// console.error("CREATE VIDEO ELEMENT"); );
// globalThis.v4dVideoElement = document.querySelector("#video");
// } if(ret) {
// EM_ASM(
// gl.bindFramebuffer(gl.FRAMEBUFFER, globalThis.v4dVideoFrameBuffer); if(typeof globalThis.v4dVideoElement === 'undefined' || globalThis.v4dVideoElement === null) {
// gl.bindTexture(gl.TEXTURE_2D, globalThis.v4dVideoFrameTexture); globalThis.v4dVideoElement = document.querySelector("#video");
// const level = 0; }
// const internalFormat = gl.RGBA; );
// const border = 0;
// const srcFormat = gl.RGBA; if(framebuffer == 0) {
// const srcType = gl.UNSIGNED_BYTE; GL_CHECK(glGenFramebuffers(1, &framebuffer));
// gl.texImage2D( }
// gl.TEXTURE_2D,
// level, GL_CHECK(glBindFramebuffer(GL_READ_FRAMEBUFFER, framebuffer));
// internalFormat,
// srcFormat, if(texture == 0) {
// srcType, GL_CHECK(glGenTextures(1, &texture));
// globalThis.v4dVideoElement }
// );
// GL_CHECK(glBindTexture(GL_TEXTURE_2D, texture));
// gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, globalThis.v4dVideoFrameTexture, 0);
// return gl.checkFramebufferStatus(gl.FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE; EM_ASM(
// } const level = 0;
// const internalFormat = globalThis.gl.RGBA;
// function updateTexture(gl) { const border = 0;
// if(initFramebuffer(gl)) { const srcFormat = globalThis.gl.RGBA;
// console.error("video texture copy: " + globalThis.v4dVideoFrameTexture + " -> " + mainTexture); const srcType = globalThis.gl.UNSIGNED_BYTE;
//// gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, mainFrameBuffer); globalThis.gl.texImage2D(
// gl.bindTexture(gl.TEXTURE_2D, mainTexture); globalThis.gl.TEXTURE_2D,
//// gl.framebufferTexture2D(gl.DRAW_FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, mainTexture, 0); level,
//// gl.blitFramebuffer( 0, 0, globalThis.v4dVideoElement.width, globalThis.v4dVideoElement.height, internalFormat,
//// 0, 0, width, height, srcFormat,
//// gl.COLOR_BUFFER_BIT, gl.NEAREST); srcType,
// gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 0, 0, globalThis.v4dVideoElement.width, globalThis.v4dVideoElement.height, 0); globalThis.v4dVideoElement
// } else { );
// console.error("frambuffer incomplete"); );
// }
// } GL_CHECK(glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0));
// var ctx; EM_ASM(
// if (typeof GL !== 'undefined' && typeof Module.ctx !== 'undefined') { globalThis.gl.bindFramebuffer(globalThis.gl.DRAW_FRAMEBUFFER, globalThis.v4dMainFrameBuffer);
// console.error("start"); globalThis.gl.bindTexture(globalThis.gl.TEXTURE_2D, globalThis.v4dMainTexture);
// globalThis.gl.pixelStorei(globalThis.gl.UNPACK_FLIP_Y_WEBGL, true);
// gl = Module.ctx; globalThis.gl.framebufferTexture2D(globalThis.gl.DRAW_FRAMEBUFFER, globalThis.gl.COLOR_ATTACHMENT0, globalThis.gl.TEXTURE_2D, globalThis.v4dMainTexture, 0);
// mainFrameBuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); );
// mainTexture = gl.getFramebufferAttachmentParameter(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.FRAMEBUFFER_ATTACHMENT_OBJECT_NAME); return true;
// console.log(mainFrameBuffer + ":" + mainTexture); }
// return false;
// if(Module.doCapture) { }
// console.error("capture");
// gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true); EM_JS(void,copyVideoFrameCPU,(int p), {
// updateTexture(gl, globalThis.v4dVideoFrameTexture, globalThis.v4dVideoElement);
// } else {
// console.error("video not playing");
// }
// console.error("6");
// } else {
// console.error("GL unavailable");
// }
////
//// gl.flush();
//// gl.finish();
//});
EM_JS(void,copyVideoFrame,(int p), {
if(Module.doCapture) { if(Module.doCapture) {
if(typeof Module.cameraCtx === 'undefined' || Module.cameraCtx === null) if(typeof Module.cameraCtx === 'undefined' || Module.cameraCtx === null)
Module.cameraCtx = document.querySelector("#cameraCanvas").getContext('2d', { willReadFrequently: true }); Module.cameraCtx = document.querySelector("#cameraCanvas").getContext('2d', { willReadFrequently: true });
@ -415,23 +402,33 @@ EM_JS(void,copyVideoFrame,(int p), {
} }
}); });
Source makeCaptureSource(int width, int height) { Source makeCaptureSource(int width, int height, cv::Ptr<V4D> window) {
using namespace std; using namespace std;
return Source([=](cv::UMat& frame) { return Source([=](cv::UMat& frame) {
//FIXME
static cv::UMat tmp(cv::Size(width, height), CV_8UC4);
try { try {
if(frame.empty()) if(frame.empty())
frame.create(cv::Size(width, height), CV_8UC3); frame.create(cv::Size(width, height), CV_8UC3);
if (current_frame != nullptr) { if (current_frame != nullptr) {
run_sync_on_main<16>([&](){ run_sync_on_main<17>([&](){
copyVideoFrame(reinterpret_cast<int>(current_frame)); FrameBufferContext::GLScope scope(window->fbCtx());
cv::Mat tmp(cv::Size(width, height), CV_8UC4, current_frame); if(captureVideoFrameGPU(width, height)) {
cv::UMat utmp = tmp.getUMat(ACCESS_READ); FrameBufferContext::FrameBufferScope fbScope(window->fbCtx(), tmp);
cvtColor(utmp, frame, cv::COLOR_BGRA2RGB); cvtColor(tmp, frame, COLOR_BGRA2RGB);
utmp.release(); }
tmp.release();
}); });
// run_sync_on_main<16>([&](){
// copyVideoFrameCPU(reinterpret_cast<int>(current_frame));
// cv::Mat tmp(cv::Size(width, height), CV_8UC4, current_frame);
// cv::UMat utmp = tmp.getUMat(ACCESS_READ);
// cvtColor(utmp, frame, cv::COLOR_BGRA2RGB);
// utmp.release();
// tmp.release();
// });
} else { } else {
std::cerr << "Nothing captured" << endl; std::cerr << "Nothing captured" << endl;
} }

@ -47,7 +47,7 @@ cv::Scalar colorConvert(const cv::Scalar& src, cv::ColorConversionCodes code) {
cv::Ptr<V4D> V4D::make(const cv::Size& size, const cv::Size& fbsize, const string& title, bool offscreen, bool debug, int major, cv::Ptr<V4D> V4D::make(const cv::Size& size, const cv::Size& fbsize, const string& title, bool offscreen, bool debug, int major,
int minor, bool compat, int samples) { int minor, bool compat, int samples) {
cv::Ptr<V4D> v4d = new V4D(size, fbsize, title, offscreen, debug, major, minor, false, 0); cv::Ptr<V4D> v4d = new V4D(size, fbsize, title, offscreen, debug, major, minor, compat, samples);
v4d->setVisible(true); v4d->setVisible(true);
return v4d; return v4d;
} }

Loading…
Cancel
Save