optimize wasm/opengl rendering and video capturing

pull/3471/head
kallaballa 2 years ago
parent 4a50541743
commit ef7361a0db
  1. 17
      modules/v4d/CMakeLists.txt
  2. 8
      modules/v4d/include/opencv2/v4d/v4d.hpp
  3. 77
      modules/v4d/samples/example_v4d_beauty-demo.html
  4. 5
      modules/v4d/samples/example_v4d_cube-demo.html
  5. 5
      modules/v4d/samples/example_v4d_custom_source_and_sink.html
  6. 3
      modules/v4d/samples/example_v4d_display_image.html
  7. 3
      modules/v4d/samples/example_v4d_display_image_fb.html
  8. 3
      modules/v4d/samples/example_v4d_font-demo.html
  9. 3
      modules/v4d/samples/example_v4d_font_rendering.html
  10. 3
      modules/v4d/samples/example_v4d_font_with_gui.html
  11. 62
      modules/v4d/samples/example_v4d_nanovg-demo.html
  12. 57
      modules/v4d/samples/example_v4d_optflow-demo.html
  13. 64
      modules/v4d/samples/example_v4d_pedestrian-demo.html
  14. 5
      modules/v4d/samples/example_v4d_render_opengl.html
  15. 63
      modules/v4d/samples/example_v4d_shader-demo.html
  16. 3
      modules/v4d/samples/example_v4d_vector_graphics.html
  17. 5
      modules/v4d/samples/example_v4d_vector_graphics_and_fb.html
  18. 63
      modules/v4d/samples/example_v4d_video-demo.html
  19. 72
      modules/v4d/samples/example_v4d_video_editing.html
  20. 20
      modules/v4d/samples/render_opengl.cpp
  21. 61
      modules/v4d/src/detail/framebuffercontext.cpp
  22. 3
      modules/v4d/src/detail/framebuffercontext.hpp
  23. 46
      modules/v4d/src/detail/glcontext.cpp
  24. 47
      modules/v4d/src/detail/nanoguicontext.cpp
  25. 42
      modules/v4d/src/detail/nanovgcontext.cpp
  26. 146
      modules/v4d/src/util.cpp
  27. 52
      modules/v4d/src/v4d.cpp

@ -49,18 +49,19 @@ macro(add_binary_sample sample)
endmacro()
if(EMSCRIPTEN)
set(EM_LINKER_FLAGS "-sPROXY_TO_PTHREAD=1 --use-preload-plugins --preload-file doc/lena.png -sINITIAL_MEMORY=128MB -sALLOW_MEMORY_GROWTH=1 -sUSE_GLFW=3 -sMIN_WEBGL_VERSION=2 -sMAX_WEBGL_VERSION=2")
set(EM_LINKER_FLAGS "-sOFFSCREENCANVAS_SUPPORT -sOFFSCREENCANVASES_TO_PTHREAD=#offscreenCanvas -sEXPORTED_FUNCTIONS=_malloc,_main,_v4dSetVideoFramePointer -sEXPORTED_RUNTIME_METHODS=ccall,setValue -sPROXY_TO_PTHREAD=1 --use-preload-plugins --preload-file doc/lena.png -sINITIAL_MEMORY=128MB -sALLOW_MEMORY_GROWTH=1 -sUSE_GLFW=3 -sMIN_WEBGL_VERSION=2 -sMAX_WEBGL_VERSION=2 --bind")
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${EM_LINKER_FLAGS}")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${EM_LINKER_FLAGS}")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${EM_LINKER_FLAGS}")
# if(NOT "${CMAKE_BUILD_TYPE}" STREQUAL "Release")
# set(EM_DEBUG_FLAGS "-fsanitize=address -sSTACK_OVERFLOW_CHECK=2 -sASSERTIONS=2 -sNO_DISABLE_EXCEPTION_CATCHING -sEXCEPTION_DEBUG=1")
# set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
# set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
# set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address")
# endif()
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug")
set(EM_DEBUG_FLAGS "-sSTACK_OVERFLOW_CHECK=2 -sASSERTIONS=2 -sNO_DISABLE_EXCEPTION_CATCHING -sEXCEPTION_DEBUG=1")
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${EM_DEBUG_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
endif()
else()
if(NOT GLEW_FOUND)
message(STATUS "Module opencv_v4d disabled because GLEW was not found")

@ -407,7 +407,6 @@ private:
void setDefaultKeyboardEventCallback();
void setKeyboardEventCallback(
std::function<bool(int key, int scancode, int action, int modifiers)> fn);
bool initializeGUI();
void setMouseDrag(bool d);
bool isMouseDrag();
cv::Vec2f getMousePosition();
@ -418,6 +417,13 @@ private:
NanoVGContext& nvgCtx();
NanoguiContext& nguiCtx();
GLContext& glCtx();
bool hasFbCtx();
bool hasClvaCtx();
bool hasNvgCtx();
bool hasNguiCtx();
bool hasGlCtx();
GLFWwindow* getGLFWWindow();
};
}

@ -4,7 +4,7 @@
<meta charset="utf-8">
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="viewport" content="width=device-width, initial-scale=2.0, user-scalable=no">
<title>Beauty Demo</title>
<title>Beautifcation Demo</title>
<style>
body {
font-family: arial;
@ -103,13 +103,14 @@
}
</style>
</head>
<body>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="cameraCanvas" width="1920" height="1080" style="display: none;"></canvas>
<video id="video" width="1920" height="1080" autoplay style="display: none;"></video>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
@ -121,20 +122,29 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var cameraBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#video");
var cameraCanvas = document.querySelector("#cameraCanvas");
function fixCanvasSize() {
Module.canvas.style.width = (1920 / window.devicePixelRatio)+ "px";
Module.canvas.style.height = (1080 / window.devicePixelRatio) + "px";
Module.canvas.width = 1920;
Module.canvas.height = 1080;
Module.canvas.style.width = (1280 / window.devicePixelRatio)+ "px";
Module.canvas.style.height = (720 / window.devicePixelRatio) + "px";
Module.canvas.width = 1280;
Module.canvas.height = 720;
}
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
Module.videoBuffer = Module._malloc(1280 * 720 * 4);
Module.cameraCtx = null;
Module.ccall('v4dSetVideoFramePointer', 'void', ['number', 'number', 'number'], [Module.videoBuffer, 1280, 720]);
},
preRun: [],
postRun: [],
@ -206,33 +216,34 @@
};
};
let fsButton = document.querySelector("#fullscreenBtn");
let cameraBtn = document.querySelector("#captureBtn");
let videoElement = document.querySelector("#video");
let cameraCanvas = document.querySelector("#cameraCanvas");
function capture() {
let ctx = cameraCanvas.getContext('2d', { willReadFrequently: true });
ctx.drawImage(videoElement, 0, 0, cameraCanvas.width, cameraCanvas.height);
var imageData = ctx.getImageData(0, 0, cameraCanvas.width, cameraCanvas.height);
let filename = 'v4d_rgba_canvas.raw';
let stream = FS.open(filename, 'w+');
if(imageData) {
FS.write(stream, imageData.data, 0, imageData.data.length, 0);
FS.close(stream);
}
}
function runCapture() {
capture();
requestAnimationFrame(runCapture);
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
Module.doCapture = true;
}
}
cameraBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
runCapture();
});
fsButton.addEventListener('click', async function () {
@ -248,9 +259,7 @@
}
});
</script>
<script async type="text/javascript" src="/example_v4d_beauty-demo/get.php?res=example_v4d_beauty-demo.js"></script>
<script async type="text/javascript" src="example_v4d_beauty-demo.js"></script>
</body>
<script async type="text/javascript" src="/example_v4d_beauty-demo/get.php?res=example_v4d_beauty-demo.js"></script>
</body>
</html>

@ -108,7 +108,8 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -108,7 +108,8 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -108,6 +108,7 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -108,6 +108,7 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -108,6 +108,7 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -108,6 +108,7 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -109,6 +109,7 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -122,6 +123,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -103,11 +103,12 @@
}
</style>
</head>
<body>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,9 +122,15 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var cameraBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#video");
var cameraCanvas = document.querySelector("#cameraCanvas");
function fixCanvasSize() {
Module.canvas.style.width = (1280 / window.devicePixelRatio)+ "px";
@ -135,6 +142,9 @@
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
Module.videoBuffer = Module._malloc(1280 * 720 * 4);
Module.cameraCtx = null;
Module.ccall('v4dSetVideoFramePointer', 'void', ['number', 'number', 'number'], [Module.videoBuffer, 1280, 720]);
},
preRun: [],
postRun: [],
@ -206,33 +216,34 @@
};
};
let fsButton = document.querySelector("#fullscreenBtn");
let cameraBtn = document.querySelector("#captureBtn");
let videoElement = document.querySelector("#video");
let cameraCanvas = document.querySelector("#cameraCanvas");
function capture() {
let ctx = cameraCanvas.getContext('2d', { willReadFrequently: true });
ctx.drawImage(videoElement, 0, 0, cameraCanvas.width, cameraCanvas.height);
var imageData = ctx.getImageData(0, 0, cameraCanvas.width, cameraCanvas.height);
let filename = 'v4d_rgba_canvas.raw';
let stream = FS.open(filename, 'w+');
if(imageData) {
FS.write(stream, imageData.data, 0, imageData.data.length, 0);
FS.close(stream);
}
}
function runCapture() {
capture();
requestAnimationFrame(runCapture);
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
Module.doCapture = true;
}
}
cameraBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
runCapture();
});
fsButton.addEventListener('click', async function () {
@ -248,8 +259,7 @@
}
});
</script>
<script async type="text/javascript" src="example_v4d_nanovg-demo.js"></script>
<script async type="text/javascript" src="/example_v4d_nanovg-demo/get.php?res=example_v4d_nanovg-demo.js"></script>
</body>
</body>
</html>

@ -108,7 +108,8 @@
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
@ -121,9 +122,15 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var cameraBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#video");
var cameraCanvas = document.querySelector("#cameraCanvas");
function fixCanvasSize() {
Module.canvas.style.width = (1280 / window.devicePixelRatio)+ "px";
@ -135,6 +142,8 @@
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
Module.videoBuffer = Module._malloc(1280 * 720 * 4);
Module.ccall('v4dSetVideoFramePointer', 'void', ['number', 'number', 'number'], [Module.videoBuffer, 1280, 720]);
},
preRun: [],
postRun: [],
@ -206,33 +215,35 @@
};
};
let fsButton = document.querySelector("#fullscreenBtn");
let cameraBtn = document.querySelector("#captureBtn");
let videoElement = document.querySelector("#video");
let cameraCanvas = document.querySelector("#cameraCanvas");
function capture() {
let ctx = cameraCanvas.getContext('2d', { willReadFrequently: true });
ctx.drawImage(videoElement, 0, 0, cameraCanvas.width, cameraCanvas.height);
var imageData = ctx.getImageData(0, 0, cameraCanvas.width, cameraCanvas.height);
let filename = 'v4d_rgba_canvas.raw';
let stream = FS.open(filename, 'w+');
if(imageData) {
FS.write(stream, imageData.data, 0, imageData.data.length, 0);
FS.close(stream);
}
}
function runCapture() {
capture();
requestAnimationFrame(runCapture);
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
Module.doCapture = true;
}
}
cameraBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
runCapture();
// runCapture();
});
fsButton.addEventListener('click', async function () {

@ -4,7 +4,7 @@
<meta charset="utf-8">
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="viewport" content="width=device-width, initial-scale=2.0, user-scalable=no">
<title>Pedestrian Demo</title>
<title>Pedestrian Detection Demo</title>
<style>
body {
font-family: arial;
@ -103,11 +103,12 @@
}
</style>
</head>
<body>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,9 +122,15 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var cameraBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#video");
var cameraCanvas = document.querySelector("#cameraCanvas");
function fixCanvasSize() {
Module.canvas.style.width = (1280 / window.devicePixelRatio)+ "px";
@ -135,6 +142,9 @@
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
Module.videoBuffer = Module._malloc(1280 * 720 * 4);
Module.cameraCtx = null;
Module.ccall('v4dSetVideoFramePointer', 'void', ['number', 'number', 'number'], [Module.videoBuffer, 1280, 720]);
},
preRun: [],
postRun: [],
@ -206,33 +216,34 @@
};
};
let fsButton = document.querySelector("#fullscreenBtn");
let cameraBtn = document.querySelector("#captureBtn");
let videoElement = document.querySelector("#video");
let cameraCanvas = document.querySelector("#cameraCanvas");
function capture() {
let ctx = cameraCanvas.getContext('2d', { willReadFrequently: true });
ctx.drawImage(videoElement, 0, 0, cameraCanvas.width, cameraCanvas.height);
var imageData = ctx.getImageData(0, 0, cameraCanvas.width, cameraCanvas.height);
let filename = 'v4d_rgba_canvas.raw';
let stream = FS.open(filename, 'w+');
if(imageData) {
FS.write(stream, imageData.data, 0, imageData.data.length, 0);
FS.close(stream);
}
}
function runCapture() {
capture();
requestAnimationFrame(runCapture);
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
Module.doCapture = true;
}
}
cameraBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
runCapture();
});
fsButton.addEventListener('click', async function () {
@ -248,8 +259,7 @@
}
});
</script>
<script async type="text/javascript" src="example_v4d_pedestrian-demo.js"></script>
<script async type="text/javascript" src="/example_v4d_pedestrian-demo/get.php?res=example_v4d_pedestrian-demo.js"></script>
</body>
</body>
</html>

@ -108,6 +108,7 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
@ -244,7 +247,7 @@
</script>
<script async type="text/javascript" src="/example_v4d_render_opengl/get.php?res=example_v4d_render_opengl.js"></script>
<script async type="text/javascript" src="example_v4d_render_opengl.js"></script>
</body>
</body>
</html>

@ -103,11 +103,12 @@
}
</style>
</head>
<body>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,9 +122,15 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var cameraBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#video");
var cameraCanvas = document.querySelector("#cameraCanvas");
function fixCanvasSize() {
Module.canvas.style.width = (1280 / window.devicePixelRatio)+ "px";
@ -135,6 +142,9 @@
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
Module.videoBuffer = Module._malloc(1280 * 720 * 4);
Module.cameraCtx = null;
Module.ccall('v4dSetVideoFramePointer', 'void', ['number', 'number', 'number'], [Module.videoBuffer, 1280, 720]);
},
preRun: [],
postRun: [],
@ -206,33 +216,34 @@
};
};
let fsButton = document.querySelector("#fullscreenBtn");
let cameraBtn = document.querySelector("#captureBtn");
let videoElement = document.querySelector("#video");
let cameraCanvas = document.querySelector("#cameraCanvas");
function capture() {
let ctx = cameraCanvas.getContext('2d', { willReadFrequently: true });
ctx.drawImage(videoElement, 0, 0, cameraCanvas.width, cameraCanvas.height);
var imageData = ctx.getImageData(0, 0, cameraCanvas.width, cameraCanvas.height);
let filename = 'v4d_rgba_canvas.raw';
let stream = FS.open(filename, 'w+');
if(imageData) {
FS.write(stream, imageData.data, 0, imageData.data.length, 0);
FS.close(stream);
}
}
function runCapture() {
capture();
requestAnimationFrame(runCapture);
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
Module.doCapture = true;
}
}
cameraBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
runCapture();
});
fsButton.addEventListener('click', async function () {
@ -248,9 +259,7 @@
}
});
</script>
<script async type="text/javascript" src="/example_v4d_shader-demo/get.php?res=example_v4d_shader-demo.js"></script>
<script async type="text/javascript" src="example_v4d_shader-demo.js"></script>
</body>
<script async type="text/javascript" src="/example_v4d_shader-demo/get.php?res=example_v4d_shader-demo.js"></script>
</body>
</html>

@ -108,6 +108,7 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -108,7 +108,8 @@
<span><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
<div class="emscripten" id="status">Downloading...</div>
@ -121,6 +122,8 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');

@ -103,11 +103,12 @@
}
</style>
</head>
<body>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -121,9 +122,15 @@
<div class="emscripten_border">
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var cameraBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#video");
var cameraCanvas = document.querySelector("#cameraCanvas");
function fixCanvasSize() {
Module.canvas.style.width = (1280 / window.devicePixelRatio)+ "px";
@ -135,6 +142,9 @@
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
Module.videoBuffer = Module._malloc(1280 * 720 * 4);
Module.cameraCtx = null;
Module.ccall('v4dSetVideoFramePointer', 'void', ['number', 'number', 'number'], [Module.videoBuffer, 1280, 720]);
},
preRun: [],
postRun: [],
@ -206,33 +216,34 @@
};
};
let fsButton = document.querySelector("#fullscreenBtn");
let cameraBtn = document.querySelector("#captureBtn");
let videoElement = document.querySelector("#video");
let cameraCanvas = document.querySelector("#cameraCanvas");
function capture() {
let ctx = cameraCanvas.getContext('2d', { willReadFrequently: true });
ctx.drawImage(videoElement, 0, 0, cameraCanvas.width, cameraCanvas.height);
var imageData = ctx.getImageData(0, 0, cameraCanvas.width, cameraCanvas.height);
let filename = 'v4d_rgba_canvas.raw';
let stream = FS.open(filename, 'w+');
if(imageData) {
FS.write(stream, imageData.data, 0, imageData.data.length, 0);
FS.close(stream);
}
}
function runCapture() {
capture();
requestAnimationFrame(runCapture);
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
Module.doCapture = true;
}
}
cameraBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
runCapture();
});
fsButton.addEventListener('click', async function () {
@ -248,9 +259,7 @@
}
});
</script>
<script async type="text/javascript" src="/example_v4d_video-demo/get.php?res=example_v4d_video-demo.js"></script>
<script async type="text/javascript" src="example_v4d_video-demo.js"></script>
</body>
<script async type="text/javascript" src="/example_v4d_video-demo/get.php?res=example_v4d_video-demo.js"></script>
</body>
</html>

@ -4,7 +4,7 @@
<meta charset="utf-8">
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="viewport" content="width=device-width, initial-scale=2.0, user-scalable=no">
<title>Video Editing Example</title>
<title>Video editing example</title>
<style>
body {
font-family: arial;
@ -103,11 +103,12 @@
}
</style>
</head>
<body>
<body>
<span id='controls'>
<span><button id="captureBtn">Start Capture</button><button id="fullscreenBtn">Fullscreen</button>
</span>
</span>
<canvas id="offscreenCanvas" width="1280" height="720" style="display: none;"></canvas>
<canvas id="cameraCanvas" width="1280" height="720" style="display: none;"></canvas>
<video id="video" width="1280" height="720" autoplay style="display: none;"></video>
@ -119,11 +120,17 @@
<div class="emscripten_border">
<canvas class="emscripten" width="1280" height="720" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
<canvas class="emscripten" id="canvas" oncontextmenu="event.preventDefault()" tabindex=-1></canvas>
</div>
<script type="text/javascript" src="/virtual-webgl2.js"></script>
<script type="text/javascript" src="virtual-webgl2.js"></script>
<script type='text/javascript'>
var statusElement = document.getElementById('status');
var progressElement = document.getElementById('progress');
var fsButton = document.querySelector("#fullscreenBtn");
var cameraBtn = document.querySelector("#captureBtn");
var videoElement = document.querySelector("#video");
var cameraCanvas = document.querySelector("#cameraCanvas");
function fixCanvasSize() {
Module.canvas.style.width = (1280 / window.devicePixelRatio)+ "px";
@ -135,11 +142,12 @@
var Module = {
onRuntimeInitialized: function() {
fixCanvasSize();
Module.videoBuffer = Module._malloc(1280 * 720 * 4);
Module.cameraCtx = null;
Module.ccall('v4dSetVideoFramePointer', 'void', ['number', 'number', 'number'], [Module.videoBuffer, 1280, 720]);
},
preRun: [],
postRun: function() {
},
postRun: [],
print: (function() {
var element = document.getElementById('output');
if (element) element.value = ''; // clear browser cache
@ -208,40 +216,40 @@
};
};
let fsButton = document.querySelector("#fullscreenBtn");
let cameraBtn = document.querySelector("#captureBtn");
let videoElement = document.querySelector("#video");
let cameraCanvas = document.querySelector("#cameraCanvas");
function capture() {
let ctx = cameraCanvas.getContext('2d', { willReadFrequently: true });
ctx.drawImage(videoElement, 0, 0, cameraCanvas.width, cameraCanvas.height);
var imageData = ctx.getImageData(0, 0, cameraCanvas.width, cameraCanvas.height);
let filename = 'v4d_rgba_canvas.raw';
let stream = FS.open(filename, 'w+');
if(imageData) {
FS.write(stream, imageData.data, 0, imageData.data.length, 0);
FS.close(stream);
}
}
function runCapture() {
capture();
requestAnimationFrame(runCapture);
var playing = false;
var timeupdate = false;
function checkReady() {
if (playing && timeupdate) {
Module.doCapture = true;
}
}
cameraBtn.addEventListener('click', async function() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
videoElement.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
videoElement.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
videoElement.srcObject = stream;
runCapture();
});
fsButton.addEventListener('click', async function () {
Module.requestFullscreen(false, false)
});
window.addEventListener('fullscreenchange', function (event) {
if (document.fullscreenElement) {
console.log(`Element: ${document.fullscreenElement.id} entered fullscreen mode.`);
@ -251,9 +259,7 @@
}
});
</script>
<script async type="text/javascript" src="/example_v4d_video_editing/get.php?res=example_v4d_video_editing.js"></script>
<script async type="text/javascript" src="example_v4d_video_editing.js"></script>
</body>
<script async type="text/javascript" src="/example_v4d_video_editing/get.php?res=example_v4d_video_editing.js"></script>
</body>
</html>

@ -1,4 +1,5 @@
#include <opencv2/v4d/v4d.hpp>
#include "../src/detail/framebuffercontext.hpp"
using namespace cv;
using namespace cv::v4d;
@ -7,19 +8,20 @@ static Ptr<V4D> window = V4D::make(Size(1280, 720), "GL Blue Screen");
int main() {
window->gl([](){
//Sets blue as clear color
//Sets the clear color to blue
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
});
window->run([=]() {
window->gl([]() {
//Clears the screen
glClear(GL_COLOR_BUFFER_BIT);
});
window->updateFps();
//Clears the screen
glClear(GL_COLOR_BUFFER_BIT);
});
//If onscreen rendering is enabled it displays the framebuffer in the native window.
//Returns false if the window was closed.
return window->display();
});
window->updateFps(true);
//If onscreen rendering is enabled it displays the framebuffer in the native window.
//Returns false if the window was closed.
return window->display();
});
}

@ -12,7 +12,6 @@
#include "opencv2/core/opengl.hpp"
#include <exception>
namespace cv {
namespace v4d {
namespace detail {
@ -36,9 +35,10 @@ FrameBufferContext::~FrameBufferContext() {
}
void FrameBufferContext::init() {
#ifndef OPENCV_V4D_USE_ES3
#if defined(__EMSCRIPTEN__) || !defined(OPENCV_V4D_USE_ES3)
if(parent_ != nullptr) {
textureID_ = parent_->textureID_;
renderBufferID_ = parent_->renderBufferID_;
isShared_ = true;
}
#else
@ -91,6 +91,7 @@ void FrameBufferContext::init() {
assert(false);
}
this->makeCurrent();
glfwSwapInterval(0);
#ifndef OPENCV_V4D_USE_ES3
if (!gladLoadGLLoader((GLADloadproc) glfwGetProcAddress))
throw std::runtime_error("Could not initialize GLAD!");
@ -124,7 +125,8 @@ void FrameBufferContext::init() {
glfwSetCursorPosCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
v4d->nguiCtx().screen().cursor_pos_callback_event(x * v4d->getXPixelRatio(), y * v4d->getXPixelRatio());
if(v4d->hasNguiCtx())
v4d->nguiCtx().screen().cursor_pos_callback_event(x * v4d->getXPixelRatio(), y * v4d->getXPixelRatio());
#ifndef __EMSCRIPTEN__
auto cursor = v4d->getMousePosition();
auto diff = cursor - cv::Vec2f(x, y);
@ -138,7 +140,8 @@ void FrameBufferContext::init() {
glfwSetMouseButtonCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, int button, int action, int modifiers) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
v4d->nguiCtx().screen().mouse_button_callback_event(button, action, modifiers);
if(v4d->hasNguiCtx())
v4d->nguiCtx().screen().mouse_button_callback_event(button, action, modifiers);
if (button == GLFW_MOUSE_BUTTON_RIGHT) {
v4d->setMouseDrag(action == GLFW_PRESS);
}
@ -147,32 +150,36 @@ void FrameBufferContext::init() {
glfwSetKeyCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, int key, int scancode, int action, int mods) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
v4d->nguiCtx().screen().key_callback_event(key, scancode, action, mods);
if(v4d->hasNguiCtx())
v4d->nguiCtx().screen().key_callback_event(key, scancode, action, mods);
}
);
glfwSetCharCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, unsigned int codepoint) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
v4d->nguiCtx().screen().char_callback_event(codepoint);
if(v4d->hasNguiCtx())
v4d->nguiCtx().screen().char_callback_event(codepoint);
}
);
glfwSetDropCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, int count, const char** filenames) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
v4d->nguiCtx().screen().drop_callback_event(count, filenames);
if(v4d->hasNguiCtx())
v4d->nguiCtx().screen().drop_callback_event(count, filenames);
}
);
glfwSetScrollCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, double x, double y) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
std::vector<nanogui::Widget*> widgets;
for (auto* w : v4d->nguiCtx().screen().children()) {
auto mousePos = nanogui::Vector2i(v4d->getMousePosition()[0] / v4d->getXPixelRatio(), v4d->getMousePosition()[1] / v4d->getYPixelRatio());
if(contains_absolute(w, mousePos)) {
v4d->nguiCtx().screen().scroll_callback_event(x, y);
return;
if(v4d->hasNguiCtx()) {
for (auto* w : v4d->nguiCtx().screen().children()) {
auto mousePos = nanogui::Vector2i(v4d->getMousePosition()[0] / v4d->getXPixelRatio(), v4d->getMousePosition()[1] / v4d->getYPixelRatio());
if(contains_absolute(w, mousePos)) {
v4d->nguiCtx().screen().scroll_callback_event(x, y);
return;
}
}
}
#ifndef __EMSCRIPTEN__
v4d->zoom(y < 0 ? 1.1 : 0.9);
#endif
@ -182,7 +189,8 @@ void FrameBufferContext::init() {
glfwSetWindowSizeCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, int width, int height) {
V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
v4d->nguiCtx().screen().resize_callback_event(width, height);
if(v4d->hasNguiCtx())
v4d->nguiCtx().screen().resize_callback_event(width, height);
cv::Rect& vp = v4d->viewport();
cv::Size fbsz = v4d->getFrameBufferSize();
vp.x = 0;
@ -222,7 +230,7 @@ void FrameBufferContext::setup(const cv::Size& sz) {
GL_CHECK(glGenTextures(1, &textureID_));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, textureID_));
cerr << "main: " << frameBufferID_ << ":" << textureID_ << endl;
texture_ = new cv::ogl::Texture2D(sz, cv::ogl::Texture2D::RGBA, textureID_);
GL_CHECK(glPixelStorei(GL_UNPACK_ALIGNMENT, 1));
GL_CHECK(
@ -237,15 +245,23 @@ void FrameBufferContext::setup(const cv::Size& sz) {
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID_, 0));
assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE);
} else {
assert(parent_ != nullptr);
GL_CHECK(glGenFramebuffers(1, &frameBufferID_));
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, frameBufferID_));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, textureID_));
cerr << "leaf: " << frameBufferID_ << ":" << textureID_ << endl;
texture_ = new cv::ogl::Texture2D(sz, cv::ogl::Texture2D::RGBA, textureID_);
GL_CHECK(glPixelStorei(GL_UNPACK_ALIGNMENT, 1));
GL_CHECK(
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sz.width, sz.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0));
GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID_, 0));
assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE);
}
}
@ -402,9 +418,12 @@ CLExecContext_t& FrameBufferContext::getCLExecContext() {
void FrameBufferContext::blitFrameBufferToScreen(const cv::Rect& viewport,
const cv::Size& windowSize, bool stretch) {
glFinish();
GL_CHECK(glBindFramebuffer(GL_READ_FRAMEBUFFER, frameBufferID_));
GL_CHECK(glReadBuffer(GL_COLOR_ATTACHMENT0));
GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0));
glClearColor(0, 0, 0, 0);
glClear(GL_COLOR_BUFFER_BIT);
double hf = double(windowSize.height) / frameBufferSize_.height;
double wf = double(windowSize.width) / frameBufferSize_.width;
double f = std::min(hf, wf);
@ -424,11 +443,9 @@ void FrameBufferContext::blitFrameBufferToScreen(const cv::Rect& viewport,
GLint dstY0 = stretch ? yn : windowSize.height - frameBufferSize_.height;
GLint dstX1 = stretch ? wn : frameBufferSize_.width;
GLint dstY1 = stretch ? hn : frameBufferSize_.height;
GL_CHECK(
glBlitFramebuffer( srcX0, srcY0, srcX1, srcY1,
dstX0, dstY0, dstX1, dstY1,
GL_COLOR_BUFFER_BIT, GL_NEAREST)
);
GL_COLOR_BUFFER_BIT, GL_NEAREST);
}
void FrameBufferContext::begin() {
@ -437,10 +454,10 @@ void FrameBufferContext::begin() {
glGetError();
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, frameBufferID_));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, textureID_));
GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, renderBufferID_));
GL_CHECK(glBindTexture(GL_TEXTURE_2D, textureID_));
GL_CHECK(
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID_, 0));
assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE);
@ -449,10 +466,6 @@ void FrameBufferContext::begin() {
}
void FrameBufferContext::end() {
glBindTexture(GL_TEXTURE_2D, 0);
glGetError();
glBindRenderbuffer(GL_RENDERBUFFER, 0);
glGetError();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glGetError();
glViewport(viewport_[0], viewport_[1], viewport_[2], viewport_[3]);

@ -10,7 +10,8 @@
# include <emscripten/threading.h>
#endif
#include "cl.hpp"
//FIXME
#include "opencv2/v4d/detail/cl.hpp"
#include <opencv2/core/ocl.hpp>
#include "opencv2/v4d/util.hpp"
#include "pbodownloader.hpp"

@ -15,35 +15,35 @@ GLContext::GLContext(V4D& v4d, FrameBufferContext& fbContext) :
void GLContext::render(std::function<void(const cv::Size&)> fn) {
run_sync_on_main([&,this](){
#ifdef __EMSCRIPTEN__
fb_.create(mainFbContext_.size(), CV_8UC4);
preFB_.create(mainFbContext_.size(), CV_8UC4);
postFB_.create(mainFbContext_.size(), CV_8UC4);
{
FrameBufferContext::GLScope mainGlScope(mainFbContext_);
FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
fb_.copyTo(preFB_);
}
{
FrameBufferContext::GLScope glGlScope(glFbContext_);
FrameBufferContext::FrameBufferScope fbScope(glFbContext_, fb_);
preFB_.copyTo(fb_);
}
// fb_.create(mainFbContext_.size(), CV_8UC4);
// preFB_.create(mainFbContext_.size(), CV_8UC4);
// postFB_.create(mainFbContext_.size(), CV_8UC4);
// {
// FrameBufferContext::GLScope mainGlScope(mainFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
// fb_.copyTo(preFB_);
// }
// {
// FrameBufferContext::GLScope glGlScope(glFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(glFbContext_, fb_);
// preFB_.copyTo(fb_);
// }
#endif
{
FrameBufferContext::GLScope glScope(fbCtx());
fn(fbCtx().size());
}
#ifdef __EMSCRIPTEN__
{
FrameBufferContext::GLScope glScope(fbCtx());
FrameBufferContext::FrameBufferScope fbScope(fbCtx(), fb_);
fb_.copyTo(postFB_);
}
{
FrameBufferContext::GLScope mainGlScope(mainFbContext_);
FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
postFB_.copyTo(fb_);
}
// {
// FrameBufferContext::GLScope glScope(fbCtx());
// FrameBufferContext::FrameBufferScope fbScope(fbCtx(), fb_);
// fb_.copyTo(postFB_);
// }
// {
// FrameBufferContext::GLScope mainGlScope(mainFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
// postFB_.copyTo(fb_);
// }
#endif
});
}

@ -18,41 +18,42 @@ void NanoguiContext::init() {
FrameBufferContext::GLScope glScope(fbCtx());
screen_ = new nanogui::Screen();
screen_->initialize(nguiFbContext_.getGLFWWindow(), false);
fbCtx().setWindowSize(fbCtx().size());
form_ = new cv::v4d::FormHelper(screen_);
}
void NanoguiContext::render() {
run_sync_on_main([&,this](){
#ifdef __EMSCRIPTEN__
fb_.create(mainFbContext_.size(), CV_8UC4);
preFB_.create(mainFbContext_.size(), CV_8UC4);
postFB_.create(mainFbContext_.size(), CV_8UC4);
{
FrameBufferContext::GLScope mainGlScope(mainFbContext_);
FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
fb_.copyTo(preFB_);
}
{
FrameBufferContext::GLScope glGlScope(fbCtx());
FrameBufferContext::FrameBufferScope fbScope(fbCtx(), fb_);
preFB_.copyTo(fb_);
}
// fb_.create(mainFbContext_.size(), CV_8UC4);
// preFB_.create(mainFbContext_.size(), CV_8UC4);
// postFB_.create(mainFbContext_.size(), CV_8UC4);
// {
// FrameBufferContext::GLScope mainGlScope(mainFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
// fb_.copyTo(preFB_);
// }
// {
// FrameBufferContext::GLScope glGlScope(fbCtx());
// FrameBufferContext::FrameBufferScope fbScope(fbCtx(), fb_);
// preFB_.copyTo(fb_);
// }
#endif
{
FrameBufferContext::GLScope glScope(fbCtx());
screen().draw_widgets();
}
#ifdef __EMSCRIPTEN__
{
FrameBufferContext::GLScope glScope(fbCtx());
FrameBufferContext::FrameBufferScope fbScope(fbCtx(), fb_);
fb_.copyTo(postFB_);
}
{
FrameBufferContext::GLScope mainGlScope(mainFbContext_);
FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
postFB_.copyTo(fb_);
}
// {
// FrameBufferContext::GLScope glScope(fbCtx());
// FrameBufferContext::FrameBufferScope fbScope(fbCtx(), fb_);
// fb_.copyTo(postFB_);
// }
// {
// FrameBufferContext::GLScope mainGlScope(mainFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
// postFB_.copyTo(fb_);
// }
#endif
});
}

@ -30,7 +30,7 @@ void NanoVGContext::init() {
FrameBufferContext::GLScope glScope(fbCtx());
screen_ = new nanogui::Screen();
screen_->initialize(fbCtx().getGLFWWindow(), false);
// fbCtx().resizeWindow(fbCtx().getSize());
fbCtx().setWindowSize(fbCtx().size());
context_ = screen_->nvg_context();
// FrameBufferContext::GLScope glScope(fbCtx());
@ -58,16 +58,16 @@ void NanoVGContext::init() {
void NanoVGContext::render(std::function<void(const cv::Size&)> fn) {
run_sync_on_main([&,this](){
#ifdef __EMSCRIPTEN__
{
FrameBufferContext::GLScope mainGlScope(mainFbContext_);
FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
fb_.copyTo(preFB_);
}
{
FrameBufferContext::GLScope nvgGlScope(nvgFbContext_);
FrameBufferContext::FrameBufferScope fbScope(nvgFbContext_, fb_);
preFB_.copyTo(fb_);
}
// {
// FrameBufferContext::GLScope mainGlScope(mainFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
// fb_.copyTo(preFB_);
// }
// {
// FrameBufferContext::GLScope nvgGlScope(nvgFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(nvgFbContext_, fb_);
// preFB_.copyTo(fb_);
// }
#endif
{
FrameBufferContext::GLScope glScope(fbCtx());
@ -77,16 +77,16 @@ void NanoVGContext::render(std::function<void(const cv::Size&)> fn) {
fn(fbCtx().size());
}
#ifdef __EMSCRIPTEN__
{
FrameBufferContext::GLScope nvgGlScope(nvgFbContext_);
FrameBufferContext::FrameBufferScope fbScope(nvgFbContext_, fb_);
fb_.copyTo(postFB_);
}
{
FrameBufferContext::GLScope mainGlScope(mainFbContext_);
FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
postFB_.copyTo(fb_);
}
// {
// FrameBufferContext::GLScope nvgGlScope(nvgFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(nvgFbContext_, fb_);
// fb_.copyTo(postFB_);
// }
// {
// FrameBufferContext::GLScope mainGlScope(mainFbContext_);
// FrameBufferContext::FrameBufferScope fbScope(mainFbContext_, fb_);
// postFB_.copyTo(fb_);
// }
#endif
});
}

@ -11,6 +11,7 @@
#ifdef __EMSCRIPTEN__
# include <emscripten.h>
# include <emscripten/bind.h>
# include <SDL/SDL.h>
# include <SDL/SDL_image.h>
# include <SDL/SDL_stdinc.h>
@ -38,6 +39,19 @@ void run_sync_on_main(std::function<void()> fn) {
#endif
}
size_t cnz(const cv::UMat& m) {
cv::UMat grey;
if(m.channels() == 1) {
grey = m;
} else if(m.channels() == 3) {
cvtColor(m, grey, cv::COLOR_BGR2GRAY);
} else if(m.channels() == 4) {
cvtColor(m, grey, cv::COLOR_BGRA2GRAY);
} else {
assert(false);
}
return cv::countNonZero(grey);
}
}
#ifdef __EMSCRIPTEN__
Mat read_embedded_image(const string &path) {
@ -303,29 +317,136 @@ Source makeCaptureSource(const string& inputFilename) {
}
#else
using namespace emscripten;
uint8_t* current_frame = nullptr;
extern "C" {
EMSCRIPTEN_KEEPALIVE
void v4dSetVideoFramePointer(uint8_t* frame, int width, int height) {
assert(current_frame == nullptr);
current_frame = frame;
// memset(current_frame, 127, width * height * 4);
}
}
//EM_JS(void,JScopyVideoFrameGPU,(int width, int height), {
//
// function initFramebuffer(gl) {
// console.log("init fb: " + width + "/" + height);
// if(typeof globalThis.v4dVideoFrameBuffer === 'undefined' || globalThis.v4dVideoFrameBuffer === null) {
// console.log("CREATE FRAMEBUFFER");
// globalThis.v4dVideoFrameBuffer = gl.createFramebuffer();
// }
// if(typeof globalThis.v4dVideoFrameTexture === 'undefined' || globalThis.v4dVideoFrameTexture === null) {
// console.log("CREATE TEXTURE");
// globalThis.v4dVideoFrameTexture = gl.createTexture();
// }
//
// if(typeof globalThis.v4dVideoElement === 'undefined' || globalThis.v4dVideoElement === null) {
// console.log("CREATE VIDEO ELEMENT");
// globalThis.v4dVideoElement = document.querySelector("#video");
// }
//
// gl.bindFramebuffer(gl.READ_FRAMEBUFFER, globalThis.v4dVideoFrameBuffer);
// gl.bindTexture(gl.TEXTURE_2D, globalThis.v4dVideoFrameTexture);
// const level = 0;
// const internalFormat = gl.RGBA;
// const border = 0;
// const srcFormat = gl.RGBA;
// const srcType = gl.UNSIGNED_BYTE;
// gl.texImage2D(
// gl.TEXTURE_2D,
// level,
// internalFormat,
// srcFormat,
// srcType,
// globalThis.v4dVideoElement
// );
//
// gl.framebufferTexture2D(gl.READ_FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, globalThis.v4dVideoFrameTexture, 0);
// return gl.checkFramebufferStatus(gl.READ_FRAMEBUFFER) == gl.FRAMEBUFFER_COMPLETE;
// }
//
// function updateTexture(gl) {
// if(initFramebuffer(gl)) {
// console.log("video texture copy: " + globalThis.v4dVideoFrameTexture + " -> " + mainTexture);
// gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, mainFrameBuffer);
// gl.bindTexture(gl.TEXTURE_2D, mainTexture);
// gl.framebufferTexture2D(gl.DRAW_FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, mainTexture, 0);
//
// gl.blitFramebuffer( 0, 0, globalThis.v4dVideoElement.width, globalThis.v4dVideoElement.height,
// 0, 0, width, height,
// gl.COLOR_BUFFER_BIT, gl.NEAREST);
//// gl.copyTexImage2D(gl.TEXTURE_2D, 0, gl["RGBA"], 0, 0, width, height, 0);
// } else {
// console.log("frambuffer incomplete");
// }
// }
//
// var ctx;
// if (typeof GL !== 'undefined' && typeof Module.ctx !== 'undefined') {
// gl = Module.ctx;
// mainFrameBuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING);
// mainTexture = gl.getFramebufferAttachmentParameter(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.FRAMEBUFFER_ATTACHMENT_OBJECT_NAME);
// console.log(mainFrameBuffer + ":" + mainTexture);
//
// if(Module.doCapture) {
// gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
// updateTexture(gl, globalThis.v4dVideoFrameTexture, globalThis.v4dVideoElement);
// } else {
// console.log("video not playing");
// }
// console.log("6");
// } else {
// console.log("GL unavailable");
// }
//
// gl.flush();
// gl.finish();
//});
EM_JS(void,copyVideoFrame,(int p), {
if(Module.doCapture) {
if(typeof Module.cameraCtx === 'undefined' || Module.cameraCtx === null)
Module.cameraCtx = document.querySelector("#cameraCanvas").getContext('2d', { willReadFrequently: true });
if(typeof Module.videoElement === 'undefined' || Module.videoElement === null)
Module.videoElement = document.querySelector("#video");
Module.cameraCtx.drawImage(Module.videoElement, 0, 0, 1280, 720);
var cameraArrayBuffer = Module.cameraCtx.getImageData(0, 0, 1280, 720);
if(typeof cameraArrayBuffer !== 'undefined') {
Module.HEAPU8.set(cameraArrayBuffer.data, p);
}
} else {
console.log("Camery not ready");
}
});
long acc = 0;
Source makeCaptureSource(int width, int height) {
using namespace std;
static cv::Mat tmp(height, width, CV_8UC4);
return Source([=](cv::UMat& frame) {
try {
frame.create(cv::Size(width, height), CV_8UC3);
std::ifstream fs("v4d_rgba_canvas.raw", std::fstream::in | std::fstream::binary);
fs.seekg(0, std::ios::end);
auto length = fs.tellg();
fs.seekg(0, std::ios::beg);
if (length == (frame.elemSize() + 1) * frame.total()) {
if (current_frame != nullptr) {
cv::Mat tmp(cv::Size(width, height), CV_8UC4, current_frame);
//we have to read from the array to make sure the data "appears". this seems to be some kind of caching problem.
//i tried volatile statements but that didn't work.
cerr << tmp.data[0] << tmp.data[width * height - 1];
cv::Mat v = frame.getMat(cv::ACCESS_WRITE);
fs.read((char*) (tmp.data), tmp.elemSize() * tmp.total());
cvtColor(tmp, v, cv::COLOR_BGRA2RGB);
v.release();
} else if(length == 0) {
// frame.setTo(cv::Scalar(0, 0, 0, 255));
std::cerr << "Error: empty webcam frame received!" << endl;
run_sync_on_main([=](){
copyVideoFrame(reinterpret_cast<int>(current_frame));
});
} else {
// frame.setTo(cv::Scalar(0, 0, 0, 255));
std::cerr << "Error: webcam frame size mismatch!" << endl;
std::cerr << "Nothing captured" << endl;
}
} catch(std::exception& ex) {
cerr << ex.what() << endl;
@ -333,6 +454,7 @@ Source makeCaptureSource(int width, int height) {
return true;
}, 0);
}
#endif
void resizePreserveAspectRatio(const cv::UMat& src, cv::UMat& output, const cv::Size& dstSize, const cv::Scalar& bgcolor) {

@ -128,6 +128,26 @@ GLContext& V4D::glCtx() {
return *glContext_;
}
bool V4D::hasFbCtx() {
return mainFbContext_ != nullptr;
}
bool V4D::hasClvaCtx() {
return clvaContext_ != nullptr;
}
bool V4D::hasNvgCtx() {
return nvgContext_ != nullptr;
}
bool V4D::hasNguiCtx() {
return nguiContext_ != nullptr;
}
bool V4D::hasGlCtx() {
return glContext_ != nullptr;
}
cv::Size V4D::getVideoFrameSize() {
return clvaCtx().getVideoFrameSize();
}
@ -164,7 +184,6 @@ void V4D::nanogui(std::function<void(cv::v4d::FormHelper& form)> fn) {
#ifdef __EMSCRIPTEN__
static void do_frame(void* void_fn_ptr) {
glfwSwapInterval(0);
auto* fn_ptr = reinterpret_cast<std::function<bool()>*>(void_fn_ptr);
if (fn_ptr) {
auto& fn = *fn_ptr;
@ -523,27 +542,27 @@ void V4D::setDefaultKeyboardEventCallback() {
bool V4D::display() {
bool result = true;
if (!offscreen_) {
// {
// run_sync_on_main([this](){
// FrameBufferContext::GLScope glScope(clvaCtx().fbCtx());
// clvaCtx().fbCtx().blitFrameBufferToScreen(viewport(), getWindowSize(), isStretching());
// clvaCtx().fbCtx().blitFrameBufferToScreen(viewport(), clvaCtx().fbCtx().getWindowSize(), isStretching());
// clvaCtx().fbCtx().makeCurrent();
// glfwSwapBuffers(clvaCtx().fbCtx().getGLFWWindow());
// }
// {
// });
// run_sync_on_main([this](){
// FrameBufferContext::GLScope glScope(glCtx().fbCtx());
// glCtx().fbCtx().blitFrameBufferToScreen(viewport(), getWindowSize(), isStretching());
// glCtx().fbCtx().blitFrameBufferToScreen(viewport(), glCtx().fbCtx().getWindowSize(), isStretching());
// glCtx().fbCtx().makeCurrent();
// glfwSwapBuffers(glCtx().fbCtx().getGLFWWindow());
// }
// {
// });
// run_sync_on_main([this](){
// FrameBufferContext::GLScope glScope(nvgCtx().fbCtx());
// nvgCtx().fbCtx().blitFrameBufferToScreen(viewport(), getWindowSize(), isStretching());
// nvgCtx().fbCtx().blitFrameBufferToScreen(viewport(), nvgCtx().fbCtx().getWindowSize(), isStretching());
// nvgCtx().fbCtx().makeCurrent();
// glfwSwapBuffers(nvgCtx().fbCtx().getGLFWWindow());
// }
// });
// run_sync_on_main([this](){
// FrameBufferContext::GLScope glScope(nguiCtx().fbCtx());
// nguiCtx().fbCtx().blitFrameBufferToScreen(viewport(), getWindowSize(), isStretching());
// nguiCtx().fbCtx().blitFrameBufferToScreen(viewport(), nguiCtx().fbCtx().getWindowSize(), isStretching());
// nguiCtx().fbCtx().makeCurrent();
// glfwSwapBuffers(nguiCtx().fbCtx().getGLFWWindow());
// });
@ -553,10 +572,21 @@ bool V4D::display() {
run_sync_on_main([&, this](){
FrameBufferContext::GLScope glScope(fbCtx());
fbCtx().blitFrameBufferToScreen(viewport(), fbCtx().getWindowSize(), isStretching());
#ifndef __EMSCRIPTEN__
glfwSwapBuffers(fbCtx().getGLFWWindow());
#else
emscripten_webgl_commit_frame();
#endif
glfwPollEvents();
result = !glfwWindowShouldClose(getGLFWWindow());
});
#ifdef __EMSCRIPTEN__
run_sync_on_main([this](){
cv::UMat tmp;
cv::v4d::detail::FrameBufferContext::GLScope glScope(fbCtx());
cv::v4d::detail::FrameBufferContext::FrameBufferScope fbScope(fbCtx(), tmp);
});
#endif
}
if(frameCnt_ == (std::numeric_limits<uint64_t>().max() - 1))
frameCnt_ = 0;

Loading…
Cancel
Save