pull/3471/head
kallaballa 2 years ago
parent f68c1a9695
commit e8503a9918
  1. 4
      modules/v4d/include/opencv2/v4d/v4d.hpp
  2. 65
      modules/v4d/src/detail/framebuffercontext.cpp
  3. 54
      modules/v4d/src/util.cpp
  4. 8
      modules/v4d/src/v4d.cpp

@ -125,6 +125,7 @@ class CV_EXPORTS V4D {
std::future<bool> futureReader_; std::future<bool> futureReader_;
std::future<void> futureWriter_; std::future<void> futureWriter_;
std::function<bool(int key, int scancode, int action, int modifiers)> keyEventCb_; std::function<bool(int key, int scancode, int action, int modifiers)> keyEventCb_;
cv::Point mousePos_;
uint64_t frameCnt_ = 0; uint64_t frameCnt_ = 0;
bool showFPS_ = true; bool showFPS_ = true;
bool printFPS_ = true; bool printFPS_ = true;
@ -381,6 +382,9 @@ private:
bool keyboard_event(int key, int scancode, int action, int modifiers); bool keyboard_event(int key, int scancode, int action, int modifiers);
cv::Point getMousePosition();
void setMousePosition(const cv::Point& pt);
FrameBufferContext& fbCtx(); FrameBufferContext& fbCtx();
CLVAContext& clvaCtx(); CLVAContext& clvaCtx();
NanoVGContext& nvgCtx(); NanoVGContext& nvgCtx();

@ -299,33 +299,24 @@ void FrameBufferContext::init() {
glfwSetWindowUserPointer(getGLFWWindow(), &getV4D()); glfwSetWindowUserPointer(getGLFWWindow(), &getV4D());
glfwSetCursorPosCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) { glfwSetCursorPosCallback(getGLFWWindow(), [](GLFWwindow* glfwWin, double x, double y) {
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin)); V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
//#ifdef __EMSCRIPTEN__ #ifdef __EMSCRIPTEN__
// x *= v4d->pixelRatioX(); x *= v4d->pixelRatioX();
// y *= v4d->pixelRatioY(); y *= v4d->pixelRatioY();
//#endif #endif
//
// if(v4d->hasNguiCtx()) { if(v4d->hasNguiCtx()) {
// v4d->nguiCtx().screen().cursor_pos_callback_event(x, y); v4d->nguiCtx().screen().cursor_pos_callback_event(x, y);
// } }
//#ifndef __EMSCRIPTEN__
// auto cursor = v4d->getMousePosition(); v4d->setMousePosition(cv::Point{x, y});
// auto diff = cursor - cv::Vec2f(x, y);
// if (v4d->isMouseDrag()) {
// v4d->pan(diff[0], -diff[1]);
// }
//#endif
// v4d->setMousePosition(x, y);
} }
); );
glfwSetMouseButtonCallback(getGLFWWindow(), glfwSetMouseButtonCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, int button, int action, int modifiers) { [](GLFWwindow* glfwWin, int button, int action, int modifiers) {
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin)); V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
// if(v4d->hasNguiCtx()) if(v4d->hasNguiCtx())
// v4d->nguiCtx().screen().mouse_button_callback_event(button, action, modifiers); v4d->nguiCtx().screen().mouse_button_callback_event(button, action, modifiers);
// if (button == GLFW_MOUSE_BUTTON_RIGHT) {
// v4d->setMouseDrag(action == GLFW_PRESS);
// }
} }
); );
glfwSetKeyCallback(getGLFWWindow(), glfwSetKeyCallback(getGLFWWindow(),
@ -350,21 +341,19 @@ void FrameBufferContext::init() {
); );
glfwSetScrollCallback(getGLFWWindow(), glfwSetScrollCallback(getGLFWWindow(),
[](GLFWwindow* glfwWin, double x, double y) { [](GLFWwindow* glfwWin, double x, double y) {
// V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin)); V4D* v4d = reinterpret_cast<V4D*>(glfwGetWindowUserPointer(glfwWin));
// std::vector<nanogui::Widget*> widgets; std::vector<nanogui::Widget*> widgets;
// if(v4d->hasNguiCtx()) { if(v4d->hasNguiCtx()) {
// for (auto* w : v4d->nguiCtx().screen().children()) { for (auto* w : v4d->nguiCtx().screen().children()) {
// auto pt = v4d->getMousePosition(); auto pt = v4d->getMousePosition();
// auto mousePos = nanogui::Vector2i(pt[0], pt[1]); auto mousePos = nanogui::Vector2i(pt.x, pt.y);
// if(cv::v4d::detail::contains_absolute(w, mousePos)) { if(cv::v4d::detail::contains_absolute(w, mousePos)) {
// v4d->nguiCtx().screen().scroll_callback_event(x, y); v4d->nguiCtx().screen().scroll_callback_event(x, y);
// return; return;
// } }
// } }
// } }
//#ifndef __EMSCRIPTEN__
// v4d->zoom(y < 0 ? 1.1 : 0.9);
//#endif
} }
); );

@ -309,6 +309,34 @@ Source makeVaSource(const string& inputFilename, const int vaDeviceIndex) {
return !frame.empty(); return !frame.empty();
}, fps); }, fps);
} }
Sink makeAnyHWSink(const string& outputFilename, const int fourcc, const float fps,
const cv::Size& frameSize) {
cv::Ptr<cv::VideoWriter> writer = new cv::VideoWriter(outputFilename, cv::CAP_FFMPEG,
fourcc, fps, frameSize, { cv::VIDEOWRITER_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_ANY });
if(writer->isOpened()) {
return Sink([=](const cv::UMat& frame) {
static cv::UMat resized;
cv::resize(frame, resized, frameSize);
(*writer) << resized;
return writer->isOpened();
});
} else {
return Sink();
}
}
Source makeAnyHWSource(const string& inputFilename) {
cv::Ptr<cv::VideoCapture> capture = new cv::VideoCapture(inputFilename, cv::CAP_FFMPEG, {
cv::CAP_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_ANY });
float fps = capture->get(cv::CAP_PROP_FPS);
return Source([=](cv::UMat& frame) {
(*capture) >> frame;
return !frame.empty();
}, fps);
}
#endif #endif
#ifndef __EMSCRIPTEN__ #ifndef __EMSCRIPTEN__
@ -316,6 +344,11 @@ Sink makeWriterSink(const string& outputFilename, const int fourcc, const float
const cv::Size& frameSize) { const cv::Size& frameSize) {
if (isIntelVaSupported()) { if (isIntelVaSupported()) {
return makeVaSink(outputFilename, fourcc, fps, frameSize, 0); return makeVaSink(outputFilename, fourcc, fps, frameSize, 0);
} else {
try {
return makeAnyHWSink(outputFilename, fourcc, fps, frameSize);
} catch(...) {
}
} }
cv::Ptr<cv::VideoWriter> writer = new cv::VideoWriter(outputFilename, cv::CAP_FFMPEG, cv::Ptr<cv::VideoWriter> writer = new cv::VideoWriter(outputFilename, cv::CAP_FFMPEG,
@ -336,6 +369,11 @@ Sink makeWriterSink(const string& outputFilename, const int fourcc, const float
Source makeCaptureSource(const string& inputFilename) { Source makeCaptureSource(const string& inputFilename) {
if (isIntelVaSupported()) { if (isIntelVaSupported()) {
return makeVaSource(inputFilename, 0); return makeVaSource(inputFilename, 0);
} else {
try {
return makeAnyHWSource(inputFilename);
} catch(...) {
}
} }
cv::Ptr<cv::VideoCapture> capture = new cv::VideoCapture(inputFilename, cv::CAP_FFMPEG); cv::Ptr<cv::VideoCapture> capture = new cv::VideoCapture(inputFilename, cv::CAP_FFMPEG);
@ -425,24 +463,8 @@ public:
return true; return true;
} }
// cerr << "not captured" << endl;
return false; return false;
} }
void captureCPU() {
EM_ASM(
if(globalThis.doCapture) {
if(typeof globalThis.v4dCopyCanvasContext === 'undefined' || globalThis.v4dCopyCanvasContext === null)
globalThis.v4dCopyCanvasContext = globalThis.v4dCopyCanvasElement.getContext('2d', { willReadFrequently: true });
if(typeof globalThis.v4dFrameData === 'undefined' || globalThis.v4dFrameData === null)
globalThis.v4dFrameData = Module._malloc(width_ * height_ * 4);
globalThis.v4dCopyCanvasElement.drawImage(globalThis.v4dVideoElement, 0, 0, 1280, 720);
var cameraArrayBuffer = globalThis.v4dCopyCanvasContext.getImageData(0, 0, 1280, 720);
Module.HEAPU8.set(cameraArrayBuffer.data, globalThis.v4dFrameData);
}
);
}
}; };
cv::Ptr<HTML5Capture> capture = nullptr; cv::Ptr<HTML5Capture> capture = nullptr;

@ -69,6 +69,14 @@ bool V4D::keyboard_event(int key, int scancode, int action, int modifiers) {
return nguiCtx().screen().keyboard_event(key, scancode, action, modifiers); return nguiCtx().screen().keyboard_event(key, scancode, action, modifiers);
} }
cv::Point V4D::getMousePosition() {
return mousePos_;
}
void V4D::setMousePosition(const cv::Point& pt) {
mousePos_ = pt;
}
FrameBufferContext& V4D::fbCtx() { FrameBufferContext& V4D::fbCtx() {
assert(mainFbContext_ != nullptr); assert(mainFbContext_ != nullptr);
return *mainFbContext_; return *mainFbContext_;

Loading…
Cancel
Save