ported to the new api

pull/3471/head
kallaballa 2 years ago
parent e657896a7c
commit f4b8f9ec84
  1. 3
      Makefile
  2. 154
      src/beauty/beauty-demo.cpp
  3. 22
      src/common/subsystems.hpp
  4. 66
      src/font/font-demo.cpp
  5. 84
      src/nanovg/nanovg-demo.cpp
  6. 62
      src/optflow/optflow-demo.cpp
  7. 56
      src/pedestrian/pedestrian-demo.cpp
  8. 39
      src/tetra/tetra-demo.cpp
  9. 4
      src/video/Makefile
  10. 72
      src/video/video-demo.cpp
  11. 46
      src/video2/Makefile
  12. 159
      src/video2/video-demo2.cpp

@ -50,7 +50,6 @@ dirs:
${MAKE} -C src/beauty/ ${MAKEFLAGS} CXX=${CXX} ${MAKECMDGOALS} ${MAKE} -C src/beauty/ ${MAKEFLAGS} CXX=${CXX} ${MAKECMDGOALS}
${MAKE} -C src/font/ ${MAKEFLAGS} CXX=${CXX} ${MAKECMDGOALS} ${MAKE} -C src/font/ ${MAKEFLAGS} CXX=${CXX} ${MAKECMDGOALS}
${MAKE} -C src/pedestrian/ ${MAKEFLAGS} CXX=${CXX} ${MAKECMDGOALS} ${MAKE} -C src/pedestrian/ ${MAKEFLAGS} CXX=${CXX} ${MAKECMDGOALS}
${MAKE} -C src/video2/ ${MAKEFLAGS} CXX=${CXX} ${MAKECMDGOALS}
debian-release: debian-release:
${MAKE} -C src/tetra/ ${MAKEFLAGS} CXX=${CXX} release ${MAKE} -C src/tetra/ ${MAKEFLAGS} CXX=${CXX} release
@ -60,7 +59,6 @@ debian-release:
${MAKE} -C src/beauty/ ${MAKEFLAGS} CXX=${CXX} release ${MAKE} -C src/beauty/ ${MAKEFLAGS} CXX=${CXX} release
${MAKE} -C src/font/ ${MAKEFLAGS} CXX=${CXX} release ${MAKE} -C src/font/ ${MAKEFLAGS} CXX=${CXX} release
${MAKE} -C src/pedestrian/ ${MAKEFLAGS} CXX=${CXX} release ${MAKE} -C src/pedestrian/ ${MAKEFLAGS} CXX=${CXX} release
${MAKE} -C src/video2/ ${MAKEFLAGS} CXX=${CXX} release
debian-clean: debian-clean:
${MAKE} -C src/tetra/ ${MAKEFLAGS} CXX=${CXX} clean ${MAKE} -C src/tetra/ ${MAKEFLAGS} CXX=${CXX} clean
@ -70,7 +68,6 @@ debian-clean:
${MAKE} -C src/beauty/ ${MAKEFLAGS} CXX=${CXX} clean ${MAKE} -C src/beauty/ ${MAKEFLAGS} CXX=${CXX} clean
${MAKE} -C src/font/ ${MAKEFLAGS} CXX=${CXX} clean ${MAKE} -C src/font/ ${MAKEFLAGS} CXX=${CXX} clean
${MAKE} -C src/pedestrian/ ${MAKEFLAGS} CXX=${CXX} clean ${MAKE} -C src/pedestrian/ ${MAKEFLAGS} CXX=${CXX} clean
${MAKE} -C src/video2/ ${MAKEFLAGS} CXX=${CXX} clean
install: ${TARGET} install: ${TARGET}
true true

@ -215,34 +215,37 @@ int main(int argc, char **argv) {
cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1 cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1
}); });
//BGRA
cv::UMat frameBuffer;
//BGR //BGR
cv::UMat videoFrameIn, resized, down, faceBgMask, diff, blurred, reduced, sharpened, masked; cv::UMat rgb, resized, down, faceBgMask, diff, blurred, reduced, sharpened, masked;
cv::UMat videoFrameOut(HEIGHT, WIDTH, CV_8UC3); cv::UMat frameOut(HEIGHT, WIDTH, CV_8UC3);
cv::UMat lhalf(HEIGHT * SCALE, WIDTH * SCALE, CV_8UC3); cv::UMat lhalf(HEIGHT * SCALE, WIDTH * SCALE, CV_8UC3);
cv::UMat rhalf(lhalf.size(), lhalf.type()); cv::UMat rhalf(lhalf.size(), lhalf.type());
//GREY //GREY
cv::UMat downGrey, faceBgMaskGrey, faceBgMaskInvGrey, faceFgMaskGrey, resMaskGrey; cv::UMat downGrey, faceBgMaskGrey, faceBgMaskInvGrey, faceFgMaskGrey, resMaskGrey;
//BGR-Float //BGR-Float
cv::UMat videoFrameOutFloat; cv::UMat frameOutFloat;
cv::Mat faces; cv::Mat faces;
vector<cv::Rect> faceRects; vector<cv::Rect> faceRects;
vector<vector<cv::Point2f>> shapes; vector<vector<cv::Point2f>> shapes;
vector<FaceFeatures> featuresList; vector<FaceFeatures> featuresList;
va::bind();
while (true) { while (true) {
capture >> videoFrameIn; bool success = va::read([&capture](cv::UMat& videoFrame){
if (videoFrameIn.empty()) //videoFrame will be converted to BGRA and stored in the frameBuffer.
break; capture >> videoFrame;
});
cv::resize(videoFrameIn, resized, cv::Size(WIDTH, HEIGHT)); if(!success)
cv::resize(videoFrameIn, down, cv::Size(0, 0), SCALE, SCALE); break;
cvtColor(down, downGrey, cv::COLOR_BGRA2GRAY);
detector->detect(down, faces); cl::compute([&](cv::UMat& frameBuffer){
cvtColor(frameBuffer,rgb,cv::COLOR_BGRA2RGB);
cv::resize(rgb, resized, cv::Size(WIDTH, HEIGHT));
cv::resize(rgb, down, cv::Size(0, 0), SCALE, SCALE);
cvtColor(down, downGrey, cv::COLOR_BGRA2GRAY);
detector->detect(down, faces);
});
faceRects.clear(); faceRects.clear();
for (int i = 0; i < faces.rows; i++) { for (int i = 0; i < faces.rows; i++) {
@ -251,79 +254,80 @@ int main(int argc, char **argv) {
shapes.clear(); shapes.clear();
gl::bind();
if (!faceRects.empty() && facemark->fit(downGrey, faceRects, shapes)) { if (!faceRects.empty() && facemark->fit(downGrey, faceRects, shapes)) {
featuresList.clear(); featuresList.clear();
for (size_t i = 0; i < faceRects.size(); ++i) { for (size_t i = 0; i < faceRects.size(); ++i) {
featuresList.push_back(FaceFeatures(faceRects[i], shapes[i], float(down.size().width) / frameBuffer.size().width)); featuresList.push_back(FaceFeatures(faceRects[i], shapes[i], float(down.size().width) / WIDTH));
} }
nvg::begin(); nvg::render([&](int w, int h) {
nvg::clear(); nvg::clear();
//Draw the face background mask (= face oval) //Draw the face background mask (= face oval)
draw_face_bg_mask(featuresList); draw_face_bg_mask(featuresList);
nvg::end(); });
cl::acquire_from_gl(frameBuffer); cl::compute([&](cv::UMat &frameBuffer) {
//Convert/Copy the mask //Convert/Copy the mask
cvtColor(frameBuffer, faceBgMask, cv::COLOR_BGRA2BGR); cvtColor(frameBuffer, faceBgMask, cv::COLOR_BGRA2BGR);
cvtColor(frameBuffer, faceBgMaskGrey, cv::COLOR_BGRA2GRAY); cvtColor(frameBuffer, faceBgMaskGrey, cv::COLOR_BGRA2GRAY);
cl::release_to_gl(frameBuffer); });
nvg::begin(); nvg::render([&](int w, int h) {
nvg::clear(); nvg::clear();
//Draw the face forground mask (= eyes and outer lips) //Draw the face forground mask (= eyes and outer lips)
draw_face_fg_mask(featuresList); draw_face_fg_mask(featuresList);
nvg::end(); });
cl::acquire_from_gl(frameBuffer); cl::compute([&](cv::UMat &frameBuffer) {
//Convert/Copy the mask //Convert/Copy the mask
cvtColor(frameBuffer, faceFgMaskGrey, cv::COLOR_BGRA2GRAY); cvtColor(frameBuffer, faceFgMaskGrey, cv::COLOR_BGRA2GRAY);
//Dilate the face forground mask to make eyes and mouth areas wider //Dilate the face forground mask to make eyes and mouth areas wider
int morph_size = 1; int morph_size = 1;
cv::Mat element = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(2 * morph_size + 1, 2 * morph_size + 1), cv::Point(morph_size, morph_size)); cv::Mat element = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(2 * morph_size + 1, 2 * morph_size + 1), cv::Point(morph_size, morph_size));
cv::morphologyEx(faceFgMaskGrey, faceFgMaskGrey, cv::MORPH_DILATE, element, cv::Point(element.cols >> 1, element.rows >> 1), DILATE_ITERATIONS, cv::BORDER_CONSTANT, cv::morphologyDefaultBorderValue()); cv::morphologyEx(faceFgMaskGrey, faceFgMaskGrey, cv::MORPH_DILATE, element, cv::Point(element.cols >> 1, element.rows >> 1), DILATE_ITERATIONS, cv::BORDER_CONSTANT, cv::morphologyDefaultBorderValue());
cv::subtract(faceBgMaskGrey, faceFgMaskGrey, faceBgMaskGrey); cv::subtract(faceBgMaskGrey, faceFgMaskGrey, faceBgMaskGrey);
cv::bitwise_not(faceBgMaskGrey, faceBgMaskInvGrey); cv::bitwise_not(faceBgMaskGrey, faceBgMaskInvGrey);
unsharp_mask(resized, sharpened, UNSHARP_STRENGTH); unsharp_mask(resized, sharpened, UNSHARP_STRENGTH);
reduce_shadows(resized, reduced, REDUCE_SHADOW); reduce_shadows(resized, reduced, REDUCE_SHADOW);
blender.prepare(cv::Rect(0,0, WIDTH,HEIGHT)); blender.prepare(cv::Rect(0, 0, WIDTH, HEIGHT));
blender.feed(reduced, faceBgMaskGrey, cv::Point(0,0)); blender.feed(reduced, faceBgMaskGrey, cv::Point(0, 0));
blender.feed(sharpened, faceBgMaskInvGrey, cv::Point(0,0)); blender.feed(sharpened, faceBgMaskInvGrey, cv::Point(0, 0));
blender.blend(videoFrameOutFloat, resMaskGrey); blender.blend(frameOutFloat, resMaskGrey);
videoFrameOutFloat.convertTo(videoFrameOut, CV_8U, 1.0); frameOutFloat.convertTo(frameOut, CV_8U, 1.0);
cv::boxFilter(videoFrameOut, blurred, -1, cv::Size(BLUR_KERNEL_SIZE, BLUR_KERNEL_SIZE), cv::Point(-1, -1), true, cv::BORDER_REPLICATE); cv::boxFilter(frameOut, blurred, -1, cv::Size(BLUR_KERNEL_SIZE, BLUR_KERNEL_SIZE), cv::Point(-1, -1), true, cv::BORDER_REPLICATE);
cv::subtract(blurred, resized, diff); cv::subtract(blurred, resized, diff);
bitwise_and(diff, faceBgMask, masked); bitwise_and(diff, faceBgMask, masked);
cv::add(videoFrameOut, masked, reduced); cv::add(frameOut, masked, reduced);
cv::resize(resized, lhalf, cv::Size(0, 0), 0.5, 0.5); cv::resize(resized, lhalf, cv::Size(0, 0), 0.5, 0.5);
cv::resize(reduced, rhalf, cv::Size(0, 0), 0.5, 0.5); cv::resize(reduced, rhalf, cv::Size(0, 0), 0.5, 0.5);
videoFrameOut = cv::Scalar::all(0); frameOut = cv::Scalar::all(0);
lhalf.copyTo(videoFrameOut(cv::Rect(0, 0, lhalf.size().width, lhalf.size().height))); lhalf.copyTo(frameOut(cv::Rect(0, 0, lhalf.size().width, lhalf.size().height)));
rhalf.copyTo(videoFrameOut(cv::Rect(rhalf.size().width, 0, rhalf.size().width, rhalf.size().height))); rhalf.copyTo(frameOut(cv::Rect(rhalf.size().width, 0, rhalf.size().width, rhalf.size().height)));
cvtColor(videoFrameOut, frameBuffer, cv::COLOR_BGR2RGBA); cvtColor(frameOut, frameBuffer, cv::COLOR_BGR2RGBA);
cl::release_to_gl(frameBuffer); });
} else { } else {
cl::acquire_from_gl(frameBuffer); cl::compute([&](cv::UMat &frameBuffer) {
videoFrameOut = cv::Scalar::all(0); frameOut = cv::Scalar::all(0);
cv::resize(resized, lhalf, cv::Size(0, 0), 0.5, 0.5); cv::resize(resized, lhalf, cv::Size(0, 0), 0.5, 0.5);
lhalf.copyTo(videoFrameOut(cv::Rect(0, 0, lhalf.size().width, lhalf.size().height))); lhalf.copyTo(frameOut(cv::Rect(0, 0, lhalf.size().width, lhalf.size().height)));
lhalf.copyTo(videoFrameOut(cv::Rect(lhalf.size().width, 0, lhalf.size().width, lhalf.size().height))); lhalf.copyTo(frameOut(cv::Rect(lhalf.size().width, 0, lhalf.size().width, lhalf.size().height)));
cvtColor(videoFrameOut, frameBuffer, cv::COLOR_BGR2RGBA); cvtColor(frameOut, frameBuffer, cv::COLOR_BGR2RGBA);
cl::release_to_gl(frameBuffer); });
} }
if (!app::display()) if (!app::display())
break; break;
va::bind(); va::write([&writer](const cv::UMat& videoFrame){
writer << videoFrameOut; //videoFrame is the frameBuffer converted to BGR. Ready to be written.
writer << videoFrame;
});
app::print_fps(); app::print_fps();
} }

@ -458,14 +458,15 @@ void end() {
GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, 0)); GL_CHECK(glBindRenderbuffer(GL_RENDERBUFFER, 0));
GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, 0)); GL_CHECK(glBindFramebuffer(GL_FRAMEBUFFER, 0));
//glFlush seems enough
GL_CHECK(glFlush()); GL_CHECK(glFlush());
GL_CHECK(glFinish()); // GL_CHECK(glFinish());
} }
void render(std::function<void()> fn) { void render(std::function<void(int,int)> fn) {
gl::bind(); gl::bind();
gl::begin(); gl::begin();
fn(); fn(app::window_width, app::window_height);
gl::end(); gl::end();
} }
@ -523,7 +524,7 @@ void release_to_gl(cv::UMat& m) {
gl::end(); gl::end();
} }
void work(std::function<void(cv::UMat& m)> fn) { void compute(std::function<void(cv::UMat& m)> fn) {
gl::bind(); gl::bind();
acquire_from_gl(frameBuffer); acquire_from_gl(frameBuffer);
fn(frameBuffer); fn(frameBuffer);
@ -573,8 +574,8 @@ void begin() {
w = ws.first; w = ws.first;
h = ws.second; h = ws.second;
#else #else
w = glfw::framebuffer_width; w = app::window_width;
h = glfw::framebuffer_height; h = app::window_height;
#endif #endif
GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, kb::gl::frame_buf)); GL_CHECK(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, kb::gl::frame_buf));
nvgSave(vg); nvgSave(vg);
@ -588,10 +589,10 @@ void end() {
gl::end(); gl::end();
} }
void render(std::function<void()> fn) { void render(std::function<void(int,int)> fn) {
gl::bind(); gl::bind();
nvg::begin(); nvg::begin();
fn(); fn(app::window_width, app::window_height);
nvg::end(); nvg::end();
} }
@ -629,14 +630,17 @@ void bind() {
va::context.bind(); va::context.bind();
} }
void read(std::function<void(cv::UMat&)> fn) { bool read(std::function<void(cv::UMat&)> fn) {
va::bind(); va::bind();
fn(va::videoFrame); fn(va::videoFrame);
gl::bind(); gl::bind();
cl::acquire_from_gl(cl::frameBuffer); cl::acquire_from_gl(cl::frameBuffer);
if(va::videoFrame.empty())
return false;
//Color-conversion from RGB to BGRA (OpenCL) //Color-conversion from RGB to BGRA (OpenCL)
cv::cvtColor(va::videoFrame, cl::frameBuffer, cv::COLOR_RGB2BGRA); cv::cvtColor(va::videoFrame, cl::frameBuffer, cv::COLOR_RGB2BGRA);
cl::release_to_gl(cl::frameBuffer); cl::release_to_gl(cl::frameBuffer);
return true;
} }
void write(std::function<void(const cv::UMat&)> fn) { void write(std::function<void(const cv::UMat&)> fn) {

@ -53,9 +53,7 @@ int main(int argc, char **argv) {
va::copy(); va::copy();
//BGRA //BGRA
cv::UMat frameBuffer, stars, warped; cv::UMat stars, warped;
//BGR
cv::UMat videoFrame;
//The text to display //The text to display
string text = cv::getBuildInformation(); string text = cv::getBuildInformation();
@ -72,12 +70,8 @@ int main(int argc, char **argv) {
cv::Mat tm = cv::getPerspectiveTransform(quad1, quad2); cv::Mat tm = cv::getPerspectiveTransform(quad1, quad2);
cv::RNG rng(cv::getTickCount()); cv::RNG rng(cv::getTickCount());
//Activate the OpenCL context for OpenGL. nvg::render([&](int w, int h) {
gl::bind(); nvg::clear();
//Begin a nanovg frame.
nvg::begin();
nvg::clear(0,0,0,1);
{
//draw stars //draw stars
using kb::nvg::vg; using kb::nvg::vg;
int numStars = rng.uniform(MIN_STAR_COUNT, MAX_STAR_COUNT); int numStars = rng.uniform(MIN_STAR_COUNT, MAX_STAR_COUNT);
@ -88,31 +82,21 @@ int main(int argc, char **argv) {
nvgCircle(vg, rng.uniform(0, WIDTH) , rng.uniform(0, HEIGHT), MAX_STAR_SIZE); nvgCircle(vg, rng.uniform(0, WIDTH) , rng.uniform(0, HEIGHT), MAX_STAR_SIZE);
nvgStroke(vg); nvgStroke(vg);
} }
} });
//End a nanovg frame
nvg::end();
//Aquire frame buffer from OpenGL. cl::compute([&](cv::UMat& frameBuffer){
cl::acquire_from_gl(frameBuffer); frameBuffer.copyTo(stars);
//Copy the star rendering. });
frameBuffer.copyTo(stars);
//Release frame buffer to OpenGL.
cl::release_to_gl(frameBuffer);
//Frame count. //Frame count.
size_t cnt = 0; size_t cnt = 0;
//Y-position of the current line in pixels. //Y-position of the current line in pixels.
float y; float y;
while (true) { while (true) {
y = 0; y = 0;
gl::bind();
//Begin a nanovg frame. nvg::render([&](int w, int h) {
nvg::begin(); nvg::clear();
//Clear the screen with black.
nvg::clear();
{
using kb::nvg::vg; using kb::nvg::vg;
nvgBeginPath(vg); nvgBeginPath(vg);
nvgFontSize(vg, FONT_SIZE); nvgFontSize(vg, FONT_SIZE);
@ -139,38 +123,32 @@ int main(int argc, char **argv) {
break; break;
} }
} }
} });
//End a nanovg frame
nvg::end();
if(y == 0) { if(y == 0) {
//Nothing drawn, exit. //Nothing drawn, exit.
break; break;
} }
//Aquire frame buffer from OpenGL. cl::compute([&](cv::UMat& frameBuffer){
cl::acquire_from_gl(frameBuffer); //Pseudo 3D text effect.
//Pseudo 3D text effect. cv::warpPerspective(frameBuffer, warped, tm, frameBuffer.size(), cv::INTER_LINEAR, cv::BORDER_CONSTANT, cv::Scalar());
cv::warpPerspective(frameBuffer, warped, tm, videoFrame.size(), cv::INTER_LINEAR, cv::BORDER_CONSTANT, cv::Scalar()); //Combine layers
//Combine layers cv::add(stars, warped, frameBuffer);
cv::add(stars, warped, frameBuffer); });
//Color-conversion from BGRA to RGB. OpenCV/OpenCL.
cv::cvtColor(frameBuffer, videoFrame, cv::COLOR_BGRA2RGB);
//Transfer buffer ownership back to OpenGL.
cl::release_to_gl(frameBuffer);
//If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed. //If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed.
if(!app::display()) if(!app::display())
break; break;
//Activate the OpenCL context for VAAPI. va::write([&writer](const cv::UMat& videoFrame){
va::bind(); //videoFrame is the frameBuffer converted to BGR. Ready to be written.
//Encode the frame using VAAPI on the GPU. writer << videoFrame;
writer << videoFrame; });
++cnt; ++cnt;
//Wrap the cnt around if it becomes to big. //Wrap the cnt around if it becomes to big.
if(cnt == std::numeric_limits<size_t>().max()) if(cnt > std::numeric_limits<size_t>().max() / 2.0)
cnt = 0; cnt = 0;
app::print_fps(); app::print_fps();

@ -151,23 +151,12 @@ int main(int argc, char **argv) {
cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1 cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1
}); });
cv::UMat frameBuffer; cv::UMat rgb;
cv::UMat videoFrame; cv::UMat bgra;
cv::UMat videoFrameBGRA; cv::UMat hsv;
cv::UMat videoFrameHSV;
cv::UMat hueChannel; cv::UMat hueChannel;
//Bind the OpenCL context for VAAPI
va::bind();
while (true) { while (true) {
//Decode a frame on the GPU using VAAPI
capture >> videoFrame;
if (videoFrame.empty()) {
cerr << "End of stream. Exiting" << endl;
break;
}
//we use time to calculated the current hue //we use time to calculated the current hue
float time = cv::getTickCount() / cv::getTickFrequency(); float time = cv::getTickCount() / cv::getTickFrequency();
//nanovg hue fading between 0.0f and 1.0f //nanovg hue fading between 0.0f and 1.0f
@ -175,48 +164,45 @@ int main(int argc, char **argv) {
//opencv hue fading between 0 and 255 //opencv hue fading between 0 and 255
int cvHue = (42 + uint8_t(std::round(((1.0 - sinf(time*0.12f))+1.0f) * 128.0))) % 255; int cvHue = (42 + uint8_t(std::round(((1.0 - sinf(time*0.12f))+1.0f) * 128.0))) % 255;
//Color-conversion from RGB to HSV. (OpenCL) bool success = va::read([&capture](cv::UMat& videoFrame){
cv::cvtColor(videoFrame, videoFrameHSV, cv::COLOR_RGB2HSV_FULL); //videoFrame will be converted to BGRA and stored in the frameBuffer.
//Extract the hue channel capture >> videoFrame;
cv::extractChannel(videoFrameHSV, hueChannel, 0); });
//Set the current hue
hueChannel.setTo(cvHue);
//Insert the hue channel
cv::insertChannel(hueChannel, videoFrameHSV, 0);
//Color-conversion from HSV to RGB. (OpenCL)
cv::cvtColor(videoFrameHSV, videoFrame, cv::COLOR_HSV2RGB_FULL);
//Color-conversion from RGB to BGRA. (OpenCL)
cv::cvtColor(videoFrame, videoFrameBGRA, cv::COLOR_RGB2BGRA);
//Bind the OpenCL context for OpenGL
gl::bind();
//Aquire the framebuffer so we can write the video frame to it
cl::acquire_from_gl(frameBuffer);
//Resize the frame if necessary. (OpenCL)
cv::resize(videoFrameBGRA, frameBuffer, cv::Size(WIDTH, HEIGHT));
//Release the frame buffer for use by OpenGL
cl::release_to_gl(frameBuffer);
//Render using nanovg if(!success)
nvg::begin(); break;
drawColorwheel(nvg::vg, WIDTH - 300, HEIGHT - 300, 250.0f, 250.0f, nvgHue);
nvg::end(); cl::compute([&](cv::UMat& frameBuffer){
cvtColor(frameBuffer,rgb,cv::COLOR_BGRA2RGB);
//Color-conversion from RGB to HSV. (OpenCL)
cv::cvtColor(rgb, hsv, cv::COLOR_RGB2HSV_FULL);
//Extract the hue channel
cv::extractChannel(hsv, hueChannel, 0);
//Set the current hue
hueChannel.setTo(cvHue);
//Insert the hue channel
cv::insertChannel(hueChannel, hsv, 0);
//Color-conversion from HSV to RGB. (OpenCL)
cv::cvtColor(hsv, rgb, cv::COLOR_HSV2RGB_FULL);
//Color-conversion from RGB to BGRA. (OpenCL)
cv::cvtColor(rgb, bgra, cv::COLOR_RGB2BGRA);
//Resize the frame if necessary. (OpenCL)
cv::resize(bgra, frameBuffer, cv::Size(WIDTH, HEIGHT));
});
//Aquire frame buffer from OpenGL //Render using nanovg
cl::acquire_from_gl(frameBuffer); nvg::render([&](int w, int h) {
//Color-conversion from BGRA to RGB. OpenCV/OpenCL. drawColorwheel(nvg::vg, w - 300, h - 300, 250.0f, 250.0f, nvgHue);
cv::cvtColor(frameBuffer, videoFrame, cv::COLOR_BGRA2RGB); });
//Transfer buffer ownership back to OpenGL
cl::release_to_gl(frameBuffer);
//If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed. //If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed.
if(!app::display()) if(!app::display())
break; break;
//Activate the OpenCL context for VAAPI va::write([&writer](const cv::UMat& videoFrame){
va::bind(); //videoFrame is the frameBuffer converted to BGR. Ready to be written.
//Encode the frame using VAAPI on the GPU. writer << videoFrame;
writer << videoFrame; });
app::print_fps(); app::print_fps();
} }

@ -212,53 +212,61 @@ int main(int argc, char **argv) {
cv::Size frameBufferSize(WIDTH, HEIGHT); cv::Size frameBufferSize(WIDTH, HEIGHT);
cv::Size scaledSize(WIDTH * FG_SCALE, HEIGHT * FG_SCALE); cv::Size scaledSize(WIDTH * FG_SCALE, HEIGHT * FG_SCALE);
//BGRA //BGRA
cv::UMat frameBuffer, background, foreground(frameBufferSize, CV_8UC4, cv::Scalar::all(0)); cv::UMat background, foreground(frameBufferSize, CV_8UC4, cv::Scalar::all(0));
//RGB //RGB
cv::UMat videoFrame, resized, down; cv::UMat rgb, resized, down;
//GREY //GREY
cv::UMat backgroundGrey, downPrevGrey, downNextGrey, downMotionMaskGrey; cv::UMat backgroundGrey, downPrevGrey, downNextGrey, downMotionMaskGrey;
vector<cv::Point2f> detectedPoints; vector<cv::Point2f> detectedPoints;
va::bind();
while (true) { while (true) {
capture >> videoFrame; bool success = va::read([&capture](cv::UMat& videoFrame){
if (videoFrame.empty()) //videoFrame will be converted to BGRA and stored in the frameBuffer.
capture >> videoFrame;
});
if(!success)
break; break;
cv::resize(videoFrame, resized, frameBufferSize); cl::compute([&](cv::UMat& frameBuffer){
cv::resize(videoFrame, down, scaledSize); cvtColor(frameBuffer,rgb,cv::COLOR_BGRA2RGB);
cv::cvtColor(resized, background, cv::COLOR_RGB2BGRA); cv::resize(rgb, resized, frameBufferSize);
cv::cvtColor(down, downNextGrey, cv::COLOR_RGB2GRAY); cv::resize(rgb, down, scaledSize);
//Subtract the background to create a motion mask cv::cvtColor(resized, background, cv::COLOR_RGB2BGRA);
prepare_motion_mask(downNextGrey, downMotionMaskGrey); cv::cvtColor(down, downNextGrey, cv::COLOR_RGB2GRAY);
//Subtract the background to create a motion mask
prepare_motion_mask(downNextGrey, downMotionMaskGrey);
});
//Detect trackable points in the motion mask //Detect trackable points in the motion mask
detect_points(downMotionMaskGrey, detectedPoints); detect_points(downMotionMaskGrey, detectedPoints);
gl::bind(); nvg::render([&](int w, int h) {
nvg::begin(); nvg::clear();
nvg::clear(); if (!downPrevGrey.empty()) {
if (!downPrevGrey.empty()) { //We don't want the algorithm to get out of hand when there is a scene change, so we suppress it when we detect one.
//We don't want the algorithm to get out of hand when there is a scene change, so we suppress it when we detect one. if (!detect_scene_change(downMotionMaskGrey, SCENE_CHANGE_THRESH, SCENE_CHANGE_THRESH_DIFF)) {
if (!detect_scene_change(downMotionMaskGrey, SCENE_CHANGE_THRESH, SCENE_CHANGE_THRESH_DIFF)) { //Visualize the sparse optical flow using nanovg
//Visualize the sparse optical flow using nanovg visualize_sparse_optical_flow(downPrevGrey, downNextGrey, detectedPoints, FG_SCALE, MAX_STROKE, EFFECT_COLOR, MAX_POINTS, POINT_LOSS);
visualize_sparse_optical_flow(downPrevGrey, downNextGrey, detectedPoints, FG_SCALE, MAX_STROKE, EFFECT_COLOR, MAX_POINTS, POINT_LOSS); }
} }
} });
nvg::end();
downPrevGrey = downNextGrey.clone(); downPrevGrey = downNextGrey.clone();
cl::acquire_from_gl(frameBuffer); cl::compute([&](cv::UMat& frameBuffer){
composite_layers(background, foreground, frameBuffer, frameBuffer, GLOW_KERNEL_SIZE, FG_LOSS); //Put it all together (OpenCL)
cv::cvtColor(frameBuffer, videoFrame, cv::COLOR_BGRA2RGB); composite_layers(background, foreground, frameBuffer, frameBuffer, GLOW_KERNEL_SIZE, FG_LOSS);
cl::release_to_gl(frameBuffer); });
//If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed. //If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed.
if(!app::display()) if(!app::display())
break; break;
va::bind(); va::write([&writer](const cv::UMat& videoFrame){
writer << videoFrame; //videoFrame is the frameBuffer converted to BGR. Ready to be written.
writer << videoFrame;
});
app::print_fps(); app::print_fps();
} }

@ -119,7 +119,7 @@ int main(int argc, char **argv) {
//Print system information //Print system information
app::print_system_info(); app::print_system_info();
cv::VideoCapture cap(argv[1], cv::CAP_FFMPEG, { cv::VideoCapture capture(argv[1], cv::CAP_FFMPEG, {
cv::CAP_PROP_HW_DEVICE, VA_HW_DEVICE_INDEX, cv::CAP_PROP_HW_DEVICE, VA_HW_DEVICE_INDEX,
cv::CAP_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI, cv::CAP_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI,
cv::CAP_PROP_HW_ACCELERATION_USE_OPENCL, 1 cv::CAP_PROP_HW_ACCELERATION_USE_OPENCL, 1
@ -127,12 +127,12 @@ int main(int argc, char **argv) {
va::copy(); va::copy();
if (!cap.isOpened()) { if (!capture.isOpened()) {
cerr << "ERROR! Unable to open video-input" << endl; cerr << "ERROR! Unable to open video-input" << endl;
return -1; return -1;
} }
double fps = cap.get(cv::CAP_PROP_FPS); double fps = capture.get(cv::CAP_PROP_FPS);
cerr << "Detected FPS: " << fps << endl; cerr << "Detected FPS: " << fps << endl;
cv::VideoWriter writer(OUTPUT_FILENAME, cv::CAP_FFMPEG, cv::VideoWriter::fourcc('V', 'P', '9', '0'), fps, cv::Size(WIDTH, HEIGHT), { cv::VideoWriter writer(OUTPUT_FILENAME, cv::CAP_FFMPEG, cv::VideoWriter::fourcc('V', 'P', '9', '0'), fps, cv::Size(WIDTH, HEIGHT), {
cv::VIDEOWRITER_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI, cv::VIDEOWRITER_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI,
@ -140,9 +140,9 @@ int main(int argc, char **argv) {
}); });
//BGRA //BGRA
cv::UMat frameBuffer, background, foreground(HEIGHT, WIDTH, CV_8UC4, cv::Scalar::all(0)); cv::UMat background, foreground(HEIGHT, WIDTH, CV_8UC4, cv::Scalar::all(0));
//RGB //RGB
cv::UMat videoFrame, videoFrameUp, videoFrameDown; cv::UMat rgb, videoFrameUp, videoFrameDown;
//GREY //GREY
cv::UMat videoFrameDownGrey; cv::UMat videoFrameDownGrey;
@ -154,14 +154,23 @@ int main(int argc, char **argv) {
vector<double> probs; vector<double> probs;
va::bind(); va::bind();
while (true) { while (true) {
cap >> videoFrame; bool success = va::read([&capture](cv::UMat& videoFrame){
if (videoFrame.empty()) //videoFrame will be converted to BGRA and stored in the frameBuffer.
capture >> videoFrame;
});
if(!success)
break; break;
cv::resize(videoFrame, videoFrameUp, cv::Size(WIDTH, HEIGHT)); cl::compute([&](cv::UMat& frameBuffer){
cv::resize(videoFrame, videoFrameDown, cv::Size(DOWNSIZE_WIDTH, DOWNSIZE_HEIGHT)); cvtColor(frameBuffer,rgb,cv::COLOR_BGRA2RGB);
cv::cvtColor(videoFrameDown, videoFrameDownGrey, cv::COLOR_RGB2GRAY); cv::resize(rgb, videoFrameUp, cv::Size(WIDTH, HEIGHT));
hog.detectMultiScale(videoFrameDownGrey, locations, 0, cv::Size(), cv::Size(), 1.025, 2.0, false); cv::resize(rgb, videoFrameDown, cv::Size(DOWNSIZE_WIDTH, DOWNSIZE_HEIGHT));
cv::cvtColor(videoFrameDown, videoFrameDownGrey, cv::COLOR_RGB2GRAY);
cv::cvtColor(videoFrameUp, background, cv::COLOR_RGB2BGRA);
hog.detectMultiScale(videoFrameDownGrey, locations, 0, cv::Size(), cv::Size(), 1.025, 2.0, false);
});
maxLocations.clear(); maxLocations.clear();
if (!locations.empty()) { if (!locations.empty()) {
boxes.clear(); boxes.clear();
@ -179,12 +188,8 @@ int main(int argc, char **argv) {
} }
} }
cv::cvtColor(videoFrameUp, background, cv::COLOR_RGB2BGRA); nvg::render([&](int w, int h) {
nvg::clear();
gl::bind();
nvg::begin();
nvg::clear();
{
using kb::nvg::vg; using kb::nvg::vg;
nvgBeginPath(vg); nvgBeginPath(vg);
nvgStrokeWidth(vg, std::fmax(2.0, WIDTH / 960.0)); nvgStrokeWidth(vg, std::fmax(2.0, WIDTH / 960.0));
@ -193,20 +198,21 @@ int main(int argc, char **argv) {
nvgRect(vg, maxLocations[i].x * WIDTH_FACTOR, maxLocations[i].y * HEIGHT_FACTOR, maxLocations[i].width * WIDTH_FACTOR, maxLocations[i].height * HEIGHT_FACTOR); nvgRect(vg, maxLocations[i].x * WIDTH_FACTOR, maxLocations[i].y * HEIGHT_FACTOR, maxLocations[i].width * WIDTH_FACTOR, maxLocations[i].height * HEIGHT_FACTOR);
} }
nvgStroke(vg); nvgStroke(vg);
} });
nvg::end();
cl::acquire_from_gl(frameBuffer); cl::compute([&](cv::UMat& frameBuffer){
composite_layers(background, foreground, frameBuffer, frameBuffer, BLUR_KERNEL_SIZE, FG_LOSS); //Put it all together
cv::cvtColor(frameBuffer, videoFrame, cv::COLOR_BGRA2RGB); composite_layers(background, foreground, frameBuffer, frameBuffer, BLUR_KERNEL_SIZE, FG_LOSS);
cl::release_to_gl(frameBuffer); });
//If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed. //If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed.
if (!app::display()) if (!app::display())
break; break;
va::bind(); va::write([&writer](const cv::UMat& videoFrame){
writer << videoFrame; //videoFrame is the frameBuffer converted to BGR. Ready to be written.
writer << videoFrame;
});
app::print_fps(); app::print_fps();
} }

@ -5,7 +5,7 @@
constexpr long unsigned int WIDTH = 1920; constexpr long unsigned int WIDTH = 1920;
constexpr long unsigned int HEIGHT = 1080; constexpr long unsigned int HEIGHT = 1080;
constexpr double FPS = 60; constexpr double FPS = 60;
constexpr bool OFFSCREEN = true; constexpr bool OFFSCREEN = false;
constexpr const char* OUTPUT_FILENAME = "tetra-demo.mkv"; constexpr const char* OUTPUT_FILENAME = "tetra-demo.mkv";
constexpr const int VA_HW_DEVICE_INDEX = 0; constexpr const int VA_HW_DEVICE_INDEX = 0;
constexpr unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT)); constexpr unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT));
@ -96,39 +96,32 @@ int main(int argc, char **argv) {
//Copy OpenCL Context for VAAPI. Must be called right after first VideoWriter/VideoCapture initialization. //Copy OpenCL Context for VAAPI. Must be called right after first VideoWriter/VideoCapture initialization.
va::copy(); va::copy();
//Initialize the OpenGL scene
init_scene(WIDTH, HEIGHT);
//BGRA gl::render([](int w, int h) {
cv::UMat frameBuffer; //Initialize the OpenGL scene
//RGB init_scene(WIDTH, HEIGHT);
cv::UMat videoFrame; });
while (true) { while (true) {
//Activate the OpenCL context for OpenGL
gl::bind();
//Render using OpenGL //Render using OpenGL
gl::begin(); gl::render([](int w, int h) {
render_scene(WIDTH, HEIGHT); render_scene(w, h);
gl::end(); });
//Aquire the frame buffer for use by OpenCL //Aquire the frame buffer for use by OpenCL
cl::acquire_from_gl(frameBuffer); cl::compute([](cv::UMat &frameBuffer) {
//Glow effect (OpenCL) //Glow effect (OpenCL)
glow_effect(frameBuffer, frameBuffer, GLOW_KERNEL_SIZE); glow_effect(frameBuffer, frameBuffer, GLOW_KERNEL_SIZE);
//Color-conversion from BGRA to RGB. OpenCV/OpenCL. });
cv::cvtColor(frameBuffer, videoFrame, cv::COLOR_BGRA2RGB);
//Release the frame buffer for use by OpenGL
cl::release_to_gl(frameBuffer);
//If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed. //If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed.
if(!app::display()) if(!app::display())
break; break;
//Activate the OpenCL context for VAAPI va::write([&writer](const cv::UMat& videoFrame){
va::bind(); //videoFrame is the frameBuffer converted to BGR. Ready to be written.
//Encode the frame using VAAPI on the GPU. writer << videoFrame;
writer << videoFrame; });
app::print_fps(); app::print_fps();
} }

@ -1,6 +1,6 @@
TARGET := video-demo TARGET := video-demo2
SRCS := video-demo.cpp SRCS := video-demo2.cpp
#precompiled headers #precompiled headers
HEADERS := HEADERS :=

@ -115,58 +115,42 @@ int main(int argc, char **argv) {
cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1 cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1
}); });
init_scene(WIDTH, HEIGHT); gl::render([](int w, int h) {
init_scene(w, h);
//BGRA });
cv::UMat frameBuffer, tmpVideoFrame;
//RGB
cv::UMat videoFrame;
//Activate the OpenCL context for VAAPI
va::bind();
while (true) { while (true) {
//Decode a frame on the GPU using VAAPI bool success = va::read([&capture](cv::UMat& videoFrame){
capture >> videoFrame; //videoFrame will be converted to BGRA and stored in the frameBuffer.
if (videoFrame.empty()) { capture >> videoFrame;
cerr << "End of stream. Exiting" << endl; });
if(!success)
break; break;
}
cl::compute([](cv::UMat& frameBuffer){
//Color-conversion from RGB to BGRA. (OpenCL) //Resize the frame if necessary. (OpenCL)
cv::cvtColor(videoFrame, tmpVideoFrame, cv::COLOR_RGB2BGRA); cv::resize(frameBuffer, frameBuffer, cv::Size(WIDTH, HEIGHT));
});
//Activate the OpenCL context for OpenGL
gl::bind(); gl::render([](int w, int h) {
//Initially aquire the framebuffer so we can write the video frame to it //Render using OpenGL
cl::acquire_from_gl(frameBuffer); render_scene(w, h);
//Resize the frame if necessary. (OpenCL) });
cv::resize(tmpVideoFrame, frameBuffer, cv::Size(WIDTH, HEIGHT));
//Release the frame buffer for use by OpenGL cl::compute([&GLOW_KERNEL_SIZE](cv::UMat& frameBuffer){
cl::release_to_gl(frameBuffer); //Glow effect (OpenCL)
glow_effect(frameBuffer, frameBuffer, GLOW_KERNEL_SIZE);
//Render using OpenGL });
gl::begin();
render_scene(WIDTH, HEIGHT);
gl::end();
//Aquire the frame buffer for use by OpenCL
cl::acquire_from_gl(frameBuffer);
//Glow effect (OpenCL)
glow_effect(frameBuffer, frameBuffer, GLOW_KERNEL_SIZE);
//Color-conversion from BGRA to RGB. (OpenCL)
cv::cvtColor(frameBuffer, videoFrame, cv::COLOR_BGRA2RGB);
//Release the frame buffer for use by OpenGL
cl::release_to_gl(frameBuffer);
//If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed. //If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed.
if(!app::display()) if(!app::display())
break; break;
//Activate the OpenCL context for VAAPI va::write([&writer](const cv::UMat& videoFrame){
va::bind(); //videoFrame is the frameBuffer converted to BGR. Ready to be written.
//Encode the frame using VAAPI on the GPU. writer << videoFrame;
writer << videoFrame; });
app::print_fps(); app::print_fps();
} }

@ -1,46 +0,0 @@
TARGET := video-demo2
SRCS := video-demo2.cpp
#precompiled headers
HEADERS :=
OBJS := ${SRCS:.cpp=.o}
DEPS := ${SRCS:.cpp=.dep}
CXXFLAGS += -fpic
LDFLAGS +=
LIBS += -lm
.PHONY: all release debug clean distclean
all: release
release: ${TARGET}
debug: ${TARGET}
info: ${TARGET}
profile: ${TARGET}
unsafe: ${TARGET}
asan: ${TARGET}
${TARGET}: ${OBJS}
${CXX} ${LDFLAGS} -o $@ $^ ${LIBS}
${OBJS}: %.o: %.cpp %.dep ${GCH}
${CXX} ${CXXFLAGS} -o $@ -c $<
${DEPS}: %.dep: %.cpp Makefile
${CXX} ${CXXFLAGS} -MM $< > $@
${GCH}: %.gch: ${HEADERS}
${CXX} ${CXXFLAGS} -o $@ -c ${@:.gch=.hpp}
install:
mkdir -p ${DESTDIR}/${PREFIX}
cp ${TARGET} ${DESTDIR}/${PREFIX}
uninstall:
rm ${DESTDIR}/${PREFIX}/${TARGET}
clean:
rm -f *~ ${DEPS} ${OBJS} ${CUO} ${GCH} ${TARGET}
distclean: clean

@ -1,159 +0,0 @@
#define CL_TARGET_OPENCL_VERSION 120
#include "../common/subsystems.hpp"
#include <string>
constexpr long unsigned int WIDTH = 1920;
constexpr long unsigned int HEIGHT = 1080;
constexpr const int VA_HW_DEVICE_INDEX = 0;
constexpr bool OFFSCREEN = true;
constexpr const char* OUTPUT_FILENAME = "video-demo.mkv";
constexpr unsigned long DIAG = hypot(double(WIDTH), double(HEIGHT));
constexpr int GLOW_KERNEL_SIZE = std::max(int(DIAG / 138 % 2 == 0 ? DIAG / 138 + 1 : DIAG / 138), 1);
using std::cerr;
using std::endl;
using std::string;
void init_scene(unsigned long w, unsigned long h) {
//Initialize the OpenGL scene
glViewport(0, 0, w, h);
glColor3f(1.0, 1.0, 1.0);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glFrustum(-2, 2, -1.5, 1.5, 1, 40);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(0, 0, -3);
glRotatef(50, 1, 0, 0);
glRotatef(70, 0, 1, 0);
}
void render_scene(unsigned long w, unsigned long h) {
//Render a tetrahedron using immediate mode because the code is more concise for a demo
glViewport(0, 0, w, h);
glRotatef(1, 0, 1, 0);
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
glBegin(GL_TRIANGLE_STRIP);
glColor3f(1, 1, 1);
glVertex3f(0, 2, 0);
glColor3f(1, 0, 0);
glVertex3f(-1, 0, 1);
glColor3f(0, 1, 0);
glVertex3f(1, 0, 1);
glColor3f(0, 0, 1);
glVertex3f(0, 0, -1.4);
glColor3f(1, 1, 1);
glVertex3f(0, 2, 0);
glColor3f(1, 0, 0);
glVertex3f(-1, 0, 1);
glEnd();
}
void glow_effect(const cv::UMat &src, cv::UMat &dst, const int ksize) {
static cv::UMat resize;
static cv::UMat blur;
static cv::UMat dst16;
cv::bitwise_not(src, dst);
//Resize for some extra performance
cv::resize(dst, resize, cv::Size(), 0.5, 0.5);
//Cheap blur
cv::boxFilter(resize, resize, -1, cv::Size(ksize, ksize), cv::Point(-1,-1), true, cv::BORDER_REPLICATE);
//Back to original size
cv::resize(resize, blur, src.size());
//Multiply the src image with a blurred version of itself
cv::multiply(dst, blur, dst16, 1, CV_16U);
//Normalize and convert back to CV_8U
cv::divide(dst16, cv::Scalar::all(255.0), dst, 1, CV_8U);
cv::bitwise_not(dst, dst);
}
int main(int argc, char **argv) {
using namespace kb;
if(argc != 2) {
cerr << "Usage: video-demo <video-file>" << endl;
exit(1);
}
//Initialize the application
app::init("Video Demo", WIDTH, HEIGHT, OFFSCREEN);
//Print system information
app::print_system_info();
//Initialize MJPEG HW decoding using VAAPI
cv::VideoCapture capture(argv[1], cv::CAP_FFMPEG, {
cv::CAP_PROP_HW_DEVICE, VA_HW_DEVICE_INDEX,
cv::CAP_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI,
cv::CAP_PROP_HW_ACCELERATION_USE_OPENCL, 1
});
//Copy OpenCL Context for VAAPI. Must be called right after first VideoWriter/VideoCapture initialization.
va::copy();
if (!capture.isOpened()) {
cerr << "ERROR! Unable to open video input" << endl;
return -1;
}
double fps = capture.get(cv::CAP_PROP_FPS);
//Initialize VP9 HW encoding using VAAPI. We don't need to specify the hardware device twice. only generates a warning.
cv::VideoWriter writer(OUTPUT_FILENAME, cv::CAP_FFMPEG, cv::VideoWriter::fourcc('V', 'P', '9', '0'), fps, cv::Size(WIDTH, HEIGHT), {
cv::VIDEOWRITER_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI,
cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1
});
gl::render([WIDTH, HEIGHT]() {
init_scene(WIDTH, HEIGHT);
});
while (true) {
va::read([&capture](cv::UMat& videoFrame){
//videoFrame will be converted to BGRA and stored in the frameBuffer.
capture >> videoFrame;
assert(!videoFrame.empty());
});
cl::work([](cv::UMat& frameBuffer){
//Resize the frame if necessary. (OpenCL)
cv::resize(frameBuffer, frameBuffer, cv::Size(WIDTH, HEIGHT));
});
gl::render([&WIDTH, &HEIGHT]() {
//Render using OpenGL
render_scene(WIDTH, HEIGHT);
});
cl::work([&GLOW_KERNEL_SIZE](cv::UMat& frameBuffer){
//Glow effect (OpenCL)
glow_effect(frameBuffer, frameBuffer, GLOW_KERNEL_SIZE);
});
//If onscreen rendering is enabled it displays the framebuffer in the native window. Returns false if the window was closed.
if(!app::display())
break;
va::write([&writer](const cv::UMat& videoFrame){
//videoFrame is the frameBuffer converted to BGR. Ready to be written.
writer << videoFrame;
});
app::print_fps();
}
app::terminate();
return 0;
}
Loading…
Cancel
Save