js: add TrackerMIL

pull/21032/head
Alexander Alekhin 3 years ago
parent fa5c7a9e75
commit 3d5d3ea20c
  1. 26
      modules/js/src/core_bindings.cpp
  2. 1
      modules/js/test/package.json
  3. 25
      modules/js/test/test_video.js
  4. 19
      platforms/js/opencv_js.config.py

@ -97,6 +97,10 @@ using namespace cv::dnn;
using namespace aruco;
#endif
#ifdef HAVE_OPENCV_VIDEO
typedef TrackerMIL::Params TrackerMIL_Params;
#endif
namespace binding_utils
{
template<typename classT, typename enumT>
@ -364,6 +368,23 @@ namespace binding_utils
result.call<void>("push", arg2);
return result;
}
void Tracker_init_wrapper(cv::Tracker& arg0, const cv::Mat& arg1, const Rect& arg2)
{
return arg0.init(arg1, arg2);
}
emscripten::val Tracker_update_wrapper(cv::Tracker& arg0, const cv::Mat& arg1)
{
Rect rect;
bool update = arg0.update(arg1, rect);
emscripten::val result = emscripten::val::array();
result.call<void>("push", update);
result.call<void>("push", rect);
return result;
}
#endif // HAVE_OPENCV_VIDEO
std::string getExceptionMsg(const cv::Exception& e) {
@ -676,6 +697,11 @@ EMSCRIPTEN_BINDINGS(binding_utils)
function("CamShift", select_overload<emscripten::val(const cv::Mat&, Rect&, TermCriteria)>(&binding_utils::CamShiftWrapper));
function("meanShift", select_overload<emscripten::val(const cv::Mat&, Rect&, TermCriteria)>(&binding_utils::meanShiftWrapper));
emscripten::class_<cv::Tracker >("Tracker")
.function("init", select_overload<void(cv::Tracker&,const cv::Mat&,const Rect&)>(&binding_utils::Tracker_init_wrapper), pure_virtual())
.function("update", select_overload<emscripten::val(cv::Tracker&,const cv::Mat&)>(&binding_utils::Tracker_update_wrapper), pure_virtual());
#endif
function("getBuildInformation", &binding_utils::getBuildInformation);

@ -4,6 +4,7 @@
"version": "1.0.1",
"dependencies": {
"ansi-colors": "^4.1.1",
"cli-table": "0.3.6",
"minimist": "^1.2.0",
"node-qunit": "latest"
},

@ -105,3 +105,28 @@ QUnit.test('Background Segmentation', function(assert) {
mog2.delete();
}
});
QUnit.test('TrackerMIL', function(assert) {
{
let src1 = cv.Mat.zeros(100, 100, cv.CV_8UC1);
let src2 = cv.Mat.zeros(100, 100, cv.CV_8UC1);
let tracker = new cv.TrackerMIL();
assert.equal(tracker instanceof cv.TrackerMIL, true);
assert.equal(tracker instanceof cv.Tracker, true);
let rect = new cv.Rect(10, 10, 50, 60);
tracker.init(src1, rect);
let [updated, rect2] = tracker.update(src2);
assert.equal(updated, true);
assert.equal(rect2.width, 50);
assert.equal(rect2.height, 60);
tracker.delete();
src1.delete();
src2.delete();
}
});

@ -113,10 +113,21 @@ objdetect = {'': ['groupRectangles'],
'CascadeClassifier': ['load', 'detectMultiScale2', 'CascadeClassifier', 'detectMultiScale3', 'empty', 'detectMultiScale'],
'QRCodeDetector': ['QRCodeDetector', 'decode', 'decodeCurved', 'detect', 'detectAndDecode', 'detectMulti', 'setEpsX', 'setEpsY']}
video = {'': ['CamShift', 'calcOpticalFlowFarneback', 'calcOpticalFlowPyrLK', 'createBackgroundSubtractorMOG2', \
'findTransformECC', 'meanShift'],
'BackgroundSubtractorMOG2': ['BackgroundSubtractorMOG2', 'apply'],
'BackgroundSubtractor': ['apply', 'getBackgroundImage']}
video = {
'': [
'CamShift',
'calcOpticalFlowFarneback',
'calcOpticalFlowPyrLK',
'createBackgroundSubtractorMOG2',
'findTransformECC',
'meanShift',
],
'BackgroundSubtractorMOG2': ['BackgroundSubtractorMOG2', 'apply'],
'BackgroundSubtractor': ['apply', 'getBackgroundImage'],
# issue #21070: 'Tracker': ['init', 'update'],
'TrackerMIL': ['create'],
'TrackerMIL_Params': [],
}
dnn = {'dnn_Net': ['setInput', 'forward'],
'': ['readNetFromCaffe', 'readNetFromTensorflow', 'readNetFromTorch', 'readNetFromDarknet',

Loading…
Cancel
Save