mirror of https://github.com/opencv/opencv.git
Open Source Computer Vision Library
https://opencv.org/
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
172 lines
5.3 KiB
172 lines
5.3 KiB
<!DOCTYPE html> |
|
<html> |
|
<head> |
|
<meta charset="utf-8"> |
|
<title>CamShift Example</title> |
|
<link href="js_example_style.css" rel="stylesheet" type="text/css" /> |
|
</head> |
|
<body> |
|
<h2>CamShift Example</h2> |
|
<p> |
|
Click <b>Start/Stop</b> button to start or stop the video.<br> |
|
The <b>videoInput</b> is a <video> element used as CamShift input. |
|
The <b>canvasOutput</b> is a <canvas> element used as CamShift output.<br> |
|
The code of <textarea> will be executed when video is started. |
|
You can modify the code to investigate more. |
|
</p> |
|
<div> |
|
<div class="control"><button id="startAndStop" disabled>Start</button></div> |
|
<textarea class="code" rows="29" cols="100" id="codeEditor" spellcheck="false"> |
|
</textarea> |
|
</div> |
|
<p class="err" id="errorMessage"></p> |
|
<div> |
|
<table cellpadding="0" cellspacing="0" width="0" border="0"> |
|
<tr> |
|
<td> |
|
<video id="videoInput" width="320" height="240" muted loop></video> |
|
</td> |
|
<td> |
|
<canvas id="canvasOutput" width="320" height="240"></canvas> |
|
</td> |
|
<td></td> |
|
<td></td> |
|
</tr> |
|
<tr> |
|
<td> |
|
<div class="caption">videoInput</div> |
|
</td> |
|
<td> |
|
<div class="caption">canvasOutput</div> |
|
</td> |
|
<td></td> |
|
<td></td> |
|
</tr> |
|
</table> |
|
</div> |
|
<script src="https://webrtc.github.io/adapter/adapter-5.0.4.js" type="text/javascript"></script> |
|
<script src="utils.js" type="text/javascript"></script> |
|
<script id="codeSnippet" type="text/code-snippet"> |
|
let video = document.getElementById('videoInput'); |
|
let cap = new cv.VideoCapture(video); |
|
|
|
// take first frame of the video |
|
let frame = new cv.Mat(video.height, video.width, cv.CV_8UC4); |
|
cap.read(frame); |
|
|
|
// hardcode the initial location of window |
|
let trackWindow = new cv.Rect(150, 60, 63, 125); |
|
|
|
// set up the ROI for tracking |
|
let roi = frame.roi(trackWindow); |
|
let hsvRoi = new cv.Mat(); |
|
cv.cvtColor(roi, hsvRoi, cv.COLOR_RGBA2RGB); |
|
cv.cvtColor(hsvRoi, hsvRoi, cv.COLOR_RGB2HSV); |
|
let mask = new cv.Mat(); |
|
let lowScalar = new cv.Scalar(30, 30, 0); |
|
let highScalar = new cv.Scalar(180, 180, 180); |
|
let low = new cv.Mat(hsvRoi.rows, hsvRoi.cols, hsvRoi.type(), lowScalar); |
|
let high = new cv.Mat(hsvRoi.rows, hsvRoi.cols, hsvRoi.type(), highScalar); |
|
cv.inRange(hsvRoi, low, high, mask); |
|
let roiHist = new cv.Mat(); |
|
let hsvRoiVec = new cv.MatVector(); |
|
hsvRoiVec.push_back(hsvRoi); |
|
cv.calcHist(hsvRoiVec, [0], mask, roiHist, [180], [0, 180]); |
|
cv.normalize(roiHist, roiHist, 0, 255, cv.NORM_MINMAX); |
|
|
|
// delete useless mats. |
|
roi.delete(); hsvRoi.delete(); mask.delete(); low.delete(); high.delete(); hsvRoiVec.delete(); |
|
|
|
// Setup the termination criteria, either 10 iteration or move by at least 1 pt |
|
let termCrit = new cv.TermCriteria(cv.TERM_CRITERIA_EPS | cv.TERM_CRITERIA_COUNT, 10, 1); |
|
|
|
let hsv = new cv.Mat(video.height, video.width, cv.CV_8UC3); |
|
let hsvVec = new cv.MatVector(); |
|
hsvVec.push_back(hsv); |
|
let dst = new cv.Mat(); |
|
let trackBox = null; |
|
|
|
const FPS = 30; |
|
function processVideo() { |
|
try { |
|
if (!streaming) { |
|
// clean and stop. |
|
frame.delete(); dst.delete(); hsvVec.delete(); roiHist.delete(); hsv.delete(); |
|
return; |
|
} |
|
let begin = Date.now(); |
|
|
|
// start processing. |
|
cap.read(frame); |
|
cv.cvtColor(frame, hsv, cv.COLOR_RGBA2RGB); |
|
cv.cvtColor(hsv, hsv, cv.COLOR_RGB2HSV); |
|
cv.calcBackProject(hsvVec, [0], roiHist, dst, [0, 180], 1); |
|
|
|
// apply camshift to get the new location |
|
[trackBox, trackWindow] = cv.CamShift(dst, trackWindow, termCrit); |
|
|
|
// Draw it on image |
|
let pts = cv.rotatedRectPoints(trackBox); |
|
cv.line(frame, pts[0], pts[1], [255, 0, 0, 255], 3); |
|
cv.line(frame, pts[1], pts[2], [255, 0, 0, 255], 3); |
|
cv.line(frame, pts[2], pts[3], [255, 0, 0, 255], 3); |
|
cv.line(frame, pts[3], pts[0], [255, 0, 0, 255], 3); |
|
cv.imshow('canvasOutput', frame); |
|
|
|
// schedule the next one. |
|
let delay = 1000/FPS - (Date.now() - begin); |
|
setTimeout(processVideo, delay); |
|
} catch (err) { |
|
utils.printError(err); |
|
} |
|
}; |
|
|
|
// schedule the first one. |
|
setTimeout(processVideo, 0); |
|
</script> |
|
<script type="text/javascript"> |
|
let utils = new Utils('errorMessage'); |
|
|
|
utils.loadCode('codeSnippet', 'codeEditor'); |
|
|
|
let streaming = false; |
|
let videoInput = document.getElementById('videoInput'); |
|
let startAndStop = document.getElementById('startAndStop'); |
|
let canvasOutput = document.getElementById('canvasOutput'); |
|
let canvasContext = canvasOutput.getContext('2d'); |
|
|
|
startAndStop.addEventListener('click', () => { |
|
if (!streaming) { |
|
utils.clearError(); |
|
videoInput.play().then(() => { |
|
onVideoStarted(); |
|
}); |
|
} else { |
|
videoInput.pause(); |
|
videoInput.currentTime = 0; |
|
onVideoStopped(); |
|
} |
|
}); |
|
|
|
function onVideoStarted() { |
|
streaming = true; |
|
startAndStop.innerText = 'Stop'; |
|
videoInput.height = videoInput.width * (videoInput.videoHeight / videoInput.videoWidth); |
|
utils.executeCode('codeEditor'); |
|
} |
|
|
|
function onVideoStopped() { |
|
streaming = false; |
|
canvasContext.clearRect(0, 0, canvasOutput.width, canvasOutput.height); |
|
startAndStop.innerText = 'Start'; |
|
} |
|
|
|
utils.loadOpenCv(() => { |
|
videoInput.addEventListener('canplay', () => { |
|
startAndStop.removeAttribute('disabled'); |
|
}); |
|
videoInput.src = 'cup.mp4'; |
|
}); |
|
</script> |
|
</body> |
|
</html>
|
|
|