|
| 1 | +<!DOCTYPE html> |
| 2 | +<html> |
| 3 | +<head> |
| 4 | +<meta charset="utf-8"> |
| 5 | +<title>Face Detection Camera Example</title> |
| 6 | +<link href="js_example_style.css" rel="stylesheet" type="text/css" /> |
| 7 | +</head> |
| 8 | +<body> |
| 9 | +<h2>Face Detection Camera Example</h2> |
| 10 | +<p> |
| 11 | + Click <b>Start/Stop</b> button to start or stop the camera capture.<br> |
| 12 | + The <b>videoInput</b> is a <video> element used as face detector input. |
| 13 | + The <b>canvasOutput</b> is a <canvas> element used as face detector output.<br> |
| 14 | + The code of <textarea> will be executed when video is started. |
| 15 | + You can modify the code to investigate more. |
| 16 | +</p> |
| 17 | +<div> |
| 18 | +<div class="control"><button id="startAndStop" disabled>Start</button></div> |
| 19 | +<textarea class="code" rows="29" cols="80" id="codeEditor" spellcheck="false"> |
| 20 | +</textarea> |
| 21 | +</div> |
| 22 | +<p class="err" id="errorMessage"></p> |
| 23 | +<div> |
| 24 | + <table cellpadding="0" cellspacing="0" width="0" border="0"> |
| 25 | + <tr> |
| 26 | + <td> |
| 27 | + <video id="videoInput" width=320 height=240></video> |
| 28 | + </td> |
| 29 | + <td> |
| 30 | + <canvas id="canvasOutput" width=320 height=240></canvas> |
| 31 | + </td> |
| 32 | + <td></td> |
| 33 | + <td></td> |
| 34 | + </tr> |
| 35 | + <tr> |
| 36 | + <td> |
| 37 | + <div class="caption">videoInput</div> |
| 38 | + </td> |
| 39 | + <td> |
| 40 | + <div class="caption">canvasOutput</div> |
| 41 | + </td> |
| 42 | + <td></td> |
| 43 | + <td></td> |
| 44 | + </tr> |
| 45 | + </table> |
| 46 | +</div> |
| 47 | +<script src="https://webrtc.github.io/adapter/adapter-5.0.4.js" type="text/javascript"></script> |
| 48 | +<script src="utils.js" type="text/javascript"></script> |
| 49 | +<script id="codeSnippet" type="text/code-snippet"> |
| 50 | +let video = document.getElementById('videoInput'); |
| 51 | +let src = new cv.Mat(video.height, video.width, cv.CV_8UC4); |
| 52 | +let dst = new cv.Mat(video.height, video.width, cv.CV_8UC4); |
| 53 | +let gray = new cv.Mat(); |
| 54 | +let cap = new cv.VideoCapture(video); |
| 55 | +let faces = new cv.RectVector(); |
| 56 | +let classifier = new cv.CascadeClassifier(); |
| 57 | + |
| 58 | +// load pre-trained classifiers |
| 59 | +classifier.load('haarcascade_frontalface_default.xml'); |
| 60 | + |
| 61 | +const FPS = 30; |
| 62 | +function processVideo() { |
| 63 | + try { |
| 64 | + if (!streaming) { |
| 65 | + // clean and stop. |
| 66 | + src.delete(); |
| 67 | + dst.delete(); |
| 68 | + gray.delete(); |
| 69 | + faces.delete(); |
| 70 | + classifier.delete(); |
| 71 | + return; |
| 72 | + } |
| 73 | + let begin = Date.now(); |
| 74 | + // start processing. |
| 75 | + cap.read(src); |
| 76 | + src.copyTo(dst); |
| 77 | + cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0); |
| 78 | + // detect faces. |
| 79 | + classifier.detectMultiScale(gray, faces, 1.1, 3, 0); |
| 80 | + // draw faces. |
| 81 | + for (let i = 0; i < faces.size(); ++i) { |
| 82 | + let face = faces.get(i); |
| 83 | + let point1 = new cv.Point(face.x, face.y); |
| 84 | + let point2 = new cv.Point(face.x + face.width, face.y + face.height); |
| 85 | + cv.rectangle(dst, point1, point2, [255, 0, 0, 255]); |
| 86 | + } |
| 87 | + cv.imshow('canvasOutput', dst); |
| 88 | + // schedule the next one. |
| 89 | + let delay = 1000/FPS - (Date.now() - begin); |
| 90 | + setTimeout(processVideo, delay); |
| 91 | + } catch (err) { |
| 92 | + utils.printError(err); |
| 93 | + } |
| 94 | +}; |
| 95 | + |
| 96 | +// schedule the first one. |
| 97 | +setTimeout(processVideo, 0); |
| 98 | +</script> |
| 99 | +<script type="text/javascript"> |
| 100 | +let utils = new Utils('errorMessage'); |
| 101 | + |
| 102 | +utils.loadCode('codeSnippet', 'codeEditor'); |
| 103 | + |
| 104 | +let streaming = false; |
| 105 | +let videoInput = document.getElementById('videoInput'); |
| 106 | +let startAndStop = document.getElementById('startAndStop'); |
| 107 | +let canvasOutput = document.getElementById('canvasOutput'); |
| 108 | +let canvasContext = canvasOutput.getContext('2d'); |
| 109 | + |
| 110 | +startAndStop.addEventListener('click', () => { |
| 111 | + if (!streaming) { |
| 112 | + utils.clearError(); |
| 113 | + utils.startCamera('qvga', onVideoStarted, 'videoInput'); |
| 114 | + } else { |
| 115 | + utils.stopCamera(); |
| 116 | + onVideoStopped(); |
| 117 | + } |
| 118 | +}); |
| 119 | + |
| 120 | +function onVideoStarted() { |
| 121 | + streaming = true; |
| 122 | + startAndStop.innerText = 'Stop'; |
| 123 | + videoInput.width = videoInput.videoWidth; |
| 124 | + videoInput.height = videoInput.videoHeight; |
| 125 | + utils.executeCode('codeEditor'); |
| 126 | +} |
| 127 | + |
| 128 | +function onVideoStopped() { |
| 129 | + streaming = false; |
| 130 | + canvasContext.clearRect(0, 0, canvasOutput.width, canvasOutput.height); |
| 131 | + startAndStop.innerText = 'Start'; |
| 132 | +} |
| 133 | + |
| 134 | +utils.loadOpenCv(() => { |
| 135 | + let faceCascadeFile = 'haarcascade_frontalface_default.xml'; |
| 136 | + utils.createFileFromUrl(faceCascadeFile, faceCascadeFile, () => { |
| 137 | + startAndStop.removeAttribute('disabled'); |
| 138 | + }); |
| 139 | +}); |
| 140 | +</script> |
| 141 | +</body> |
| 142 | +</html> |
0 commit comments