tracking.js 103 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182
  1. /**
  2. * tracking - A modern approach for Computer Vision on the web.
  3. * @author Eduardo Lundgren <edu@rdo.io>
  4. * @version v1.1.3
  5. * @link http://trackingjs.com
  6. * @license BSD
  7. */
  8. (function(window, undefined) {
  9. window.tracking = window.tracking || {};
  10. /**
  11. * Inherit the prototype methods from one constructor into another.
  12. *
  13. * Usage:
  14. * <pre>
  15. * function ParentClass(a, b) { }
  16. * ParentClass.prototype.foo = function(a) { }
  17. *
  18. * function ChildClass(a, b, c) {
  19. * tracking.base(this, a, b);
  20. * }
  21. * tracking.inherits(ChildClass, ParentClass);
  22. *
  23. * var child = new ChildClass('a', 'b', 'c');
  24. * child.foo();
  25. * </pre>
  26. *
  27. * @param {Function} childCtor Child class.
  28. * @param {Function} parentCtor Parent class.
  29. */
  30. tracking.inherits = function(childCtor, parentCtor) {
  31. function TempCtor() {
  32. }
  33. TempCtor.prototype = parentCtor.prototype;
  34. childCtor.superClass_ = parentCtor.prototype;
  35. childCtor.prototype = new TempCtor();
  36. childCtor.prototype.constructor = childCtor;
  37. /**
  38. * Calls superclass constructor/method.
  39. *
  40. * This function is only available if you use tracking.inherits to express
  41. * inheritance relationships between classes.
  42. *
  43. * @param {!object} me Should always be "this".
  44. * @param {string} methodName The method name to call. Calling superclass
  45. * constructor can be done with the special string 'constructor'.
  46. * @param {...*} var_args The arguments to pass to superclass
  47. * method/constructor.
  48. * @return {*} The return value of the superclass method/constructor.
  49. */
  50. childCtor.base = function(me, methodName) {
  51. var args = Array.prototype.slice.call(arguments, 2);
  52. return parentCtor.prototype[methodName].apply(me, args);
  53. };
  54. };
  55. /**
  56. * Captures the user camera when tracking a video element and set its source
  57. * to the camera stream.
  58. * @param {HTMLVideoElement} element Canvas element to track.
  59. * @param {object} opt_options Optional configuration to the tracker.
  60. */
  61. tracking.initUserMedia_ = function(element, opt_options) {
  62. window.navigator.mediaDevices.getUserMedia({
  63. video: true,
  64. audio: (opt_options && opt_options.audio) ? true : false,
  65. }).then(function(stream) {
  66. element.srcObject = stream;
  67. }).catch(function(err) {
  68. throw Error('Cannot capture user camera.');
  69. });
  70. };
  71. /**
  72. * Tests whether the object is a dom node.
  73. * @param {object} o Object to be tested.
  74. * @return {boolean} True if the object is a dom node.
  75. */
  76. tracking.isNode = function(o) {
  77. return o.nodeType || this.isWindow(o);
  78. };
  79. /**
  80. * Tests whether the object is the `window` object.
  81. * @param {object} o Object to be tested.
  82. * @return {boolean} True if the object is the `window` object.
  83. */
  84. tracking.isWindow = function(o) {
  85. return !!(o && o.alert && o.document);
  86. };
  87. /**
  88. * Selects a dom node from a CSS3 selector using `document.querySelector`.
  89. * @param {string} selector
  90. * @param {object} opt_element The root element for the query. When not
  91. * specified `document` is used as root element.
  92. * @return {HTMLElement} The first dom element that matches to the selector.
  93. * If not found, returns `null`.
  94. */
  95. tracking.one = function(selector, opt_element) {
  96. if (this.isNode(selector)) {
  97. return selector;
  98. }
  99. return (opt_element || document).querySelector(selector);
  100. };
  101. /**
  102. * Tracks a canvas, image or video element based on the specified `tracker`
  103. * instance. This method extract the pixel information of the input element
  104. * to pass to the `tracker` instance. When tracking a video, the
  105. * `tracker.track(pixels, width, height)` will be in a
  106. * `requestAnimationFrame` loop in order to track all video frames.
  107. *
  108. * Example:
  109. * var tracker = new tracking.ColorTracker();
  110. *
  111. * tracking.track('#video', tracker);
  112. * or
  113. * tracking.track('#video', tracker, { camera: true });
  114. *
  115. * tracker.on('track', function(event) {
  116. * // console.log(event.data[0].x, event.data[0].y)
  117. * });
  118. *
  119. * @param {HTMLElement} element The element to track, canvas, image or
  120. * video.
  121. * @param {tracking.Tracker} tracker The tracker instance used to track the
  122. * element.
  123. * @param {object} opt_options Optional configuration to the tracker.
  124. */
  125. tracking.track = function(element, tracker, opt_options) {
  126. element = tracking.one(element);
  127. if (!element) {
  128. throw new Error('Element not found, try a different element or selector.');
  129. }
  130. if (!tracker) {
  131. throw new Error('Tracker not specified, try `tracking.track(element, new tracking.FaceTracker())`.');
  132. }
  133. switch (element.nodeName.toLowerCase()) {
  134. case 'canvas':
  135. return this.trackCanvas_(element, tracker, opt_options);
  136. case 'img':
  137. return this.trackImg_(element, tracker, opt_options);
  138. case 'video':
  139. if (opt_options) {
  140. if (opt_options.camera) {
  141. this.initUserMedia_(element, opt_options);
  142. }
  143. }
  144. return this.trackVideo_(element, tracker, opt_options);
  145. default:
  146. throw new Error('Element not supported, try in a canvas, img, or video.');
  147. }
  148. };
  149. /**
  150. * Tracks a canvas element based on the specified `tracker` instance and
  151. * returns a `TrackerTask` for this track.
  152. * @param {HTMLCanvasElement} element Canvas element to track.
  153. * @param {tracking.Tracker} tracker The tracker instance used to track the
  154. * element.
  155. * @param {object} opt_options Optional configuration to the tracker.
  156. * @return {tracking.TrackerTask}
  157. * @private
  158. */
  159. tracking.trackCanvas_ = function(element, tracker) {
  160. var self = this;
  161. var task = new tracking.TrackerTask(tracker);
  162. task.on('run', function() {
  163. self.trackCanvasInternal_(element, tracker);
  164. });
  165. return task.run();
  166. };
  167. /**
  168. * Tracks a canvas element based on the specified `tracker` instance. This
  169. * method extract the pixel information of the input element to pass to the
  170. * `tracker` instance.
  171. * @param {HTMLCanvasElement} element Canvas element to track.
  172. * @param {tracking.Tracker} tracker The tracker instance used to track the
  173. * element.
  174. * @param {object} opt_options Optional configuration to the tracker.
  175. * @private
  176. */
  177. tracking.trackCanvasInternal_ = function(element, tracker) {
  178. var width = element.width;
  179. var height = element.height;
  180. var context = element.getContext('2d');
  181. var imageData = context.getImageData(0, 0, width, height);
  182. tracker.track(imageData.data, width, height);
  183. };
  184. /**
  185. * Tracks a image element based on the specified `tracker` instance. This
  186. * method extract the pixel information of the input element to pass to the
  187. * `tracker` instance.
  188. * @param {HTMLImageElement} element Canvas element to track.
  189. * @param {tracking.Tracker} tracker The tracker instance used to track the
  190. * element.
  191. * @param {object} opt_options Optional configuration to the tracker.
  192. * @private
  193. */
  194. tracking.trackImg_ = function(element, tracker) {
  195. var width = element.naturalWidth;
  196. var height = element.naturalHeight;
  197. var canvas = document.createElement('canvas');
  198. canvas.width = width;
  199. canvas.height = height;
  200. var task = new tracking.TrackerTask(tracker);
  201. task.on('run', function() {
  202. tracking.Canvas.loadImage(canvas, element.src, 0, 0, width, height, function() {
  203. tracking.trackCanvasInternal_(canvas, tracker);
  204. });
  205. });
  206. return task.run();
  207. };
  208. /**
  209. * Tracks a video element based on the specified `tracker` instance. This
  210. * method extract the pixel information of the input element to pass to the
  211. * `tracker` instance. The `tracker.track(pixels, width, height)` will be in
  212. * a `requestAnimationFrame` loop in order to track all video frames.
  213. * @param {HTMLVideoElement} element Canvas element to track.
  214. * @param {tracking.Tracker} tracker The tracker instance used to track the
  215. * element.
  216. * @param {object} opt_options Optional configuration to the tracker.
  217. * @private
  218. */
  219. tracking.trackVideo_ = function(element, tracker) {
  220. var canvas = document.createElement('canvas');
  221. var context = canvas.getContext('2d');
  222. var width;
  223. var height;
  224. // FIXME here the video display size of the analysed size
  225. var resizeCanvas_ = function() {
  226. width = element.offsetWidth;
  227. height = element.offsetHeight;
  228. canvas.width = width;
  229. canvas.height = height;
  230. };
  231. resizeCanvas_();
  232. element.addEventListener('resize', resizeCanvas_);
  233. // FIXME: do a process function - it is up to the caller to handle the frequency of detection
  234. // it seems all handled in the tracking.TrackerTask..
  235. // so in short, remove the tracking.TrackerTask from here
  236. // if the user want to use it, it can create it himself
  237. // var requestId;
  238. // var requestAnimationFrame_ = function() {
  239. // requestId = window.requestAnimationFrame(function() {
  240. // if (element.readyState === element.HAVE_ENOUGH_DATA) {
  241. // try {
  242. // // Firefox v~30.0 gets confused with the video readyState firing an
  243. // // erroneous HAVE_ENOUGH_DATA just before HAVE_CURRENT_DATA state,
  244. // // hence keep trying to read it until resolved.
  245. // context.drawImage(element, 0, 0, width, height);
  246. // } catch (err) {}
  247. // tracking.trackCanvasInternal_(canvas, tracker);
  248. // }
  249. // requestAnimationFrame_();
  250. // });
  251. // };
  252. // ****
  253. var stopTask = false;
  254. var doTask = function() {
  255. setTimeout(function() {
  256. if (element.readyState === element.HAVE_ENOUGH_DATA) {
  257. try {
  258. // Firefox v~30.0 gets confused with the video readyState firing an
  259. // erroneous HAVE_ENOUGH_DATA just before HAVE_CURRENT_DATA state,
  260. // hence keep trying to read it until resolved.
  261. context.drawImage(element, 0, 0, width, height);
  262. } catch (err) {}
  263. try{
  264. tracking.trackCanvasInternal_(canvas, tracker);
  265. } catch (e) {
  266. console.log('stopped tracking??')
  267. }
  268. }
  269. if(!stopTask) {
  270. setTimeout(doTask, 500);
  271. }
  272. }, 500);
  273. };
  274. // ***
  275. var task = new tracking.TrackerTask(tracker);
  276. task.on('stop', function() {
  277. // window.cancelAnimationFrame(requestId);
  278. stopTask = true;
  279. });
  280. task.on('run', function() {
  281. // requestAnimationFrame_();
  282. stopTask = false;
  283. doTask();
  284. });
  285. return task.run();
  286. };
  287. // Browser polyfills
  288. //===================
  289. if (!window.URL) {
  290. window.URL = window.URL || window.webkitURL || window.msURL || window.oURL;
  291. }
  292. if (!navigator.getUserMedia) {
  293. navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
  294. navigator.mozGetUserMedia || navigator.msGetUserMedia;
  295. }
  296. }(window));
  297. (function() {
  298. /**
  299. * EventEmitter utility.
  300. * @constructor
  301. */
  302. tracking.EventEmitter = function() {};
  303. /**
  304. * Holds event listeners scoped by event type.
  305. * @type {object}
  306. * @private
  307. */
  308. tracking.EventEmitter.prototype.events_ = null;
  309. /**
  310. * Adds a listener to the end of the listeners array for the specified event.
  311. * @param {string} event
  312. * @param {function} listener
  313. * @return {object} Returns emitter, so calls can be chained.
  314. */
  315. tracking.EventEmitter.prototype.addListener = function(event, listener) {
  316. if (typeof listener !== 'function') {
  317. throw new TypeError('Listener must be a function');
  318. }
  319. if (!this.events_) {
  320. this.events_ = {};
  321. }
  322. this.emit('newListener', event, listener);
  323. if (!this.events_[event]) {
  324. this.events_[event] = [];
  325. }
  326. this.events_[event].push(listener);
  327. return this;
  328. };
  329. /**
  330. * Returns an array of listeners for the specified event.
  331. * @param {string} event
  332. * @return {array} Array of listeners.
  333. */
  334. tracking.EventEmitter.prototype.listeners = function(event) {
  335. return this.events_ && this.events_[event];
  336. };
  337. /**
  338. * Execute each of the listeners in order with the supplied arguments.
  339. * @param {string} event
  340. * @param {*} opt_args [arg1], [arg2], [...]
  341. * @return {boolean} Returns true if event had listeners, false otherwise.
  342. */
  343. tracking.EventEmitter.prototype.emit = function(event) {
  344. var listeners = this.listeners(event);
  345. if (listeners) {
  346. var args = Array.prototype.slice.call(arguments, 1);
  347. for (var i = 0; i < listeners.length; i++) {
  348. if (listeners[i]) {
  349. listeners[i].apply(this, args);
  350. }
  351. }
  352. return true;
  353. }
  354. return false;
  355. };
  356. /**
  357. * Adds a listener to the end of the listeners array for the specified event.
  358. * @param {string} event
  359. * @param {function} listener
  360. * @return {object} Returns emitter, so calls can be chained.
  361. */
  362. tracking.EventEmitter.prototype.on = tracking.EventEmitter.prototype.addListener;
  363. /**
  364. * Adds a one time listener for the event. This listener is invoked only the
  365. * next time the event is fired, after which it is removed.
  366. * @param {string} event
  367. * @param {function} listener
  368. * @return {object} Returns emitter, so calls can be chained.
  369. */
  370. tracking.EventEmitter.prototype.once = function(event, listener) {
  371. var self = this;
  372. self.on(event, function handlerInternal() {
  373. self.removeListener(event, handlerInternal);
  374. listener.apply(this, arguments);
  375. });
  376. };
  377. /**
  378. * Removes all listeners, or those of the specified event. It's not a good
  379. * idea to remove listeners that were added elsewhere in the code,
  380. * especially when it's on an emitter that you didn't create.
  381. * @param {string} event
  382. * @return {object} Returns emitter, so calls can be chained.
  383. */
  384. tracking.EventEmitter.prototype.removeAllListeners = function(opt_event) {
  385. if (!this.events_) {
  386. return this;
  387. }
  388. if (opt_event) {
  389. delete this.events_[opt_event];
  390. } else {
  391. delete this.events_;
  392. }
  393. return this;
  394. };
  395. /**
  396. * Remove a listener from the listener array for the specified event.
  397. * Caution: changes array indices in the listener array behind the listener.
  398. * @param {string} event
  399. * @param {function} listener
  400. * @return {object} Returns emitter, so calls can be chained.
  401. */
  402. tracking.EventEmitter.prototype.removeListener = function(event, listener) {
  403. if (typeof listener !== 'function') {
  404. throw new TypeError('Listener must be a function');
  405. }
  406. if (!this.events_) {
  407. return this;
  408. }
  409. var listeners = this.listeners(event);
  410. if (Array.isArray(listeners)) {
  411. var i = listeners.indexOf(listener);
  412. if (i < 0) {
  413. return this;
  414. }
  415. listeners.splice(i, 1);
  416. }
  417. return this;
  418. };
  419. /**
  420. * By default EventEmitters will print a warning if more than 10 listeners
  421. * are added for a particular event. This is a useful default which helps
  422. * finding memory leaks. Obviously not all Emitters should be limited to 10.
  423. * This function allows that to be increased. Set to zero for unlimited.
  424. * @param {number} n The maximum number of listeners.
  425. */
  426. tracking.EventEmitter.prototype.setMaxListeners = function() {
  427. throw new Error('Not implemented');
  428. };
  429. }());
  430. (function() {
  431. /**
  432. * Canvas utility.
  433. * @static
  434. * @constructor
  435. */
  436. tracking.Canvas = {};
  437. /**
  438. * Loads an image source into the canvas.
  439. * @param {HTMLCanvasElement} canvas The canvas dom element.
  440. * @param {string} src The image source.
  441. * @param {number} x The canvas horizontal coordinate to load the image.
  442. * @param {number} y The canvas vertical coordinate to load the image.
  443. * @param {number} width The image width.
  444. * @param {number} height The image height.
  445. * @param {function} opt_callback Callback that fires when the image is loaded
  446. * into the canvas.
  447. * @static
  448. */
  449. tracking.Canvas.loadImage = function(canvas, src, x, y, width, height, opt_callback) {
  450. var instance = this;
  451. var img = new window.Image();
  452. img.crossOrigin = '*';
  453. img.onload = function() {
  454. var context = canvas.getContext('2d');
  455. canvas.width = width;
  456. canvas.height = height;
  457. context.drawImage(img, x, y, width, height);
  458. if (opt_callback) {
  459. opt_callback.call(instance);
  460. }
  461. img = null;
  462. };
  463. img.src = src;
  464. };
  465. }());
  466. (function() {
  467. /**
  468. * DisjointSet utility with path compression. Some applications involve
  469. * grouping n distinct objects into a collection of disjoint sets. Two
  470. * important operations are then finding which set a given object belongs to
  471. * and uniting the two sets. A disjoint set data structure maintains a
  472. * collection S={ S1 , S2 ,..., Sk } of disjoint dynamic sets. Each set is
  473. * identified by a representative, which usually is a member in the set.
  474. * @static
  475. * @constructor
  476. */
  477. tracking.DisjointSet = function(length) {
  478. if (length === undefined) {
  479. throw new Error('DisjointSet length not specified.');
  480. }
  481. this.length = length;
  482. this.parent = new Uint32Array(length);
  483. for (var i = 0; i < length; i++) {
  484. this.parent[i] = i;
  485. }
  486. };
  487. /**
  488. * Holds the length of the internal set.
  489. * @type {number}
  490. */
  491. tracking.DisjointSet.prototype.length = null;
  492. /**
  493. * Holds the set containing the representative values.
  494. * @type {Array.<number>}
  495. */
  496. tracking.DisjointSet.prototype.parent = null;
  497. /**
  498. * Finds a pointer to the representative of the set containing i.
  499. * @param {number} i
  500. * @return {number} The representative set of i.
  501. */
  502. tracking.DisjointSet.prototype.find = function(i) {
  503. if (this.parent[i] === i) {
  504. return i;
  505. } else {
  506. return (this.parent[i] = this.find(this.parent[i]));
  507. }
  508. };
  509. /**
  510. * Unites two dynamic sets containing objects i and j, say Si and Sj, into
  511. * a new set that Si ∪ Sj, assuming that Si ∩ Sj = ∅;
  512. * @param {number} i
  513. * @param {number} j
  514. */
  515. tracking.DisjointSet.prototype.union = function(i, j) {
  516. var iRepresentative = this.find(i);
  517. var jRepresentative = this.find(j);
  518. this.parent[iRepresentative] = jRepresentative;
  519. };
  520. }());
  521. (function() {
  522. /**
  523. * Image utility.
  524. * @static
  525. * @constructor
  526. */
  527. tracking.Image = {};
  528. /**
  529. * Computes gaussian blur. Adapted from
  530. * https://github.com/kig/canvasfilters.
  531. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  532. * @param {number} width The image width.
  533. * @param {number} height The image height.
  534. * @param {number} diameter Gaussian blur diameter, must be greater than 1.
  535. * @return {array} The edge pixels in a linear [r,g,b,a,...] array.
  536. */
  537. tracking.Image.blur = function(pixels, width, height, diameter) {
  538. diameter = Math.abs(diameter);
  539. if (diameter <= 1) {
  540. throw new Error('Diameter should be greater than 1.');
  541. }
  542. var radius = diameter / 2;
  543. var len = Math.ceil(diameter) + (1 - (Math.ceil(diameter) % 2));
  544. var weights = new Float32Array(len);
  545. var rho = (radius + 0.5) / 3;
  546. var rhoSq = rho * rho;
  547. var gaussianFactor = 1 / Math.sqrt(2 * Math.PI * rhoSq);
  548. var rhoFactor = -1 / (2 * rho * rho);
  549. var wsum = 0;
  550. var middle = Math.floor(len / 2);
  551. for (var i = 0; i < len; i++) {
  552. var x = i - middle;
  553. var gx = gaussianFactor * Math.exp(x * x * rhoFactor);
  554. weights[i] = gx;
  555. wsum += gx;
  556. }
  557. for (var j = 0; j < weights.length; j++) {
  558. weights[j] /= wsum;
  559. }
  560. return this.separableConvolve(pixels, width, height, weights, weights, false);
  561. };
  562. /**
  563. * Computes the integral image for summed, squared, rotated and sobel pixels.
  564. * @param {array} pixels The pixels in a linear [r,g,b,a,...] array to loop
  565. * through.
  566. * @param {number} width The image width.
  567. * @param {number} height The image height.
  568. * @param {array} opt_integralImage Empty array of size `width * height` to
  569. * be filled with the integral image values. If not specified compute sum
  570. * values will be skipped.
  571. * @param {array} opt_integralImageSquare Empty array of size `width *
  572. * height` to be filled with the integral image squared values. If not
  573. * specified compute squared values will be skipped.
  574. * @param {array} opt_tiltedIntegralImage Empty array of size `width *
  575. * height` to be filled with the rotated integral image values. If not
  576. * specified compute sum values will be skipped.
  577. * @param {array} opt_integralImageSobel Empty array of size `width *
  578. * height` to be filled with the integral image of sobel values. If not
  579. * specified compute sobel filtering will be skipped.
  580. * @static
  581. */
  582. tracking.Image.computeIntegralImage = function(pixels, width, height, opt_integralImage, opt_integralImageSquare, opt_tiltedIntegralImage, opt_integralImageSobel) {
  583. if (arguments.length < 4) {
  584. throw new Error('You should specify at least one output array in the order: sum, square, tilted, sobel.');
  585. }
  586. var pixelsSobel;
  587. if (opt_integralImageSobel) {
  588. pixelsSobel = tracking.Image.sobel(pixels, width, height);
  589. }
  590. for (var i = 0; i < height; i++) {
  591. for (var j = 0; j < width; j++) {
  592. var w = i * width * 4 + j * 4;
  593. var pixel = ~~(pixels[w] * 0.299 + pixels[w + 1] * 0.587 + pixels[w + 2] * 0.114);
  594. if (opt_integralImage) {
  595. this.computePixelValueSAT_(opt_integralImage, width, i, j, pixel);
  596. }
  597. if (opt_integralImageSquare) {
  598. this.computePixelValueSAT_(opt_integralImageSquare, width, i, j, pixel * pixel);
  599. }
  600. if (opt_tiltedIntegralImage) {
  601. var w1 = w - width * 4;
  602. var pixelAbove = ~~(pixels[w1] * 0.299 + pixels[w1 + 1] * 0.587 + pixels[w1 + 2] * 0.114);
  603. this.computePixelValueRSAT_(opt_tiltedIntegralImage, width, i, j, pixel, pixelAbove || 0);
  604. }
  605. if (opt_integralImageSobel) {
  606. this.computePixelValueSAT_(opt_integralImageSobel, width, i, j, pixelsSobel[w]);
  607. }
  608. }
  609. }
  610. };
  611. /**
  612. * Helper method to compute the rotated summed area table (RSAT) by the
  613. * formula:
  614. *
  615. * RSAT(x, y) = RSAT(x-1, y-1) + RSAT(x+1, y-1) - RSAT(x, y-2) + I(x, y) + I(x, y-1)
  616. *
  617. * @param {number} width The image width.
  618. * @param {array} RSAT Empty array of size `width * height` to be filled with
  619. * the integral image values. If not specified compute sum values will be
  620. * skipped.
  621. * @param {number} i Vertical position of the pixel to be evaluated.
  622. * @param {number} j Horizontal position of the pixel to be evaluated.
  623. * @param {number} pixel Pixel value to be added to the integral image.
  624. * @static
  625. * @private
  626. */
  627. tracking.Image.computePixelValueRSAT_ = function(RSAT, width, i, j, pixel, pixelAbove) {
  628. var w = i * width + j;
  629. RSAT[w] = (RSAT[w - width - 1] || 0) + (RSAT[w - width + 1] || 0) - (RSAT[w - width - width] || 0) + pixel + pixelAbove;
  630. };
  631. /**
  632. * Helper method to compute the summed area table (SAT) by the formula:
  633. *
  634. * SAT(x, y) = SAT(x, y-1) + SAT(x-1, y) + I(x, y) - SAT(x-1, y-1)
  635. *
  636. * @param {number} width The image width.
  637. * @param {array} SAT Empty array of size `width * height` to be filled with
  638. * the integral image values. If not specified compute sum values will be
  639. * skipped.
  640. * @param {number} i Vertical position of the pixel to be evaluated.
  641. * @param {number} j Horizontal position of the pixel to be evaluated.
  642. * @param {number} pixel Pixel value to be added to the integral image.
  643. * @static
  644. * @private
  645. */
  646. tracking.Image.computePixelValueSAT_ = function(SAT, width, i, j, pixel) {
  647. var w = i * width + j;
  648. SAT[w] = (SAT[w - width] || 0) + (SAT[w - 1] || 0) + pixel - (SAT[w - width - 1] || 0);
  649. };
  650. /**
  651. * Converts a color from a color-space based on an RGB color model to a
  652. * grayscale representation of its luminance. The coefficients represent the
  653. * measured intensity perception of typical trichromat humans, in
  654. * particular, human vision is most sensitive to green and least sensitive
  655. * to blue.
  656. * @param {Uint8Array|Uint8ClampedArray|Array} pixels The pixels in a linear [r,g,b,a,...] array.
  657. * @param {number} width The image width.
  658. * @param {number} height The image height.
  659. * @param {boolean} fillRGBA If the result should fill all RGBA values with the gray scale
  660. * values, instead of returning a single value per pixel.
  661. * @return {Uint8Array} The grayscale pixels in a linear array ([p,p,p,a,...] if fillRGBA
  662. * is true and [p1, p2, p3, ...] if fillRGBA is false).
  663. * @static
  664. */
  665. tracking.Image.grayscale = function(pixels, width, height, fillRGBA) {
  666. /*
  667. Performance result (rough EST. - image size, CPU arch. will affect):
  668. https://jsperf.com/tracking-new-image-to-grayscale
  669. Firefox v.60b:
  670. fillRGBA Gray only
  671. Old 11 551 OPs/sec
  672. New 3548 6487 OPs/sec
  673. ---------------------------------
  674. 322.5x 11.8x faster
  675. Chrome v.67b:
  676. fillRGBA Gray only
  677. Old 291 489 OPs/sec
  678. New 6975 6635 OPs/sec
  679. ---------------------------------
  680. 24.0x 13.6x faster
  681. - Ken Nilsen / epistemex
  682. */
  683. var len = pixels.length>>2;
  684. var gray = fillRGBA ? new Uint32Array(len) : new Uint8Array(len);
  685. var data32 = new Uint32Array(pixels.buffer || new Uint8Array(pixels).buffer);
  686. var i = 0;
  687. var c = 0;
  688. var luma = 0;
  689. // unrolled loops to not have to check fillRGBA each iteration
  690. if (fillRGBA) {
  691. while(i < len) {
  692. // Entire pixel in little-endian order (ABGR)
  693. c = data32[i];
  694. // Using the more up-to-date REC/BT.709 approx. weights for luma instead: [0.2126, 0.7152, 0.0722].
  695. // luma = ((c>>>16 & 0xff) * 0.2126 + (c>>>8 & 0xff) * 0.7152 + (c & 0xff) * 0.0722 + 0.5)|0;
  696. // But I'm using scaled integers here for speed (x 0xffff). This can be improved more using 2^n
  697. // close to the factors allowing for shift-ops (i.e. 4732 -> 4096 => .. (c&0xff) << 12 .. etc.)
  698. // if "accuracy" is not important (luma is anyway an visual approx.):
  699. luma = ((c>>>16&0xff) * 13933 + (c>>>8&0xff) * 46871 + (c&0xff) * 4732)>>>16;
  700. gray[i++] = luma * 0x10101 | c & 0xff000000;
  701. }
  702. }
  703. else {
  704. while(i < len) {
  705. c = data32[i];
  706. luma = ((c>>>16&0xff) * 13933 + (c>>>8&0xff) * 46871 + (c&0xff) * 4732)>>>16;
  707. // ideally, alpha should affect value here: value * (alpha/255) or with shift-ops for the above version
  708. gray[i++] = luma;
  709. }
  710. }
  711. // Consolidate array view to byte component format independent of source view
  712. return new Uint8Array(gray.buffer);
  713. };
  714. /**
  715. * Fast horizontal separable convolution. A point spread function (PSF) is
  716. * said to be separable if it can be broken into two one-dimensional
  717. * signals: a vertical and a horizontal projection. The convolution is
  718. * performed by sliding the kernel over the image, generally starting at the
  719. * top left corner, so as to move the kernel through all the positions where
  720. * the kernel fits entirely within the boundaries of the image. Adapted from
  721. * https://github.com/kig/canvasfilters.
  722. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  723. * @param {number} width The image width.
  724. * @param {number} height The image height.
  725. * @param {array} weightsVector The weighting vector, e.g [-1,0,1].
  726. * @param {number} opaque
  727. * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
  728. */
  729. tracking.Image.horizontalConvolve = function(pixels, width, height, weightsVector, opaque) {
  730. var side = weightsVector.length;
  731. var halfSide = Math.floor(side / 2);
  732. var output = new Float32Array(width * height * 4);
  733. var alphaFac = opaque ? 1 : 0;
  734. for (var y = 0; y < height; y++) {
  735. for (var x = 0; x < width; x++) {
  736. var sy = y;
  737. var sx = x;
  738. var offset = (y * width + x) * 4;
  739. var r = 0;
  740. var g = 0;
  741. var b = 0;
  742. var a = 0;
  743. for (var cx = 0; cx < side; cx++) {
  744. var scy = sy;
  745. var scx = Math.min(width - 1, Math.max(0, sx + cx - halfSide));
  746. var poffset = (scy * width + scx) * 4;
  747. var wt = weightsVector[cx];
  748. r += pixels[poffset] * wt;
  749. g += pixels[poffset + 1] * wt;
  750. b += pixels[poffset + 2] * wt;
  751. a += pixels[poffset + 3] * wt;
  752. }
  753. output[offset] = r;
  754. output[offset + 1] = g;
  755. output[offset + 2] = b;
  756. output[offset + 3] = a + alphaFac * (255 - a);
  757. }
  758. }
  759. return output;
  760. };
  761. /**
  762. * Fast vertical separable convolution. A point spread function (PSF) is
  763. * said to be separable if it can be broken into two one-dimensional
  764. * signals: a vertical and a horizontal projection. The convolution is
  765. * performed by sliding the kernel over the image, generally starting at the
  766. * top left corner, so as to move the kernel through all the positions where
  767. * the kernel fits entirely within the boundaries of the image. Adapted from
  768. * https://github.com/kig/canvasfilters.
  769. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  770. * @param {number} width The image width.
  771. * @param {number} height The image height.
  772. * @param {array} weightsVector The weighting vector, e.g [-1,0,1].
  773. * @param {number} opaque
  774. * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
  775. */
  776. tracking.Image.verticalConvolve = function(pixels, width, height, weightsVector, opaque) {
  777. var side = weightsVector.length;
  778. var halfSide = Math.floor(side / 2);
  779. var output = new Float32Array(width * height * 4);
  780. var alphaFac = opaque ? 1 : 0;
  781. for (var y = 0; y < height; y++) {
  782. for (var x = 0; x < width; x++) {
  783. var sy = y;
  784. var sx = x;
  785. var offset = (y * width + x) * 4;
  786. var r = 0;
  787. var g = 0;
  788. var b = 0;
  789. var a = 0;
  790. for (var cy = 0; cy < side; cy++) {
  791. var scy = Math.min(height - 1, Math.max(0, sy + cy - halfSide));
  792. var scx = sx;
  793. var poffset = (scy * width + scx) * 4;
  794. var wt = weightsVector[cy];
  795. r += pixels[poffset] * wt;
  796. g += pixels[poffset + 1] * wt;
  797. b += pixels[poffset + 2] * wt;
  798. a += pixels[poffset + 3] * wt;
  799. }
  800. output[offset] = r;
  801. output[offset + 1] = g;
  802. output[offset + 2] = b;
  803. output[offset + 3] = a + alphaFac * (255 - a);
  804. }
  805. }
  806. return output;
  807. };
  808. /**
  809. * Fast separable convolution. A point spread function (PSF) is said to be
  810. * separable if it can be broken into two one-dimensional signals: a
  811. * vertical and a horizontal projection. The convolution is performed by
  812. * sliding the kernel over the image, generally starting at the top left
  813. * corner, so as to move the kernel through all the positions where the
  814. * kernel fits entirely within the boundaries of the image. Adapted from
  815. * https://github.com/kig/canvasfilters.
  816. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  817. * @param {number} width The image width.
  818. * @param {number} height The image height.
  819. * @param {array} horizWeights The horizontal weighting vector, e.g [-1,0,1].
  820. * @param {array} vertWeights The vertical vector, e.g [-1,0,1].
  821. * @param {number} opaque
  822. * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
  823. */
  824. tracking.Image.separableConvolve = function(pixels, width, height, horizWeights, vertWeights, opaque) {
  825. var vertical = this.verticalConvolve(pixels, width, height, vertWeights, opaque);
  826. return this.horizontalConvolve(vertical, width, height, horizWeights, opaque);
  827. };
  828. /**
  829. * Compute image edges using Sobel operator. Computes the vertical and
  830. * horizontal gradients of the image and combines the computed images to
  831. * find edges in the image. The way we implement the Sobel filter here is by
  832. * first grayscaling the image, then taking the horizontal and vertical
  833. * gradients and finally combining the gradient images to make up the final
  834. * image. Adapted from https://github.com/kig/canvasfilters.
  835. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  836. * @param {number} width The image width.
  837. * @param {number} height The image height.
  838. * @return {array} The edge pixels in a linear [r,g,b,a,...] array.
  839. */
  840. tracking.Image.sobel = function(pixels, width, height) {
  841. pixels = this.grayscale(pixels, width, height, true);
  842. var output = new Float32Array(width * height * 4);
  843. var sobelSignVector = new Float32Array([-1, 0, 1]);
  844. var sobelScaleVector = new Float32Array([1, 2, 1]);
  845. var vertical = this.separableConvolve(pixels, width, height, sobelSignVector, sobelScaleVector);
  846. var horizontal = this.separableConvolve(pixels, width, height, sobelScaleVector, sobelSignVector);
  847. for (var i = 0; i < output.length; i += 4) {
  848. var v = vertical[i];
  849. var h = horizontal[i];
  850. var p = Math.sqrt(h * h + v * v);
  851. output[i] = p;
  852. output[i + 1] = p;
  853. output[i + 2] = p;
  854. output[i + 3] = 255;
  855. }
  856. return output;
  857. };
  858. /**
  859. * Equalizes the histogram of a grayscale image, normalizing the
  860. * brightness and increasing the contrast of the image.
  861. * @param {pixels} pixels The grayscale pixels in a linear array.
  862. * @param {number} width The image width.
  863. * @param {number} height The image height.
  864. * @return {array} The equalized grayscale pixels in a linear array.
  865. */
  866. tracking.Image.equalizeHist = function(pixels, width, height){
  867. var equalized = new Uint8ClampedArray(pixels.length);
  868. var histogram = new Array(256);
  869. for(var i=0; i < 256; i++) histogram[i] = 0;
  870. for(var i=0; i < pixels.length; i++){
  871. equalized[i] = pixels[i];
  872. histogram[pixels[i]]++;
  873. }
  874. var prev = histogram[0];
  875. for(var i=0; i < 256; i++){
  876. histogram[i] += prev;
  877. prev = histogram[i];
  878. }
  879. var norm = 255 / pixels.length;
  880. for(var i=0; i < pixels.length; i++)
  881. equalized[i] = (histogram[pixels[i]] * norm + 0.5) | 0;
  882. return equalized;
  883. }
  884. }());
  885. (function() {
  886. /**
  887. * ViolaJones utility.
  888. * @static
  889. * @constructor
  890. */
  891. tracking.ViolaJones = {};
  892. /**
  893. * Holds the minimum area of intersection that defines when a rectangle is
  894. * from the same group. Often when a face is matched multiple rectangles are
  895. * classified as possible rectangles to represent the face, when they
  896. * intersects they are grouped as one face.
  897. * @type {number}
  898. * @default 0.5
  899. * @static
  900. */
  901. tracking.ViolaJones.REGIONS_OVERLAP = 0.5;
  902. /**
  903. * Holds the HAAR cascade classifiers converted from OpenCV training.
  904. * @type {array}
  905. * @static
  906. */
  907. tracking.ViolaJones.classifiers = {};
  908. /**
  909. * Detects through the HAAR cascade data rectangles matches.
  910. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  911. * @param {number} width The image width.
  912. * @param {number} height The image height.
  913. * @param {number} initialScale The initial scale to start the block
  914. * scaling.
  915. * @param {number} scaleFactor The scale factor to scale the feature block.
  916. * @param {number} stepSize The block step size.
  917. * @param {number} edgesDensity Percentage density edges inside the
  918. * classifier block. Value from [0.0, 1.0], defaults to 0.2. If specified
  919. * edge detection will be applied to the image to prune dead areas of the
  920. * image, this can improve significantly performance.
  921. * @param {number} data The HAAR cascade data.
  922. * @return {array} Found rectangles.
  923. * @static
  924. */
  925. tracking.ViolaJones.detect = function(pixels, width, height, initialScale, scaleFactor, stepSize, edgesDensity, data) {
  926. var total = 0;
  927. var rects = [];
  928. var integralImage = new Int32Array(width * height);
  929. var integralImageSquare = new Int32Array(width * height);
  930. var tiltedIntegralImage = new Int32Array(width * height);
  931. var integralImageSobel;
  932. if (edgesDensity > 0) {
  933. integralImageSobel = new Int32Array(width * height);
  934. }
  935. tracking.Image.computeIntegralImage(pixels, width, height, integralImage, integralImageSquare, tiltedIntegralImage, integralImageSobel);
  936. var minWidth = data[0];
  937. var minHeight = data[1];
  938. var scale = initialScale * scaleFactor;
  939. var blockWidth = (scale * minWidth) | 0;
  940. var blockHeight = (scale * minHeight) | 0;
  941. while (blockWidth < width && blockHeight < height) {
  942. var step = (scale * stepSize + 0.5) | 0;
  943. for (var i = 0; i < (height - blockHeight); i += step) {
  944. for (var j = 0; j < (width - blockWidth); j += step) {
  945. if (edgesDensity > 0) {
  946. if (this.isTriviallyExcluded(edgesDensity, integralImageSobel, i, j, width, blockWidth, blockHeight)) {
  947. continue;
  948. }
  949. }
  950. if (this.evalStages_(data, integralImage, integralImageSquare, tiltedIntegralImage, i, j, width, blockWidth, blockHeight, scale)) {
  951. rects[total++] = {
  952. width: blockWidth,
  953. height: blockHeight,
  954. x: j,
  955. y: i
  956. };
  957. }
  958. }
  959. }
  960. scale *= scaleFactor;
  961. blockWidth = (scale * minWidth) | 0;
  962. blockHeight = (scale * minHeight) | 0;
  963. }
  964. return this.mergeRectangles_(rects);
  965. };
  966. /**
  967. * Fast check to test whether the edges density inside the block is greater
  968. * than a threshold, if true it tests the stages. This can improve
  969. * significantly performance.
  970. * @param {number} edgesDensity Percentage density edges inside the
  971. * classifier block.
  972. * @param {array} integralImageSobel The integral image of a sobel image.
  973. * @param {number} i Vertical position of the pixel to be evaluated.
  974. * @param {number} j Horizontal position of the pixel to be evaluated.
  975. * @param {number} width The image width.
  976. * @return {boolean} True whether the block at position i,j can be skipped,
  977. * false otherwise.
  978. * @static
  979. * @protected
  980. */
  981. tracking.ViolaJones.isTriviallyExcluded = function(edgesDensity, integralImageSobel, i, j, width, blockWidth, blockHeight) {
  982. var wbA = i * width + j;
  983. var wbB = wbA + blockWidth;
  984. var wbD = wbA + blockHeight * width;
  985. var wbC = wbD + blockWidth;
  986. var blockEdgesDensity = (integralImageSobel[wbA] - integralImageSobel[wbB] - integralImageSobel[wbD] + integralImageSobel[wbC]) / (blockWidth * blockHeight * 255);
  987. if (blockEdgesDensity < edgesDensity) {
  988. return true;
  989. }
  990. return false;
  991. };
  992. /**
  993. * Evaluates if the block size on i,j position is a valid HAAR cascade
  994. * stage.
  995. * @param {number} data The HAAR cascade data.
  996. * @param {number} i Vertical position of the pixel to be evaluated.
  997. * @param {number} j Horizontal position of the pixel to be evaluated.
  998. * @param {number} width The image width.
  999. * @param {number} blockSize The block size.
  1000. * @param {number} scale The scale factor of the block size and its original
  1001. * size.
  1002. * @param {number} inverseArea The inverse area of the block size.
  1003. * @return {boolean} Whether the region passes all the stage tests.
  1004. * @private
  1005. * @static
  1006. */
  1007. tracking.ViolaJones.evalStages_ = function(data, integralImage, integralImageSquare, tiltedIntegralImage, i, j, width, blockWidth, blockHeight, scale) {
  1008. var inverseArea = 1.0 / (blockWidth * blockHeight);
  1009. var wbA = i * width + j;
  1010. var wbB = wbA + blockWidth;
  1011. var wbD = wbA + blockHeight * width;
  1012. var wbC = wbD + blockWidth;
  1013. var mean = (integralImage[wbA] - integralImage[wbB] - integralImage[wbD] + integralImage[wbC]) * inverseArea;
  1014. var variance = (integralImageSquare[wbA] - integralImageSquare[wbB] - integralImageSquare[wbD] + integralImageSquare[wbC]) * inverseArea - mean * mean;
  1015. var standardDeviation = 1;
  1016. if (variance > 0) {
  1017. standardDeviation = Math.sqrt(variance);
  1018. }
  1019. var length = data.length;
  1020. for (var w = 2; w < length; ) {
  1021. var stageSum = 0;
  1022. var stageThreshold = data[w++];
  1023. var nodeLength = data[w++];
  1024. while (nodeLength--) {
  1025. var rectsSum = 0;
  1026. var tilted = data[w++];
  1027. var rectsLength = data[w++];
  1028. for (var r = 0; r < rectsLength; r++) {
  1029. var rectLeft = (j + data[w++] * scale + 0.5) | 0;
  1030. var rectTop = (i + data[w++] * scale + 0.5) | 0;
  1031. var rectWidth = (data[w++] * scale + 0.5) | 0;
  1032. var rectHeight = (data[w++] * scale + 0.5) | 0;
  1033. var rectWeight = data[w++];
  1034. var w1;
  1035. var w2;
  1036. var w3;
  1037. var w4;
  1038. if (tilted) {
  1039. // RectSum(r) = RSAT(x-h+w, y+w+h-1) + RSAT(x, y-1) - RSAT(x-h, y+h-1) - RSAT(x+w, y+w-1)
  1040. w1 = (rectLeft - rectHeight + rectWidth) + (rectTop + rectWidth + rectHeight - 1) * width;
  1041. w2 = rectLeft + (rectTop - 1) * width;
  1042. w3 = (rectLeft - rectHeight) + (rectTop + rectHeight - 1) * width;
  1043. w4 = (rectLeft + rectWidth) + (rectTop + rectWidth - 1) * width;
  1044. rectsSum += (tiltedIntegralImage[w1] + tiltedIntegralImage[w2] - tiltedIntegralImage[w3] - tiltedIntegralImage[w4]) * rectWeight;
  1045. } else {
  1046. // RectSum(r) = SAT(x-1, y-1) + SAT(x+w-1, y+h-1) - SAT(x-1, y+h-1) - SAT(x+w-1, y-1)
  1047. w1 = rectTop * width + rectLeft;
  1048. w2 = w1 + rectWidth;
  1049. w3 = w1 + rectHeight * width;
  1050. w4 = w3 + rectWidth;
  1051. rectsSum += (integralImage[w1] - integralImage[w2] - integralImage[w3] + integralImage[w4]) * rectWeight;
  1052. // TODO: Review the code below to analyze performance when using it instead.
  1053. // w1 = (rectLeft - 1) + (rectTop - 1) * width;
  1054. // w2 = (rectLeft + rectWidth - 1) + (rectTop + rectHeight - 1) * width;
  1055. // w3 = (rectLeft - 1) + (rectTop + rectHeight - 1) * width;
  1056. // w4 = (rectLeft + rectWidth - 1) + (rectTop - 1) * width;
  1057. // rectsSum += (integralImage[w1] + integralImage[w2] - integralImage[w3] - integralImage[w4]) * rectWeight;
  1058. }
  1059. }
  1060. var nodeThreshold = data[w++];
  1061. var nodeLeft = data[w++];
  1062. var nodeRight = data[w++];
  1063. if (rectsSum * inverseArea < nodeThreshold * standardDeviation) {
  1064. stageSum += nodeLeft;
  1065. } else {
  1066. stageSum += nodeRight;
  1067. }
  1068. }
  1069. if (stageSum < stageThreshold) {
  1070. return false;
  1071. }
  1072. }
  1073. return true;
  1074. };
  1075. /**
  1076. * Postprocess the detected sub-windows in order to combine overlapping
  1077. * detections into a single detection.
  1078. * @param {array} rects
  1079. * @return {array}
  1080. * @private
  1081. * @static
  1082. */
  1083. tracking.ViolaJones.mergeRectangles_ = function(rects) {
  1084. var disjointSet = new tracking.DisjointSet(rects.length);
  1085. for (var i = 0; i < rects.length; i++) {
  1086. var r1 = rects[i];
  1087. for (var j = 0; j < rects.length; j++) {
  1088. var r2 = rects[j];
  1089. if (tracking.Math.intersectRect(r1.x, r1.y, r1.x + r1.width, r1.y + r1.height, r2.x, r2.y, r2.x + r2.width, r2.y + r2.height)) {
  1090. var x1 = Math.max(r1.x, r2.x);
  1091. var y1 = Math.max(r1.y, r2.y);
  1092. var x2 = Math.min(r1.x + r1.width, r2.x + r2.width);
  1093. var y2 = Math.min(r1.y + r1.height, r2.y + r2.height);
  1094. var overlap = (x1 - x2) * (y1 - y2);
  1095. var area1 = (r1.width * r1.height);
  1096. var area2 = (r2.width * r2.height);
  1097. if ((overlap / (area1 * (area1 / area2)) >= this.REGIONS_OVERLAP) &&
  1098. (overlap / (area2 * (area1 / area2)) >= this.REGIONS_OVERLAP)) {
  1099. disjointSet.union(i, j);
  1100. }
  1101. }
  1102. }
  1103. }
  1104. var map = {};
  1105. for (var k = 0; k < disjointSet.length; k++) {
  1106. var rep = disjointSet.find(k);
  1107. if (!map[rep]) {
  1108. map[rep] = {
  1109. total: 1,
  1110. width: rects[k].width,
  1111. height: rects[k].height,
  1112. x: rects[k].x,
  1113. y: rects[k].y
  1114. };
  1115. continue;
  1116. }
  1117. map[rep].total++;
  1118. map[rep].width += rects[k].width;
  1119. map[rep].height += rects[k].height;
  1120. map[rep].x += rects[k].x;
  1121. map[rep].y += rects[k].y;
  1122. }
  1123. var result = [];
  1124. Object.keys(map).forEach(function(key) {
  1125. var rect = map[key];
  1126. result.push({
  1127. total: rect.total,
  1128. width: (rect.width / rect.total + 0.5) | 0,
  1129. height: (rect.height / rect.total + 0.5) | 0,
  1130. x: (rect.x / rect.total + 0.5) | 0,
  1131. y: (rect.y / rect.total + 0.5) | 0
  1132. });
  1133. });
  1134. return result;
  1135. };
  1136. }());
  1137. (function() {
  1138. /**
  1139. * Brief intends for "Binary Robust Independent Elementary Features".This
  1140. * method generates a binary string for each keypoint found by an extractor
  1141. * method.
  1142. * @static
  1143. * @constructor
  1144. */
  1145. tracking.Brief = {};
  1146. /**
  1147. * The set of binary tests is defined by the nd (x,y)-location pairs
  1148. * uniquely chosen during the initialization. Values could vary between N =
  1149. * 128,256,512. N=128 yield good compromises between speed, storage
  1150. * efficiency, and recognition rate.
  1151. * @type {number}
  1152. */
  1153. tracking.Brief.N = 512;
  1154. /**
  1155. * Caches coordinates values of (x,y)-location pairs uniquely chosen during
  1156. * the initialization.
  1157. * @type {Object.<number, Int32Array>}
  1158. * @private
  1159. * @static
  1160. */
  1161. tracking.Brief.randomImageOffsets_ = {};
  1162. /**
  1163. * Caches delta values of (x,y)-location pairs uniquely chosen during
  1164. * the initialization.
  1165. * @type {Int32Array}
  1166. * @private
  1167. * @static
  1168. */
  1169. tracking.Brief.randomWindowOffsets_ = null;
  1170. /**
  1171. * Generates a binary string for each found keypoints extracted using an
  1172. * extractor method.
  1173. * @param {array} The grayscale pixels in a linear [p1,p2,...] array.
  1174. * @param {number} width The image width.
  1175. * @param {array} keypoints
  1176. * @return {Int32Array} Returns an array where for each four sequence int
  1177. * values represent the descriptor binary string (128 bits) necessary
  1178. * to describe the corner, e.g. [0,0,0,0, 0,0,0,0, ...].
  1179. * @static
  1180. */
  1181. tracking.Brief.getDescriptors = function(pixels, width, keypoints) {
  1182. // Optimizing divide by 32 operation using binary shift
  1183. // (this.N >> 5) === this.N/32.
  1184. var descriptors = new Int32Array((keypoints.length >> 1) * (this.N >> 5));
  1185. var descriptorWord = 0;
  1186. var offsets = this.getRandomOffsets_(width);
  1187. var position = 0;
  1188. for (var i = 0; i < keypoints.length; i += 2) {
  1189. var w = width * keypoints[i + 1] + keypoints[i];
  1190. var offsetsPosition = 0;
  1191. for (var j = 0, n = this.N; j < n; j++) {
  1192. if (pixels[offsets[offsetsPosition++] + w] < pixels[offsets[offsetsPosition++] + w]) {
  1193. // The bit in the position `j % 32` of descriptorWord should be set to 1. We do
  1194. // this by making an OR operation with a binary number that only has the bit
  1195. // in that position set to 1. That binary number is obtained by shifting 1 left by
  1196. // `j % 32` (which is the same as `j & 31` left) positions.
  1197. descriptorWord |= 1 << (j & 31);
  1198. }
  1199. // If the next j is a multiple of 32, we will need to use a new descriptor word to hold
  1200. // the next results.
  1201. if (!((j + 1) & 31)) {
  1202. descriptors[position++] = descriptorWord;
  1203. descriptorWord = 0;
  1204. }
  1205. }
  1206. }
  1207. return descriptors;
  1208. };
  1209. /**
  1210. * Matches sets of features {mi} and {m′j} extracted from two images taken
  1211. * from similar, and often successive, viewpoints. A classical procedure
  1212. * runs as follows. For each point {mi} in the first image, search in a
  1213. * region of the second image around location {mi} for point {m′j}. The
  1214. * search is based on the similarity of the local image windows, also known
  1215. * as kernel windows, centered on the points, which strongly characterizes
  1216. * the points when the images are sufficiently close. Once each keypoint is
  1217. * described with its binary string, they need to be compared with the
  1218. * closest matching point. Distance metric is critical to the performance of
  1219. * in- trusion detection systems. Thus using binary strings reduces the size
  1220. * of the descriptor and provides an interesting data structure that is fast
  1221. * to operate whose similarity can be measured by the Hamming distance.
  1222. * @param {array} keypoints1
  1223. * @param {array} descriptors1
  1224. * @param {array} keypoints2
  1225. * @param {array} descriptors2
  1226. * @return {Int32Array} Returns an array where the index is the corner1
  1227. * index coordinate, and the value is the corresponding match index of
  1228. * corner2, e.g. keypoints1=[x0,y0,x1,y1,...] and
  1229. * keypoints2=[x'0,y'0,x'1,y'1,...], if x0 matches x'1 and x1 matches x'0,
  1230. * the return array would be [3,0].
  1231. * @static
  1232. */
  1233. tracking.Brief.match = function(keypoints1, descriptors1, keypoints2, descriptors2) {
  1234. var len1 = keypoints1.length >> 1;
  1235. var len2 = keypoints2.length >> 1;
  1236. var matches = new Array(len1);
  1237. for (var i = 0; i < len1; i++) {
  1238. var min = Infinity;
  1239. var minj = 0;
  1240. for (var j = 0; j < len2; j++) {
  1241. var dist = 0;
  1242. // Optimizing divide by 32 operation using binary shift
  1243. // (this.N >> 5) === this.N/32.
  1244. for (var k = 0, n = this.N >> 5; k < n; k++) {
  1245. dist += tracking.Math.hammingWeight(descriptors1[i * n + k] ^ descriptors2[j * n + k]);
  1246. }
  1247. if (dist < min) {
  1248. min = dist;
  1249. minj = j;
  1250. }
  1251. }
  1252. matches[i] = {
  1253. index1: i,
  1254. index2: minj,
  1255. keypoint1: [keypoints1[2 * i], keypoints1[2 * i + 1]],
  1256. keypoint2: [keypoints2[2 * minj], keypoints2[2 * minj + 1]],
  1257. confidence: 1 - min / this.N
  1258. };
  1259. }
  1260. return matches;
  1261. };
  1262. /**
  1263. * Removes matches outliers by testing matches on both directions.
  1264. * @param {array} keypoints1
  1265. * @param {array} descriptors1
  1266. * @param {array} keypoints2
  1267. * @param {array} descriptors2
  1268. * @return {Int32Array} Returns an array where the index is the corner1
  1269. * index coordinate, and the value is the corresponding match index of
  1270. * corner2, e.g. keypoints1=[x0,y0,x1,y1,...] and
  1271. * keypoints2=[x'0,y'0,x'1,y'1,...], if x0 matches x'1 and x1 matches x'0,
  1272. * the return array would be [3,0].
  1273. * @static
  1274. */
  1275. tracking.Brief.reciprocalMatch = function(keypoints1, descriptors1, keypoints2, descriptors2) {
  1276. var matches = [];
  1277. if (keypoints1.length === 0 || keypoints2.length === 0) {
  1278. return matches;
  1279. }
  1280. var matches1 = tracking.Brief.match(keypoints1, descriptors1, keypoints2, descriptors2);
  1281. var matches2 = tracking.Brief.match(keypoints2, descriptors2, keypoints1, descriptors1);
  1282. for (var i = 0; i < matches1.length; i++) {
  1283. if (matches2[matches1[i].index2].index2 === i) {
  1284. matches.push(matches1[i]);
  1285. }
  1286. }
  1287. return matches;
  1288. };
  1289. /**
  1290. * Gets the coordinates values of (x,y)-location pairs uniquely chosen
  1291. * during the initialization.
  1292. * @return {array} Array with the random offset values.
  1293. * @private
  1294. */
  1295. tracking.Brief.getRandomOffsets_ = function(width) {
  1296. if (!this.randomWindowOffsets_) {
  1297. var windowPosition = 0;
  1298. var windowOffsets = new Int32Array(4 * this.N);
  1299. for (var i = 0; i < this.N; i++) {
  1300. windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
  1301. windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
  1302. windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
  1303. windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
  1304. }
  1305. this.randomWindowOffsets_ = windowOffsets;
  1306. }
  1307. if (!this.randomImageOffsets_[width]) {
  1308. var imagePosition = 0;
  1309. var imageOffsets = new Int32Array(2 * this.N);
  1310. for (var j = 0; j < this.N; j++) {
  1311. imageOffsets[imagePosition++] = this.randomWindowOffsets_[4 * j] * width + this.randomWindowOffsets_[4 * j + 1];
  1312. imageOffsets[imagePosition++] = this.randomWindowOffsets_[4 * j + 2] * width + this.randomWindowOffsets_[4 * j + 3];
  1313. }
  1314. this.randomImageOffsets_[width] = imageOffsets;
  1315. }
  1316. return this.randomImageOffsets_[width];
  1317. };
  1318. }());
  1319. (function() {
  1320. /**
  1321. * FAST intends for "Features from Accelerated Segment Test". This method
  1322. * performs a point segment test corner detection. The segment test
  1323. * criterion operates by considering a circle of sixteen pixels around the
  1324. * corner candidate p. The detector classifies p as a corner if there exists
  1325. * a set of n contiguous pixelsin the circle which are all brighter than the
  1326. * intensity of the candidate pixel Ip plus a threshold t, or all darker
  1327. * than Ip − t.
  1328. *
  1329. * 15 00 01
  1330. * 14 02
  1331. * 13 03
  1332. * 12 [] 04
  1333. * 11 05
  1334. * 10 06
  1335. * 09 08 07
  1336. *
  1337. * For more reference:
  1338. * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.60.3991&rep=rep1&type=pdf
  1339. * @static
  1340. * @constructor
  1341. */
  1342. tracking.Fast = {};
  1343. /**
  1344. * Holds the threshold to determine whether the tested pixel is brighter or
  1345. * darker than the corner candidate p.
  1346. * @type {number}
  1347. * @default 40
  1348. * @static
  1349. */
  1350. tracking.Fast.THRESHOLD = 40;
  1351. /**
  1352. * Caches coordinates values of the circle surrounding the pixel candidate p.
  1353. * @type {Object.<number, Int32Array>}
  1354. * @private
  1355. * @static
  1356. */
  1357. tracking.Fast.circles_ = {};
  1358. /**
  1359. * Finds corners coordinates on the graysacaled image.
  1360. * @param {array} The grayscale pixels in a linear [p1,p2,...] array.
  1361. * @param {number} width The image width.
  1362. * @param {number} height The image height.
  1363. * @param {number} threshold to determine whether the tested pixel is brighter or
  1364. * darker than the corner candidate p. Default value is 40.
  1365. * @return {array} Array containing the coordinates of all found corners,
  1366. * e.g. [x0,y0,x1,y1,...], where P(x0,y0) represents a corner coordinate.
  1367. * @static
  1368. */
  1369. tracking.Fast.findCorners = function(pixels, width, height, opt_threshold) {
  1370. var circleOffsets = this.getCircleOffsets_(width);
  1371. var circlePixels = new Int32Array(16);
  1372. var corners = [];
  1373. if (opt_threshold === undefined) {
  1374. opt_threshold = this.THRESHOLD;
  1375. }
  1376. // When looping through the image pixels, skips the first three lines from
  1377. // the image boundaries to constrain the surrounding circle inside the image
  1378. // area.
  1379. for (var i = 3; i < height - 3; i++) {
  1380. for (var j = 3; j < width - 3; j++) {
  1381. var w = i * width + j;
  1382. var p = pixels[w];
  1383. // Loops the circle offsets to read the pixel value for the sixteen
  1384. // surrounding pixels.
  1385. for (var k = 0; k < 16; k++) {
  1386. circlePixels[k] = pixels[w + circleOffsets[k]];
  1387. }
  1388. if (this.isCorner(p, circlePixels, opt_threshold)) {
  1389. // The pixel p is classified as a corner, as optimization increment j
  1390. // by the circle radius 3 to skip the neighbor pixels inside the
  1391. // surrounding circle. This can be removed without compromising the
  1392. // result.
  1393. corners.push(j, i);
  1394. j += 3;
  1395. }
  1396. }
  1397. }
  1398. return corners;
  1399. };
  1400. /**
  1401. * Checks if the circle pixel is brighter than the candidate pixel p by
  1402. * a threshold.
  1403. * @param {number} circlePixel The circle pixel value.
  1404. * @param {number} p The value of the candidate pixel p.
  1405. * @param {number} threshold
  1406. * @return {Boolean}
  1407. * @static
  1408. */
  1409. tracking.Fast.isBrighter = function(circlePixel, p, threshold) {
  1410. return circlePixel - p > threshold;
  1411. };
  1412. /**
  1413. * Checks if the circle pixel is within the corner of the candidate pixel p
  1414. * by a threshold.
  1415. * @param {number} p The value of the candidate pixel p.
  1416. * @param {number} circlePixel The circle pixel value.
  1417. * @param {number} threshold
  1418. * @return {Boolean}
  1419. * @static
  1420. */
  1421. tracking.Fast.isCorner = function(p, circlePixels, threshold) {
  1422. if (this.isTriviallyExcluded(circlePixels, p, threshold)) {
  1423. return false;
  1424. }
  1425. for (var x = 0; x < 16; x++) {
  1426. var darker = true;
  1427. var brighter = true;
  1428. for (var y = 0; y < 9; y++) {
  1429. var circlePixel = circlePixels[(x + y) & 15];
  1430. if (!this.isBrighter(p, circlePixel, threshold)) {
  1431. brighter = false;
  1432. if (darker === false) {
  1433. break;
  1434. }
  1435. }
  1436. if (!this.isDarker(p, circlePixel, threshold)) {
  1437. darker = false;
  1438. if (brighter === false) {
  1439. break;
  1440. }
  1441. }
  1442. }
  1443. if (brighter || darker) {
  1444. return true;
  1445. }
  1446. }
  1447. return false;
  1448. };
  1449. /**
  1450. * Checks if the circle pixel is darker than the candidate pixel p by
  1451. * a threshold.
  1452. * @param {number} circlePixel The circle pixel value.
  1453. * @param {number} p The value of the candidate pixel p.
  1454. * @param {number} threshold
  1455. * @return {Boolean}
  1456. * @static
  1457. */
  1458. tracking.Fast.isDarker = function(circlePixel, p, threshold) {
  1459. return p - circlePixel > threshold;
  1460. };
  1461. /**
  1462. * Fast check to test if the candidate pixel is a trivially excluded value.
  1463. * In order to be a corner, the candidate pixel value should be darker or
  1464. * brighter than 9-12 surrounding pixels, when at least three of the top,
  1465. * bottom, left and right pixels are brighter or darker it can be
  1466. * automatically excluded improving the performance.
  1467. * @param {number} circlePixel The circle pixel value.
  1468. * @param {number} p The value of the candidate pixel p.
  1469. * @param {number} threshold
  1470. * @return {Boolean}
  1471. * @static
  1472. * @protected
  1473. */
  1474. tracking.Fast.isTriviallyExcluded = function(circlePixels, p, threshold) {
  1475. var count = 0;
  1476. var circleBottom = circlePixels[8];
  1477. var circleLeft = circlePixels[12];
  1478. var circleRight = circlePixels[4];
  1479. var circleTop = circlePixels[0];
  1480. if (this.isBrighter(circleTop, p, threshold)) {
  1481. count++;
  1482. }
  1483. if (this.isBrighter(circleRight, p, threshold)) {
  1484. count++;
  1485. }
  1486. if (this.isBrighter(circleBottom, p, threshold)) {
  1487. count++;
  1488. }
  1489. if (this.isBrighter(circleLeft, p, threshold)) {
  1490. count++;
  1491. }
  1492. if (count < 3) {
  1493. count = 0;
  1494. if (this.isDarker(circleTop, p, threshold)) {
  1495. count++;
  1496. }
  1497. if (this.isDarker(circleRight, p, threshold)) {
  1498. count++;
  1499. }
  1500. if (this.isDarker(circleBottom, p, threshold)) {
  1501. count++;
  1502. }
  1503. if (this.isDarker(circleLeft, p, threshold)) {
  1504. count++;
  1505. }
  1506. if (count < 3) {
  1507. return true;
  1508. }
  1509. }
  1510. return false;
  1511. };
  1512. /**
  1513. * Gets the sixteen offset values of the circle surrounding pixel.
  1514. * @param {number} width The image width.
  1515. * @return {array} Array with the sixteen offset values of the circle
  1516. * surrounding pixel.
  1517. * @private
  1518. */
  1519. tracking.Fast.getCircleOffsets_ = function(width) {
  1520. if (this.circles_[width]) {
  1521. return this.circles_[width];
  1522. }
  1523. var circle = new Int32Array(16);
  1524. circle[0] = -width - width - width;
  1525. circle[1] = circle[0] + 1;
  1526. circle[2] = circle[1] + width + 1;
  1527. circle[3] = circle[2] + width + 1;
  1528. circle[4] = circle[3] + width;
  1529. circle[5] = circle[4] + width;
  1530. circle[6] = circle[5] + width - 1;
  1531. circle[7] = circle[6] + width - 1;
  1532. circle[8] = circle[7] - 1;
  1533. circle[9] = circle[8] - 1;
  1534. circle[10] = circle[9] - width - 1;
  1535. circle[11] = circle[10] - width - 1;
  1536. circle[12] = circle[11] - width;
  1537. circle[13] = circle[12] - width;
  1538. circle[14] = circle[13] - width + 1;
  1539. circle[15] = circle[14] - width + 1;
  1540. this.circles_[width] = circle;
  1541. return circle;
  1542. };
  1543. }());
  1544. (function() {
  1545. /**
  1546. * Math utility.
  1547. * @static
  1548. * @constructor
  1549. */
  1550. tracking.Math = {};
  1551. /**
  1552. * Euclidean distance between two points P(x0, y0) and P(x1, y1).
  1553. * @param {number} x0 Horizontal coordinate of P0.
  1554. * @param {number} y0 Vertical coordinate of P0.
  1555. * @param {number} x1 Horizontal coordinate of P1.
  1556. * @param {number} y1 Vertical coordinate of P1.
  1557. * @return {number} The euclidean distance.
  1558. */
  1559. tracking.Math.distance = function(x0, y0, x1, y1) {
  1560. var dx = x1 - x0;
  1561. var dy = y1 - y0;
  1562. return Math.sqrt(dx * dx + dy * dy);
  1563. };
  1564. /**
  1565. * Calculates the Hamming weight of a string, which is the number of symbols that are
  1566. * different from the zero-symbol of the alphabet used. It is thus
  1567. * equivalent to the Hamming distance from the all-zero string of the same
  1568. * length. For the most typical case, a string of bits, this is the number
  1569. * of 1's in the string.
  1570. *
  1571. * Example:
  1572. *
  1573. * <pre>
  1574. * Binary string Hamming weight
  1575. * 11101 4
  1576. * 11101010 5
  1577. * </pre>
  1578. *
  1579. * @param {number} i Number that holds the binary string to extract the hamming weight.
  1580. * @return {number} The hamming weight.
  1581. */
  1582. tracking.Math.hammingWeight = function(i) {
  1583. i = i - ((i >> 1) & 0x55555555);
  1584. i = (i & 0x33333333) + ((i >> 2) & 0x33333333);
  1585. return ((i + (i >> 4) & 0xF0F0F0F) * 0x1010101) >> 24;
  1586. };
  1587. /**
  1588. * Generates a random number between [a, b] interval.
  1589. * @param {number} a
  1590. * @param {number} b
  1591. * @return {number}
  1592. */
  1593. tracking.Math.uniformRandom = function(a, b) {
  1594. return a + Math.random() * (b - a);
  1595. };
  1596. /**
  1597. * Tests if a rectangle intersects with another.
  1598. *
  1599. * <pre>
  1600. * x0y0 -------- x2y2 --------
  1601. * | | | |
  1602. * -------- x1y1 -------- x3y3
  1603. * </pre>
  1604. *
  1605. * @param {number} x0 Horizontal coordinate of P0.
  1606. * @param {number} y0 Vertical coordinate of P0.
  1607. * @param {number} x1 Horizontal coordinate of P1.
  1608. * @param {number} y1 Vertical coordinate of P1.
  1609. * @param {number} x2 Horizontal coordinate of P2.
  1610. * @param {number} y2 Vertical coordinate of P2.
  1611. * @param {number} x3 Horizontal coordinate of P3.
  1612. * @param {number} y3 Vertical coordinate of P3.
  1613. * @return {boolean}
  1614. */
  1615. tracking.Math.intersectRect = function(x0, y0, x1, y1, x2, y2, x3, y3) {
  1616. return !(x2 > x1 || x3 < x0 || y2 > y1 || y3 < y0);
  1617. };
  1618. }());
  1619. (function() {
  1620. /**
  1621. * Matrix utility.
  1622. * @static
  1623. * @constructor
  1624. */
  1625. tracking.Matrix = {};
  1626. /**
  1627. * Loops the array organized as major-row order and executes `fn` callback
  1628. * for each iteration. The `fn` callback receives the following parameters:
  1629. * `(r,g,b,a,index,i,j)`, where `r,g,b,a` represents the pixel color with
  1630. * alpha channel, `index` represents the position in the major-row order
  1631. * array and `i,j` the respective indexes positions in two dimensions.
  1632. * @param {array} pixels The pixels in a linear [r,g,b,a,...] array to loop
  1633. * through.
  1634. * @param {number} width The image width.
  1635. * @param {number} height The image height.
  1636. * @param {function} fn The callback function for each pixel.
  1637. * @param {number} opt_jump Optional jump for the iteration, by default it
  1638. * is 1, hence loops all the pixels of the array.
  1639. * @static
  1640. */
  1641. tracking.Matrix.forEach = function(pixels, width, height, fn, opt_jump) {
  1642. opt_jump = opt_jump || 1;
  1643. for (var i = 0; i < height; i += opt_jump) {
  1644. for (var j = 0; j < width; j += opt_jump) {
  1645. var w = i * width * 4 + j * 4;
  1646. fn.call(this, pixels[w], pixels[w + 1], pixels[w + 2], pixels[w + 3], w, i, j);
  1647. }
  1648. }
  1649. };
  1650. /**
  1651. * Calculates the per-element subtraction of two NxM matrices and returns a
  1652. * new NxM matrix as the result.
  1653. * @param {matrix} a The first matrix.
  1654. * @param {matrix} a The second matrix.
  1655. * @static
  1656. */
  1657. tracking.Matrix.sub = function(a, b){
  1658. var res = tracking.Matrix.clone(a);
  1659. for(var i=0; i < res.length; i++){
  1660. for(var j=0; j < res[i].length; j++){
  1661. res[i][j] -= b[i][j];
  1662. }
  1663. }
  1664. return res;
  1665. }
  1666. /**
  1667. * Calculates the per-element sum of two NxM matrices and returns a new NxM
  1668. * NxM matrix as the result.
  1669. * @param {matrix} a The first matrix.
  1670. * @param {matrix} a The second matrix.
  1671. * @static
  1672. */
  1673. tracking.Matrix.add = function(a, b){
  1674. var res = tracking.Matrix.clone(a);
  1675. for(var i=0; i < res.length; i++){
  1676. for(var j=0; j < res[i].length; j++){
  1677. res[i][j] += b[i][j];
  1678. }
  1679. }
  1680. return res;
  1681. }
  1682. /**
  1683. * Clones a matrix (or part of it) and returns a new matrix as the result.
  1684. * @param {matrix} src The matrix to be cloned.
  1685. * @param {number} width The second matrix.
  1686. * @static
  1687. */
  1688. tracking.Matrix.clone = function(src, width, height){
  1689. width = width || src[0].length;
  1690. height = height || src.length;
  1691. var temp = new Array(height);
  1692. var i = height;
  1693. while(i--){
  1694. temp[i] = new Array(width);
  1695. var j = width;
  1696. while(j--) temp[i][j] = src[i][j];
  1697. }
  1698. return temp;
  1699. }
  1700. /**
  1701. * Multiply a matrix by a scalar and returns a new matrix as the result.
  1702. * @param {number} scalar The scalar to multiply the matrix by.
  1703. * @param {matrix} src The matrix to be multiplied.
  1704. * @static
  1705. */
  1706. tracking.Matrix.mulScalar = function(scalar, src){
  1707. var res = tracking.Matrix.clone(src);
  1708. for(var i=0; i < src.length; i++){
  1709. for(var j=0; j < src[i].length; j++){
  1710. res[i][j] *= scalar;
  1711. }
  1712. }
  1713. return res;
  1714. }
  1715. /**
  1716. * Transpose a matrix and returns a new matrix as the result.
  1717. * @param {matrix} src The matrix to be transposed.
  1718. * @static
  1719. */
  1720. tracking.Matrix.transpose = function(src){
  1721. var transpose = new Array(src[0].length);
  1722. for(var i=0; i < src[0].length; i++){
  1723. transpose[i] = new Array(src.length);
  1724. for(var j=0; j < src.length; j++){
  1725. transpose[i][j] = src[j][i];
  1726. }
  1727. }
  1728. return transpose;
  1729. }
  1730. /**
  1731. * Multiply an MxN matrix with an NxP matrix and returns a new MxP matrix
  1732. * as the result.
  1733. * @param {matrix} a The first matrix.
  1734. * @param {matrix} b The second matrix.
  1735. * @static
  1736. */
  1737. tracking.Matrix.mul = function(a, b) {
  1738. var res = new Array(a.length);
  1739. for (var i = 0; i < a.length; i++) {
  1740. res[i] = new Array(b[0].length);
  1741. for (var j = 0; j < b[0].length; j++) {
  1742. res[i][j] = 0;
  1743. for (var k = 0; k < a[0].length; k++) {
  1744. res[i][j] += a[i][k] * b[k][j];
  1745. }
  1746. }
  1747. }
  1748. return res;
  1749. }
  1750. /**
  1751. * Calculates the absolute norm of a matrix.
  1752. * @param {matrix} src The matrix which norm will be calculated.
  1753. * @static
  1754. */
  1755. tracking.Matrix.norm = function(src){
  1756. var res = 0;
  1757. for(var i=0; i < src.length; i++){
  1758. for(var j=0; j < src[i].length; j++){
  1759. res += src[i][j]*src[i][j];
  1760. }
  1761. }
  1762. return Math.sqrt(res);
  1763. }
  1764. /**
  1765. * Calculates and returns the covariance matrix of a set of vectors as well
  1766. * as the mean of the matrix.
  1767. * @param {matrix} src The matrix which covariance matrix will be calculated.
  1768. * @static
  1769. */
  1770. tracking.Matrix.calcCovarMatrix = function(src){
  1771. var mean = new Array(src.length);
  1772. for(var i=0; i < src.length; i++){
  1773. mean[i] = [0.0];
  1774. for(var j=0; j < src[i].length; j++){
  1775. mean[i][0] += src[i][j]/src[i].length;
  1776. }
  1777. }
  1778. var deltaFull = tracking.Matrix.clone(mean);
  1779. for(var i=0; i < deltaFull.length; i++){
  1780. for(var j=0; j < src[0].length - 1; j++){
  1781. deltaFull[i].push(deltaFull[i][0]);
  1782. }
  1783. }
  1784. var a = tracking.Matrix.sub(src, deltaFull);
  1785. var b = tracking.Matrix.transpose(a);
  1786. var covar = tracking.Matrix.mul(b,a);
  1787. return [covar, mean];
  1788. }
  1789. }());
  1790. (function() {
  1791. /**
  1792. * EPnp utility.
  1793. * @static
  1794. * @constructor
  1795. */
  1796. tracking.EPnP = {};
  1797. tracking.EPnP.solve = function(objectPoints, imagePoints, cameraMatrix) {};
  1798. }());
  1799. (function() {
  1800. /**
  1801. * Tracker utility.
  1802. * @constructor
  1803. * @extends {tracking.EventEmitter}
  1804. */
  1805. tracking.Tracker = function() {
  1806. tracking.Tracker.base(this, 'constructor');
  1807. };
  1808. tracking.inherits(tracking.Tracker, tracking.EventEmitter);
  1809. /**
  1810. * Tracks the pixels on the array. This method is called for each video
  1811. * frame in order to emit `track` event.
  1812. * @param {Uint8ClampedArray} pixels The pixels data to track.
  1813. * @param {number} width The pixels canvas width.
  1814. * @param {number} height The pixels canvas height.
  1815. */
  1816. tracking.Tracker.prototype.track = function() {};
  1817. }());
  1818. (function() {
  1819. /**
  1820. * TrackerTask utility.
  1821. * @constructor
  1822. * @extends {tracking.EventEmitter}
  1823. */
  1824. tracking.TrackerTask = function(tracker) {
  1825. tracking.TrackerTask.base(this, 'constructor');
  1826. if (!tracker) {
  1827. throw new Error('Tracker instance not specified.');
  1828. }
  1829. this.setTracker(tracker);
  1830. };
  1831. tracking.inherits(tracking.TrackerTask, tracking.EventEmitter);
  1832. /**
  1833. * Holds the tracker instance managed by this task.
  1834. * @type {tracking.Tracker}
  1835. * @private
  1836. */
  1837. tracking.TrackerTask.prototype.tracker_ = null;
  1838. /**
  1839. * Holds if the tracker task is in running.
  1840. * @type {boolean}
  1841. * @private
  1842. */
  1843. tracking.TrackerTask.prototype.running_ = false;
  1844. /**
  1845. * Gets the tracker instance managed by this task.
  1846. * @return {tracking.Tracker}
  1847. */
  1848. tracking.TrackerTask.prototype.getTracker = function() {
  1849. return this.tracker_;
  1850. };
  1851. /**
  1852. * Returns true if the tracker task is in running, false otherwise.
  1853. * @return {boolean}
  1854. * @private
  1855. */
  1856. tracking.TrackerTask.prototype.inRunning = function() {
  1857. return this.running_;
  1858. };
  1859. /**
  1860. * Sets if the tracker task is in running.
  1861. * @param {boolean} running
  1862. * @private
  1863. */
  1864. tracking.TrackerTask.prototype.setRunning = function(running) {
  1865. this.running_ = running;
  1866. };
  1867. /**
  1868. * Sets the tracker instance managed by this task.
  1869. * @return {tracking.Tracker}
  1870. */
  1871. tracking.TrackerTask.prototype.setTracker = function(tracker) {
  1872. this.tracker_ = tracker;
  1873. };
  1874. /**
  1875. * Emits a `run` event on the tracker task for the implementers to run any
  1876. * child action, e.g. `requestAnimationFrame`.
  1877. * @return {object} Returns itself, so calls can be chained.
  1878. */
  1879. tracking.TrackerTask.prototype.run = function() {
  1880. var self = this;
  1881. if (this.inRunning()) {
  1882. return;
  1883. }
  1884. this.setRunning(true);
  1885. this.reemitTrackEvent_ = function(event) {
  1886. self.emit('track', event);
  1887. };
  1888. this.tracker_.on('track', this.reemitTrackEvent_);
  1889. this.emit('run');
  1890. return this;
  1891. };
  1892. /**
  1893. * Emits a `stop` event on the tracker task for the implementers to stop any
  1894. * child action being done, e.g. `requestAnimationFrame`.
  1895. * @return {object} Returns itself, so calls can be chained.
  1896. */
  1897. tracking.TrackerTask.prototype.stop = function() {
  1898. if (!this.inRunning()) {
  1899. return;
  1900. }
  1901. this.setRunning(false);
  1902. this.emit('stop');
  1903. this.tracker_.removeListener('track', this.reemitTrackEvent_);
  1904. return this;
  1905. };
  1906. }());
  1907. (function() {
  1908. /**
  1909. * ColorTracker utility to track colored blobs in a frame using color
  1910. * difference evaluation.
  1911. * @constructor
  1912. * @param {string|Array.<string>} opt_colors Optional colors to track.
  1913. * @extends {tracking.Tracker}
  1914. */
  1915. tracking.ColorTracker = function(opt_colors) {
  1916. tracking.ColorTracker.base(this, 'constructor');
  1917. if (typeof opt_colors === 'string') {
  1918. opt_colors = [opt_colors];
  1919. }
  1920. if (opt_colors) {
  1921. opt_colors.forEach(function(color) {
  1922. if (!tracking.ColorTracker.getColor(color)) {
  1923. throw new Error('Color not valid, try `new tracking.ColorTracker("magenta")`.');
  1924. }
  1925. });
  1926. this.setColors(opt_colors);
  1927. }
  1928. };
  1929. tracking.inherits(tracking.ColorTracker, tracking.Tracker);
  1930. /**
  1931. * Holds the known colors.
  1932. * @type {Object.<string, function>}
  1933. * @private
  1934. * @static
  1935. */
  1936. tracking.ColorTracker.knownColors_ = {};
  1937. /**
  1938. * Caches coordinates values of the neighbours surrounding a pixel.
  1939. * @type {Object.<number, Int32Array>}
  1940. * @private
  1941. * @static
  1942. */
  1943. tracking.ColorTracker.neighbours_ = {};
  1944. /**
  1945. * Registers a color as known color.
  1946. * @param {string} name The color name.
  1947. * @param {function} fn The color function to test if the passed (r,g,b) is
  1948. * the desired color.
  1949. * @static
  1950. */
  1951. tracking.ColorTracker.registerColor = function(name, fn) {
  1952. tracking.ColorTracker.knownColors_[name] = fn;
  1953. };
  1954. /**
  1955. * Gets the known color function that is able to test whether an (r,g,b) is
  1956. * the desired color.
  1957. * @param {string} name The color name.
  1958. * @return {function} The known color test function.
  1959. * @static
  1960. */
  1961. tracking.ColorTracker.getColor = function(name) {
  1962. return tracking.ColorTracker.knownColors_[name];
  1963. };
  1964. /**
  1965. * Holds the colors to be tracked by the `ColorTracker` instance.
  1966. * @default ['magenta']
  1967. * @type {Array.<string>}
  1968. */
  1969. tracking.ColorTracker.prototype.colors = ['magenta'];
  1970. /**
  1971. * Holds the minimum dimension to classify a rectangle.
  1972. * @default 20
  1973. * @type {number}
  1974. */
  1975. tracking.ColorTracker.prototype.minDimension = 20;
  1976. /**
  1977. * Holds the maximum dimension to classify a rectangle.
  1978. * @default Infinity
  1979. * @type {number}
  1980. */
  1981. tracking.ColorTracker.prototype.maxDimension = Infinity;
  1982. /**
  1983. * Holds the minimum group size to be classified as a rectangle.
  1984. * @default 30
  1985. * @type {number}
  1986. */
  1987. tracking.ColorTracker.prototype.minGroupSize = 30;
  1988. /**
  1989. * Calculates the central coordinate from the cloud points. The cloud points
  1990. * are all points that matches the desired color.
  1991. * @param {Array.<number>} cloud Major row order array containing all the
  1992. * points from the desired color, e.g. [x1, y1, c2, y2, ...].
  1993. * @param {number} total Total numbers of pixels of the desired color.
  1994. * @return {object} Object containing the x, y and estimated z coordinate of
  1995. * the blog extracted from the cloud points.
  1996. * @private
  1997. */
  1998. tracking.ColorTracker.prototype.calculateDimensions_ = function(cloud, total) {
  1999. var maxx = -1;
  2000. var maxy = -1;
  2001. var minx = Infinity;
  2002. var miny = Infinity;
  2003. for (var c = 0; c < total; c += 2) {
  2004. var x = cloud[c];
  2005. var y = cloud[c + 1];
  2006. if (x < minx) {
  2007. minx = x;
  2008. }
  2009. if (x > maxx) {
  2010. maxx = x;
  2011. }
  2012. if (y < miny) {
  2013. miny = y;
  2014. }
  2015. if (y > maxy) {
  2016. maxy = y;
  2017. }
  2018. }
  2019. return {
  2020. width: maxx - minx,
  2021. height: maxy - miny,
  2022. x: minx,
  2023. y: miny
  2024. };
  2025. };
  2026. /**
  2027. * Gets the colors being tracked by the `ColorTracker` instance.
  2028. * @return {Array.<string>}
  2029. */
  2030. tracking.ColorTracker.prototype.getColors = function() {
  2031. return this.colors;
  2032. };
  2033. /**
  2034. * Gets the minimum dimension to classify a rectangle.
  2035. * @return {number}
  2036. */
  2037. tracking.ColorTracker.prototype.getMinDimension = function() {
  2038. return this.minDimension;
  2039. };
  2040. /**
  2041. * Gets the maximum dimension to classify a rectangle.
  2042. * @return {number}
  2043. */
  2044. tracking.ColorTracker.prototype.getMaxDimension = function() {
  2045. return this.maxDimension;
  2046. };
  2047. /**
  2048. * Gets the minimum group size to be classified as a rectangle.
  2049. * @return {number}
  2050. */
  2051. tracking.ColorTracker.prototype.getMinGroupSize = function() {
  2052. return this.minGroupSize;
  2053. };
  2054. /**
  2055. * Gets the eight offset values of the neighbours surrounding a pixel.
  2056. * @param {number} width The image width.
  2057. * @return {array} Array with the eight offset values of the neighbours
  2058. * surrounding a pixel.
  2059. * @private
  2060. */
  2061. tracking.ColorTracker.prototype.getNeighboursForWidth_ = function(width) {
  2062. if (tracking.ColorTracker.neighbours_[width]) {
  2063. return tracking.ColorTracker.neighbours_[width];
  2064. }
  2065. var neighbours = new Int32Array(8);
  2066. neighbours[0] = -width * 4;
  2067. neighbours[1] = -width * 4 + 4;
  2068. neighbours[2] = 4;
  2069. neighbours[3] = width * 4 + 4;
  2070. neighbours[4] = width * 4;
  2071. neighbours[5] = width * 4 - 4;
  2072. neighbours[6] = -4;
  2073. neighbours[7] = -width * 4 - 4;
  2074. tracking.ColorTracker.neighbours_[width] = neighbours;
  2075. return neighbours;
  2076. };
  2077. /**
  2078. * Unites groups whose bounding box intersect with each other.
  2079. * @param {Array.<Object>} rects
  2080. * @private
  2081. */
  2082. tracking.ColorTracker.prototype.mergeRectangles_ = function(rects) {
  2083. var intersects;
  2084. var results = [];
  2085. var minDimension = this.getMinDimension();
  2086. var maxDimension = this.getMaxDimension();
  2087. for (var r = 0; r < rects.length; r++) {
  2088. var r1 = rects[r];
  2089. intersects = true;
  2090. for (var s = r + 1; s < rects.length; s++) {
  2091. var r2 = rects[s];
  2092. if (tracking.Math.intersectRect(r1.x, r1.y, r1.x + r1.width, r1.y + r1.height, r2.x, r2.y, r2.x + r2.width, r2.y + r2.height)) {
  2093. intersects = false;
  2094. var x1 = Math.min(r1.x, r2.x);
  2095. var y1 = Math.min(r1.y, r2.y);
  2096. var x2 = Math.max(r1.x + r1.width, r2.x + r2.width);
  2097. var y2 = Math.max(r1.y + r1.height, r2.y + r2.height);
  2098. r2.height = y2 - y1;
  2099. r2.width = x2 - x1;
  2100. r2.x = x1;
  2101. r2.y = y1;
  2102. break;
  2103. }
  2104. }
  2105. if (intersects) {
  2106. if (r1.width >= minDimension && r1.height >= minDimension) {
  2107. if (r1.width <= maxDimension && r1.height <= maxDimension) {
  2108. results.push(r1);
  2109. }
  2110. }
  2111. }
  2112. }
  2113. return results;
  2114. };
  2115. /**
  2116. * Sets the colors to be tracked by the `ColorTracker` instance.
  2117. * @param {Array.<string>} colors
  2118. */
  2119. tracking.ColorTracker.prototype.setColors = function(colors) {
  2120. this.colors = colors;
  2121. };
  2122. /**
  2123. * Sets the minimum dimension to classify a rectangle.
  2124. * @param {number} minDimension
  2125. */
  2126. tracking.ColorTracker.prototype.setMinDimension = function(minDimension) {
  2127. this.minDimension = minDimension;
  2128. };
  2129. /**
  2130. * Sets the maximum dimension to classify a rectangle.
  2131. * @param {number} maxDimension
  2132. */
  2133. tracking.ColorTracker.prototype.setMaxDimension = function(maxDimension) {
  2134. this.maxDimension = maxDimension;
  2135. };
  2136. /**
  2137. * Sets the minimum group size to be classified as a rectangle.
  2138. * @param {number} minGroupSize
  2139. */
  2140. tracking.ColorTracker.prototype.setMinGroupSize = function(minGroupSize) {
  2141. this.minGroupSize = minGroupSize;
  2142. };
  2143. /**
  2144. * Tracks the `Video` frames. This method is called for each video frame in
  2145. * order to emit `track` event.
  2146. * @param {Uint8ClampedArray} pixels The pixels data to track.
  2147. * @param {number} width The pixels canvas width.
  2148. * @param {number} height The pixels canvas height.
  2149. */
  2150. tracking.ColorTracker.prototype.track = function(pixels, width, height) {
  2151. var self = this;
  2152. var colors = this.getColors();
  2153. if (!colors) {
  2154. throw new Error('Colors not specified, try `new tracking.ColorTracker("magenta")`.');
  2155. }
  2156. var results = [];
  2157. colors.forEach(function(color) {
  2158. results = results.concat(self.trackColor_(pixels, width, height, color));
  2159. });
  2160. this.emit('track', {
  2161. data: results
  2162. });
  2163. };
  2164. /**
  2165. * Find the given color in the given matrix of pixels using Flood fill
  2166. * algorithm to determines the area connected to a given node in a
  2167. * multi-dimensional array.
  2168. * @param {Uint8ClampedArray} pixels The pixels data to track.
  2169. * @param {number} width The pixels canvas width.
  2170. * @param {number} height The pixels canvas height.
  2171. * @param {string} color The color to be found
  2172. * @private
  2173. */
  2174. tracking.ColorTracker.prototype.trackColor_ = function(pixels, width, height, color) {
  2175. var colorFn = tracking.ColorTracker.knownColors_[color];
  2176. var currGroup = new Int32Array(pixels.length >> 2);
  2177. var currGroupSize;
  2178. var currI;
  2179. var currJ;
  2180. var currW;
  2181. var marked = new Int8Array(pixels.length);
  2182. var minGroupSize = this.getMinGroupSize();
  2183. var neighboursW = this.getNeighboursForWidth_(width);
  2184. var queue = new Int32Array(pixels.length);
  2185. var queuePosition;
  2186. var results = [];
  2187. var w = -4;
  2188. if (!colorFn) {
  2189. return results;
  2190. }
  2191. for (var i = 0; i < height; i++) {
  2192. for (var j = 0; j < width; j++) {
  2193. w += 4;
  2194. if (marked[w]) {
  2195. continue;
  2196. }
  2197. currGroupSize = 0;
  2198. queuePosition = -1;
  2199. queue[++queuePosition] = w;
  2200. queue[++queuePosition] = i;
  2201. queue[++queuePosition] = j;
  2202. marked[w] = 1;
  2203. while (queuePosition >= 0) {
  2204. currJ = queue[queuePosition--];
  2205. currI = queue[queuePosition--];
  2206. currW = queue[queuePosition--];
  2207. if (colorFn(pixels[currW], pixels[currW + 1], pixels[currW + 2], pixels[currW + 3], currW, currI, currJ)) {
  2208. currGroup[currGroupSize++] = currJ;
  2209. currGroup[currGroupSize++] = currI;
  2210. for (var k = 0; k < neighboursW.length; k++) {
  2211. var otherW = currW + neighboursW[k];
  2212. var otherI = currI + neighboursI[k];
  2213. var otherJ = currJ + neighboursJ[k];
  2214. if (!marked[otherW] && otherI >= 0 && otherI < height && otherJ >= 0 && otherJ < width) {
  2215. queue[++queuePosition] = otherW;
  2216. queue[++queuePosition] = otherI;
  2217. queue[++queuePosition] = otherJ;
  2218. marked[otherW] = 1;
  2219. }
  2220. }
  2221. }
  2222. }
  2223. if (currGroupSize >= minGroupSize) {
  2224. var data = this.calculateDimensions_(currGroup, currGroupSize);
  2225. if (data) {
  2226. data.color = color;
  2227. results.push(data);
  2228. }
  2229. }
  2230. }
  2231. }
  2232. return this.mergeRectangles_(results);
  2233. };
  2234. // Default colors
  2235. //===================
  2236. tracking.ColorTracker.registerColor('cyan', function(r, g, b) {
  2237. var thresholdGreen = 50,
  2238. thresholdBlue = 70,
  2239. dx = r - 0,
  2240. dy = g - 255,
  2241. dz = b - 255;
  2242. if ((g - r) >= thresholdGreen && (b - r) >= thresholdBlue) {
  2243. return true;
  2244. }
  2245. return dx * dx + dy * dy + dz * dz < 6400;
  2246. });
  2247. tracking.ColorTracker.registerColor('magenta', function(r, g, b) {
  2248. var threshold = 50,
  2249. dx = r - 255,
  2250. dy = g - 0,
  2251. dz = b - 255;
  2252. if ((r - g) >= threshold && (b - g) >= threshold) {
  2253. return true;
  2254. }
  2255. return dx * dx + dy * dy + dz * dz < 19600;
  2256. });
  2257. tracking.ColorTracker.registerColor('yellow', function(r, g, b) {
  2258. var threshold = 50,
  2259. dx = r - 255,
  2260. dy = g - 255,
  2261. dz = b - 0;
  2262. if ((r - b) >= threshold && (g - b) >= threshold) {
  2263. return true;
  2264. }
  2265. return dx * dx + dy * dy + dz * dz < 10000;
  2266. });
  2267. // Caching neighbour i/j offset values.
  2268. //=====================================
  2269. var neighboursI = new Int32Array([-1, -1, 0, 1, 1, 1, 0, -1]);
  2270. var neighboursJ = new Int32Array([0, 1, 1, 1, 0, -1, -1, -1]);
  2271. }());
  2272. (function() {
  2273. /**
  2274. * ObjectTracker utility.
  2275. * @constructor
  2276. * @param {string|Array.<string|Array.<number>>} opt_classifiers Optional
  2277. * object classifiers to track.
  2278. * @extends {tracking.Tracker}
  2279. */
  2280. tracking.ObjectTracker = function(opt_classifiers) {
  2281. tracking.ObjectTracker.base(this, 'constructor');
  2282. if (opt_classifiers) {
  2283. if (!Array.isArray(opt_classifiers)) {
  2284. opt_classifiers = [opt_classifiers];
  2285. }
  2286. if (Array.isArray(opt_classifiers)) {
  2287. opt_classifiers.forEach(function(classifier, i) {
  2288. if (typeof classifier === 'string') {
  2289. opt_classifiers[i] = tracking.ViolaJones.classifiers[classifier];
  2290. }
  2291. if (!opt_classifiers[i]) {
  2292. throw new Error('Object classifier not valid, try `new tracking.ObjectTracker("face")`.');
  2293. }
  2294. });
  2295. }
  2296. }
  2297. this.setClassifiers(opt_classifiers);
  2298. };
  2299. tracking.inherits(tracking.ObjectTracker, tracking.Tracker);
  2300. /**
  2301. * Specifies the edges density of a block in order to decide whether to skip
  2302. * it or not.
  2303. * @default 0.2
  2304. * @type {number}
  2305. */
  2306. tracking.ObjectTracker.prototype.edgesDensity = 0.2;
  2307. /**
  2308. * Specifies the initial scale to start the feature block scaling.
  2309. * @default 1.0
  2310. * @type {number}
  2311. */
  2312. tracking.ObjectTracker.prototype.initialScale = 1.0;
  2313. /**
  2314. * Specifies the scale factor to scale the feature block.
  2315. * @default 1.25
  2316. * @type {number}
  2317. */
  2318. tracking.ObjectTracker.prototype.scaleFactor = 1.25;
  2319. /**
  2320. * Specifies the block step size.
  2321. * @default 1.5
  2322. * @type {number}
  2323. */
  2324. tracking.ObjectTracker.prototype.stepSize = 1.5;
  2325. /**
  2326. * Gets the tracker HAAR classifiers.
  2327. * @return {TypedArray.<number>}
  2328. */
  2329. tracking.ObjectTracker.prototype.getClassifiers = function() {
  2330. return this.classifiers;
  2331. };
  2332. /**
  2333. * Gets the edges density value.
  2334. * @return {number}
  2335. */
  2336. tracking.ObjectTracker.prototype.getEdgesDensity = function() {
  2337. return this.edgesDensity;
  2338. };
  2339. /**
  2340. * Gets the initial scale to start the feature block scaling.
  2341. * @return {number}
  2342. */
  2343. tracking.ObjectTracker.prototype.getInitialScale = function() {
  2344. return this.initialScale;
  2345. };
  2346. /**
  2347. * Gets the scale factor to scale the feature block.
  2348. * @return {number}
  2349. */
  2350. tracking.ObjectTracker.prototype.getScaleFactor = function() {
  2351. return this.scaleFactor;
  2352. };
  2353. /**
  2354. * Gets the block step size.
  2355. * @return {number}
  2356. */
  2357. tracking.ObjectTracker.prototype.getStepSize = function() {
  2358. return this.stepSize;
  2359. };
  2360. /**
  2361. * Tracks the `Video` frames. This method is called for each video frame in
  2362. * order to emit `track` event.
  2363. * @param {Uint8ClampedArray} pixels The pixels data to track.
  2364. * @param {number} width The pixels canvas width.
  2365. * @param {number} height The pixels canvas height.
  2366. */
  2367. tracking.ObjectTracker.prototype.track = function(pixels, width, height) {
  2368. var self = this;
  2369. var classifiers = this.getClassifiers();
  2370. if (!classifiers) {
  2371. throw new Error('Object classifier not specified, try `new tracking.ObjectTracker("face")`.');
  2372. }
  2373. var results = [];
  2374. classifiers.forEach(function(classifier) {
  2375. results = results.concat(tracking.ViolaJones.detect(pixels, width, height, self.getInitialScale(), self.getScaleFactor(), self.getStepSize(), self.getEdgesDensity(), classifier));
  2376. });
  2377. this.emit('track', {
  2378. data: results
  2379. });
  2380. };
  2381. /**
  2382. * Sets the tracker HAAR classifiers.
  2383. * @param {TypedArray.<number>} classifiers
  2384. */
  2385. tracking.ObjectTracker.prototype.setClassifiers = function(classifiers) {
  2386. this.classifiers = classifiers;
  2387. };
  2388. /**
  2389. * Sets the edges density.
  2390. * @param {number} edgesDensity
  2391. */
  2392. tracking.ObjectTracker.prototype.setEdgesDensity = function(edgesDensity) {
  2393. this.edgesDensity = edgesDensity;
  2394. };
  2395. /**
  2396. * Sets the initial scale to start the block scaling.
  2397. * @param {number} initialScale
  2398. */
  2399. tracking.ObjectTracker.prototype.setInitialScale = function(initialScale) {
  2400. this.initialScale = initialScale;
  2401. };
  2402. /**
  2403. * Sets the scale factor to scale the feature block.
  2404. * @param {number} scaleFactor
  2405. */
  2406. tracking.ObjectTracker.prototype.setScaleFactor = function(scaleFactor) {
  2407. this.scaleFactor = scaleFactor;
  2408. };
  2409. /**
  2410. * Sets the block step size.
  2411. * @param {number} stepSize
  2412. */
  2413. tracking.ObjectTracker.prototype.setStepSize = function(stepSize) {
  2414. this.stepSize = stepSize;
  2415. };
  2416. }());
  2417. (function() {
  2418. tracking.LandmarksTracker = function() {
  2419. tracking.LandmarksTracker.base(this, 'constructor');
  2420. }
  2421. tracking.inherits(tracking.LandmarksTracker, tracking.ObjectTracker);
  2422. tracking.LandmarksTracker.prototype.track = function(pixels, width, height) {
  2423. var image = {
  2424. 'data': pixels,
  2425. 'width': width,
  2426. 'height': height
  2427. };
  2428. var classifier = tracking.ViolaJones.classifiers['face'];
  2429. var faces = tracking.ViolaJones.detect(pixels, width, height,
  2430. this.getInitialScale(), this.getScaleFactor(), this.getStepSize(),
  2431. this.getEdgesDensity(), classifier);
  2432. var landmarks = tracking.LBF.align(pixels, width, height, faces);
  2433. this.emit('track', {
  2434. 'data': {
  2435. 'faces' : faces,
  2436. 'landmarks' : landmarks
  2437. }
  2438. });
  2439. }
  2440. }());
  2441. (function() {
  2442. tracking.LBF = {};
  2443. /**
  2444. * LBF Regressor utility.
  2445. * @constructor
  2446. */
  2447. tracking.LBF.Regressor = function(maxNumStages){
  2448. this.maxNumStages = maxNumStages;
  2449. this.rfs = new Array(maxNumStages);
  2450. this.models = new Array(maxNumStages);
  2451. for(var i=0; i < maxNumStages; i++){
  2452. this.rfs[i] = new tracking.LBF.RandomForest(i);
  2453. this.models[i] = tracking.LBF.RegressorData[i].models;
  2454. }
  2455. this.meanShape = tracking.LBF.LandmarksData;
  2456. }
  2457. /**
  2458. * Predicts the position of the landmarks based on the bounding box of the face.
  2459. * @param {pixels} pixels The grayscale pixels in a linear array.
  2460. * @param {number} width Width of the image.
  2461. * @param {number} height Height of the image.
  2462. * @param {object} boudingBox Bounding box of the face to be aligned.
  2463. * @return {matrix} A matrix with each landmark position in a row [x,y].
  2464. */
  2465. tracking.LBF.Regressor.prototype.predict = function(pixels, width, height, boundingBox) {
  2466. var images = [];
  2467. var currentShapes = [];
  2468. var boundingBoxes = [];
  2469. var meanShapeClone = tracking.Matrix.clone(this.meanShape);
  2470. images.push({
  2471. 'data': pixels,
  2472. 'width': width,
  2473. 'height': height
  2474. });
  2475. boundingBoxes.push(boundingBox);
  2476. currentShapes.push(tracking.LBF.projectShapeToBoundingBox_(meanShapeClone, boundingBox));
  2477. for(var stage = 0; stage < this.maxNumStages; stage++){
  2478. var binaryFeatures = tracking.LBF.Regressor.deriveBinaryFeat(this.rfs[stage], images, currentShapes, boundingBoxes, meanShapeClone);
  2479. this.applyGlobalPrediction(binaryFeatures, this.models[stage], currentShapes, boundingBoxes);
  2480. }
  2481. return currentShapes[0];
  2482. };
  2483. /**
  2484. * Multiplies the binary features of the landmarks with the regression matrix
  2485. * to obtain the displacement for each landmark. Then applies this displacement
  2486. * into the landmarks shape.
  2487. * @param {object} binaryFeatures The binary features for the landmarks.
  2488. * @param {object} models The regressor models.
  2489. * @param {matrix} currentShapes The landmarks shapes.
  2490. * @param {array} boudingBoxes The bounding boxes of the faces.
  2491. */
  2492. tracking.LBF.Regressor.prototype.applyGlobalPrediction = function(binaryFeatures, models, currentShapes,
  2493. boundingBoxes){
  2494. var residual = currentShapes[0].length * 2;
  2495. var rotation = [];
  2496. var deltashape = new Array(residual/2);
  2497. for(var i=0; i < residual/2; i++){
  2498. deltashape[i] = [0.0, 0.0];
  2499. }
  2500. for(var i=0; i < currentShapes.length; i++){
  2501. for(var j=0; j < residual; j++){
  2502. var tmp = 0;
  2503. for(var lx=0, idx=0; (idx = binaryFeatures[i][lx].index) != -1; lx++){
  2504. if(idx <= models[j].nr_feature){
  2505. tmp += models[j].data[(idx - 1)] * binaryFeatures[i][lx].value;
  2506. }
  2507. }
  2508. if(j < residual/2){
  2509. deltashape[j][0] = tmp;
  2510. }else{
  2511. deltashape[j - residual/2][1] = tmp;
  2512. }
  2513. }
  2514. var res = tracking.LBF.similarityTransform_(tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]), this.meanShape);
  2515. var rotation = tracking.Matrix.transpose(res[0]);
  2516. var s = tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]);
  2517. s = tracking.Matrix.add(s, deltashape);
  2518. currentShapes[i] = tracking.LBF.projectShapeToBoundingBox_(s, boundingBoxes[i]);
  2519. }
  2520. };
  2521. /**
  2522. * Derives the binary features from the image for each landmark.
  2523. * @param {object} forest The random forest to search for the best binary feature match.
  2524. * @param {array} images The images with pixels in a grayscale linear array.
  2525. * @param {array} currentShapes The current landmarks shape.
  2526. * @param {array} boudingBoxes The bounding boxes of the faces.
  2527. * @param {matrix} meanShape The mean shape of the current landmarks set.
  2528. * @return {array} The binary features extracted from the image and matched with the
  2529. * training data.
  2530. * @static
  2531. */
  2532. tracking.LBF.Regressor.deriveBinaryFeat = function(forest, images, currentShapes, boundingBoxes, meanShape){
  2533. var binaryFeatures = new Array(images.length);
  2534. for(var i=0; i < images.length; i++){
  2535. var t = forest.maxNumTrees * forest.landmarkNum + 1;
  2536. binaryFeatures[i] = new Array(t);
  2537. for(var j=0; j < t; j++){
  2538. binaryFeatures[i][j] = {};
  2539. }
  2540. }
  2541. var leafnodesPerTree = 1 << (forest.maxDepth - 1);
  2542. for(var i=0; i < images.length; i++){
  2543. var projectedShape = tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]);
  2544. var transform = tracking.LBF.similarityTransform_(projectedShape, meanShape);
  2545. for(var j=0; j < forest.landmarkNum; j++){
  2546. for(var k=0; k < forest.maxNumTrees; k++){
  2547. var binaryCode = tracking.LBF.Regressor.getCodeFromTree(forest.rfs[j][k], images[i],
  2548. currentShapes[i], boundingBoxes[i], transform[0], transform[1]);
  2549. var index = j*forest.maxNumTrees + k;
  2550. binaryFeatures[i][index].index = leafnodesPerTree * index + binaryCode;
  2551. binaryFeatures[i][index].value = 1;
  2552. }
  2553. }
  2554. binaryFeatures[i][forest.landmarkNum * forest.maxNumTrees].index = -1;
  2555. binaryFeatures[i][forest.landmarkNum * forest.maxNumTrees].value = -1;
  2556. }
  2557. return binaryFeatures;
  2558. }
  2559. /**
  2560. * Gets the binary code for a specific tree in a random forest. For each landmark,
  2561. * the position from two pre-defined points are recovered from the training data
  2562. * and then the intensity of the pixels corresponding to these points is extracted
  2563. * from the image and used to traverse the trees in the random forest. At the end,
  2564. * the ending nodes will be represented by 1, and the remaining nodes by 0.
  2565. *
  2566. * +--------------------------- Random Forest -----------------------------+
  2567. * | Ø = Ending leaf |
  2568. * | |
  2569. * | O O O O O |
  2570. * | / \ / \ / \ / \ / \ |
  2571. * | O O O O O O O O O O |
  2572. * | / \ / \ / \ / \ / \ / \ / \ / \ / \ / \ |
  2573. * | Ø O O O O O Ø O O Ø O O O O Ø O O O O Ø |
  2574. * | 1 0 0 0 0 0 1 0 0 1 0 0 0 0 1 0 0 0 0 1 |
  2575. * +-----------------------------------------------------------------------+
  2576. * Final binary code for this landmark: 10000010010000100001
  2577. *
  2578. * @param {object} forest The tree to be analyzed.
  2579. * @param {array} image The image with pixels in a grayscale linear array.
  2580. * @param {matrix} shape The current landmarks shape.
  2581. * @param {object} boudingBoxes The bounding box of the face.
  2582. * @param {matrix} rotation The rotation matrix used to transform the projected landmarks
  2583. * into the mean shape.
  2584. * @param {number} scale The scale factor used to transform the projected landmarks
  2585. * into the mean shape.
  2586. * @return {number} The binary code extracted from the tree.
  2587. * @static
  2588. */
  2589. tracking.LBF.Regressor.getCodeFromTree = function(tree, image, shape, boundingBox, rotation, scale){
  2590. var current = 0;
  2591. var bincode = 0;
  2592. while(true){
  2593. var x1 = Math.cos(tree.nodes[current].feats[0]) * tree.nodes[current].feats[2] * tree.maxRadioRadius * boundingBox.width;
  2594. var y1 = Math.sin(tree.nodes[current].feats[0]) * tree.nodes[current].feats[2] * tree.maxRadioRadius * boundingBox.height;
  2595. var x2 = Math.cos(tree.nodes[current].feats[1]) * tree.nodes[current].feats[3] * tree.maxRadioRadius * boundingBox.width;
  2596. var y2 = Math.sin(tree.nodes[current].feats[1]) * tree.nodes[current].feats[3] * tree.maxRadioRadius * boundingBox.height;
  2597. var project_x1 = rotation[0][0] * x1 + rotation[0][1] * y1;
  2598. var project_y1 = rotation[1][0] * x1 + rotation[1][1] * y1;
  2599. var real_x1 = Math.floor(project_x1 + shape[tree.landmarkID][0]);
  2600. var real_y1 = Math.floor(project_y1 + shape[tree.landmarkID][1]);
  2601. real_x1 = Math.max(0.0, Math.min(real_x1, image.height - 1.0));
  2602. real_y1 = Math.max(0.0, Math.min(real_y1, image.width - 1.0));
  2603. var project_x2 = rotation[0][0] * x2 + rotation[0][1] * y2;
  2604. var project_y2 = rotation[1][0] * x2 + rotation[1][1] * y2;
  2605. var real_x2 = Math.floor(project_x2 + shape[tree.landmarkID][0]);
  2606. var real_y2 = Math.floor(project_y2 + shape[tree.landmarkID][1]);
  2607. real_x2 = Math.max(0.0, Math.min(real_x2, image.height - 1.0));
  2608. real_y2 = Math.max(0.0, Math.min(real_y2, image.width - 1.0));
  2609. var pdf = Math.floor(image.data[real_y1*image.width + real_x1]) -
  2610. Math.floor(image.data[real_y2 * image.width +real_x2]);
  2611. if(pdf < tree.nodes[current].thresh){
  2612. current = tree.nodes[current].cnodes[0];
  2613. }else{
  2614. current = tree.nodes[current].cnodes[1];
  2615. }
  2616. if (tree.nodes[current].is_leafnode == 1) {
  2617. bincode = 1;
  2618. for (var i=0; i < tree.leafnodes.length; i++) {
  2619. if (tree.leafnodes[i] == current) {
  2620. return bincode;
  2621. }
  2622. bincode++;
  2623. }
  2624. return bincode;
  2625. }
  2626. }
  2627. return bincode;
  2628. }
  2629. }());
  2630. (function() {
  2631. /**
  2632. * Face Alignment via Regressing Local Binary Features (LBF)
  2633. * This approach has two components: a set of local binary features and
  2634. * a locality principle for learning those features.
  2635. * The locality principle is used to guide the learning of a set of highly
  2636. * discriminative local binary features for each landmark independently.
  2637. * The obtained local binary features are used to learn a linear regression
  2638. * that later will be used to guide the landmarks in the alignment phase.
  2639. *
  2640. * @authors: VoxarLabs Team (http://cin.ufpe.br/~voxarlabs)
  2641. * Lucas Figueiredo <lsf@cin.ufpe.br>, Thiago Menezes <tmc2@cin.ufpe.br>,
  2642. * Thiago Domingues <tald@cin.ufpe.br>, Rafael Roberto <rar3@cin.ufpe.br>,
  2643. * Thulio Araujo <tlsa@cin.ufpe.br>, Joao Victor <jvfl@cin.ufpe.br>,
  2644. * Tomer Simis <tls@cin.ufpe.br>)
  2645. */
  2646. /**
  2647. * Holds the maximum number of stages that will be used in the alignment algorithm.
  2648. * Each stage contains a different set of random forests and retrieves the binary
  2649. * code from a more "specialized" (i.e. smaller) region around the landmarks.
  2650. * @type {number}
  2651. * @static
  2652. */
  2653. tracking.LBF.maxNumStages = 4;
  2654. /**
  2655. * Holds the regressor that will be responsible for extracting the local features from
  2656. * the image and guide the landmarks using the training data.
  2657. * @type {object}
  2658. * @protected
  2659. * @static
  2660. */
  2661. tracking.LBF.regressor_ = null;
  2662. /**
  2663. * Generates a set of landmarks for a set of faces
  2664. * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
  2665. * @param {number} width The image width.
  2666. * @param {number} height The image height.
  2667. * @param {array} faces The list of faces detected in the image
  2668. * @return {array} The aligned landmarks, each set of landmarks corresponding
  2669. * to a specific face.
  2670. * @static
  2671. */
  2672. tracking.LBF.align = function(pixels, width, height, faces){
  2673. if(tracking.LBF.regressor_ == null){
  2674. tracking.LBF.regressor_ = new tracking.LBF.Regressor(
  2675. tracking.LBF.maxNumStages
  2676. );
  2677. }
  2678. // NOTE: is this thesholding suitable ? if it is on image, why no skin-color filter ? and a adaptative threshold
  2679. pixels = tracking.Image.grayscale(pixels, width, height, false);
  2680. pixels = tracking.Image.equalizeHist(pixels, width, height);
  2681. var shapes = new Array(faces.length);
  2682. for(var i in faces){
  2683. faces[i].height = faces[i].width;
  2684. var boundingBox = {};
  2685. boundingBox.startX = faces[i].x;
  2686. boundingBox.startY = faces[i].y;
  2687. boundingBox.width = faces[i].width;
  2688. boundingBox.height = faces[i].height;
  2689. shapes[i] = tracking.LBF.regressor_.predict(pixels, width, height, boundingBox);
  2690. }
  2691. return shapes;
  2692. }
  2693. /**
  2694. * Unprojects the landmarks shape from the bounding box.
  2695. * @param {matrix} shape The landmarks shape.
  2696. * @param {matrix} boudingBox The bounding box.
  2697. * @return {matrix} The landmarks shape projected into the bounding box.
  2698. * @static
  2699. * @protected
  2700. */
  2701. tracking.LBF.unprojectShapeToBoundingBox_ = function(shape, boundingBox){
  2702. var temp = new Array(shape.length);
  2703. for(var i=0; i < shape.length; i++){
  2704. temp[i] = [
  2705. (shape[i][0] - boundingBox.startX) / boundingBox.width,
  2706. (shape[i][1] - boundingBox.startY) / boundingBox.height
  2707. ];
  2708. }
  2709. return temp;
  2710. }
  2711. /**
  2712. * Projects the landmarks shape into the bounding box. The landmarks shape has
  2713. * normalized coordinates, so it is necessary to map these coordinates into
  2714. * the bounding box coordinates.
  2715. * @param {matrix} shape The landmarks shape.
  2716. * @param {matrix} boudingBox The bounding box.
  2717. * @return {matrix} The landmarks shape.
  2718. * @static
  2719. * @protected
  2720. */
  2721. tracking.LBF.projectShapeToBoundingBox_ = function(shape, boundingBox){
  2722. var temp = new Array(shape.length);
  2723. for(var i=0; i < shape.length; i++){
  2724. temp[i] = [
  2725. shape[i][0] * boundingBox.width + boundingBox.startX,
  2726. shape[i][1] * boundingBox.height + boundingBox.startY
  2727. ];
  2728. }
  2729. return temp;
  2730. }
  2731. /**
  2732. * Calculates the rotation and scale necessary to transform shape1 into shape2.
  2733. * @param {matrix} shape1 The shape to be transformed.
  2734. * @param {matrix} shape2 The shape to be transformed in.
  2735. * @return {[matrix, scalar]} The rotation matrix and scale that applied to shape1
  2736. * results in shape2.
  2737. * @static
  2738. * @protected
  2739. */
  2740. tracking.LBF.similarityTransform_ = function(shape1, shape2){
  2741. var center1 = [0,0];
  2742. var center2 = [0,0];
  2743. for (var i = 0; i < shape1.length; i++) {
  2744. center1[0] += shape1[i][0];
  2745. center1[1] += shape1[i][1];
  2746. center2[0] += shape2[i][0];
  2747. center2[1] += shape2[i][1];
  2748. }
  2749. center1[0] /= shape1.length;
  2750. center1[1] /= shape1.length;
  2751. center2[0] /= shape2.length;
  2752. center2[1] /= shape2.length;
  2753. var temp1 = tracking.Matrix.clone(shape1);
  2754. var temp2 = tracking.Matrix.clone(shape2);
  2755. for(var i=0; i < shape1.length; i++){
  2756. temp1[i][0] -= center1[0];
  2757. temp1[i][1] -= center1[1];
  2758. temp2[i][0] -= center2[0];
  2759. temp2[i][1] -= center2[1];
  2760. }
  2761. var covariance1, covariance2;
  2762. var mean1, mean2;
  2763. var t = tracking.Matrix.calcCovarMatrix(temp1);
  2764. covariance1 = t[0];
  2765. mean1 = t[1];
  2766. t = tracking.Matrix.calcCovarMatrix(temp2);
  2767. covariance2 = t[0];
  2768. mean2 = t[1];
  2769. var s1 = Math.sqrt(tracking.Matrix.norm(covariance1));
  2770. var s2 = Math.sqrt(tracking.Matrix.norm(covariance2));
  2771. var scale = s1/s2;
  2772. temp1 = tracking.Matrix.mulScalar(1.0/s1, temp1);
  2773. temp2 = tracking.Matrix.mulScalar(1.0/s2, temp2);
  2774. var num = 0, den = 0;
  2775. for (var i = 0; i < shape1.length; i++) {
  2776. num = num + temp1[i][1] * temp2[i][0] - temp1[i][0] * temp2[i][1];
  2777. den = den + temp1[i][0] * temp2[i][0] + temp1[i][1] * temp2[i][1];
  2778. }
  2779. var norm = Math.sqrt(num*num + den*den);
  2780. var sin_theta = num/norm;
  2781. var cos_theta = den/norm;
  2782. var rotation = [
  2783. [cos_theta, -sin_theta],
  2784. [sin_theta, cos_theta]
  2785. ];
  2786. return [rotation, scale];
  2787. }
  2788. /**
  2789. * LBF Random Forest data structure.
  2790. * @static
  2791. * @constructor
  2792. */
  2793. tracking.LBF.RandomForest = function(forestIndex){
  2794. this.maxNumTrees = tracking.LBF.RegressorData[forestIndex].max_numtrees;
  2795. this.landmarkNum = tracking.LBF.RegressorData[forestIndex].num_landmark;
  2796. this.maxDepth = tracking.LBF.RegressorData[forestIndex].max_depth;
  2797. this.stages = tracking.LBF.RegressorData[forestIndex].stages;
  2798. this.rfs = new Array(this.landmarkNum);
  2799. for(var i=0; i < this.landmarkNum; i++){
  2800. this.rfs[i] = new Array(this.maxNumTrees);
  2801. for(var j=0; j < this.maxNumTrees; j++){
  2802. this.rfs[i][j] = new tracking.LBF.Tree(forestIndex, i, j);
  2803. }
  2804. }
  2805. }
  2806. /**
  2807. * LBF Tree data structure.
  2808. * @static
  2809. * @constructor
  2810. */
  2811. tracking.LBF.Tree = function(forestIndex, landmarkIndex, treeIndex){
  2812. var data = tracking.LBF.RegressorData[forestIndex].landmarks[landmarkIndex][treeIndex];
  2813. this.maxDepth = data.max_depth;
  2814. this.maxNumNodes = data.max_numnodes;
  2815. this.nodes = data.nodes;
  2816. this.landmarkID = data.landmark_id;
  2817. this.numLeafnodes = data.num_leafnodes;
  2818. this.numNodes = data.num_nodes;
  2819. this.maxNumFeats = data.max_numfeats;
  2820. this.maxRadioRadius = data.max_radio_radius;
  2821. this.leafnodes = data.id_leafnodes;
  2822. }
  2823. }());