12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182 |
- /**
- * tracking - A modern approach for Computer Vision on the web.
- * @author Eduardo Lundgren <edu@rdo.io>
- * @version v1.1.3
- * @link http://trackingjs.com
- * @license BSD
- */
- (function(window, undefined) {
- window.tracking = window.tracking || {};
- /**
- * Inherit the prototype methods from one constructor into another.
- *
- * Usage:
- * <pre>
- * function ParentClass(a, b) { }
- * ParentClass.prototype.foo = function(a) { }
- *
- * function ChildClass(a, b, c) {
- * tracking.base(this, a, b);
- * }
- * tracking.inherits(ChildClass, ParentClass);
- *
- * var child = new ChildClass('a', 'b', 'c');
- * child.foo();
- * </pre>
- *
- * @param {Function} childCtor Child class.
- * @param {Function} parentCtor Parent class.
- */
- tracking.inherits = function(childCtor, parentCtor) {
- function TempCtor() {
- }
- TempCtor.prototype = parentCtor.prototype;
- childCtor.superClass_ = parentCtor.prototype;
- childCtor.prototype = new TempCtor();
- childCtor.prototype.constructor = childCtor;
- /**
- * Calls superclass constructor/method.
- *
- * This function is only available if you use tracking.inherits to express
- * inheritance relationships between classes.
- *
- * @param {!object} me Should always be "this".
- * @param {string} methodName The method name to call. Calling superclass
- * constructor can be done with the special string 'constructor'.
- * @param {...*} var_args The arguments to pass to superclass
- * method/constructor.
- * @return {*} The return value of the superclass method/constructor.
- */
- childCtor.base = function(me, methodName) {
- var args = Array.prototype.slice.call(arguments, 2);
- return parentCtor.prototype[methodName].apply(me, args);
- };
- };
- /**
- * Captures the user camera when tracking a video element and set its source
- * to the camera stream.
- * @param {HTMLVideoElement} element Canvas element to track.
- * @param {object} opt_options Optional configuration to the tracker.
- */
- tracking.initUserMedia_ = function(element, opt_options) {
- window.navigator.mediaDevices.getUserMedia({
- video: true,
- audio: (opt_options && opt_options.audio) ? true : false,
- }).then(function(stream) {
- element.srcObject = stream;
- }).catch(function(err) {
- throw Error('Cannot capture user camera.');
- });
- };
- /**
- * Tests whether the object is a dom node.
- * @param {object} o Object to be tested.
- * @return {boolean} True if the object is a dom node.
- */
- tracking.isNode = function(o) {
- return o.nodeType || this.isWindow(o);
- };
- /**
- * Tests whether the object is the `window` object.
- * @param {object} o Object to be tested.
- * @return {boolean} True if the object is the `window` object.
- */
- tracking.isWindow = function(o) {
- return !!(o && o.alert && o.document);
- };
- /**
- * Selects a dom node from a CSS3 selector using `document.querySelector`.
- * @param {string} selector
- * @param {object} opt_element The root element for the query. When not
- * specified `document` is used as root element.
- * @return {HTMLElement} The first dom element that matches to the selector.
- * If not found, returns `null`.
- */
- tracking.one = function(selector, opt_element) {
- if (this.isNode(selector)) {
- return selector;
- }
- return (opt_element || document).querySelector(selector);
- };
- /**
- * Tracks a canvas, image or video element based on the specified `tracker`
- * instance. This method extract the pixel information of the input element
- * to pass to the `tracker` instance. When tracking a video, the
- * `tracker.track(pixels, width, height)` will be in a
- * `requestAnimationFrame` loop in order to track all video frames.
- *
- * Example:
- * var tracker = new tracking.ColorTracker();
- *
- * tracking.track('#video', tracker);
- * or
- * tracking.track('#video', tracker, { camera: true });
- *
- * tracker.on('track', function(event) {
- * // console.log(event.data[0].x, event.data[0].y)
- * });
- *
- * @param {HTMLElement} element The element to track, canvas, image or
- * video.
- * @param {tracking.Tracker} tracker The tracker instance used to track the
- * element.
- * @param {object} opt_options Optional configuration to the tracker.
- */
- tracking.track = function(element, tracker, opt_options) {
- element = tracking.one(element);
- if (!element) {
- throw new Error('Element not found, try a different element or selector.');
- }
- if (!tracker) {
- throw new Error('Tracker not specified, try `tracking.track(element, new tracking.FaceTracker())`.');
- }
- switch (element.nodeName.toLowerCase()) {
- case 'canvas':
- return this.trackCanvas_(element, tracker, opt_options);
- case 'img':
- return this.trackImg_(element, tracker, opt_options);
- case 'video':
- if (opt_options) {
- if (opt_options.camera) {
- this.initUserMedia_(element, opt_options);
- }
- }
- return this.trackVideo_(element, tracker, opt_options);
- default:
- throw new Error('Element not supported, try in a canvas, img, or video.');
- }
- };
- /**
- * Tracks a canvas element based on the specified `tracker` instance and
- * returns a `TrackerTask` for this track.
- * @param {HTMLCanvasElement} element Canvas element to track.
- * @param {tracking.Tracker} tracker The tracker instance used to track the
- * element.
- * @param {object} opt_options Optional configuration to the tracker.
- * @return {tracking.TrackerTask}
- * @private
- */
- tracking.trackCanvas_ = function(element, tracker) {
- var self = this;
- var task = new tracking.TrackerTask(tracker);
- task.on('run', function() {
- self.trackCanvasInternal_(element, tracker);
- });
- return task.run();
- };
- /**
- * Tracks a canvas element based on the specified `tracker` instance. This
- * method extract the pixel information of the input element to pass to the
- * `tracker` instance.
- * @param {HTMLCanvasElement} element Canvas element to track.
- * @param {tracking.Tracker} tracker The tracker instance used to track the
- * element.
- * @param {object} opt_options Optional configuration to the tracker.
- * @private
- */
- tracking.trackCanvasInternal_ = function(element, tracker) {
- var width = element.width;
- var height = element.height;
- var context = element.getContext('2d');
- var imageData = context.getImageData(0, 0, width, height);
- tracker.track(imageData.data, width, height);
- };
- /**
- * Tracks a image element based on the specified `tracker` instance. This
- * method extract the pixel information of the input element to pass to the
- * `tracker` instance.
- * @param {HTMLImageElement} element Canvas element to track.
- * @param {tracking.Tracker} tracker The tracker instance used to track the
- * element.
- * @param {object} opt_options Optional configuration to the tracker.
- * @private
- */
- tracking.trackImg_ = function(element, tracker) {
- var width = element.naturalWidth;
- var height = element.naturalHeight;
- var canvas = document.createElement('canvas');
- canvas.width = width;
- canvas.height = height;
- var task = new tracking.TrackerTask(tracker);
- task.on('run', function() {
- tracking.Canvas.loadImage(canvas, element.src, 0, 0, width, height, function() {
- tracking.trackCanvasInternal_(canvas, tracker);
- });
- });
- return task.run();
- };
- /**
- * Tracks a video element based on the specified `tracker` instance. This
- * method extract the pixel information of the input element to pass to the
- * `tracker` instance. The `tracker.track(pixels, width, height)` will be in
- * a `requestAnimationFrame` loop in order to track all video frames.
- * @param {HTMLVideoElement} element Canvas element to track.
- * @param {tracking.Tracker} tracker The tracker instance used to track the
- * element.
- * @param {object} opt_options Optional configuration to the tracker.
- * @private
- */
- tracking.trackVideo_ = function(element, tracker) {
- var canvas = document.createElement('canvas');
- var context = canvas.getContext('2d');
- var width;
- var height;
- // FIXME here the video display size of the analysed size
- var resizeCanvas_ = function() {
- width = element.offsetWidth;
- height = element.offsetHeight;
- canvas.width = width;
- canvas.height = height;
- };
- resizeCanvas_();
- element.addEventListener('resize', resizeCanvas_);
- // FIXME: do a process function - it is up to the caller to handle the frequency of detection
- // it seems all handled in the tracking.TrackerTask..
- // so in short, remove the tracking.TrackerTask from here
- // if the user want to use it, it can create it himself
- // var requestId;
- // var requestAnimationFrame_ = function() {
- // requestId = window.requestAnimationFrame(function() {
- // if (element.readyState === element.HAVE_ENOUGH_DATA) {
- // try {
- // // Firefox v~30.0 gets confused with the video readyState firing an
- // // erroneous HAVE_ENOUGH_DATA just before HAVE_CURRENT_DATA state,
- // // hence keep trying to read it until resolved.
- // context.drawImage(element, 0, 0, width, height);
- // } catch (err) {}
- // tracking.trackCanvasInternal_(canvas, tracker);
- // }
- // requestAnimationFrame_();
- // });
- // };
- // ****
- var stopTask = false;
- var doTask = function() {
- setTimeout(function() {
- if (element.readyState === element.HAVE_ENOUGH_DATA) {
- try {
- // Firefox v~30.0 gets confused with the video readyState firing an
- // erroneous HAVE_ENOUGH_DATA just before HAVE_CURRENT_DATA state,
- // hence keep trying to read it until resolved.
- context.drawImage(element, 0, 0, width, height);
- } catch (err) {}
- try{
- tracking.trackCanvasInternal_(canvas, tracker);
- } catch (e) {
- console.log('stopped tracking??')
- }
- }
- if(!stopTask) {
- setTimeout(doTask, 500);
- }
- }, 500);
- };
- // ***
- var task = new tracking.TrackerTask(tracker);
- task.on('stop', function() {
- // window.cancelAnimationFrame(requestId);
- stopTask = true;
- });
- task.on('run', function() {
- // requestAnimationFrame_();
- stopTask = false;
- doTask();
- });
- return task.run();
- };
- // Browser polyfills
- //===================
- if (!window.URL) {
- window.URL = window.URL || window.webkitURL || window.msURL || window.oURL;
- }
- if (!navigator.getUserMedia) {
- navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
- navigator.mozGetUserMedia || navigator.msGetUserMedia;
- }
- }(window));
- (function() {
- /**
- * EventEmitter utility.
- * @constructor
- */
- tracking.EventEmitter = function() {};
- /**
- * Holds event listeners scoped by event type.
- * @type {object}
- * @private
- */
- tracking.EventEmitter.prototype.events_ = null;
- /**
- * Adds a listener to the end of the listeners array for the specified event.
- * @param {string} event
- * @param {function} listener
- * @return {object} Returns emitter, so calls can be chained.
- */
- tracking.EventEmitter.prototype.addListener = function(event, listener) {
- if (typeof listener !== 'function') {
- throw new TypeError('Listener must be a function');
- }
- if (!this.events_) {
- this.events_ = {};
- }
- this.emit('newListener', event, listener);
- if (!this.events_[event]) {
- this.events_[event] = [];
- }
- this.events_[event].push(listener);
- return this;
- };
- /**
- * Returns an array of listeners for the specified event.
- * @param {string} event
- * @return {array} Array of listeners.
- */
- tracking.EventEmitter.prototype.listeners = function(event) {
- return this.events_ && this.events_[event];
- };
- /**
- * Execute each of the listeners in order with the supplied arguments.
- * @param {string} event
- * @param {*} opt_args [arg1], [arg2], [...]
- * @return {boolean} Returns true if event had listeners, false otherwise.
- */
- tracking.EventEmitter.prototype.emit = function(event) {
- var listeners = this.listeners(event);
- if (listeners) {
- var args = Array.prototype.slice.call(arguments, 1);
- for (var i = 0; i < listeners.length; i++) {
- if (listeners[i]) {
- listeners[i].apply(this, args);
- }
- }
- return true;
- }
- return false;
- };
- /**
- * Adds a listener to the end of the listeners array for the specified event.
- * @param {string} event
- * @param {function} listener
- * @return {object} Returns emitter, so calls can be chained.
- */
- tracking.EventEmitter.prototype.on = tracking.EventEmitter.prototype.addListener;
- /**
- * Adds a one time listener for the event. This listener is invoked only the
- * next time the event is fired, after which it is removed.
- * @param {string} event
- * @param {function} listener
- * @return {object} Returns emitter, so calls can be chained.
- */
- tracking.EventEmitter.prototype.once = function(event, listener) {
- var self = this;
- self.on(event, function handlerInternal() {
- self.removeListener(event, handlerInternal);
- listener.apply(this, arguments);
- });
- };
- /**
- * Removes all listeners, or those of the specified event. It's not a good
- * idea to remove listeners that were added elsewhere in the code,
- * especially when it's on an emitter that you didn't create.
- * @param {string} event
- * @return {object} Returns emitter, so calls can be chained.
- */
- tracking.EventEmitter.prototype.removeAllListeners = function(opt_event) {
- if (!this.events_) {
- return this;
- }
- if (opt_event) {
- delete this.events_[opt_event];
- } else {
- delete this.events_;
- }
- return this;
- };
- /**
- * Remove a listener from the listener array for the specified event.
- * Caution: changes array indices in the listener array behind the listener.
- * @param {string} event
- * @param {function} listener
- * @return {object} Returns emitter, so calls can be chained.
- */
- tracking.EventEmitter.prototype.removeListener = function(event, listener) {
- if (typeof listener !== 'function') {
- throw new TypeError('Listener must be a function');
- }
- if (!this.events_) {
- return this;
- }
- var listeners = this.listeners(event);
- if (Array.isArray(listeners)) {
- var i = listeners.indexOf(listener);
- if (i < 0) {
- return this;
- }
- listeners.splice(i, 1);
- }
- return this;
- };
- /**
- * By default EventEmitters will print a warning if more than 10 listeners
- * are added for a particular event. This is a useful default which helps
- * finding memory leaks. Obviously not all Emitters should be limited to 10.
- * This function allows that to be increased. Set to zero for unlimited.
- * @param {number} n The maximum number of listeners.
- */
- tracking.EventEmitter.prototype.setMaxListeners = function() {
- throw new Error('Not implemented');
- };
- }());
- (function() {
- /**
- * Canvas utility.
- * @static
- * @constructor
- */
- tracking.Canvas = {};
- /**
- * Loads an image source into the canvas.
- * @param {HTMLCanvasElement} canvas The canvas dom element.
- * @param {string} src The image source.
- * @param {number} x The canvas horizontal coordinate to load the image.
- * @param {number} y The canvas vertical coordinate to load the image.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {function} opt_callback Callback that fires when the image is loaded
- * into the canvas.
- * @static
- */
- tracking.Canvas.loadImage = function(canvas, src, x, y, width, height, opt_callback) {
- var instance = this;
- var img = new window.Image();
- img.crossOrigin = '*';
- img.onload = function() {
- var context = canvas.getContext('2d');
- canvas.width = width;
- canvas.height = height;
- context.drawImage(img, x, y, width, height);
- if (opt_callback) {
- opt_callback.call(instance);
- }
- img = null;
- };
- img.src = src;
- };
- }());
- (function() {
- /**
- * DisjointSet utility with path compression. Some applications involve
- * grouping n distinct objects into a collection of disjoint sets. Two
- * important operations are then finding which set a given object belongs to
- * and uniting the two sets. A disjoint set data structure maintains a
- * collection S={ S1 , S2 ,..., Sk } of disjoint dynamic sets. Each set is
- * identified by a representative, which usually is a member in the set.
- * @static
- * @constructor
- */
- tracking.DisjointSet = function(length) {
- if (length === undefined) {
- throw new Error('DisjointSet length not specified.');
- }
- this.length = length;
- this.parent = new Uint32Array(length);
- for (var i = 0; i < length; i++) {
- this.parent[i] = i;
- }
- };
- /**
- * Holds the length of the internal set.
- * @type {number}
- */
- tracking.DisjointSet.prototype.length = null;
- /**
- * Holds the set containing the representative values.
- * @type {Array.<number>}
- */
- tracking.DisjointSet.prototype.parent = null;
- /**
- * Finds a pointer to the representative of the set containing i.
- * @param {number} i
- * @return {number} The representative set of i.
- */
- tracking.DisjointSet.prototype.find = function(i) {
- if (this.parent[i] === i) {
- return i;
- } else {
- return (this.parent[i] = this.find(this.parent[i]));
- }
- };
- /**
- * Unites two dynamic sets containing objects i and j, say Si and Sj, into
- * a new set that Si ∪ Sj, assuming that Si ∩ Sj = ∅;
- * @param {number} i
- * @param {number} j
- */
- tracking.DisjointSet.prototype.union = function(i, j) {
- var iRepresentative = this.find(i);
- var jRepresentative = this.find(j);
- this.parent[iRepresentative] = jRepresentative;
- };
- }());
- (function() {
- /**
- * Image utility.
- * @static
- * @constructor
- */
- tracking.Image = {};
- /**
- * Computes gaussian blur. Adapted from
- * https://github.com/kig/canvasfilters.
- * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {number} diameter Gaussian blur diameter, must be greater than 1.
- * @return {array} The edge pixels in a linear [r,g,b,a,...] array.
- */
- tracking.Image.blur = function(pixels, width, height, diameter) {
- diameter = Math.abs(diameter);
- if (diameter <= 1) {
- throw new Error('Diameter should be greater than 1.');
- }
- var radius = diameter / 2;
- var len = Math.ceil(diameter) + (1 - (Math.ceil(diameter) % 2));
- var weights = new Float32Array(len);
- var rho = (radius + 0.5) / 3;
- var rhoSq = rho * rho;
- var gaussianFactor = 1 / Math.sqrt(2 * Math.PI * rhoSq);
- var rhoFactor = -1 / (2 * rho * rho);
- var wsum = 0;
- var middle = Math.floor(len / 2);
- for (var i = 0; i < len; i++) {
- var x = i - middle;
- var gx = gaussianFactor * Math.exp(x * x * rhoFactor);
- weights[i] = gx;
- wsum += gx;
- }
- for (var j = 0; j < weights.length; j++) {
- weights[j] /= wsum;
- }
- return this.separableConvolve(pixels, width, height, weights, weights, false);
- };
- /**
- * Computes the integral image for summed, squared, rotated and sobel pixels.
- * @param {array} pixels The pixels in a linear [r,g,b,a,...] array to loop
- * through.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {array} opt_integralImage Empty array of size `width * height` to
- * be filled with the integral image values. If not specified compute sum
- * values will be skipped.
- * @param {array} opt_integralImageSquare Empty array of size `width *
- * height` to be filled with the integral image squared values. If not
- * specified compute squared values will be skipped.
- * @param {array} opt_tiltedIntegralImage Empty array of size `width *
- * height` to be filled with the rotated integral image values. If not
- * specified compute sum values will be skipped.
- * @param {array} opt_integralImageSobel Empty array of size `width *
- * height` to be filled with the integral image of sobel values. If not
- * specified compute sobel filtering will be skipped.
- * @static
- */
- tracking.Image.computeIntegralImage = function(pixels, width, height, opt_integralImage, opt_integralImageSquare, opt_tiltedIntegralImage, opt_integralImageSobel) {
- if (arguments.length < 4) {
- throw new Error('You should specify at least one output array in the order: sum, square, tilted, sobel.');
- }
- var pixelsSobel;
- if (opt_integralImageSobel) {
- pixelsSobel = tracking.Image.sobel(pixels, width, height);
- }
- for (var i = 0; i < height; i++) {
- for (var j = 0; j < width; j++) {
- var w = i * width * 4 + j * 4;
- var pixel = ~~(pixels[w] * 0.299 + pixels[w + 1] * 0.587 + pixels[w + 2] * 0.114);
- if (opt_integralImage) {
- this.computePixelValueSAT_(opt_integralImage, width, i, j, pixel);
- }
- if (opt_integralImageSquare) {
- this.computePixelValueSAT_(opt_integralImageSquare, width, i, j, pixel * pixel);
- }
- if (opt_tiltedIntegralImage) {
- var w1 = w - width * 4;
- var pixelAbove = ~~(pixels[w1] * 0.299 + pixels[w1 + 1] * 0.587 + pixels[w1 + 2] * 0.114);
- this.computePixelValueRSAT_(opt_tiltedIntegralImage, width, i, j, pixel, pixelAbove || 0);
- }
- if (opt_integralImageSobel) {
- this.computePixelValueSAT_(opt_integralImageSobel, width, i, j, pixelsSobel[w]);
- }
- }
- }
- };
- /**
- * Helper method to compute the rotated summed area table (RSAT) by the
- * formula:
- *
- * RSAT(x, y) = RSAT(x-1, y-1) + RSAT(x+1, y-1) - RSAT(x, y-2) + I(x, y) + I(x, y-1)
- *
- * @param {number} width The image width.
- * @param {array} RSAT Empty array of size `width * height` to be filled with
- * the integral image values. If not specified compute sum values will be
- * skipped.
- * @param {number} i Vertical position of the pixel to be evaluated.
- * @param {number} j Horizontal position of the pixel to be evaluated.
- * @param {number} pixel Pixel value to be added to the integral image.
- * @static
- * @private
- */
- tracking.Image.computePixelValueRSAT_ = function(RSAT, width, i, j, pixel, pixelAbove) {
- var w = i * width + j;
- RSAT[w] = (RSAT[w - width - 1] || 0) + (RSAT[w - width + 1] || 0) - (RSAT[w - width - width] || 0) + pixel + pixelAbove;
- };
- /**
- * Helper method to compute the summed area table (SAT) by the formula:
- *
- * SAT(x, y) = SAT(x, y-1) + SAT(x-1, y) + I(x, y) - SAT(x-1, y-1)
- *
- * @param {number} width The image width.
- * @param {array} SAT Empty array of size `width * height` to be filled with
- * the integral image values. If not specified compute sum values will be
- * skipped.
- * @param {number} i Vertical position of the pixel to be evaluated.
- * @param {number} j Horizontal position of the pixel to be evaluated.
- * @param {number} pixel Pixel value to be added to the integral image.
- * @static
- * @private
- */
- tracking.Image.computePixelValueSAT_ = function(SAT, width, i, j, pixel) {
- var w = i * width + j;
- SAT[w] = (SAT[w - width] || 0) + (SAT[w - 1] || 0) + pixel - (SAT[w - width - 1] || 0);
- };
- /**
- * Converts a color from a color-space based on an RGB color model to a
- * grayscale representation of its luminance. The coefficients represent the
- * measured intensity perception of typical trichromat humans, in
- * particular, human vision is most sensitive to green and least sensitive
- * to blue.
- * @param {Uint8Array|Uint8ClampedArray|Array} pixels The pixels in a linear [r,g,b,a,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {boolean} fillRGBA If the result should fill all RGBA values with the gray scale
- * values, instead of returning a single value per pixel.
- * @return {Uint8Array} The grayscale pixels in a linear array ([p,p,p,a,...] if fillRGBA
- * is true and [p1, p2, p3, ...] if fillRGBA is false).
- * @static
- */
- tracking.Image.grayscale = function(pixels, width, height, fillRGBA) {
- /*
- Performance result (rough EST. - image size, CPU arch. will affect):
- https://jsperf.com/tracking-new-image-to-grayscale
- Firefox v.60b:
- fillRGBA Gray only
- Old 11 551 OPs/sec
- New 3548 6487 OPs/sec
- ---------------------------------
- 322.5x 11.8x faster
- Chrome v.67b:
- fillRGBA Gray only
- Old 291 489 OPs/sec
- New 6975 6635 OPs/sec
- ---------------------------------
- 24.0x 13.6x faster
- - Ken Nilsen / epistemex
- */
- var len = pixels.length>>2;
- var gray = fillRGBA ? new Uint32Array(len) : new Uint8Array(len);
- var data32 = new Uint32Array(pixels.buffer || new Uint8Array(pixels).buffer);
- var i = 0;
- var c = 0;
- var luma = 0;
- // unrolled loops to not have to check fillRGBA each iteration
- if (fillRGBA) {
- while(i < len) {
- // Entire pixel in little-endian order (ABGR)
- c = data32[i];
- // Using the more up-to-date REC/BT.709 approx. weights for luma instead: [0.2126, 0.7152, 0.0722].
- // luma = ((c>>>16 & 0xff) * 0.2126 + (c>>>8 & 0xff) * 0.7152 + (c & 0xff) * 0.0722 + 0.5)|0;
- // But I'm using scaled integers here for speed (x 0xffff). This can be improved more using 2^n
- // close to the factors allowing for shift-ops (i.e. 4732 -> 4096 => .. (c&0xff) << 12 .. etc.)
- // if "accuracy" is not important (luma is anyway an visual approx.):
- luma = ((c>>>16&0xff) * 13933 + (c>>>8&0xff) * 46871 + (c&0xff) * 4732)>>>16;
- gray[i++] = luma * 0x10101 | c & 0xff000000;
- }
- }
- else {
- while(i < len) {
- c = data32[i];
- luma = ((c>>>16&0xff) * 13933 + (c>>>8&0xff) * 46871 + (c&0xff) * 4732)>>>16;
- // ideally, alpha should affect value here: value * (alpha/255) or with shift-ops for the above version
- gray[i++] = luma;
- }
- }
- // Consolidate array view to byte component format independent of source view
- return new Uint8Array(gray.buffer);
- };
- /**
- * Fast horizontal separable convolution. A point spread function (PSF) is
- * said to be separable if it can be broken into two one-dimensional
- * signals: a vertical and a horizontal projection. The convolution is
- * performed by sliding the kernel over the image, generally starting at the
- * top left corner, so as to move the kernel through all the positions where
- * the kernel fits entirely within the boundaries of the image. Adapted from
- * https://github.com/kig/canvasfilters.
- * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {array} weightsVector The weighting vector, e.g [-1,0,1].
- * @param {number} opaque
- * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
- */
- tracking.Image.horizontalConvolve = function(pixels, width, height, weightsVector, opaque) {
- var side = weightsVector.length;
- var halfSide = Math.floor(side / 2);
- var output = new Float32Array(width * height * 4);
- var alphaFac = opaque ? 1 : 0;
- for (var y = 0; y < height; y++) {
- for (var x = 0; x < width; x++) {
- var sy = y;
- var sx = x;
- var offset = (y * width + x) * 4;
- var r = 0;
- var g = 0;
- var b = 0;
- var a = 0;
- for (var cx = 0; cx < side; cx++) {
- var scy = sy;
- var scx = Math.min(width - 1, Math.max(0, sx + cx - halfSide));
- var poffset = (scy * width + scx) * 4;
- var wt = weightsVector[cx];
- r += pixels[poffset] * wt;
- g += pixels[poffset + 1] * wt;
- b += pixels[poffset + 2] * wt;
- a += pixels[poffset + 3] * wt;
- }
- output[offset] = r;
- output[offset + 1] = g;
- output[offset + 2] = b;
- output[offset + 3] = a + alphaFac * (255 - a);
- }
- }
- return output;
- };
- /**
- * Fast vertical separable convolution. A point spread function (PSF) is
- * said to be separable if it can be broken into two one-dimensional
- * signals: a vertical and a horizontal projection. The convolution is
- * performed by sliding the kernel over the image, generally starting at the
- * top left corner, so as to move the kernel through all the positions where
- * the kernel fits entirely within the boundaries of the image. Adapted from
- * https://github.com/kig/canvasfilters.
- * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {array} weightsVector The weighting vector, e.g [-1,0,1].
- * @param {number} opaque
- * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
- */
- tracking.Image.verticalConvolve = function(pixels, width, height, weightsVector, opaque) {
- var side = weightsVector.length;
- var halfSide = Math.floor(side / 2);
- var output = new Float32Array(width * height * 4);
- var alphaFac = opaque ? 1 : 0;
- for (var y = 0; y < height; y++) {
- for (var x = 0; x < width; x++) {
- var sy = y;
- var sx = x;
- var offset = (y * width + x) * 4;
- var r = 0;
- var g = 0;
- var b = 0;
- var a = 0;
- for (var cy = 0; cy < side; cy++) {
- var scy = Math.min(height - 1, Math.max(0, sy + cy - halfSide));
- var scx = sx;
- var poffset = (scy * width + scx) * 4;
- var wt = weightsVector[cy];
- r += pixels[poffset] * wt;
- g += pixels[poffset + 1] * wt;
- b += pixels[poffset + 2] * wt;
- a += pixels[poffset + 3] * wt;
- }
- output[offset] = r;
- output[offset + 1] = g;
- output[offset + 2] = b;
- output[offset + 3] = a + alphaFac * (255 - a);
- }
- }
- return output;
- };
- /**
- * Fast separable convolution. A point spread function (PSF) is said to be
- * separable if it can be broken into two one-dimensional signals: a
- * vertical and a horizontal projection. The convolution is performed by
- * sliding the kernel over the image, generally starting at the top left
- * corner, so as to move the kernel through all the positions where the
- * kernel fits entirely within the boundaries of the image. Adapted from
- * https://github.com/kig/canvasfilters.
- * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {array} horizWeights The horizontal weighting vector, e.g [-1,0,1].
- * @param {array} vertWeights The vertical vector, e.g [-1,0,1].
- * @param {number} opaque
- * @return {array} The convoluted pixels in a linear [r,g,b,a,...] array.
- */
- tracking.Image.separableConvolve = function(pixels, width, height, horizWeights, vertWeights, opaque) {
- var vertical = this.verticalConvolve(pixels, width, height, vertWeights, opaque);
- return this.horizontalConvolve(vertical, width, height, horizWeights, opaque);
- };
- /**
- * Compute image edges using Sobel operator. Computes the vertical and
- * horizontal gradients of the image and combines the computed images to
- * find edges in the image. The way we implement the Sobel filter here is by
- * first grayscaling the image, then taking the horizontal and vertical
- * gradients and finally combining the gradient images to make up the final
- * image. Adapted from https://github.com/kig/canvasfilters.
- * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @return {array} The edge pixels in a linear [r,g,b,a,...] array.
- */
- tracking.Image.sobel = function(pixels, width, height) {
- pixels = this.grayscale(pixels, width, height, true);
- var output = new Float32Array(width * height * 4);
- var sobelSignVector = new Float32Array([-1, 0, 1]);
- var sobelScaleVector = new Float32Array([1, 2, 1]);
- var vertical = this.separableConvolve(pixels, width, height, sobelSignVector, sobelScaleVector);
- var horizontal = this.separableConvolve(pixels, width, height, sobelScaleVector, sobelSignVector);
- for (var i = 0; i < output.length; i += 4) {
- var v = vertical[i];
- var h = horizontal[i];
- var p = Math.sqrt(h * h + v * v);
- output[i] = p;
- output[i + 1] = p;
- output[i + 2] = p;
- output[i + 3] = 255;
- }
- return output;
- };
- /**
- * Equalizes the histogram of a grayscale image, normalizing the
- * brightness and increasing the contrast of the image.
- * @param {pixels} pixels The grayscale pixels in a linear array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @return {array} The equalized grayscale pixels in a linear array.
- */
- tracking.Image.equalizeHist = function(pixels, width, height){
- var equalized = new Uint8ClampedArray(pixels.length);
- var histogram = new Array(256);
- for(var i=0; i < 256; i++) histogram[i] = 0;
- for(var i=0; i < pixels.length; i++){
- equalized[i] = pixels[i];
- histogram[pixels[i]]++;
- }
- var prev = histogram[0];
- for(var i=0; i < 256; i++){
- histogram[i] += prev;
- prev = histogram[i];
- }
- var norm = 255 / pixels.length;
- for(var i=0; i < pixels.length; i++)
- equalized[i] = (histogram[pixels[i]] * norm + 0.5) | 0;
- return equalized;
- }
- }());
- (function() {
- /**
- * ViolaJones utility.
- * @static
- * @constructor
- */
- tracking.ViolaJones = {};
- /**
- * Holds the minimum area of intersection that defines when a rectangle is
- * from the same group. Often when a face is matched multiple rectangles are
- * classified as possible rectangles to represent the face, when they
- * intersects they are grouped as one face.
- * @type {number}
- * @default 0.5
- * @static
- */
- tracking.ViolaJones.REGIONS_OVERLAP = 0.5;
- /**
- * Holds the HAAR cascade classifiers converted from OpenCV training.
- * @type {array}
- * @static
- */
- tracking.ViolaJones.classifiers = {};
- /**
- * Detects through the HAAR cascade data rectangles matches.
- * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {number} initialScale The initial scale to start the block
- * scaling.
- * @param {number} scaleFactor The scale factor to scale the feature block.
- * @param {number} stepSize The block step size.
- * @param {number} edgesDensity Percentage density edges inside the
- * classifier block. Value from [0.0, 1.0], defaults to 0.2. If specified
- * edge detection will be applied to the image to prune dead areas of the
- * image, this can improve significantly performance.
- * @param {number} data The HAAR cascade data.
- * @return {array} Found rectangles.
- * @static
- */
- tracking.ViolaJones.detect = function(pixels, width, height, initialScale, scaleFactor, stepSize, edgesDensity, data) {
- var total = 0;
- var rects = [];
- var integralImage = new Int32Array(width * height);
- var integralImageSquare = new Int32Array(width * height);
- var tiltedIntegralImage = new Int32Array(width * height);
- var integralImageSobel;
- if (edgesDensity > 0) {
- integralImageSobel = new Int32Array(width * height);
- }
- tracking.Image.computeIntegralImage(pixels, width, height, integralImage, integralImageSquare, tiltedIntegralImage, integralImageSobel);
- var minWidth = data[0];
- var minHeight = data[1];
- var scale = initialScale * scaleFactor;
- var blockWidth = (scale * minWidth) | 0;
- var blockHeight = (scale * minHeight) | 0;
- while (blockWidth < width && blockHeight < height) {
- var step = (scale * stepSize + 0.5) | 0;
- for (var i = 0; i < (height - blockHeight); i += step) {
- for (var j = 0; j < (width - blockWidth); j += step) {
- if (edgesDensity > 0) {
- if (this.isTriviallyExcluded(edgesDensity, integralImageSobel, i, j, width, blockWidth, blockHeight)) {
- continue;
- }
- }
- if (this.evalStages_(data, integralImage, integralImageSquare, tiltedIntegralImage, i, j, width, blockWidth, blockHeight, scale)) {
- rects[total++] = {
- width: blockWidth,
- height: blockHeight,
- x: j,
- y: i
- };
- }
- }
- }
- scale *= scaleFactor;
- blockWidth = (scale * minWidth) | 0;
- blockHeight = (scale * minHeight) | 0;
- }
- return this.mergeRectangles_(rects);
- };
- /**
- * Fast check to test whether the edges density inside the block is greater
- * than a threshold, if true it tests the stages. This can improve
- * significantly performance.
- * @param {number} edgesDensity Percentage density edges inside the
- * classifier block.
- * @param {array} integralImageSobel The integral image of a sobel image.
- * @param {number} i Vertical position of the pixel to be evaluated.
- * @param {number} j Horizontal position of the pixel to be evaluated.
- * @param {number} width The image width.
- * @return {boolean} True whether the block at position i,j can be skipped,
- * false otherwise.
- * @static
- * @protected
- */
- tracking.ViolaJones.isTriviallyExcluded = function(edgesDensity, integralImageSobel, i, j, width, blockWidth, blockHeight) {
- var wbA = i * width + j;
- var wbB = wbA + blockWidth;
- var wbD = wbA + blockHeight * width;
- var wbC = wbD + blockWidth;
- var blockEdgesDensity = (integralImageSobel[wbA] - integralImageSobel[wbB] - integralImageSobel[wbD] + integralImageSobel[wbC]) / (blockWidth * blockHeight * 255);
- if (blockEdgesDensity < edgesDensity) {
- return true;
- }
- return false;
- };
- /**
- * Evaluates if the block size on i,j position is a valid HAAR cascade
- * stage.
- * @param {number} data The HAAR cascade data.
- * @param {number} i Vertical position of the pixel to be evaluated.
- * @param {number} j Horizontal position of the pixel to be evaluated.
- * @param {number} width The image width.
- * @param {number} blockSize The block size.
- * @param {number} scale The scale factor of the block size and its original
- * size.
- * @param {number} inverseArea The inverse area of the block size.
- * @return {boolean} Whether the region passes all the stage tests.
- * @private
- * @static
- */
- tracking.ViolaJones.evalStages_ = function(data, integralImage, integralImageSquare, tiltedIntegralImage, i, j, width, blockWidth, blockHeight, scale) {
- var inverseArea = 1.0 / (blockWidth * blockHeight);
- var wbA = i * width + j;
- var wbB = wbA + blockWidth;
- var wbD = wbA + blockHeight * width;
- var wbC = wbD + blockWidth;
- var mean = (integralImage[wbA] - integralImage[wbB] - integralImage[wbD] + integralImage[wbC]) * inverseArea;
- var variance = (integralImageSquare[wbA] - integralImageSquare[wbB] - integralImageSquare[wbD] + integralImageSquare[wbC]) * inverseArea - mean * mean;
- var standardDeviation = 1;
- if (variance > 0) {
- standardDeviation = Math.sqrt(variance);
- }
- var length = data.length;
- for (var w = 2; w < length; ) {
- var stageSum = 0;
- var stageThreshold = data[w++];
- var nodeLength = data[w++];
- while (nodeLength--) {
- var rectsSum = 0;
- var tilted = data[w++];
- var rectsLength = data[w++];
- for (var r = 0; r < rectsLength; r++) {
- var rectLeft = (j + data[w++] * scale + 0.5) | 0;
- var rectTop = (i + data[w++] * scale + 0.5) | 0;
- var rectWidth = (data[w++] * scale + 0.5) | 0;
- var rectHeight = (data[w++] * scale + 0.5) | 0;
- var rectWeight = data[w++];
- var w1;
- var w2;
- var w3;
- var w4;
- if (tilted) {
- // RectSum(r) = RSAT(x-h+w, y+w+h-1) + RSAT(x, y-1) - RSAT(x-h, y+h-1) - RSAT(x+w, y+w-1)
- w1 = (rectLeft - rectHeight + rectWidth) + (rectTop + rectWidth + rectHeight - 1) * width;
- w2 = rectLeft + (rectTop - 1) * width;
- w3 = (rectLeft - rectHeight) + (rectTop + rectHeight - 1) * width;
- w4 = (rectLeft + rectWidth) + (rectTop + rectWidth - 1) * width;
- rectsSum += (tiltedIntegralImage[w1] + tiltedIntegralImage[w2] - tiltedIntegralImage[w3] - tiltedIntegralImage[w4]) * rectWeight;
- } else {
- // RectSum(r) = SAT(x-1, y-1) + SAT(x+w-1, y+h-1) - SAT(x-1, y+h-1) - SAT(x+w-1, y-1)
- w1 = rectTop * width + rectLeft;
- w2 = w1 + rectWidth;
- w3 = w1 + rectHeight * width;
- w4 = w3 + rectWidth;
- rectsSum += (integralImage[w1] - integralImage[w2] - integralImage[w3] + integralImage[w4]) * rectWeight;
- // TODO: Review the code below to analyze performance when using it instead.
- // w1 = (rectLeft - 1) + (rectTop - 1) * width;
- // w2 = (rectLeft + rectWidth - 1) + (rectTop + rectHeight - 1) * width;
- // w3 = (rectLeft - 1) + (rectTop + rectHeight - 1) * width;
- // w4 = (rectLeft + rectWidth - 1) + (rectTop - 1) * width;
- // rectsSum += (integralImage[w1] + integralImage[w2] - integralImage[w3] - integralImage[w4]) * rectWeight;
- }
- }
- var nodeThreshold = data[w++];
- var nodeLeft = data[w++];
- var nodeRight = data[w++];
- if (rectsSum * inverseArea < nodeThreshold * standardDeviation) {
- stageSum += nodeLeft;
- } else {
- stageSum += nodeRight;
- }
- }
- if (stageSum < stageThreshold) {
- return false;
- }
- }
- return true;
- };
- /**
- * Postprocess the detected sub-windows in order to combine overlapping
- * detections into a single detection.
- * @param {array} rects
- * @return {array}
- * @private
- * @static
- */
- tracking.ViolaJones.mergeRectangles_ = function(rects) {
- var disjointSet = new tracking.DisjointSet(rects.length);
- for (var i = 0; i < rects.length; i++) {
- var r1 = rects[i];
- for (var j = 0; j < rects.length; j++) {
- var r2 = rects[j];
- if (tracking.Math.intersectRect(r1.x, r1.y, r1.x + r1.width, r1.y + r1.height, r2.x, r2.y, r2.x + r2.width, r2.y + r2.height)) {
- var x1 = Math.max(r1.x, r2.x);
- var y1 = Math.max(r1.y, r2.y);
- var x2 = Math.min(r1.x + r1.width, r2.x + r2.width);
- var y2 = Math.min(r1.y + r1.height, r2.y + r2.height);
- var overlap = (x1 - x2) * (y1 - y2);
- var area1 = (r1.width * r1.height);
- var area2 = (r2.width * r2.height);
- if ((overlap / (area1 * (area1 / area2)) >= this.REGIONS_OVERLAP) &&
- (overlap / (area2 * (area1 / area2)) >= this.REGIONS_OVERLAP)) {
- disjointSet.union(i, j);
- }
- }
- }
- }
- var map = {};
- for (var k = 0; k < disjointSet.length; k++) {
- var rep = disjointSet.find(k);
- if (!map[rep]) {
- map[rep] = {
- total: 1,
- width: rects[k].width,
- height: rects[k].height,
- x: rects[k].x,
- y: rects[k].y
- };
- continue;
- }
- map[rep].total++;
- map[rep].width += rects[k].width;
- map[rep].height += rects[k].height;
- map[rep].x += rects[k].x;
- map[rep].y += rects[k].y;
- }
- var result = [];
- Object.keys(map).forEach(function(key) {
- var rect = map[key];
- result.push({
- total: rect.total,
- width: (rect.width / rect.total + 0.5) | 0,
- height: (rect.height / rect.total + 0.5) | 0,
- x: (rect.x / rect.total + 0.5) | 0,
- y: (rect.y / rect.total + 0.5) | 0
- });
- });
- return result;
- };
- }());
- (function() {
- /**
- * Brief intends for "Binary Robust Independent Elementary Features".This
- * method generates a binary string for each keypoint found by an extractor
- * method.
- * @static
- * @constructor
- */
- tracking.Brief = {};
- /**
- * The set of binary tests is defined by the nd (x,y)-location pairs
- * uniquely chosen during the initialization. Values could vary between N =
- * 128,256,512. N=128 yield good compromises between speed, storage
- * efficiency, and recognition rate.
- * @type {number}
- */
- tracking.Brief.N = 512;
- /**
- * Caches coordinates values of (x,y)-location pairs uniquely chosen during
- * the initialization.
- * @type {Object.<number, Int32Array>}
- * @private
- * @static
- */
- tracking.Brief.randomImageOffsets_ = {};
- /**
- * Caches delta values of (x,y)-location pairs uniquely chosen during
- * the initialization.
- * @type {Int32Array}
- * @private
- * @static
- */
- tracking.Brief.randomWindowOffsets_ = null;
- /**
- * Generates a binary string for each found keypoints extracted using an
- * extractor method.
- * @param {array} The grayscale pixels in a linear [p1,p2,...] array.
- * @param {number} width The image width.
- * @param {array} keypoints
- * @return {Int32Array} Returns an array where for each four sequence int
- * values represent the descriptor binary string (128 bits) necessary
- * to describe the corner, e.g. [0,0,0,0, 0,0,0,0, ...].
- * @static
- */
- tracking.Brief.getDescriptors = function(pixels, width, keypoints) {
- // Optimizing divide by 32 operation using binary shift
- // (this.N >> 5) === this.N/32.
- var descriptors = new Int32Array((keypoints.length >> 1) * (this.N >> 5));
- var descriptorWord = 0;
- var offsets = this.getRandomOffsets_(width);
- var position = 0;
- for (var i = 0; i < keypoints.length; i += 2) {
- var w = width * keypoints[i + 1] + keypoints[i];
- var offsetsPosition = 0;
- for (var j = 0, n = this.N; j < n; j++) {
- if (pixels[offsets[offsetsPosition++] + w] < pixels[offsets[offsetsPosition++] + w]) {
- // The bit in the position `j % 32` of descriptorWord should be set to 1. We do
- // this by making an OR operation with a binary number that only has the bit
- // in that position set to 1. That binary number is obtained by shifting 1 left by
- // `j % 32` (which is the same as `j & 31` left) positions.
- descriptorWord |= 1 << (j & 31);
- }
- // If the next j is a multiple of 32, we will need to use a new descriptor word to hold
- // the next results.
- if (!((j + 1) & 31)) {
- descriptors[position++] = descriptorWord;
- descriptorWord = 0;
- }
- }
- }
- return descriptors;
- };
- /**
- * Matches sets of features {mi} and {m′j} extracted from two images taken
- * from similar, and often successive, viewpoints. A classical procedure
- * runs as follows. For each point {mi} in the first image, search in a
- * region of the second image around location {mi} for point {m′j}. The
- * search is based on the similarity of the local image windows, also known
- * as kernel windows, centered on the points, which strongly characterizes
- * the points when the images are sufficiently close. Once each keypoint is
- * described with its binary string, they need to be compared with the
- * closest matching point. Distance metric is critical to the performance of
- * in- trusion detection systems. Thus using binary strings reduces the size
- * of the descriptor and provides an interesting data structure that is fast
- * to operate whose similarity can be measured by the Hamming distance.
- * @param {array} keypoints1
- * @param {array} descriptors1
- * @param {array} keypoints2
- * @param {array} descriptors2
- * @return {Int32Array} Returns an array where the index is the corner1
- * index coordinate, and the value is the corresponding match index of
- * corner2, e.g. keypoints1=[x0,y0,x1,y1,...] and
- * keypoints2=[x'0,y'0,x'1,y'1,...], if x0 matches x'1 and x1 matches x'0,
- * the return array would be [3,0].
- * @static
- */
- tracking.Brief.match = function(keypoints1, descriptors1, keypoints2, descriptors2) {
- var len1 = keypoints1.length >> 1;
- var len2 = keypoints2.length >> 1;
- var matches = new Array(len1);
- for (var i = 0; i < len1; i++) {
- var min = Infinity;
- var minj = 0;
- for (var j = 0; j < len2; j++) {
- var dist = 0;
- // Optimizing divide by 32 operation using binary shift
- // (this.N >> 5) === this.N/32.
- for (var k = 0, n = this.N >> 5; k < n; k++) {
- dist += tracking.Math.hammingWeight(descriptors1[i * n + k] ^ descriptors2[j * n + k]);
- }
- if (dist < min) {
- min = dist;
- minj = j;
- }
- }
- matches[i] = {
- index1: i,
- index2: minj,
- keypoint1: [keypoints1[2 * i], keypoints1[2 * i + 1]],
- keypoint2: [keypoints2[2 * minj], keypoints2[2 * minj + 1]],
- confidence: 1 - min / this.N
- };
- }
- return matches;
- };
- /**
- * Removes matches outliers by testing matches on both directions.
- * @param {array} keypoints1
- * @param {array} descriptors1
- * @param {array} keypoints2
- * @param {array} descriptors2
- * @return {Int32Array} Returns an array where the index is the corner1
- * index coordinate, and the value is the corresponding match index of
- * corner2, e.g. keypoints1=[x0,y0,x1,y1,...] and
- * keypoints2=[x'0,y'0,x'1,y'1,...], if x0 matches x'1 and x1 matches x'0,
- * the return array would be [3,0].
- * @static
- */
- tracking.Brief.reciprocalMatch = function(keypoints1, descriptors1, keypoints2, descriptors2) {
- var matches = [];
- if (keypoints1.length === 0 || keypoints2.length === 0) {
- return matches;
- }
- var matches1 = tracking.Brief.match(keypoints1, descriptors1, keypoints2, descriptors2);
- var matches2 = tracking.Brief.match(keypoints2, descriptors2, keypoints1, descriptors1);
- for (var i = 0; i < matches1.length; i++) {
- if (matches2[matches1[i].index2].index2 === i) {
- matches.push(matches1[i]);
- }
- }
- return matches;
- };
- /**
- * Gets the coordinates values of (x,y)-location pairs uniquely chosen
- * during the initialization.
- * @return {array} Array with the random offset values.
- * @private
- */
- tracking.Brief.getRandomOffsets_ = function(width) {
- if (!this.randomWindowOffsets_) {
- var windowPosition = 0;
- var windowOffsets = new Int32Array(4 * this.N);
- for (var i = 0; i < this.N; i++) {
- windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
- windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
- windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
- windowOffsets[windowPosition++] = Math.round(tracking.Math.uniformRandom(-15, 16));
- }
- this.randomWindowOffsets_ = windowOffsets;
- }
- if (!this.randomImageOffsets_[width]) {
- var imagePosition = 0;
- var imageOffsets = new Int32Array(2 * this.N);
- for (var j = 0; j < this.N; j++) {
- imageOffsets[imagePosition++] = this.randomWindowOffsets_[4 * j] * width + this.randomWindowOffsets_[4 * j + 1];
- imageOffsets[imagePosition++] = this.randomWindowOffsets_[4 * j + 2] * width + this.randomWindowOffsets_[4 * j + 3];
- }
- this.randomImageOffsets_[width] = imageOffsets;
- }
- return this.randomImageOffsets_[width];
- };
- }());
- (function() {
- /**
- * FAST intends for "Features from Accelerated Segment Test". This method
- * performs a point segment test corner detection. The segment test
- * criterion operates by considering a circle of sixteen pixels around the
- * corner candidate p. The detector classifies p as a corner if there exists
- * a set of n contiguous pixelsin the circle which are all brighter than the
- * intensity of the candidate pixel Ip plus a threshold t, or all darker
- * than Ip − t.
- *
- * 15 00 01
- * 14 02
- * 13 03
- * 12 [] 04
- * 11 05
- * 10 06
- * 09 08 07
- *
- * For more reference:
- * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.60.3991&rep=rep1&type=pdf
- * @static
- * @constructor
- */
- tracking.Fast = {};
- /**
- * Holds the threshold to determine whether the tested pixel is brighter or
- * darker than the corner candidate p.
- * @type {number}
- * @default 40
- * @static
- */
- tracking.Fast.THRESHOLD = 40;
- /**
- * Caches coordinates values of the circle surrounding the pixel candidate p.
- * @type {Object.<number, Int32Array>}
- * @private
- * @static
- */
- tracking.Fast.circles_ = {};
- /**
- * Finds corners coordinates on the graysacaled image.
- * @param {array} The grayscale pixels in a linear [p1,p2,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {number} threshold to determine whether the tested pixel is brighter or
- * darker than the corner candidate p. Default value is 40.
- * @return {array} Array containing the coordinates of all found corners,
- * e.g. [x0,y0,x1,y1,...], where P(x0,y0) represents a corner coordinate.
- * @static
- */
- tracking.Fast.findCorners = function(pixels, width, height, opt_threshold) {
- var circleOffsets = this.getCircleOffsets_(width);
- var circlePixels = new Int32Array(16);
- var corners = [];
- if (opt_threshold === undefined) {
- opt_threshold = this.THRESHOLD;
- }
- // When looping through the image pixels, skips the first three lines from
- // the image boundaries to constrain the surrounding circle inside the image
- // area.
- for (var i = 3; i < height - 3; i++) {
- for (var j = 3; j < width - 3; j++) {
- var w = i * width + j;
- var p = pixels[w];
- // Loops the circle offsets to read the pixel value for the sixteen
- // surrounding pixels.
- for (var k = 0; k < 16; k++) {
- circlePixels[k] = pixels[w + circleOffsets[k]];
- }
- if (this.isCorner(p, circlePixels, opt_threshold)) {
- // The pixel p is classified as a corner, as optimization increment j
- // by the circle radius 3 to skip the neighbor pixels inside the
- // surrounding circle. This can be removed without compromising the
- // result.
- corners.push(j, i);
- j += 3;
- }
- }
- }
- return corners;
- };
- /**
- * Checks if the circle pixel is brighter than the candidate pixel p by
- * a threshold.
- * @param {number} circlePixel The circle pixel value.
- * @param {number} p The value of the candidate pixel p.
- * @param {number} threshold
- * @return {Boolean}
- * @static
- */
- tracking.Fast.isBrighter = function(circlePixel, p, threshold) {
- return circlePixel - p > threshold;
- };
- /**
- * Checks if the circle pixel is within the corner of the candidate pixel p
- * by a threshold.
- * @param {number} p The value of the candidate pixel p.
- * @param {number} circlePixel The circle pixel value.
- * @param {number} threshold
- * @return {Boolean}
- * @static
- */
- tracking.Fast.isCorner = function(p, circlePixels, threshold) {
- if (this.isTriviallyExcluded(circlePixels, p, threshold)) {
- return false;
- }
- for (var x = 0; x < 16; x++) {
- var darker = true;
- var brighter = true;
- for (var y = 0; y < 9; y++) {
- var circlePixel = circlePixels[(x + y) & 15];
- if (!this.isBrighter(p, circlePixel, threshold)) {
- brighter = false;
- if (darker === false) {
- break;
- }
- }
- if (!this.isDarker(p, circlePixel, threshold)) {
- darker = false;
- if (brighter === false) {
- break;
- }
- }
- }
- if (brighter || darker) {
- return true;
- }
- }
- return false;
- };
- /**
- * Checks if the circle pixel is darker than the candidate pixel p by
- * a threshold.
- * @param {number} circlePixel The circle pixel value.
- * @param {number} p The value of the candidate pixel p.
- * @param {number} threshold
- * @return {Boolean}
- * @static
- */
- tracking.Fast.isDarker = function(circlePixel, p, threshold) {
- return p - circlePixel > threshold;
- };
- /**
- * Fast check to test if the candidate pixel is a trivially excluded value.
- * In order to be a corner, the candidate pixel value should be darker or
- * brighter than 9-12 surrounding pixels, when at least three of the top,
- * bottom, left and right pixels are brighter or darker it can be
- * automatically excluded improving the performance.
- * @param {number} circlePixel The circle pixel value.
- * @param {number} p The value of the candidate pixel p.
- * @param {number} threshold
- * @return {Boolean}
- * @static
- * @protected
- */
- tracking.Fast.isTriviallyExcluded = function(circlePixels, p, threshold) {
- var count = 0;
- var circleBottom = circlePixels[8];
- var circleLeft = circlePixels[12];
- var circleRight = circlePixels[4];
- var circleTop = circlePixels[0];
- if (this.isBrighter(circleTop, p, threshold)) {
- count++;
- }
- if (this.isBrighter(circleRight, p, threshold)) {
- count++;
- }
- if (this.isBrighter(circleBottom, p, threshold)) {
- count++;
- }
- if (this.isBrighter(circleLeft, p, threshold)) {
- count++;
- }
- if (count < 3) {
- count = 0;
- if (this.isDarker(circleTop, p, threshold)) {
- count++;
- }
- if (this.isDarker(circleRight, p, threshold)) {
- count++;
- }
- if (this.isDarker(circleBottom, p, threshold)) {
- count++;
- }
- if (this.isDarker(circleLeft, p, threshold)) {
- count++;
- }
- if (count < 3) {
- return true;
- }
- }
- return false;
- };
- /**
- * Gets the sixteen offset values of the circle surrounding pixel.
- * @param {number} width The image width.
- * @return {array} Array with the sixteen offset values of the circle
- * surrounding pixel.
- * @private
- */
- tracking.Fast.getCircleOffsets_ = function(width) {
- if (this.circles_[width]) {
- return this.circles_[width];
- }
- var circle = new Int32Array(16);
- circle[0] = -width - width - width;
- circle[1] = circle[0] + 1;
- circle[2] = circle[1] + width + 1;
- circle[3] = circle[2] + width + 1;
- circle[4] = circle[3] + width;
- circle[5] = circle[4] + width;
- circle[6] = circle[5] + width - 1;
- circle[7] = circle[6] + width - 1;
- circle[8] = circle[7] - 1;
- circle[9] = circle[8] - 1;
- circle[10] = circle[9] - width - 1;
- circle[11] = circle[10] - width - 1;
- circle[12] = circle[11] - width;
- circle[13] = circle[12] - width;
- circle[14] = circle[13] - width + 1;
- circle[15] = circle[14] - width + 1;
- this.circles_[width] = circle;
- return circle;
- };
- }());
- (function() {
- /**
- * Math utility.
- * @static
- * @constructor
- */
- tracking.Math = {};
- /**
- * Euclidean distance between two points P(x0, y0) and P(x1, y1).
- * @param {number} x0 Horizontal coordinate of P0.
- * @param {number} y0 Vertical coordinate of P0.
- * @param {number} x1 Horizontal coordinate of P1.
- * @param {number} y1 Vertical coordinate of P1.
- * @return {number} The euclidean distance.
- */
- tracking.Math.distance = function(x0, y0, x1, y1) {
- var dx = x1 - x0;
- var dy = y1 - y0;
- return Math.sqrt(dx * dx + dy * dy);
- };
- /**
- * Calculates the Hamming weight of a string, which is the number of symbols that are
- * different from the zero-symbol of the alphabet used. It is thus
- * equivalent to the Hamming distance from the all-zero string of the same
- * length. For the most typical case, a string of bits, this is the number
- * of 1's in the string.
- *
- * Example:
- *
- * <pre>
- * Binary string Hamming weight
- * 11101 4
- * 11101010 5
- * </pre>
- *
- * @param {number} i Number that holds the binary string to extract the hamming weight.
- * @return {number} The hamming weight.
- */
- tracking.Math.hammingWeight = function(i) {
- i = i - ((i >> 1) & 0x55555555);
- i = (i & 0x33333333) + ((i >> 2) & 0x33333333);
- return ((i + (i >> 4) & 0xF0F0F0F) * 0x1010101) >> 24;
- };
- /**
- * Generates a random number between [a, b] interval.
- * @param {number} a
- * @param {number} b
- * @return {number}
- */
- tracking.Math.uniformRandom = function(a, b) {
- return a + Math.random() * (b - a);
- };
- /**
- * Tests if a rectangle intersects with another.
- *
- * <pre>
- * x0y0 -------- x2y2 --------
- * | | | |
- * -------- x1y1 -------- x3y3
- * </pre>
- *
- * @param {number} x0 Horizontal coordinate of P0.
- * @param {number} y0 Vertical coordinate of P0.
- * @param {number} x1 Horizontal coordinate of P1.
- * @param {number} y1 Vertical coordinate of P1.
- * @param {number} x2 Horizontal coordinate of P2.
- * @param {number} y2 Vertical coordinate of P2.
- * @param {number} x3 Horizontal coordinate of P3.
- * @param {number} y3 Vertical coordinate of P3.
- * @return {boolean}
- */
- tracking.Math.intersectRect = function(x0, y0, x1, y1, x2, y2, x3, y3) {
- return !(x2 > x1 || x3 < x0 || y2 > y1 || y3 < y0);
- };
- }());
- (function() {
- /**
- * Matrix utility.
- * @static
- * @constructor
- */
- tracking.Matrix = {};
- /**
- * Loops the array organized as major-row order and executes `fn` callback
- * for each iteration. The `fn` callback receives the following parameters:
- * `(r,g,b,a,index,i,j)`, where `r,g,b,a` represents the pixel color with
- * alpha channel, `index` represents the position in the major-row order
- * array and `i,j` the respective indexes positions in two dimensions.
- * @param {array} pixels The pixels in a linear [r,g,b,a,...] array to loop
- * through.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {function} fn The callback function for each pixel.
- * @param {number} opt_jump Optional jump for the iteration, by default it
- * is 1, hence loops all the pixels of the array.
- * @static
- */
- tracking.Matrix.forEach = function(pixels, width, height, fn, opt_jump) {
- opt_jump = opt_jump || 1;
- for (var i = 0; i < height; i += opt_jump) {
- for (var j = 0; j < width; j += opt_jump) {
- var w = i * width * 4 + j * 4;
- fn.call(this, pixels[w], pixels[w + 1], pixels[w + 2], pixels[w + 3], w, i, j);
- }
- }
- };
- /**
- * Calculates the per-element subtraction of two NxM matrices and returns a
- * new NxM matrix as the result.
- * @param {matrix} a The first matrix.
- * @param {matrix} a The second matrix.
- * @static
- */
- tracking.Matrix.sub = function(a, b){
- var res = tracking.Matrix.clone(a);
- for(var i=0; i < res.length; i++){
- for(var j=0; j < res[i].length; j++){
- res[i][j] -= b[i][j];
- }
- }
- return res;
- }
- /**
- * Calculates the per-element sum of two NxM matrices and returns a new NxM
- * NxM matrix as the result.
- * @param {matrix} a The first matrix.
- * @param {matrix} a The second matrix.
- * @static
- */
- tracking.Matrix.add = function(a, b){
- var res = tracking.Matrix.clone(a);
- for(var i=0; i < res.length; i++){
- for(var j=0; j < res[i].length; j++){
- res[i][j] += b[i][j];
- }
- }
- return res;
- }
- /**
- * Clones a matrix (or part of it) and returns a new matrix as the result.
- * @param {matrix} src The matrix to be cloned.
- * @param {number} width The second matrix.
- * @static
- */
- tracking.Matrix.clone = function(src, width, height){
- width = width || src[0].length;
- height = height || src.length;
- var temp = new Array(height);
- var i = height;
- while(i--){
- temp[i] = new Array(width);
- var j = width;
- while(j--) temp[i][j] = src[i][j];
- }
- return temp;
- }
- /**
- * Multiply a matrix by a scalar and returns a new matrix as the result.
- * @param {number} scalar The scalar to multiply the matrix by.
- * @param {matrix} src The matrix to be multiplied.
- * @static
- */
- tracking.Matrix.mulScalar = function(scalar, src){
- var res = tracking.Matrix.clone(src);
- for(var i=0; i < src.length; i++){
- for(var j=0; j < src[i].length; j++){
- res[i][j] *= scalar;
- }
- }
- return res;
- }
- /**
- * Transpose a matrix and returns a new matrix as the result.
- * @param {matrix} src The matrix to be transposed.
- * @static
- */
- tracking.Matrix.transpose = function(src){
- var transpose = new Array(src[0].length);
- for(var i=0; i < src[0].length; i++){
- transpose[i] = new Array(src.length);
- for(var j=0; j < src.length; j++){
- transpose[i][j] = src[j][i];
- }
- }
- return transpose;
- }
- /**
- * Multiply an MxN matrix with an NxP matrix and returns a new MxP matrix
- * as the result.
- * @param {matrix} a The first matrix.
- * @param {matrix} b The second matrix.
- * @static
- */
- tracking.Matrix.mul = function(a, b) {
- var res = new Array(a.length);
- for (var i = 0; i < a.length; i++) {
- res[i] = new Array(b[0].length);
- for (var j = 0; j < b[0].length; j++) {
- res[i][j] = 0;
- for (var k = 0; k < a[0].length; k++) {
- res[i][j] += a[i][k] * b[k][j];
- }
- }
- }
- return res;
- }
- /**
- * Calculates the absolute norm of a matrix.
- * @param {matrix} src The matrix which norm will be calculated.
- * @static
- */
- tracking.Matrix.norm = function(src){
- var res = 0;
- for(var i=0; i < src.length; i++){
- for(var j=0; j < src[i].length; j++){
- res += src[i][j]*src[i][j];
- }
- }
- return Math.sqrt(res);
- }
- /**
- * Calculates and returns the covariance matrix of a set of vectors as well
- * as the mean of the matrix.
- * @param {matrix} src The matrix which covariance matrix will be calculated.
- * @static
- */
- tracking.Matrix.calcCovarMatrix = function(src){
- var mean = new Array(src.length);
- for(var i=0; i < src.length; i++){
- mean[i] = [0.0];
- for(var j=0; j < src[i].length; j++){
- mean[i][0] += src[i][j]/src[i].length;
- }
- }
- var deltaFull = tracking.Matrix.clone(mean);
- for(var i=0; i < deltaFull.length; i++){
- for(var j=0; j < src[0].length - 1; j++){
- deltaFull[i].push(deltaFull[i][0]);
- }
- }
- var a = tracking.Matrix.sub(src, deltaFull);
- var b = tracking.Matrix.transpose(a);
- var covar = tracking.Matrix.mul(b,a);
- return [covar, mean];
- }
- }());
- (function() {
- /**
- * EPnp utility.
- * @static
- * @constructor
- */
- tracking.EPnP = {};
- tracking.EPnP.solve = function(objectPoints, imagePoints, cameraMatrix) {};
- }());
- (function() {
- /**
- * Tracker utility.
- * @constructor
- * @extends {tracking.EventEmitter}
- */
- tracking.Tracker = function() {
- tracking.Tracker.base(this, 'constructor');
- };
- tracking.inherits(tracking.Tracker, tracking.EventEmitter);
- /**
- * Tracks the pixels on the array. This method is called for each video
- * frame in order to emit `track` event.
- * @param {Uint8ClampedArray} pixels The pixels data to track.
- * @param {number} width The pixels canvas width.
- * @param {number} height The pixels canvas height.
- */
- tracking.Tracker.prototype.track = function() {};
- }());
- (function() {
- /**
- * TrackerTask utility.
- * @constructor
- * @extends {tracking.EventEmitter}
- */
- tracking.TrackerTask = function(tracker) {
- tracking.TrackerTask.base(this, 'constructor');
- if (!tracker) {
- throw new Error('Tracker instance not specified.');
- }
- this.setTracker(tracker);
- };
- tracking.inherits(tracking.TrackerTask, tracking.EventEmitter);
- /**
- * Holds the tracker instance managed by this task.
- * @type {tracking.Tracker}
- * @private
- */
- tracking.TrackerTask.prototype.tracker_ = null;
- /**
- * Holds if the tracker task is in running.
- * @type {boolean}
- * @private
- */
- tracking.TrackerTask.prototype.running_ = false;
- /**
- * Gets the tracker instance managed by this task.
- * @return {tracking.Tracker}
- */
- tracking.TrackerTask.prototype.getTracker = function() {
- return this.tracker_;
- };
- /**
- * Returns true if the tracker task is in running, false otherwise.
- * @return {boolean}
- * @private
- */
- tracking.TrackerTask.prototype.inRunning = function() {
- return this.running_;
- };
- /**
- * Sets if the tracker task is in running.
- * @param {boolean} running
- * @private
- */
- tracking.TrackerTask.prototype.setRunning = function(running) {
- this.running_ = running;
- };
- /**
- * Sets the tracker instance managed by this task.
- * @return {tracking.Tracker}
- */
- tracking.TrackerTask.prototype.setTracker = function(tracker) {
- this.tracker_ = tracker;
- };
- /**
- * Emits a `run` event on the tracker task for the implementers to run any
- * child action, e.g. `requestAnimationFrame`.
- * @return {object} Returns itself, so calls can be chained.
- */
- tracking.TrackerTask.prototype.run = function() {
- var self = this;
- if (this.inRunning()) {
- return;
- }
- this.setRunning(true);
- this.reemitTrackEvent_ = function(event) {
- self.emit('track', event);
- };
- this.tracker_.on('track', this.reemitTrackEvent_);
- this.emit('run');
- return this;
- };
- /**
- * Emits a `stop` event on the tracker task for the implementers to stop any
- * child action being done, e.g. `requestAnimationFrame`.
- * @return {object} Returns itself, so calls can be chained.
- */
- tracking.TrackerTask.prototype.stop = function() {
- if (!this.inRunning()) {
- return;
- }
- this.setRunning(false);
- this.emit('stop');
- this.tracker_.removeListener('track', this.reemitTrackEvent_);
- return this;
- };
- }());
- (function() {
- /**
- * ColorTracker utility to track colored blobs in a frame using color
- * difference evaluation.
- * @constructor
- * @param {string|Array.<string>} opt_colors Optional colors to track.
- * @extends {tracking.Tracker}
- */
- tracking.ColorTracker = function(opt_colors) {
- tracking.ColorTracker.base(this, 'constructor');
- if (typeof opt_colors === 'string') {
- opt_colors = [opt_colors];
- }
- if (opt_colors) {
- opt_colors.forEach(function(color) {
- if (!tracking.ColorTracker.getColor(color)) {
- throw new Error('Color not valid, try `new tracking.ColorTracker("magenta")`.');
- }
- });
- this.setColors(opt_colors);
- }
- };
- tracking.inherits(tracking.ColorTracker, tracking.Tracker);
- /**
- * Holds the known colors.
- * @type {Object.<string, function>}
- * @private
- * @static
- */
- tracking.ColorTracker.knownColors_ = {};
- /**
- * Caches coordinates values of the neighbours surrounding a pixel.
- * @type {Object.<number, Int32Array>}
- * @private
- * @static
- */
- tracking.ColorTracker.neighbours_ = {};
- /**
- * Registers a color as known color.
- * @param {string} name The color name.
- * @param {function} fn The color function to test if the passed (r,g,b) is
- * the desired color.
- * @static
- */
- tracking.ColorTracker.registerColor = function(name, fn) {
- tracking.ColorTracker.knownColors_[name] = fn;
- };
- /**
- * Gets the known color function that is able to test whether an (r,g,b) is
- * the desired color.
- * @param {string} name The color name.
- * @return {function} The known color test function.
- * @static
- */
- tracking.ColorTracker.getColor = function(name) {
- return tracking.ColorTracker.knownColors_[name];
- };
- /**
- * Holds the colors to be tracked by the `ColorTracker` instance.
- * @default ['magenta']
- * @type {Array.<string>}
- */
- tracking.ColorTracker.prototype.colors = ['magenta'];
- /**
- * Holds the minimum dimension to classify a rectangle.
- * @default 20
- * @type {number}
- */
- tracking.ColorTracker.prototype.minDimension = 20;
- /**
- * Holds the maximum dimension to classify a rectangle.
- * @default Infinity
- * @type {number}
- */
- tracking.ColorTracker.prototype.maxDimension = Infinity;
- /**
- * Holds the minimum group size to be classified as a rectangle.
- * @default 30
- * @type {number}
- */
- tracking.ColorTracker.prototype.minGroupSize = 30;
- /**
- * Calculates the central coordinate from the cloud points. The cloud points
- * are all points that matches the desired color.
- * @param {Array.<number>} cloud Major row order array containing all the
- * points from the desired color, e.g. [x1, y1, c2, y2, ...].
- * @param {number} total Total numbers of pixels of the desired color.
- * @return {object} Object containing the x, y and estimated z coordinate of
- * the blog extracted from the cloud points.
- * @private
- */
- tracking.ColorTracker.prototype.calculateDimensions_ = function(cloud, total) {
- var maxx = -1;
- var maxy = -1;
- var minx = Infinity;
- var miny = Infinity;
- for (var c = 0; c < total; c += 2) {
- var x = cloud[c];
- var y = cloud[c + 1];
- if (x < minx) {
- minx = x;
- }
- if (x > maxx) {
- maxx = x;
- }
- if (y < miny) {
- miny = y;
- }
- if (y > maxy) {
- maxy = y;
- }
- }
- return {
- width: maxx - minx,
- height: maxy - miny,
- x: minx,
- y: miny
- };
- };
- /**
- * Gets the colors being tracked by the `ColorTracker` instance.
- * @return {Array.<string>}
- */
- tracking.ColorTracker.prototype.getColors = function() {
- return this.colors;
- };
- /**
- * Gets the minimum dimension to classify a rectangle.
- * @return {number}
- */
- tracking.ColorTracker.prototype.getMinDimension = function() {
- return this.minDimension;
- };
- /**
- * Gets the maximum dimension to classify a rectangle.
- * @return {number}
- */
- tracking.ColorTracker.prototype.getMaxDimension = function() {
- return this.maxDimension;
- };
- /**
- * Gets the minimum group size to be classified as a rectangle.
- * @return {number}
- */
- tracking.ColorTracker.prototype.getMinGroupSize = function() {
- return this.minGroupSize;
- };
- /**
- * Gets the eight offset values of the neighbours surrounding a pixel.
- * @param {number} width The image width.
- * @return {array} Array with the eight offset values of the neighbours
- * surrounding a pixel.
- * @private
- */
- tracking.ColorTracker.prototype.getNeighboursForWidth_ = function(width) {
- if (tracking.ColorTracker.neighbours_[width]) {
- return tracking.ColorTracker.neighbours_[width];
- }
- var neighbours = new Int32Array(8);
- neighbours[0] = -width * 4;
- neighbours[1] = -width * 4 + 4;
- neighbours[2] = 4;
- neighbours[3] = width * 4 + 4;
- neighbours[4] = width * 4;
- neighbours[5] = width * 4 - 4;
- neighbours[6] = -4;
- neighbours[7] = -width * 4 - 4;
- tracking.ColorTracker.neighbours_[width] = neighbours;
- return neighbours;
- };
- /**
- * Unites groups whose bounding box intersect with each other.
- * @param {Array.<Object>} rects
- * @private
- */
- tracking.ColorTracker.prototype.mergeRectangles_ = function(rects) {
- var intersects;
- var results = [];
- var minDimension = this.getMinDimension();
- var maxDimension = this.getMaxDimension();
- for (var r = 0; r < rects.length; r++) {
- var r1 = rects[r];
- intersects = true;
- for (var s = r + 1; s < rects.length; s++) {
- var r2 = rects[s];
- if (tracking.Math.intersectRect(r1.x, r1.y, r1.x + r1.width, r1.y + r1.height, r2.x, r2.y, r2.x + r2.width, r2.y + r2.height)) {
- intersects = false;
- var x1 = Math.min(r1.x, r2.x);
- var y1 = Math.min(r1.y, r2.y);
- var x2 = Math.max(r1.x + r1.width, r2.x + r2.width);
- var y2 = Math.max(r1.y + r1.height, r2.y + r2.height);
- r2.height = y2 - y1;
- r2.width = x2 - x1;
- r2.x = x1;
- r2.y = y1;
- break;
- }
- }
- if (intersects) {
- if (r1.width >= minDimension && r1.height >= minDimension) {
- if (r1.width <= maxDimension && r1.height <= maxDimension) {
- results.push(r1);
- }
- }
- }
- }
- return results;
- };
- /**
- * Sets the colors to be tracked by the `ColorTracker` instance.
- * @param {Array.<string>} colors
- */
- tracking.ColorTracker.prototype.setColors = function(colors) {
- this.colors = colors;
- };
- /**
- * Sets the minimum dimension to classify a rectangle.
- * @param {number} minDimension
- */
- tracking.ColorTracker.prototype.setMinDimension = function(minDimension) {
- this.minDimension = minDimension;
- };
- /**
- * Sets the maximum dimension to classify a rectangle.
- * @param {number} maxDimension
- */
- tracking.ColorTracker.prototype.setMaxDimension = function(maxDimension) {
- this.maxDimension = maxDimension;
- };
- /**
- * Sets the minimum group size to be classified as a rectangle.
- * @param {number} minGroupSize
- */
- tracking.ColorTracker.prototype.setMinGroupSize = function(minGroupSize) {
- this.minGroupSize = minGroupSize;
- };
- /**
- * Tracks the `Video` frames. This method is called for each video frame in
- * order to emit `track` event.
- * @param {Uint8ClampedArray} pixels The pixels data to track.
- * @param {number} width The pixels canvas width.
- * @param {number} height The pixels canvas height.
- */
- tracking.ColorTracker.prototype.track = function(pixels, width, height) {
- var self = this;
- var colors = this.getColors();
- if (!colors) {
- throw new Error('Colors not specified, try `new tracking.ColorTracker("magenta")`.');
- }
- var results = [];
- colors.forEach(function(color) {
- results = results.concat(self.trackColor_(pixels, width, height, color));
- });
- this.emit('track', {
- data: results
- });
- };
- /**
- * Find the given color in the given matrix of pixels using Flood fill
- * algorithm to determines the area connected to a given node in a
- * multi-dimensional array.
- * @param {Uint8ClampedArray} pixels The pixels data to track.
- * @param {number} width The pixels canvas width.
- * @param {number} height The pixels canvas height.
- * @param {string} color The color to be found
- * @private
- */
- tracking.ColorTracker.prototype.trackColor_ = function(pixels, width, height, color) {
- var colorFn = tracking.ColorTracker.knownColors_[color];
- var currGroup = new Int32Array(pixels.length >> 2);
- var currGroupSize;
- var currI;
- var currJ;
- var currW;
- var marked = new Int8Array(pixels.length);
- var minGroupSize = this.getMinGroupSize();
- var neighboursW = this.getNeighboursForWidth_(width);
- var queue = new Int32Array(pixels.length);
- var queuePosition;
- var results = [];
- var w = -4;
- if (!colorFn) {
- return results;
- }
- for (var i = 0; i < height; i++) {
- for (var j = 0; j < width; j++) {
- w += 4;
- if (marked[w]) {
- continue;
- }
- currGroupSize = 0;
- queuePosition = -1;
- queue[++queuePosition] = w;
- queue[++queuePosition] = i;
- queue[++queuePosition] = j;
- marked[w] = 1;
- while (queuePosition >= 0) {
- currJ = queue[queuePosition--];
- currI = queue[queuePosition--];
- currW = queue[queuePosition--];
- if (colorFn(pixels[currW], pixels[currW + 1], pixels[currW + 2], pixels[currW + 3], currW, currI, currJ)) {
- currGroup[currGroupSize++] = currJ;
- currGroup[currGroupSize++] = currI;
- for (var k = 0; k < neighboursW.length; k++) {
- var otherW = currW + neighboursW[k];
- var otherI = currI + neighboursI[k];
- var otherJ = currJ + neighboursJ[k];
- if (!marked[otherW] && otherI >= 0 && otherI < height && otherJ >= 0 && otherJ < width) {
- queue[++queuePosition] = otherW;
- queue[++queuePosition] = otherI;
- queue[++queuePosition] = otherJ;
- marked[otherW] = 1;
- }
- }
- }
- }
- if (currGroupSize >= minGroupSize) {
- var data = this.calculateDimensions_(currGroup, currGroupSize);
- if (data) {
- data.color = color;
- results.push(data);
- }
- }
- }
- }
- return this.mergeRectangles_(results);
- };
- // Default colors
- //===================
- tracking.ColorTracker.registerColor('cyan', function(r, g, b) {
- var thresholdGreen = 50,
- thresholdBlue = 70,
- dx = r - 0,
- dy = g - 255,
- dz = b - 255;
- if ((g - r) >= thresholdGreen && (b - r) >= thresholdBlue) {
- return true;
- }
- return dx * dx + dy * dy + dz * dz < 6400;
- });
- tracking.ColorTracker.registerColor('magenta', function(r, g, b) {
- var threshold = 50,
- dx = r - 255,
- dy = g - 0,
- dz = b - 255;
- if ((r - g) >= threshold && (b - g) >= threshold) {
- return true;
- }
- return dx * dx + dy * dy + dz * dz < 19600;
- });
- tracking.ColorTracker.registerColor('yellow', function(r, g, b) {
- var threshold = 50,
- dx = r - 255,
- dy = g - 255,
- dz = b - 0;
- if ((r - b) >= threshold && (g - b) >= threshold) {
- return true;
- }
- return dx * dx + dy * dy + dz * dz < 10000;
- });
- // Caching neighbour i/j offset values.
- //=====================================
- var neighboursI = new Int32Array([-1, -1, 0, 1, 1, 1, 0, -1]);
- var neighboursJ = new Int32Array([0, 1, 1, 1, 0, -1, -1, -1]);
- }());
- (function() {
- /**
- * ObjectTracker utility.
- * @constructor
- * @param {string|Array.<string|Array.<number>>} opt_classifiers Optional
- * object classifiers to track.
- * @extends {tracking.Tracker}
- */
- tracking.ObjectTracker = function(opt_classifiers) {
- tracking.ObjectTracker.base(this, 'constructor');
- if (opt_classifiers) {
- if (!Array.isArray(opt_classifiers)) {
- opt_classifiers = [opt_classifiers];
- }
- if (Array.isArray(opt_classifiers)) {
- opt_classifiers.forEach(function(classifier, i) {
- if (typeof classifier === 'string') {
- opt_classifiers[i] = tracking.ViolaJones.classifiers[classifier];
- }
- if (!opt_classifiers[i]) {
- throw new Error('Object classifier not valid, try `new tracking.ObjectTracker("face")`.');
- }
- });
- }
- }
- this.setClassifiers(opt_classifiers);
- };
- tracking.inherits(tracking.ObjectTracker, tracking.Tracker);
- /**
- * Specifies the edges density of a block in order to decide whether to skip
- * it or not.
- * @default 0.2
- * @type {number}
- */
- tracking.ObjectTracker.prototype.edgesDensity = 0.2;
- /**
- * Specifies the initial scale to start the feature block scaling.
- * @default 1.0
- * @type {number}
- */
- tracking.ObjectTracker.prototype.initialScale = 1.0;
- /**
- * Specifies the scale factor to scale the feature block.
- * @default 1.25
- * @type {number}
- */
- tracking.ObjectTracker.prototype.scaleFactor = 1.25;
- /**
- * Specifies the block step size.
- * @default 1.5
- * @type {number}
- */
- tracking.ObjectTracker.prototype.stepSize = 1.5;
- /**
- * Gets the tracker HAAR classifiers.
- * @return {TypedArray.<number>}
- */
- tracking.ObjectTracker.prototype.getClassifiers = function() {
- return this.classifiers;
- };
- /**
- * Gets the edges density value.
- * @return {number}
- */
- tracking.ObjectTracker.prototype.getEdgesDensity = function() {
- return this.edgesDensity;
- };
- /**
- * Gets the initial scale to start the feature block scaling.
- * @return {number}
- */
- tracking.ObjectTracker.prototype.getInitialScale = function() {
- return this.initialScale;
- };
- /**
- * Gets the scale factor to scale the feature block.
- * @return {number}
- */
- tracking.ObjectTracker.prototype.getScaleFactor = function() {
- return this.scaleFactor;
- };
- /**
- * Gets the block step size.
- * @return {number}
- */
- tracking.ObjectTracker.prototype.getStepSize = function() {
- return this.stepSize;
- };
- /**
- * Tracks the `Video` frames. This method is called for each video frame in
- * order to emit `track` event.
- * @param {Uint8ClampedArray} pixels The pixels data to track.
- * @param {number} width The pixels canvas width.
- * @param {number} height The pixels canvas height.
- */
- tracking.ObjectTracker.prototype.track = function(pixels, width, height) {
- var self = this;
- var classifiers = this.getClassifiers();
- if (!classifiers) {
- throw new Error('Object classifier not specified, try `new tracking.ObjectTracker("face")`.');
- }
- var results = [];
- classifiers.forEach(function(classifier) {
- results = results.concat(tracking.ViolaJones.detect(pixels, width, height, self.getInitialScale(), self.getScaleFactor(), self.getStepSize(), self.getEdgesDensity(), classifier));
- });
- this.emit('track', {
- data: results
- });
- };
- /**
- * Sets the tracker HAAR classifiers.
- * @param {TypedArray.<number>} classifiers
- */
- tracking.ObjectTracker.prototype.setClassifiers = function(classifiers) {
- this.classifiers = classifiers;
- };
- /**
- * Sets the edges density.
- * @param {number} edgesDensity
- */
- tracking.ObjectTracker.prototype.setEdgesDensity = function(edgesDensity) {
- this.edgesDensity = edgesDensity;
- };
- /**
- * Sets the initial scale to start the block scaling.
- * @param {number} initialScale
- */
- tracking.ObjectTracker.prototype.setInitialScale = function(initialScale) {
- this.initialScale = initialScale;
- };
- /**
- * Sets the scale factor to scale the feature block.
- * @param {number} scaleFactor
- */
- tracking.ObjectTracker.prototype.setScaleFactor = function(scaleFactor) {
- this.scaleFactor = scaleFactor;
- };
- /**
- * Sets the block step size.
- * @param {number} stepSize
- */
- tracking.ObjectTracker.prototype.setStepSize = function(stepSize) {
- this.stepSize = stepSize;
- };
- }());
- (function() {
- tracking.LandmarksTracker = function() {
- tracking.LandmarksTracker.base(this, 'constructor');
- }
- tracking.inherits(tracking.LandmarksTracker, tracking.ObjectTracker);
- tracking.LandmarksTracker.prototype.track = function(pixels, width, height) {
-
- var image = {
- 'data': pixels,
- 'width': width,
- 'height': height
- };
- var classifier = tracking.ViolaJones.classifiers['face'];
- var faces = tracking.ViolaJones.detect(pixels, width, height,
- this.getInitialScale(), this.getScaleFactor(), this.getStepSize(),
- this.getEdgesDensity(), classifier);
- var landmarks = tracking.LBF.align(pixels, width, height, faces);
- this.emit('track', {
- 'data': {
- 'faces' : faces,
- 'landmarks' : landmarks
- }
- });
- }
- }());
- (function() {
- tracking.LBF = {};
- /**
- * LBF Regressor utility.
- * @constructor
- */
- tracking.LBF.Regressor = function(maxNumStages){
- this.maxNumStages = maxNumStages;
- this.rfs = new Array(maxNumStages);
- this.models = new Array(maxNumStages);
- for(var i=0; i < maxNumStages; i++){
- this.rfs[i] = new tracking.LBF.RandomForest(i);
- this.models[i] = tracking.LBF.RegressorData[i].models;
- }
- this.meanShape = tracking.LBF.LandmarksData;
- }
- /**
- * Predicts the position of the landmarks based on the bounding box of the face.
- * @param {pixels} pixels The grayscale pixels in a linear array.
- * @param {number} width Width of the image.
- * @param {number} height Height of the image.
- * @param {object} boudingBox Bounding box of the face to be aligned.
- * @return {matrix} A matrix with each landmark position in a row [x,y].
- */
- tracking.LBF.Regressor.prototype.predict = function(pixels, width, height, boundingBox) {
- var images = [];
- var currentShapes = [];
- var boundingBoxes = [];
- var meanShapeClone = tracking.Matrix.clone(this.meanShape);
- images.push({
- 'data': pixels,
- 'width': width,
- 'height': height
- });
- boundingBoxes.push(boundingBox);
- currentShapes.push(tracking.LBF.projectShapeToBoundingBox_(meanShapeClone, boundingBox));
- for(var stage = 0; stage < this.maxNumStages; stage++){
- var binaryFeatures = tracking.LBF.Regressor.deriveBinaryFeat(this.rfs[stage], images, currentShapes, boundingBoxes, meanShapeClone);
- this.applyGlobalPrediction(binaryFeatures, this.models[stage], currentShapes, boundingBoxes);
- }
- return currentShapes[0];
- };
- /**
- * Multiplies the binary features of the landmarks with the regression matrix
- * to obtain the displacement for each landmark. Then applies this displacement
- * into the landmarks shape.
- * @param {object} binaryFeatures The binary features for the landmarks.
- * @param {object} models The regressor models.
- * @param {matrix} currentShapes The landmarks shapes.
- * @param {array} boudingBoxes The bounding boxes of the faces.
- */
- tracking.LBF.Regressor.prototype.applyGlobalPrediction = function(binaryFeatures, models, currentShapes,
- boundingBoxes){
- var residual = currentShapes[0].length * 2;
- var rotation = [];
- var deltashape = new Array(residual/2);
- for(var i=0; i < residual/2; i++){
- deltashape[i] = [0.0, 0.0];
- }
- for(var i=0; i < currentShapes.length; i++){
- for(var j=0; j < residual; j++){
- var tmp = 0;
- for(var lx=0, idx=0; (idx = binaryFeatures[i][lx].index) != -1; lx++){
- if(idx <= models[j].nr_feature){
- tmp += models[j].data[(idx - 1)] * binaryFeatures[i][lx].value;
- }
- }
- if(j < residual/2){
- deltashape[j][0] = tmp;
- }else{
- deltashape[j - residual/2][1] = tmp;
- }
- }
- var res = tracking.LBF.similarityTransform_(tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]), this.meanShape);
- var rotation = tracking.Matrix.transpose(res[0]);
- var s = tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]);
- s = tracking.Matrix.add(s, deltashape);
- currentShapes[i] = tracking.LBF.projectShapeToBoundingBox_(s, boundingBoxes[i]);
- }
- };
- /**
- * Derives the binary features from the image for each landmark.
- * @param {object} forest The random forest to search for the best binary feature match.
- * @param {array} images The images with pixels in a grayscale linear array.
- * @param {array} currentShapes The current landmarks shape.
- * @param {array} boudingBoxes The bounding boxes of the faces.
- * @param {matrix} meanShape The mean shape of the current landmarks set.
- * @return {array} The binary features extracted from the image and matched with the
- * training data.
- * @static
- */
- tracking.LBF.Regressor.deriveBinaryFeat = function(forest, images, currentShapes, boundingBoxes, meanShape){
- var binaryFeatures = new Array(images.length);
- for(var i=0; i < images.length; i++){
- var t = forest.maxNumTrees * forest.landmarkNum + 1;
- binaryFeatures[i] = new Array(t);
- for(var j=0; j < t; j++){
- binaryFeatures[i][j] = {};
- }
- }
- var leafnodesPerTree = 1 << (forest.maxDepth - 1);
- for(var i=0; i < images.length; i++){
- var projectedShape = tracking.LBF.unprojectShapeToBoundingBox_(currentShapes[i], boundingBoxes[i]);
- var transform = tracking.LBF.similarityTransform_(projectedShape, meanShape);
-
- for(var j=0; j < forest.landmarkNum; j++){
- for(var k=0; k < forest.maxNumTrees; k++){
- var binaryCode = tracking.LBF.Regressor.getCodeFromTree(forest.rfs[j][k], images[i],
- currentShapes[i], boundingBoxes[i], transform[0], transform[1]);
- var index = j*forest.maxNumTrees + k;
- binaryFeatures[i][index].index = leafnodesPerTree * index + binaryCode;
- binaryFeatures[i][index].value = 1;
- }
- }
- binaryFeatures[i][forest.landmarkNum * forest.maxNumTrees].index = -1;
- binaryFeatures[i][forest.landmarkNum * forest.maxNumTrees].value = -1;
- }
- return binaryFeatures;
- }
- /**
- * Gets the binary code for a specific tree in a random forest. For each landmark,
- * the position from two pre-defined points are recovered from the training data
- * and then the intensity of the pixels corresponding to these points is extracted
- * from the image and used to traverse the trees in the random forest. At the end,
- * the ending nodes will be represented by 1, and the remaining nodes by 0.
- *
- * +--------------------------- Random Forest -----------------------------+
- * | Ø = Ending leaf |
- * | |
- * | O O O O O |
- * | / \ / \ / \ / \ / \ |
- * | O O O O O O O O O O |
- * | / \ / \ / \ / \ / \ / \ / \ / \ / \ / \ |
- * | Ø O O O O O Ø O O Ø O O O O Ø O O O O Ø |
- * | 1 0 0 0 0 0 1 0 0 1 0 0 0 0 1 0 0 0 0 1 |
- * +-----------------------------------------------------------------------+
- * Final binary code for this landmark: 10000010010000100001
- *
- * @param {object} forest The tree to be analyzed.
- * @param {array} image The image with pixels in a grayscale linear array.
- * @param {matrix} shape The current landmarks shape.
- * @param {object} boudingBoxes The bounding box of the face.
- * @param {matrix} rotation The rotation matrix used to transform the projected landmarks
- * into the mean shape.
- * @param {number} scale The scale factor used to transform the projected landmarks
- * into the mean shape.
- * @return {number} The binary code extracted from the tree.
- * @static
- */
- tracking.LBF.Regressor.getCodeFromTree = function(tree, image, shape, boundingBox, rotation, scale){
- var current = 0;
- var bincode = 0;
- while(true){
-
- var x1 = Math.cos(tree.nodes[current].feats[0]) * tree.nodes[current].feats[2] * tree.maxRadioRadius * boundingBox.width;
- var y1 = Math.sin(tree.nodes[current].feats[0]) * tree.nodes[current].feats[2] * tree.maxRadioRadius * boundingBox.height;
- var x2 = Math.cos(tree.nodes[current].feats[1]) * tree.nodes[current].feats[3] * tree.maxRadioRadius * boundingBox.width;
- var y2 = Math.sin(tree.nodes[current].feats[1]) * tree.nodes[current].feats[3] * tree.maxRadioRadius * boundingBox.height;
- var project_x1 = rotation[0][0] * x1 + rotation[0][1] * y1;
- var project_y1 = rotation[1][0] * x1 + rotation[1][1] * y1;
- var real_x1 = Math.floor(project_x1 + shape[tree.landmarkID][0]);
- var real_y1 = Math.floor(project_y1 + shape[tree.landmarkID][1]);
- real_x1 = Math.max(0.0, Math.min(real_x1, image.height - 1.0));
- real_y1 = Math.max(0.0, Math.min(real_y1, image.width - 1.0));
- var project_x2 = rotation[0][0] * x2 + rotation[0][1] * y2;
- var project_y2 = rotation[1][0] * x2 + rotation[1][1] * y2;
- var real_x2 = Math.floor(project_x2 + shape[tree.landmarkID][0]);
- var real_y2 = Math.floor(project_y2 + shape[tree.landmarkID][1]);
- real_x2 = Math.max(0.0, Math.min(real_x2, image.height - 1.0));
- real_y2 = Math.max(0.0, Math.min(real_y2, image.width - 1.0));
- var pdf = Math.floor(image.data[real_y1*image.width + real_x1]) -
- Math.floor(image.data[real_y2 * image.width +real_x2]);
- if(pdf < tree.nodes[current].thresh){
- current = tree.nodes[current].cnodes[0];
- }else{
- current = tree.nodes[current].cnodes[1];
- }
- if (tree.nodes[current].is_leafnode == 1) {
- bincode = 1;
- for (var i=0; i < tree.leafnodes.length; i++) {
- if (tree.leafnodes[i] == current) {
- return bincode;
- }
- bincode++;
- }
- return bincode;
- }
- }
- return bincode;
- }
- }());
- (function() {
- /**
- * Face Alignment via Regressing Local Binary Features (LBF)
- * This approach has two components: a set of local binary features and
- * a locality principle for learning those features.
- * The locality principle is used to guide the learning of a set of highly
- * discriminative local binary features for each landmark independently.
- * The obtained local binary features are used to learn a linear regression
- * that later will be used to guide the landmarks in the alignment phase.
- *
- * @authors: VoxarLabs Team (http://cin.ufpe.br/~voxarlabs)
- * Lucas Figueiredo <lsf@cin.ufpe.br>, Thiago Menezes <tmc2@cin.ufpe.br>,
- * Thiago Domingues <tald@cin.ufpe.br>, Rafael Roberto <rar3@cin.ufpe.br>,
- * Thulio Araujo <tlsa@cin.ufpe.br>, Joao Victor <jvfl@cin.ufpe.br>,
- * Tomer Simis <tls@cin.ufpe.br>)
- */
-
- /**
- * Holds the maximum number of stages that will be used in the alignment algorithm.
- * Each stage contains a different set of random forests and retrieves the binary
- * code from a more "specialized" (i.e. smaller) region around the landmarks.
- * @type {number}
- * @static
- */
- tracking.LBF.maxNumStages = 4;
- /**
- * Holds the regressor that will be responsible for extracting the local features from
- * the image and guide the landmarks using the training data.
- * @type {object}
- * @protected
- * @static
- */
- tracking.LBF.regressor_ = null;
-
- /**
- * Generates a set of landmarks for a set of faces
- * @param {pixels} pixels The pixels in a linear [r,g,b,a,...] array.
- * @param {number} width The image width.
- * @param {number} height The image height.
- * @param {array} faces The list of faces detected in the image
- * @return {array} The aligned landmarks, each set of landmarks corresponding
- * to a specific face.
- * @static
- */
- tracking.LBF.align = function(pixels, width, height, faces){
- if(tracking.LBF.regressor_ == null){
- tracking.LBF.regressor_ = new tracking.LBF.Regressor(
- tracking.LBF.maxNumStages
- );
- }
- // NOTE: is this thesholding suitable ? if it is on image, why no skin-color filter ? and a adaptative threshold
- pixels = tracking.Image.grayscale(pixels, width, height, false);
- pixels = tracking.Image.equalizeHist(pixels, width, height);
- var shapes = new Array(faces.length);
- for(var i in faces){
- faces[i].height = faces[i].width;
- var boundingBox = {};
- boundingBox.startX = faces[i].x;
- boundingBox.startY = faces[i].y;
- boundingBox.width = faces[i].width;
- boundingBox.height = faces[i].height;
- shapes[i] = tracking.LBF.regressor_.predict(pixels, width, height, boundingBox);
- }
- return shapes;
- }
- /**
- * Unprojects the landmarks shape from the bounding box.
- * @param {matrix} shape The landmarks shape.
- * @param {matrix} boudingBox The bounding box.
- * @return {matrix} The landmarks shape projected into the bounding box.
- * @static
- * @protected
- */
- tracking.LBF.unprojectShapeToBoundingBox_ = function(shape, boundingBox){
- var temp = new Array(shape.length);
- for(var i=0; i < shape.length; i++){
- temp[i] = [
- (shape[i][0] - boundingBox.startX) / boundingBox.width,
- (shape[i][1] - boundingBox.startY) / boundingBox.height
- ];
- }
- return temp;
- }
- /**
- * Projects the landmarks shape into the bounding box. The landmarks shape has
- * normalized coordinates, so it is necessary to map these coordinates into
- * the bounding box coordinates.
- * @param {matrix} shape The landmarks shape.
- * @param {matrix} boudingBox The bounding box.
- * @return {matrix} The landmarks shape.
- * @static
- * @protected
- */
- tracking.LBF.projectShapeToBoundingBox_ = function(shape, boundingBox){
- var temp = new Array(shape.length);
- for(var i=0; i < shape.length; i++){
- temp[i] = [
- shape[i][0] * boundingBox.width + boundingBox.startX,
- shape[i][1] * boundingBox.height + boundingBox.startY
- ];
- }
- return temp;
- }
- /**
- * Calculates the rotation and scale necessary to transform shape1 into shape2.
- * @param {matrix} shape1 The shape to be transformed.
- * @param {matrix} shape2 The shape to be transformed in.
- * @return {[matrix, scalar]} The rotation matrix and scale that applied to shape1
- * results in shape2.
- * @static
- * @protected
- */
- tracking.LBF.similarityTransform_ = function(shape1, shape2){
- var center1 = [0,0];
- var center2 = [0,0];
- for (var i = 0; i < shape1.length; i++) {
- center1[0] += shape1[i][0];
- center1[1] += shape1[i][1];
- center2[0] += shape2[i][0];
- center2[1] += shape2[i][1];
- }
- center1[0] /= shape1.length;
- center1[1] /= shape1.length;
- center2[0] /= shape2.length;
- center2[1] /= shape2.length;
- var temp1 = tracking.Matrix.clone(shape1);
- var temp2 = tracking.Matrix.clone(shape2);
- for(var i=0; i < shape1.length; i++){
- temp1[i][0] -= center1[0];
- temp1[i][1] -= center1[1];
- temp2[i][0] -= center2[0];
- temp2[i][1] -= center2[1];
- }
- var covariance1, covariance2;
- var mean1, mean2;
- var t = tracking.Matrix.calcCovarMatrix(temp1);
- covariance1 = t[0];
- mean1 = t[1];
- t = tracking.Matrix.calcCovarMatrix(temp2);
- covariance2 = t[0];
- mean2 = t[1];
- var s1 = Math.sqrt(tracking.Matrix.norm(covariance1));
- var s2 = Math.sqrt(tracking.Matrix.norm(covariance2));
- var scale = s1/s2;
- temp1 = tracking.Matrix.mulScalar(1.0/s1, temp1);
- temp2 = tracking.Matrix.mulScalar(1.0/s2, temp2);
- var num = 0, den = 0;
- for (var i = 0; i < shape1.length; i++) {
- num = num + temp1[i][1] * temp2[i][0] - temp1[i][0] * temp2[i][1];
- den = den + temp1[i][0] * temp2[i][0] + temp1[i][1] * temp2[i][1];
- }
- var norm = Math.sqrt(num*num + den*den);
- var sin_theta = num/norm;
- var cos_theta = den/norm;
- var rotation = [
- [cos_theta, -sin_theta],
- [sin_theta, cos_theta]
- ];
- return [rotation, scale];
- }
- /**
- * LBF Random Forest data structure.
- * @static
- * @constructor
- */
- tracking.LBF.RandomForest = function(forestIndex){
- this.maxNumTrees = tracking.LBF.RegressorData[forestIndex].max_numtrees;
- this.landmarkNum = tracking.LBF.RegressorData[forestIndex].num_landmark;
- this.maxDepth = tracking.LBF.RegressorData[forestIndex].max_depth;
- this.stages = tracking.LBF.RegressorData[forestIndex].stages;
- this.rfs = new Array(this.landmarkNum);
- for(var i=0; i < this.landmarkNum; i++){
- this.rfs[i] = new Array(this.maxNumTrees);
- for(var j=0; j < this.maxNumTrees; j++){
- this.rfs[i][j] = new tracking.LBF.Tree(forestIndex, i, j);
- }
- }
- }
- /**
- * LBF Tree data structure.
- * @static
- * @constructor
- */
- tracking.LBF.Tree = function(forestIndex, landmarkIndex, treeIndex){
- var data = tracking.LBF.RegressorData[forestIndex].landmarks[landmarkIndex][treeIndex];
- this.maxDepth = data.max_depth;
- this.maxNumNodes = data.max_numnodes;
- this.nodes = data.nodes;
- this.landmarkID = data.landmark_id;
- this.numLeafnodes = data.num_leafnodes;
- this.numNodes = data.num_nodes;
- this.maxNumFeats = data.max_numfeats;
- this.maxRadioRadius = data.max_radio_radius;
- this.leafnodes = data.id_leafnodes;
- }
- }());
|