FaceTracking.vue 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327
  1. <template>
  2. <div></div>
  3. </template>
  4. <script>
  5. import * as faceapi from "face-api.js";
  6. import { FACE_API_MODEL_PATH } from "@/constants/constants";
  7. import { mapGetters as globalMapGetters } from "vuex";
  8. import { createNamespacedHelpers } from "vuex";
  9. const { mapState } = createNamespacedHelpers("examingHomeModule");
  10. import { isThisMachineOwnByStudent } from "@/utils/util";
  11. window.faceapi = faceapi;
  12. // const os = (function() {
  13. // const ua = navigator.userAgent.toLowerCase();
  14. // return {
  15. // isWin2K: /windows nt 5.0/.test(ua),
  16. // isXP: /windows nt 5.1/.test(ua),
  17. // isVista: /windows nt 6.0/.test(ua),
  18. // isWin7: /windows nt 6.1/.test(ua),
  19. // isWin8: /windows nt 6.2/.test(ua),
  20. // isWin81: /windows nt 6.3/.test(ua),
  21. // isWin10: /windows nt 10.0/.test(ua),
  22. // };
  23. // })();
  24. let __cache4WebglAvailable = null;
  25. function webgl_available() {
  26. if (__cache4WebglAvailable) return __cache4WebglAvailable;
  27. var canvas = document.createElement("canvas");
  28. var gl = canvas.getContext("webgl");
  29. __cache4WebglAvailable = gl && gl instanceof WebGLRenderingContext;
  30. return __cache4WebglAvailable;
  31. }
  32. let __cache4TensorFlowWebPackStatus = null;
  33. function tensorFlowWebPackStatus() {
  34. if (__cache4TensorFlowWebPackStatus) return __cache4TensorFlowWebPackStatus;
  35. __cache4TensorFlowWebPackStatus = faceapi.tf.ENV.get("WEBGL_PACK");
  36. return __cache4TensorFlowWebPackStatus;
  37. }
  38. // function getCPUModel() {
  39. // if (typeof nodeRequire != "undefined") {
  40. // var os = window.nodeRequire("os");
  41. // const cpus = os.cpus();
  42. // if (cpus.length > 0) {
  43. // return cpus[0].model;
  44. // }
  45. // }
  46. // return "null";
  47. // }
  48. // if (os.isWin7) alert("是win7");
  49. // if (os.isWin10) alert("是win10");
  50. let __inputSize = 128;
  51. async function detectTest() {
  52. const inputSizeList = [128, 160, 224, 320, 416, 512, 608];
  53. const succRate = [0, 0, 0, 0, 0, 0, 0];
  54. const detectTimes = 6;
  55. for (let idx = 0; idx < inputSizeList.length; idx++) {
  56. for (let n = 0; n < detectTimes; n++) {
  57. await new Promise(resolve => setTimeout(resolve, 3 * 1000));
  58. const inputSize = inputSizeList[idx];
  59. const videoEl = document.getElementById("video");
  60. try {
  61. const options = new faceapi.TinyFaceDetectorOptions({
  62. inputSize: inputSize,
  63. scoreThreshold: 0.5,
  64. });
  65. // const result = await faceapi.detectAllFaces(videoEl, options);
  66. // console.log(result);
  67. const result = await Promise.race([
  68. faceapi.detectAllFaces(videoEl, options),
  69. new Promise(resolve => setTimeout(resolve, 10 * 1000)),
  70. ]);
  71. if (result && result.length >= 1) {
  72. console.log(`inputSize: ${inputSize} ${result.length}`);
  73. succRate[idx]++;
  74. } else {
  75. console.log(`inputSize: ${inputSize} 检测失败`);
  76. }
  77. } catch (error) {
  78. console.log(error);
  79. console.log(`inputSize: ${inputSize} 检测失败-异常`);
  80. }
  81. }
  82. if (succRate[idx] === detectTimes) {
  83. console.log(`inputSize: ${inputSizeList[idx]} 提前选中`);
  84. break;
  85. }
  86. }
  87. console.log({ succRate });
  88. const max = Math.max(...succRate);
  89. const idx = succRate.indexOf(max);
  90. __inputSize = inputSizeList[idx];
  91. window.__inputSize = __inputSize;
  92. console.log("最好的 inputSize 为: " + __inputSize);
  93. window._hmt.push([
  94. "_trackEvent",
  95. "正在考试页面",
  96. "最好的 inputSize 为: " + __inputSize,
  97. ]);
  98. return __inputSize;
  99. }
  100. function getFaceDetectorOptions() {
  101. return new faceapi.TinyFaceDetectorOptions({
  102. inputSize: __inputSize || 128,
  103. scoreThreshold: 0.5,
  104. });
  105. // return new faceapi.SsdMobilenetv1Options({ minConfidence: 0.8 });
  106. // return new faceapi.MtcnnOptions({ minFaceSize: 200, scaleFactor: 0.8 });
  107. }
  108. const detectTimeArray = [];
  109. export default {
  110. name: "FaceTracking",
  111. computed: {
  112. ...globalMapGetters(["isEpcc"]),
  113. ...mapState(["isDoingFaceLiveness"]),
  114. },
  115. async created() {
  116. await faceapi.nets.tinyFaceDetector.load(FACE_API_MODEL_PATH);
  117. // faceapi.nets.faceRecognitionNet.load(modelsPath);
  118. await faceapi.loadFaceLandmarkModel(FACE_API_MODEL_PATH);
  119. faceapi.tf.ENV.set("WEBGL_PACK", false);
  120. },
  121. async mounted() {
  122. let trackStarted = false;
  123. const that = this;
  124. async function trackHead() {
  125. const video = document.getElementById("video");
  126. if (
  127. video &&
  128. video.readyState === 4 &&
  129. faceapi.nets.tinyFaceDetector.params
  130. ) {
  131. trackStarted = true;
  132. } else {
  133. return;
  134. }
  135. console.log("start tracking ... ");
  136. await detectTest();
  137. await that.detectFaces();
  138. }
  139. if (!this.isEpcc) {
  140. // EPCC 关闭人脸实时检测
  141. this.trackHeadInterval = setInterval(() => {
  142. if (trackStarted) {
  143. clearInterval(this.trackHeadInterval);
  144. } else {
  145. trackHead();
  146. }
  147. }, 1000);
  148. }
  149. },
  150. beforeDestroy() {
  151. clearInterval(this.trackHeadInterval);
  152. clearTimeout(this.warningTimeout);
  153. clearTimeout(this.detectFacesTimeout);
  154. },
  155. methods: {
  156. async detectFaces() {
  157. this.singleTimeUsage = this.singleTimeUsage || 0;
  158. this.multipleTimeUsage = this.multipleTimeUsage || 0;
  159. if (
  160. this.singleTimeUsage > 10 * 1000 ||
  161. this.multipleTimeUsage > 0.5 * 1000
  162. ) {
  163. console.log("关闭实时人脸检测,因为耗时过长", this.multipleTimeUsage);
  164. window._hmt.push([
  165. "_trackEvent",
  166. "正在考试页面",
  167. "关闭实时人脸检测,因为耗时过长",
  168. ]);
  169. return;
  170. }
  171. if (this.isDoingFaceLiveness) {
  172. console.log("正在活检,暂停实时人脸");
  173. this.detectFacesTimeout = setTimeout(
  174. () => this.detectFaces(),
  175. 10 * 1000
  176. );
  177. return;
  178. }
  179. const videoEl = document.getElementById("video");
  180. // var canvas = document.createElement("canvas");
  181. // canvas.width = 133;
  182. // canvas.height = 100;
  183. // var context = canvas.getContext("2d");
  184. // context.drawImage(videoEl, 0, 0, 133, 100);
  185. const detectStartTime = performance.now();
  186. // this.___vWidth =
  187. // this.___vWidth ||
  188. // document.getElementById("video-container").clientWidth;
  189. const options = getFaceDetectorOptions();
  190. let result;
  191. try {
  192. result = await faceapi
  193. // .detectSingleFace(videoEl, options)
  194. .detectAllFaces(videoEl, options);
  195. } catch (e) {
  196. window._hmt.push(["_trackEvent", "正在考试页面", "实时人脸检测失败"]);
  197. throw e;
  198. }
  199. // console.log(result);
  200. const detectEndTime = performance.now();
  201. // console.log("WebGL: ", faceapi.tf.ENV.get("WEBGL_PACK"));
  202. console.log(
  203. "WebGL: ",
  204. webgl_available(),
  205. " WEBGL_PACK: ",
  206. tensorFlowWebPackStatus(),
  207. " single detect time: ",
  208. detectEndTime - detectStartTime,
  209. " result: ",
  210. result.length
  211. );
  212. this.singleTimeUsage = detectEndTime - detectStartTime;
  213. if (detectTimeArray.length < 6) {
  214. // 仅捕获一部分检测次数
  215. detectTimeArray.push(detectEndTime - detectStartTime);
  216. }
  217. if (detectTimeArray.length === 6) {
  218. detectTimeArray.shift();
  219. const avg =
  220. detectTimeArray.reduce((a, b) => a + b, 0) / detectTimeArray.length;
  221. const roundAvg = Math.round(avg / 100) * 100;
  222. window._hmt.push([
  223. "_trackEvent",
  224. "正在考试页面",
  225. "实时人脸检测平均时长" +
  226. (isThisMachineOwnByStudent() ? "学生电脑" : "学习中心电脑"),
  227. roundAvg + "ms",
  228. ]);
  229. console.log(detectTimeArray);
  230. detectTimeArray.push(0, 0); // 避免再次达到push条件和上传条件
  231. // FIXME: 上线初期停止统计此类信息,过于零散
  232. // const roundAvg100 = Math.round(avg / 100) * 100;
  233. // const osType = os.isWin7 ? "win7" : os.isWin10 ? "win10" : "other";
  234. // const stats = `webgl: ${webgl_available()}; tf_backend: ${faceapi.tf.getBackend()}; os: ${osType}; cpu: ${getCPUModel()}`;
  235. // window._hmt.push([
  236. // "_trackEvent",
  237. // "正在考试页面",
  238. // "实时人脸检测统计" + roundAvg100 + "ms",
  239. // stats,
  240. // ]);
  241. this.multipleTimeUsage = roundAvg;
  242. }
  243. // init this.showWaringTime
  244. this.showWaringTime = this.showWaringTime || Date.now();
  245. if (result.length >= 2 && Date.now() - this.showWaringTime > 20 * 1000) {
  246. this.showWaringTime = Date.now();
  247. this.$Message.warning({
  248. content: "请独立完成考试",
  249. duration: 5,
  250. closable: true,
  251. });
  252. }
  253. if (result.length === 0 && Date.now() - this.showWaringTime > 20 * 1000) {
  254. this.showWaringTime = Date.now();
  255. this.$Message.warning({
  256. content: "请调整坐姿,诚信考试",
  257. duration: 5,
  258. closable: true,
  259. });
  260. }
  261. if (
  262. (!result || result.length !== 1) &&
  263. !videoEl.classList.contains("video-warning")
  264. ) {
  265. videoEl.classList.add("video-warning");
  266. this.warningTimeout = setTimeout(function() {
  267. videoEl.classList.remove("video-warning");
  268. }, 3000);
  269. }
  270. this.detectFacesTimeout = setTimeout(() => this.detectFaces(), 10 * 1000);
  271. },
  272. },
  273. };
  274. </script>
  275. <style>
  276. @keyframes warning-people {
  277. 0% {
  278. /* border: solid 5px white; */
  279. box-shadow: 0 0 20px white;
  280. }
  281. 100% {
  282. /* border: solid 5px red; */
  283. box-shadow: 0 0 20px gold;
  284. }
  285. }
  286. .video-warning {
  287. animation: warning-people 3s infinite;
  288. }
  289. </style>