FaceRecognition.vue 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588
  1. <script setup lang="ts">
  2. import MD5 from "js-md5";
  3. import { onMounted, watchEffect } from "vue";
  4. import { getMediaStream } from "@/utils/camera";
  5. import { httpApp } from "@/plugins/axiosApp";
  6. import { showLogout } from "@/utils/utils";
  7. import { getCapturePhotoYunSign, saveCapturePhoto } from "@/api/login";
  8. import { execLocal, fileExists } from "@/utils/nativeMethods";
  9. /**
  10. * 上层通过showRecognizeButton来控制是否是同步比对
  11. *
  12. * 同步比对通过onRecognizeResult得到人脸比对结果
  13. *
  14. * 异步比对通过snapId来控制是否该进行比对,什么时候进行,以什么频率频率进行,错误处理,均由上层控制
  15. * 异步比对同时传递一个snapId(time),供上层识别和计数
  16. * 可能存在多个异步比对的任务同时进行
  17. */
  18. const {
  19. width = 400,
  20. height = 300,
  21. snapId = 0,
  22. examRecordDataId = -1,
  23. showRecognizeButton = false,
  24. } = defineProps<{
  25. width: string;
  26. height: string;
  27. showRecognizeButton: boolean;
  28. snapId?: number;
  29. examRecordDataId?: number;
  30. }>();
  31. const emit = defineEmits<{
  32. (
  33. e: "on-recognize-result",
  34. v: { isPassed: boolean; isStranger: boolean }
  35. ): void;
  36. (
  37. e: "on-async-recognize-result",
  38. v: { hasError: boolean; fileName: string }
  39. ): void;
  40. }>();
  41. let snapBtnDisabled = $ref(true);
  42. let btnText = $ref("开始识别");
  43. watchEffect(() => {
  44. if (snapId) {
  45. void snapAsync();
  46. }
  47. });
  48. onMounted(async () => {
  49. await openCamera();
  50. });
  51. const video = $ref<HTMLVideoElement>();
  52. async function openCamera() {
  53. const _openStartTime = Date.now();
  54. const stream = await getMediaStream();
  55. video.srcObject = stream;
  56. try {
  57. await video.play();
  58. } catch (error) {
  59. if (error instanceof Error) {
  60. if (error.name == "AbortError") {
  61. logger({
  62. cnl: ["server"],
  63. act: "video.paly",
  64. dtl: "AbortError and retry",
  65. });
  66. await video.play();
  67. logger({
  68. cnl: ["server"],
  69. act: "摄像头没有正常启用: AbortError 重试成功",
  70. });
  71. } else if (error.name == "NotSupportedError") {
  72. logger({
  73. cnl: ["server"],
  74. act: "摄像头没有正常启用",
  75. ejn: JSON.stringify(error),
  76. ext: {
  77. errorName: error.name,
  78. errorMessage: error.message,
  79. errorStack: error.stack,
  80. },
  81. });
  82. $message.error("摄像头没有正常启用: " + error);
  83. } else {
  84. throw error;
  85. }
  86. } else {
  87. logger({
  88. cnl: ["server"],
  89. act: "video.play",
  90. dtl: "not an Error",
  91. stk: error + "",
  92. });
  93. }
  94. throw error;
  95. }
  96. snapBtnDisabled = false;
  97. const _openEndTime = Date.now();
  98. logger({
  99. cnl: ["server"],
  100. act: "摄像头打开耗时",
  101. ext: { cost: _openEndTime - _openStartTime },
  102. });
  103. }
  104. async function videoStartPlay() {
  105. if (video && video.paused) {
  106. await video.play().catch((e) => {
  107. if (!(e instanceof Error)) {
  108. logger({
  109. cnl: ["server"],
  110. act: "restart video play error",
  111. dtl: "not an Error",
  112. stk: e + "",
  113. });
  114. } else {
  115. logger({
  116. cnl: ["server"],
  117. act: "restart video play error",
  118. stk: e.stack,
  119. ejn: JSON.stringify(e),
  120. });
  121. }
  122. throw e;
  123. });
  124. }
  125. }
  126. //#region 同步人脸比对
  127. async function snapSync() {
  128. logger({
  129. cnl: ["server"],
  130. act: "同步人脸比对",
  131. dtl: "点击开始识别按钮",
  132. });
  133. $message.destroyAll();
  134. try {
  135. snapBtnDisabled = true;
  136. btnText = "拍照中...";
  137. logger({ cnl: ["server"], lvl: "debug", act: btnText });
  138. const captureBlob = await getSnapShot(true);
  139. if (!(captureBlob instanceof Blob)) return;
  140. logger({
  141. cnl: ["server"],
  142. lvl: "debug",
  143. act: "getSnapShot",
  144. ext: { blobSize: captureBlob.size },
  145. });
  146. if (captureBlob.size < 48 * 48 || captureBlob.size >= 2 * 1024 * 1024) {
  147. $message.error("抓拍照片太小!");
  148. logger({
  149. cnl: ["server"],
  150. act: "摄像头异常",
  151. dtl: "抓拍照片大小异常",
  152. ext: { blobSize: captureBlob.size },
  153. });
  154. throw new Error("抓拍照片大小异常");
  155. }
  156. btnText = "上传照片中...";
  157. logger({ cnl: ["server"], lvl: "debug", act: btnText });
  158. const [captureFilePath, signIdentifier] = await uploadToServer(captureBlob);
  159. btnText = "人脸比对中...";
  160. await faceCompareSync(captureFilePath, signIdentifier);
  161. logger({
  162. cnl: ["server"],
  163. act: "同步比对照片详细日志",
  164. ext: {
  165. fileUrl: captureFilePath,
  166. signIdentifier,
  167. },
  168. });
  169. } catch (error) {
  170. console.log("同步照片比对流程失败");
  171. throw error;
  172. } finally {
  173. btnText = "开始识别";
  174. // 避免人脸识别功能被大量重复点击
  175. await new Promise((resolve) => setTimeout(resolve, 3000));
  176. snapBtnDisabled = false;
  177. }
  178. }
  179. async function getSnapShot(compareSync: boolean): Promise<Blob | unknown> {
  180. return new Promise((resolve, reject) => {
  181. if (video.readyState !== 4 || !(video.srcObject as MediaStream).active) {
  182. $message.error("摄像头没有正常启用");
  183. logger({
  184. cnl: ["server"],
  185. pgu: "AUTO",
  186. act: "getSnapShot",
  187. dtl: "摄像头没有正常启用",
  188. });
  189. reject("摄像头没有正常启用");
  190. if (!compareSync) {
  191. showLogout("摄像头没有正常启用");
  192. }
  193. return;
  194. }
  195. // video.pause();
  196. const canvas = document.createElement("canvas");
  197. canvas.width = 220;
  198. canvas.height = 165;
  199. const context = canvas.getContext("2d");
  200. context?.drawImage(video, 0, 0, 220, 165);
  201. canvas.toBlob((blob) => resolve(blob!), "image/png", 0.95);
  202. })
  203. // .finally(() => void videoStartPlay()); // finally 会在返回前执行,满足我们的要求
  204. }
  205. // 用来比对两次抓拍照片的md5是否一样
  206. let __previousPhotoMD5 = "";
  207. async function uploadToServer(captureBlob: Blob): Promise<[string, string]> {
  208. async function blobToArray(blob: Blob): Promise<ArrayBuffer> {
  209. return new Promise((resolve) => {
  210. var reader = new FileReader();
  211. reader.addEventListener("loadend", function () {
  212. // reader.result contains the contents of blob as a typed array
  213. resolve(reader.result as ArrayBuffer);
  214. });
  215. reader.readAsArrayBuffer(blob);
  216. });
  217. }
  218. //保存抓拍照片到服务器
  219. let resultUrl, signIdentifier;
  220. try {
  221. const buffer = await blobToArray(captureBlob);
  222. const fileMd5Base64 = window.btoa(
  223. String.fromCharCode(...MD5.digest(buffer))
  224. );
  225. if (fileMd5Base64 === __previousPhotoMD5) {
  226. logger({
  227. cnl: ["server"],
  228. pgu: "AUTO",
  229. act: "uploadToServer",
  230. key: "抓拍照片异常",
  231. stk: "两次fileMd5Base64一样,疑似摄像头卡住",
  232. });
  233. }
  234. __previousPhotoMD5 = fileMd5Base64;
  235. const res = await getCapturePhotoYunSign({ fileSuffix: "png" });
  236. try {
  237. const saveRes = await saveCapturePhoto(
  238. res.data.formUrl,
  239. res.data.formParams,
  240. { file: captureBlob }
  241. );
  242. if (saveRes.headers["content-md5"] != fileMd5Base64) {
  243. logger({
  244. cnl: ["server"],
  245. dtl: "抓拍照片保存失败--alioss content-md5 mismatch",
  246. ext: {
  247. fileMd5Base64,
  248. "content-md5": saveRes.headers["content-md5"],
  249. },
  250. });
  251. throw new Error("图片校验失败");
  252. }
  253. } catch (error) {
  254. logger({
  255. cnl: ["server"],
  256. dtl: "抓拍照片保存失败",
  257. possibleError: error,
  258. });
  259. throw error;
  260. }
  261. // console.log(response);
  262. resultUrl = res.data.accessUrl;
  263. signIdentifier = res.data.signIdentifier;
  264. logger({
  265. cnl: ["server"],
  266. dtl: "抓拍照片保存成功",
  267. ext: { resultUrl, fileMd5Base64 },
  268. });
  269. } catch (e) {
  270. console.log(e);
  271. logger({
  272. cnl: ["server"],
  273. pgu: "AUTO",
  274. dtl: "保存抓拍照片到服务器失败!",
  275. possibleError: e,
  276. });
  277. $message.error("抓拍照片保存失败!");
  278. throw new Error("抓拍照片保存失败!");
  279. }
  280. return [resultUrl, signIdentifier];
  281. }
  282. async function faceCompareSync(
  283. captureFilePath: string,
  284. signIdentifier: string
  285. ) {
  286. try {
  287. logger({ cnl: ["server"], act: "同步比对开始" });
  288. const res = await httpApp.post(
  289. "/api/ecs_oe_student_face/examCaptureQueue/compareFaceSync?signIdentifier=" +
  290. signIdentifier +
  291. "&fileUrl=" +
  292. encodeURIComponent(captureFilePath)
  293. );
  294. logger({
  295. cnl: ["server"],
  296. act: "同步比对api成功",
  297. ext: {
  298. isPass: res.data.isPass,
  299. isStranger: res.data.isStranger,
  300. errorMsg: res.data.errorMsg,
  301. },
  302. });
  303. emit("on-recognize-result", {
  304. isPassed: res.data.isPass,
  305. isStranger: res.data.isStranger,
  306. });
  307. } catch (e) {
  308. logger({
  309. cnl: ["server"],
  310. act: "同步比对失败",
  311. possibleError: e,
  312. });
  313. throw new Error("同步照片比较失败!");
  314. }
  315. }
  316. //#endregion 同步人脸比对
  317. //#region 异步人脸比对
  318. async function snapAsync() {
  319. try {
  320. logger({ cnl: ["server"], act: "定时抓拍开始" });
  321. const captureBlob = await getSnapShot(false);
  322. if (!(captureBlob instanceof Blob)) return;
  323. logger({ cnl: ["server"], act: "抓拍照片的大小:" + captureBlob.size });
  324. if (captureBlob.size < 48 * 48 || captureBlob.size >= 2 * 1024 * 1024) {
  325. // 经查以前记录,不完整图片均为8192大小。此处设置小于10KB的图片为未抓拍成功
  326. // 检查百度统计的记录后,这里的图片大小可能小于8192,也可能是有效的数据,所以降低图片大小的要求为face++的要求
  327. logger({
  328. cnl: ["server"],
  329. act: "摄像头异常",
  330. dtl: "定时抓拍照片大小异常",
  331. ext: { blobSize: captureBlob.size },
  332. });
  333. throw new Error("定时抓拍照片大小异常");
  334. }
  335. const startTime = Date.now();
  336. const [captureFilePath, signIdentifier] = await uploadToServer(captureBlob);
  337. const endTime = Date.now();
  338. logger({
  339. cnl: ["server"],
  340. act: "定时抓拍上传",
  341. ext: { cost: endTime - startTime },
  342. });
  343. await faceCompare(captureFilePath, signIdentifier, examRecordDataId);
  344. logger({
  345. cnl: ["server"],
  346. act: "定时抓拍比对",
  347. dtl: "定时抓拍流程成功",
  348. ext: { cost: Date.now() - endTime, signIdentifier },
  349. });
  350. } catch (error) {
  351. if (!(error instanceof Error)) {
  352. logger({
  353. cnl: ["server"],
  354. act: "snapAsync",
  355. dtl: "not an Error",
  356. stk: error + "",
  357. });
  358. return;
  359. }
  360. logger({
  361. cnl: ["server"],
  362. act: "定时抓拍流程失败",
  363. ejn: JSON.stringify(error),
  364. stk: error.stack,
  365. possibleError: error,
  366. });
  367. emit("on-async-recognize-result", {
  368. hasError: true,
  369. fileName: "",
  370. });
  371. }
  372. }
  373. type CameraInfo = {
  374. detail: string;
  375. pid: string;
  376. vid: string;
  377. name: string;
  378. };
  379. async function faceCompare(
  380. captureFilePath: string,
  381. signIdentifier: string,
  382. examRecordDataId: number
  383. ) {
  384. try {
  385. let cameraInfos;
  386. let hasVirtualCamera = false;
  387. if (typeof window.nodeRequire != "undefined") {
  388. const fs: typeof import("fs") = window.nodeRequire("fs");
  389. if (fileExists("multiCamera.exe")) {
  390. try {
  391. await execLocal("multiCamera.exe");
  392. cameraInfos = fs.readFileSync("CameraInfo.txt", "utf-8");
  393. // cameraInfos =
  394. // '[{"detail":"@device:pnp:?display#int3470#4&300121c4&0&uid13424#{65e8773d-8f56-11d0-a3b9-00a0c9223196}{9c5f415a-02cd-4e28-aeb7-811cb317dd64}","name":"HP Truevision 5MP Front","pid":"13424","vid":"3470"},{"detail":"@device:pnp:?display#int3470#4&300121c4&0&uid13424#{65e8773d-8f56-11d0-a3b9-00a0c9223196}{a6c1c503-01f1-4767-a229-00a0b223162f}","name":"HP Truevision 8MP Rear","pid":"13424","vid":"3470"},{"detail":"@device:pnp:?usb#vid_8086&pid_0a80&mi_04#6&28913c47&0&0004#{65e8773d-8f56-11d0-a3b9-00a0c9223196}global","name":"Intel(R) RealSense(TM) 3D Camera (R200) RGB","pid":"0a80","vid":"8086"},{"detail":"@device:pnp:?usb#vid_8086&pid_0a80&mi_00#6&28913c47&0&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}global","name":"Intel(R) RealSense(TM) 3D Camera (R200) Left-Right","pid":"0a80","vid":"8086"},{"detail":"@device:pnp:?usb#vid_8086&pid_0a80&mi_02#6&28913c47&0&0002#{65e8773d-8f56-11d0-a3b9-00a0c9223196}global","name":"Intel(R) RealSense(TM) 3D Camera (R200) Depth","pid":"0a80","vid":"8086"}]';
  395. if (cameraInfos && cameraInfos.trim()) {
  396. cameraInfos = cameraInfos.trim();
  397. cameraInfos = cameraInfos.replace(/\r\n/g, "");
  398. cameraInfos = cameraInfos.replace(/\n/g, "");
  399. logger({
  400. cnl: ["server"],
  401. act: "multiCamera.exe",
  402. ext: { cameraInfos },
  403. });
  404. }
  405. if (cameraInfos.includes('""')) {
  406. hasVirtualCamera = true;
  407. }
  408. // multiCamera.exe 1.0.1
  409. if (cameraInfos.includes("cameraInfo")) {
  410. cameraInfos = JSON.stringify(JSON.parse(cameraInfos).cameraInfo);
  411. }
  412. if (cameraInfos.length >= 800) {
  413. logger({
  414. cnl: ["server"],
  415. act: "multiCamera.exe",
  416. stk: "虚拟摄像头-cameraInfos超长",
  417. ext: { cameraInfos },
  418. });
  419. let ary: CameraInfo[] = JSON.parse(cameraInfos);
  420. // 相同pid&vid仅保留一个
  421. const pidAndVidCollector: string[] = [];
  422. ary = ary.filter((c) => {
  423. const pv = c.pid + "|" + c.vid;
  424. const res = pidAndVidCollector.includes(pv);
  425. pidAndVidCollector.push(pv);
  426. return !res;
  427. });
  428. cameraInfos = JSON.stringify(ary);
  429. logger({
  430. cnl: ["server"],
  431. act: "multiCamera.exe",
  432. stk: "除重复pid&vid",
  433. });
  434. if (cameraInfos.length >= 800) {
  435. cameraInfos = JSON.stringify(
  436. (<CameraInfo[]>JSON.parse(cameraInfos)).map((v) => {
  437. return {
  438. pid: v.pid,
  439. vid: v.pid,
  440. detail: "omitted",
  441. name: v.name,
  442. };
  443. })
  444. );
  445. console.log("摄像头检测超长:", "去除detail");
  446. console.log(cameraInfos);
  447. }
  448. if (cameraInfos.length >= 800) {
  449. logger({
  450. cnl: ["server"],
  451. act: "multiCamera.exe",
  452. stk: "精简后还是超长",
  453. ext: { cameraInfos },
  454. });
  455. }
  456. }
  457. } catch (error) {
  458. logger({
  459. cnl: ["server"],
  460. act: "multiCamera.exe",
  461. stk: "虚拟摄像头-读取摄像头列表失败",
  462. possibleError: error,
  463. });
  464. // throw new Error("读取摄像头列表失败");
  465. }
  466. }
  467. }
  468. let body: any = {
  469. fileUrl: captureFilePath,
  470. signIdentifier,
  471. examRecordDataId,
  472. };
  473. if (cameraInfos) {
  474. body.cameraInfos = cameraInfos;
  475. body.hasVirtualCamera = hasVirtualCamera;
  476. }
  477. logger({
  478. cnl: ["server"],
  479. act: "抓拍照片详细日志",
  480. ext: {
  481. fileUrl: captureFilePath,
  482. signIdentifier,
  483. examRecordDataId,
  484. cameraInfos,
  485. hasVirtualCamera,
  486. },
  487. });
  488. const res = await httpApp.post(
  489. "/api/ecs_oe_student_face/examCaptureQueue/uploadExamCapture",
  490. body
  491. );
  492. emit("on-async-recognize-result", {
  493. hasError: false,
  494. fileName: res.data,
  495. });
  496. } catch (e) {
  497. logger({
  498. cnl: ["server"],
  499. act: "定时抓拍",
  500. dtl: "抓拍失败",
  501. possibleError: e,
  502. });
  503. emit("on-async-recognize-result", {
  504. hasError: true,
  505. fileName: "",
  506. });
  507. throw new Error("异步比较抓拍照片失败");
  508. }
  509. }
  510. //#endregion 异步人脸比对
  511. </script>
  512. <template>
  513. <div>
  514. <video id="video" ref="video" :width="width" :height="height" autoplay />
  515. <div v-if="showRecognizeButton" class="btn-container">
  516. <button
  517. class="verify-button"
  518. :class="[snapBtnDisabled && 'disable-verify-button']"
  519. :disabled="snapBtnDisabled"
  520. @click="snapSync"
  521. >
  522. {{ btnText }}
  523. </button>
  524. </div>
  525. </div>
  526. </template>
  527. <style scoped>
  528. .btn-container {
  529. position: absolute;
  530. width: 400px;
  531. text-align: center;
  532. margin-top: -50px;
  533. color: #232323;
  534. }
  535. .verify-button {
  536. font-size: 16px;
  537. background-color: #ffcc00;
  538. display: inline-block;
  539. padding: 6px 16px;
  540. border-radius: 6px;
  541. }
  542. .verify-button:hover {
  543. color: #444444;
  544. cursor: pointer;
  545. }
  546. .disable-verify-button {
  547. background-color: #f7f7f7;
  548. color: #c5c8ce;
  549. }
  550. .disable-verify-button:hover {
  551. cursor: not-allowed;
  552. color: #c5c8ce;
  553. }
  554. </style>