|
@@ -0,0 +1,970 @@
|
|
|
+<template>
|
|
|
+ <div class="page-container">
|
|
|
+ <div id="video-container" style="position: relative" class="page-container">
|
|
|
+ <div v-if="shouldShowSections" class="instruction-tips above-video">
|
|
|
+ <div
|
|
|
+ class="instruction-animation"
|
|
|
+ :style="{
|
|
|
+ width: '33.3%',
|
|
|
+ 'font-size': 16,
|
|
|
+ padding: '0 10px',
|
|
|
+ 'margin-left': reverseSection * 33 + '%',
|
|
|
+ }"
|
|
|
+ data-intro="停留的时间:每一次停留时长可能不一样。"
|
|
|
+ >
|
|
|
+ 请将脸部移入此区域,停留<span style="color: blue">{{
|
|
|
+ currentStep.stay
|
|
|
+ }}</span
|
|
|
+ >秒,并保持<span style="color: blue">{{
|
|
|
+ shouldDetectExpression ? (currentStep.happy ? "笑容" : "严肃") : ""
|
|
|
+ }}</span>
|
|
|
+ </div>
|
|
|
+ </div>
|
|
|
+ <div class="instruction-tips above-video">
|
|
|
+ <div
|
|
|
+ class="instruction-animation"
|
|
|
+ :style="{
|
|
|
+ width: '100%',
|
|
|
+ 'font-size': '18px',
|
|
|
+ 'text-align': 'center',
|
|
|
+ }"
|
|
|
+ >
|
|
|
+ 保持<span style="color: blue">{{
|
|
|
+ shouldDetectExpression ? (currentStep.happy ? "笑容" : "严肃") : ""
|
|
|
+ }}</span>
|
|
|
+ <Progress hide-info :percent="stepProgress" />
|
|
|
+ </div>
|
|
|
+ </div>
|
|
|
+
|
|
|
+ <div
|
|
|
+ v-if="isDetecting"
|
|
|
+ class="instruction-total above-video"
|
|
|
+ style="z-index: 3"
|
|
|
+ >
|
|
|
+ <div class="total-text" data-intro="请在规定的时间内完成。">
|
|
|
+ {{ instructions.total }}
|
|
|
+ </div>
|
|
|
+ </div>
|
|
|
+ <div v-if="shouldShowSections" class="seperators above-video">
|
|
|
+ <div class="line"></div>
|
|
|
+ <div class="line"></div>
|
|
|
+ </div>
|
|
|
+ <!-- <div v-if="!behaving" class="blocks above-video">
|
|
|
+ <div class="block-index">1</div>
|
|
|
+ <div class="block-index">2</div>
|
|
|
+ <div class="block-index">3</div>
|
|
|
+ </div> -->
|
|
|
+ <div v-if="shouldShowSections" class="blocks above-video">
|
|
|
+ <div
|
|
|
+ v-for="item in [3, 2, 1]"
|
|
|
+ :key="item"
|
|
|
+ :class="[
|
|
|
+ 'block-index-size',
|
|
|
+ currentStep.section !== item && 'block-index-blur',
|
|
|
+ ]"
|
|
|
+ ></div>
|
|
|
+ </div>
|
|
|
+ <div
|
|
|
+ v-if="shouldShowSections"
|
|
|
+ :class="[
|
|
|
+ 'above-video',
|
|
|
+ 'instruction-face',
|
|
|
+ 'instruction-animation',
|
|
|
+ behaving && 'instruction-face-animation-state',
|
|
|
+ ]"
|
|
|
+ :style="{
|
|
|
+ width: '33.3%',
|
|
|
+ 'margin-left': reverseSection * 33 + '%',
|
|
|
+ }"
|
|
|
+ data-intro="停留的位置:请将脸部停留在头像所处的列。检测成功,头像会停止抖动。"
|
|
|
+ >
|
|
|
+ <!-- <el-progress
|
|
|
+ type="circle"
|
|
|
+ :stroke-width="12"
|
|
|
+ :show-text="false"
|
|
|
+ :percentage="stepProgress"
|
|
|
+ style="margin-top: -13px;"
|
|
|
+ class="above-video"
|
|
|
+ ></el-progress> -->
|
|
|
+ <!-- <div
|
|
|
+ style="margin: 0 auto; border-radius: 50%; margin-top: -50px; width: 200px; height: 200px; border: 10px solid black;"
|
|
|
+ ></div> -->
|
|
|
+ </div>
|
|
|
+ <div
|
|
|
+ v-if="behaving"
|
|
|
+ :class="[
|
|
|
+ 'above-video',
|
|
|
+ 'instruction-face',
|
|
|
+ 'instruction-animation',
|
|
|
+ behaving && 'instruction-face-animation-state',
|
|
|
+ ]"
|
|
|
+ :style="{
|
|
|
+ width: '33.3%',
|
|
|
+ 'margin-left': 1 * 33 + '%',
|
|
|
+ }"
|
|
|
+ >
|
|
|
+ <!-- <el-progress
|
|
|
+ type="circle"
|
|
|
+ :stroke-width="12"
|
|
|
+ :show-text="false"
|
|
|
+ :percentage="stepProgress"
|
|
|
+ style="margin-top: -13px;"
|
|
|
+ class="above-video"
|
|
|
+ ></el-progress> -->
|
|
|
+ </div>
|
|
|
+ <video
|
|
|
+ id="inputVideo"
|
|
|
+ class="detect-video"
|
|
|
+ style="transform: scaleX(-1);"
|
|
|
+ autoplay
|
|
|
+ muted
|
|
|
+ @loadedmetadata="onPlay"
|
|
|
+ ></video>
|
|
|
+ <canvas id="overlay" class="above-video" />
|
|
|
+ </div>
|
|
|
+
|
|
|
+ <div style="position: absolute; top: 0; left: 0; display:none;">
|
|
|
+ <img
|
|
|
+ id="base-photo"
|
|
|
+ src="/student_base_photo/0/6/1560392244118.jpg"
|
|
|
+ style="width: 150px;"
|
|
|
+ />
|
|
|
+ </div>
|
|
|
+ </div>
|
|
|
+</template>
|
|
|
+
|
|
|
+<script>
|
|
|
+import * as faceapi from "face-api.js";
|
|
|
+// import introJs from "intro.js";
|
|
|
+import throttle from "lodash-es/throttle";
|
|
|
+
|
|
|
+// models path
|
|
|
+const modelsPath = "/models/20190620/";
|
|
|
+window.faceapi = faceapi;
|
|
|
+
|
|
|
+// let withBoxes = true;
|
|
|
+
|
|
|
+const os = (function() {
|
|
|
+ const ua = navigator.userAgent.toLowerCase();
|
|
|
+ return {
|
|
|
+ isWin2K: /windows nt 5.0/.test(ua),
|
|
|
+ isXP: /windows nt 5.1/.test(ua),
|
|
|
+ isVista: /windows nt 6.0/.test(ua),
|
|
|
+ isWin7: /windows nt 6.1/.test(ua),
|
|
|
+ isWin8: /windows nt 6.2/.test(ua),
|
|
|
+ isWin81: /windows nt 6.3/.test(ua),
|
|
|
+ isWin10: /windows nt 10.0/.test(ua),
|
|
|
+ };
|
|
|
+})();
|
|
|
+
|
|
|
+// tiny_face_detector options
|
|
|
+
|
|
|
+function getFaceDetectorOptions() {
|
|
|
+ let inputSize = 160;
|
|
|
+ if (os.isWin7) {
|
|
|
+ inputSize = 256; // 在win7上无bug,速度快,效果较好
|
|
|
+ } else if (os.isWin10) {
|
|
|
+ inputSize = 320; // 在win10上,效果较好
|
|
|
+ }
|
|
|
+
|
|
|
+ window.____hideMe =
|
|
|
+ window.____hideMe ||
|
|
|
+ new faceapi.TinyFaceDetectorOptions({
|
|
|
+ inputSize, // 这行是解决Box.constructor - expected box to be IBoundingBox | IRect, instead have 问题的关键
|
|
|
+ scoreThreshold: 0.5,
|
|
|
+ });
|
|
|
+ return window.____hideMe;
|
|
|
+ // return new faceapi.SsdMobilenetv1Options({ minConfidence: 0.8 });
|
|
|
+ // return new faceapi.MtcnnOptions({ minFaceSize: 200, scaleFactor: 0.8 });
|
|
|
+}
|
|
|
+
|
|
|
+export default {
|
|
|
+ name: "FaceMotion",
|
|
|
+ data() {
|
|
|
+ return {
|
|
|
+ isDetecting: false,
|
|
|
+ shoudAdjustDistance: true,
|
|
|
+ introStarted: false,
|
|
|
+ asked: false,
|
|
|
+ instructions: {
|
|
|
+ total: 60,
|
|
|
+ steps: [
|
|
|
+ { section: 2, stay: 3, happy: true, finished: false },
|
|
|
+ { section: 3, stay: 5, happy: true, finished: false },
|
|
|
+ { section: 2, stay: 4, happy: true, finished: false },
|
|
|
+ ],
|
|
|
+ },
|
|
|
+ behaving: false,
|
|
|
+ behavingStartDate: null,
|
|
|
+ behavingTimestampe: null,
|
|
|
+ shouldDetectExpression: null,
|
|
|
+ };
|
|
|
+ },
|
|
|
+ computed: {
|
|
|
+ shouldShowSections() {
|
|
|
+ if (this.currentStep.section === 0) return false;
|
|
|
+ if (this.introStarted) return true;
|
|
|
+ if (this.isDetecting && this.shoudAdjustDistance) return false;
|
|
|
+ return this.isDetecting;
|
|
|
+ },
|
|
|
+ currentStep() {
|
|
|
+ return this.instructions.steps.find(v => !v.finished) || {};
|
|
|
+ },
|
|
|
+ instructionsFinished() {
|
|
|
+ return this.instructions.steps.every(v => v.finished);
|
|
|
+ },
|
|
|
+ reverseSection() {
|
|
|
+ return [3, 2, 1][this.currentStep.section - 1] - 1;
|
|
|
+ },
|
|
|
+ stepProgress() {
|
|
|
+ if (this.instructionsFinished) return 0;
|
|
|
+ if (!this.behaving) return 100;
|
|
|
+ let progress =
|
|
|
+ 100 -
|
|
|
+ (100 * (this.behavingTimestampe - this.behavingStartDate)) /
|
|
|
+ (this.currentStep.stay * 1000);
|
|
|
+ // console.log("progress: ", progress);
|
|
|
+ if (progress > 100) {
|
|
|
+ progress = 100;
|
|
|
+ } else if (progress < 0) {
|
|
|
+ progress = 0;
|
|
|
+ }
|
|
|
+ return progress;
|
|
|
+ },
|
|
|
+ },
|
|
|
+ watch: {
|
|
|
+ instructionsFinished(finished) {
|
|
|
+ if (finished) {
|
|
|
+ clearInterval(this.remainInteval);
|
|
|
+ this.isDetecting = false;
|
|
|
+ // this.$message({
|
|
|
+ // message: "恭喜你,活体检测通过",
|
|
|
+ // type: "success",
|
|
|
+ // });
|
|
|
+ this.$Message.success({
|
|
|
+ content: "恭喜你,活体检测通过",
|
|
|
+ duration: 5,
|
|
|
+ });
|
|
|
+ // this.resetTest();
|
|
|
+ this.closeMe();
|
|
|
+ }
|
|
|
+ },
|
|
|
+ "instructions.total"(total) {
|
|
|
+ if (total <= 0) {
|
|
|
+ this.failedTest("超时!活体检测失败!");
|
|
|
+ }
|
|
|
+ },
|
|
|
+ },
|
|
|
+ async created() {
|
|
|
+ // console.log(faceapi);
|
|
|
+ // console.log(faceapi.nets.tinyFaceDetector);
|
|
|
+ this.resetTest();
|
|
|
+
|
|
|
+ this.$Spin.show({});
|
|
|
+ await this.fetchData();
|
|
|
+
|
|
|
+ await faceapi.nets.tinyFaceDetector.load(modelsPath);
|
|
|
+ await faceapi.loadFaceLandmarkModel(modelsPath);
|
|
|
+ await faceapi.nets.faceExpressionNet.load(modelsPath);
|
|
|
+ // await faceapi.nets.ssdMobilenetv1.load(modelsPath);
|
|
|
+ // await faceapi.nets.faceRecognitionNet.load(modelsPath);
|
|
|
+ faceapi.tf.ENV.set("WEBGL_PACK", false);
|
|
|
+
|
|
|
+ // faceapi.nets.mtcnn.load(modelsPath);
|
|
|
+ },
|
|
|
+ mounted() {
|
|
|
+ this.run();
|
|
|
+ },
|
|
|
+ methods: {
|
|
|
+ async fetchData() {
|
|
|
+ const examRecordDataId = this.$route.params.examRecordDataId;
|
|
|
+ // FIXME: 失败了再取?
|
|
|
+ const faceBiopsyInfoData = await this.$http.get(
|
|
|
+ "/api/ecs_oe_student/faceBiopsy/getFaceBiopsyInfo?examRecordDataId=" +
|
|
|
+ examRecordDataId
|
|
|
+ );
|
|
|
+
|
|
|
+ const faceBiopsyInfo = faceBiopsyInfoData.data;
|
|
|
+ console.log(faceBiopsyInfo);
|
|
|
+ this.faceBiopsyInfo = faceBiopsyInfo;
|
|
|
+
|
|
|
+ const faceLiveResultData = await this.$http.post(
|
|
|
+ "/api/ecs_oe_student/faceBiopsy/saveFaceBiopsyResult",
|
|
|
+ {
|
|
|
+ examRecordDataId,
|
|
|
+ faceBiopsyItemId: faceBiopsyInfo.faceBiopsyItemId,
|
|
|
+ verifySteps: faceBiopsyInfo.verifySteps.map(s => {
|
|
|
+ s.result = true;
|
|
|
+ return s;
|
|
|
+ }),
|
|
|
+ }
|
|
|
+ );
|
|
|
+ console.log(faceLiveResultData.data);
|
|
|
+ },
|
|
|
+ closeMe() {
|
|
|
+ this.$emit("closeFaceMotion");
|
|
|
+ },
|
|
|
+ resetTest() {
|
|
|
+ // this.isDetecting = true;
|
|
|
+ this.asked = false;
|
|
|
+ this.shoudAdjustDistance = true;
|
|
|
+ this.behavingStartDate = null;
|
|
|
+ this.happyFailedTimes = 0;
|
|
|
+ this.singleFaceFailedTimes = 0;
|
|
|
+ this.instructions = {
|
|
|
+ total: 60,
|
|
|
+ steps: [
|
|
|
+ {
|
|
|
+ section: 0,
|
|
|
+ stay: (Math.round(Math.random() * 10) % 5) + 2,
|
|
|
+ happy: Math.random() > 0.5,
|
|
|
+ finished: false,
|
|
|
+ },
|
|
|
+ {
|
|
|
+ section: 0,
|
|
|
+ stay: (Math.round(Math.random() * 10) % 5) + 2,
|
|
|
+ happy: Math.random() > 0.5,
|
|
|
+ finished: false,
|
|
|
+ },
|
|
|
+ {
|
|
|
+ section: 0,
|
|
|
+ stay: (Math.round(Math.random() * 10) % 5) + 2,
|
|
|
+ happy: Math.random() > 0.5,
|
|
|
+ finished: false,
|
|
|
+ },
|
|
|
+ ],
|
|
|
+ };
|
|
|
+ let section = (Math.round(Math.random() * 10) % 3) + 1;
|
|
|
+ this.instructions.steps[0].section = section;
|
|
|
+ let step = 1;
|
|
|
+ let sectionNew;
|
|
|
+ // 每一个section都与上一个不一样
|
|
|
+ // TODO: 有0section的情况
|
|
|
+ while ((sectionNew = (Math.round(Math.random() * 10) % 3) + 1)) {
|
|
|
+ if (section === sectionNew) {
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+ this.instructions.steps[step].section = sectionNew;
|
|
|
+ [section, sectionNew] = [sectionNew, section];
|
|
|
+ // console.log(section, sectionNew);
|
|
|
+ step++;
|
|
|
+ if (step === 3) {
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ console.log(this.instructions);
|
|
|
+ // this.instructions.steps.map(v => (v.section = 0));
|
|
|
+ this.instructions.steps[0].section = 0;
|
|
|
+ this.instructions.steps[1].section = 0;
|
|
|
+ this.instructions.steps[2].section = 0;
|
|
|
+
|
|
|
+ const happy = this.instructions.steps[0].happy;
|
|
|
+ this.instructions.steps[1].happy = !happy;
|
|
|
+ this.instructions.steps[2].happy = happy;
|
|
|
+
|
|
|
+ this.shouldDetectExpression = true;
|
|
|
+ // this.shouldDoFaceRecognition = true;
|
|
|
+
|
|
|
+ // function setSection(index, previousSection) {}
|
|
|
+ // console.log(this.instructions.steps);
|
|
|
+ },
|
|
|
+ async run() {
|
|
|
+ // load face detection and face landmark models
|
|
|
+ // await changeFaceDetector(TINY_FACE_DETECTOR)
|
|
|
+ // changeInputSize(224);
|
|
|
+ // try to access users webcam and stream the images
|
|
|
+ // to the video element
|
|
|
+ const stream = await navigator.mediaDevices.getUserMedia({
|
|
|
+ audio: false,
|
|
|
+ // video: {},
|
|
|
+ video: {
|
|
|
+ // width: { min: "100vw" },
|
|
|
+ // height: { min: "100vh" },
|
|
|
+ width: 640,
|
|
|
+ height: 480,
|
|
|
+ frameRate: 15,
|
|
|
+ // resizeMode: "crop-and-scale",
|
|
|
+ },
|
|
|
+ });
|
|
|
+ // console.log(
|
|
|
+ // "video stream settings",
|
|
|
+ // stream.getVideoTracks()[0].getSettings()
|
|
|
+ // );
|
|
|
+ // console.log(
|
|
|
+ // "video stream constraints",
|
|
|
+ // stream.getVideoTracks()[0].getConstraints()
|
|
|
+ // );
|
|
|
+ // console.log(
|
|
|
+ // "video stream capabilities",
|
|
|
+ // stream.getVideoTracks()[0].getCapabilities()
|
|
|
+ // );
|
|
|
+ const videoEl = document.getElementById("inputVideo");
|
|
|
+ videoEl.srcObject = stream;
|
|
|
+ },
|
|
|
+ async intro() {
|
|
|
+ // this.$message({
|
|
|
+ // type: "info",
|
|
|
+ // message: "开始活体检测",
|
|
|
+ // duration: 1500,
|
|
|
+ // });
|
|
|
+ this.$Message.info({ content: "开始活体检测", duration: 2 });
|
|
|
+ // let loading;
|
|
|
+ // loading = this.$Spin.show({});
|
|
|
+ this.$Spin.show({});
|
|
|
+
|
|
|
+ // this.isDetecting = true;
|
|
|
+ this.behavingStartDate = null;
|
|
|
+ // this.introStarted = true;
|
|
|
+ // this.$nextTick(() => {
|
|
|
+ // const io = introJs()
|
|
|
+ // .setOptions({
|
|
|
+ // nextLabel: "下一步",
|
|
|
+ // prevLabel: "上一步",
|
|
|
+ // skipLabel: "跳过",
|
|
|
+ // doneLabel: "完成",
|
|
|
+ // })
|
|
|
+ // .start();
|
|
|
+ // // loading.hide();
|
|
|
+ // this.$Spin.hide();
|
|
|
+ // const realStart = () => {
|
|
|
+ // this.isDetecting = true;
|
|
|
+ // clearInterval(this.remainInteval);
|
|
|
+ // this.remainInteval = setInterval(() => {
|
|
|
+ // this.instructions.total--;
|
|
|
+ // if (this.instructions.total <= 0) {
|
|
|
+ // clearInterval(this.remainInteval);
|
|
|
+ // }
|
|
|
+ // }, 1000);
|
|
|
+ // this.onPlay();
|
|
|
+ // };
|
|
|
+
|
|
|
+ // io.onbeforeexit(() => {
|
|
|
+ // // loading = this.$Spin.show({});
|
|
|
+ // this.$Spin.show({});
|
|
|
+ // });
|
|
|
+ // io.onexit(() => {
|
|
|
+ // setTimeout(() => {
|
|
|
+ // realStart();
|
|
|
+ // this.introStarted = false;
|
|
|
+ // // console.log("exit intro");
|
|
|
+ // // loading.hide();
|
|
|
+ // this.$Spin.hide();
|
|
|
+ // }, 300);
|
|
|
+ // });
|
|
|
+ // });
|
|
|
+ const realStart = () => {
|
|
|
+ this.isDetecting = true;
|
|
|
+ clearInterval(this.remainInteval);
|
|
|
+ this.remainInteval = setInterval(() => {
|
|
|
+ this.instructions.total--;
|
|
|
+ if (this.instructions.total <= 0) {
|
|
|
+ clearInterval(this.remainInteval);
|
|
|
+ }
|
|
|
+ }, 1000);
|
|
|
+ this.onPlay();
|
|
|
+ };
|
|
|
+
|
|
|
+ realStart();
|
|
|
+ },
|
|
|
+ async onPlay() {
|
|
|
+ if (!this.asked) {
|
|
|
+ this.asked = true;
|
|
|
+ await this.increaseTestSpeed();
|
|
|
+ // await new Promise(resolve => setTimeout(resolve, 3000));
|
|
|
+ await this.intro();
|
|
|
+
|
|
|
+ // this.$confirm("开始活体检测?", "确认开始")
|
|
|
+ // .then(async () => {
|
|
|
+ // })
|
|
|
+ // .catch(() => {
|
|
|
+ // this.$message({
|
|
|
+ // type: "info",
|
|
|
+ // message: "刷新可重新选择",
|
|
|
+ // });
|
|
|
+ // });
|
|
|
+ }
|
|
|
+ if (!this.isDetecting) return;
|
|
|
+ const detectStartTime = performance.now();
|
|
|
+ const videoEl = document.getElementById("inputVideo");
|
|
|
+
|
|
|
+ this.___vWidth =
|
|
|
+ this.___vWidth ||
|
|
|
+ document.getElementById("video-container").clientWidth;
|
|
|
+
|
|
|
+ const options = getFaceDetectorOptions();
|
|
|
+ let result;
|
|
|
+
|
|
|
+ /**
|
|
|
+ * tiny
|
|
|
+ * 无表情,无landmarks,60~70ms
|
|
|
+ * 有表情,增加20~30ms
|
|
|
+ * 有landmarks,增加20~30ms
|
|
|
+ *
|
|
|
+ * ssdMobilenetv1
|
|
|
+ * 无表情,无landmarks,130ms
|
|
|
+ * 有表情,增加30~40ms
|
|
|
+ * 有landmarks,增加20~30ms
|
|
|
+ *
|
|
|
+ * mtcnn
|
|
|
+ * 无表情,无landmarks,200ms 每次差异很大
|
|
|
+ * 有表情,增加30~40ms
|
|
|
+ * 有landmarks,增加20~30ms
|
|
|
+ *
|
|
|
+ */
|
|
|
+
|
|
|
+ if (this.shouldDetectExpression) {
|
|
|
+ // const canvas2 = faceapi.createCanvasFromMedia(videoEl);
|
|
|
+ result = await faceapi
|
|
|
+ // .detectSingleFace(videoEl, options)
|
|
|
+ // .detectAllFaces(canvas2, options)
|
|
|
+ .detectAllFaces(videoEl, options)
|
|
|
+ .withFaceLandmarks()
|
|
|
+ .withFaceExpressions();
|
|
|
+ // if (result.length === 0) {
|
|
|
+ // document.body.appendChild(canvas2);
|
|
|
+ // }
|
|
|
+ } else {
|
|
|
+ result = await faceapi
|
|
|
+ // .detectSingleFace(videoEl, options)
|
|
|
+ .detectAllFaces(videoEl, options)
|
|
|
+ .withFaceLandmarks();
|
|
|
+ }
|
|
|
+
|
|
|
+ // console.log(result);
|
|
|
+ if (result && result.length >= 2) {
|
|
|
+ this.failedTest("检测到多张人脸!活体检测失败!");
|
|
|
+ }
|
|
|
+
|
|
|
+ if (result && result.length === 0) {
|
|
|
+ if (!this.shoudAdjustDistance) {
|
|
|
+ // 只有不是调整人脸距离的时候增加失败次数
|
|
|
+ this.singleFaceFailedTimes++;
|
|
|
+ }
|
|
|
+ if (this.singleFaceFailedTimes >= 5) {
|
|
|
+ this.failedTest("活检过程中没有检测到人脸!活体检测失败!");
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // 人脸比对 - 开始
|
|
|
+ {
|
|
|
+ if (
|
|
|
+ this.shouldDoFaceRecognition &&
|
|
|
+ faceapi.nets.ssdMobilenetv1.params &&
|
|
|
+ faceapi.nets.faceRecognitionNet.params
|
|
|
+ ) {
|
|
|
+ const personFromVideo = await faceapi
|
|
|
+ // .detectSingleFace(videoEl, options)
|
|
|
+ .detectSingleFace(videoEl, options)
|
|
|
+ .withFaceLandmarks()
|
|
|
+ .withFaceDescriptor();
|
|
|
+ const personFromBasePhoto = await faceapi
|
|
|
+ .detectSingleFace(document.getElementById("base-photo"))
|
|
|
+ .withFaceLandmarks()
|
|
|
+ .withFaceDescriptor();
|
|
|
+
|
|
|
+ if (personFromVideo && personFromBasePhoto) {
|
|
|
+ // create FaceMatcher with automatically assigned labels
|
|
|
+ // from the detection results for the reference image
|
|
|
+ const faceMatcher = new faceapi.FaceMatcher(personFromBasePhoto);
|
|
|
+ const bestMatch = faceMatcher.findBestMatch(
|
|
|
+ personFromVideo.descriptor
|
|
|
+ );
|
|
|
+ if (bestMatch.distance > 0.8) {
|
|
|
+ console.log("%c肯定不是王章军", "color: red");
|
|
|
+ }
|
|
|
+ if (bestMatch.distance >= 0.4 && bestMatch.distance <= 0.8) {
|
|
|
+ console.log("有可能是王章军");
|
|
|
+ }
|
|
|
+ if (bestMatch.distance < 0.4) {
|
|
|
+ console.log("%c肯定是王章军", "color: green");
|
|
|
+ }
|
|
|
+ console.log(bestMatch.toString());
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ // 人脸比对 - 结束
|
|
|
+
|
|
|
+ if (result && result[0]) {
|
|
|
+ result = result[0];
|
|
|
+ // Object.entries(result.expressions).forEach(([key, value]) => {
|
|
|
+ // if (value > 0.5) {
|
|
|
+ // console.log(key, value);
|
|
|
+ // }
|
|
|
+ // });
|
|
|
+ // console.log(Object.entries(result.expressions));
|
|
|
+ // console.log(".......");
|
|
|
+ // const canvasStartTime = performance.now();
|
|
|
+ const canvas = document.getElementById("overlay");
|
|
|
+ const dims = faceapi.matchDimensions(canvas, videoEl, true);
|
|
|
+ const resizedResult = faceapi.resizeResults(result, dims);
|
|
|
+ // const canvasEndTime = performance.now();
|
|
|
+ // console.log(" canvas time: ", canvasEndTime - canvasStartTime);
|
|
|
+
|
|
|
+ // console.log(resizedResult);
|
|
|
+ // console.log(resizedResult.detection.box.top);
|
|
|
+ // console.log(resizedResult.detection.box.left);
|
|
|
+ let box;
|
|
|
+ if (this.shouldDetectExpression || resizedResult.detection) {
|
|
|
+ // 在检测表情时有detection属性,没有box属性。没有检测landmarks
|
|
|
+ box = resizedResult.detection.box;
|
|
|
+ } else {
|
|
|
+ box = resizedResult.box;
|
|
|
+ }
|
|
|
+ // console.log(box.area);
|
|
|
+ if (box.area > 60000 || box.area < 20000) {
|
|
|
+ const message = box.area > 60000 ? "请远离摄像头" : "请靠近摄像头";
|
|
|
+ this.tipHandler =
|
|
|
+ this.tipHandler ||
|
|
|
+ throttle(message => {
|
|
|
+ // this.$message({
|
|
|
+ // type: "warning",
|
|
|
+ // message,
|
|
|
+ // duration: 1000,
|
|
|
+ // offset: 300,
|
|
|
+ // });
|
|
|
+ this.$Message.warning({ content: message, duration: 1 });
|
|
|
+ }, 1000);
|
|
|
+ this.tipHandler(message);
|
|
|
+ if (this.shoudAdjustDistance) {
|
|
|
+ setTimeout(() => this.onPlay(), 300);
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ this.shoudAdjustDistance = false;
|
|
|
+ }
|
|
|
+ // 区域左边的一半
|
|
|
+ const centerPoint = box.left + (box.right - box.left) / 2;
|
|
|
+ if (
|
|
|
+ (centerPoint >
|
|
|
+ this.___vWidth * ((this.currentStep.section - 1) / 3) &&
|
|
|
+ centerPoint < this.___vWidth * (this.currentStep.section / 3)) ||
|
|
|
+ this.currentStep.section === 0
|
|
|
+ ) {
|
|
|
+ if (this.behavingStartDate === null) {
|
|
|
+ // 到指定区块后才开始检测表情
|
|
|
+ if (this.shouldDetectExpression) {
|
|
|
+ if (
|
|
|
+ (result.expressions.happy < 0.5 && this.currentStep.happy) ||
|
|
|
+ (result.expressions.neutral < 0.5 && !this.currentStep.happy)
|
|
|
+ ) {
|
|
|
+ // this.$message({
|
|
|
+ // type: "warning",
|
|
|
+ // message: this.currentStep.happy ? "请保持微笑" : "请保持严肃",
|
|
|
+ // duration: 1000,
|
|
|
+ // offset: 300,
|
|
|
+ // });
|
|
|
+ this.$Message.warning({
|
|
|
+ content: this.currentStep.happy ? "请保持微笑" : "请保持严肃",
|
|
|
+ duration: 1,
|
|
|
+ });
|
|
|
+ setTimeout(() => this.onPlay(), 1000);
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ this.behavingStartDate = new Date();
|
|
|
+ }
|
|
|
+ this.behaving = true;
|
|
|
+ this.behavingTimestampe = Date.now();
|
|
|
+ } else {
|
|
|
+ this.behaving = false;
|
|
|
+ this.behavingStartDate = null;
|
|
|
+
|
|
|
+ if (!this.moveFaceMessage) {
|
|
|
+ // this.moveFaceMessage = this.$message({
|
|
|
+ // type: "info",
|
|
|
+ // message: "请将您的脸部移向笑脸所在的区块",
|
|
|
+ // duration: 3000,
|
|
|
+ // offset: 300,
|
|
|
+ // });
|
|
|
+ this.moveFaceMessage = this.$Message.info({
|
|
|
+ content: "请将您的脸部移向笑脸所在的区块",
|
|
|
+ duration: 1,
|
|
|
+ });
|
|
|
+ setTimeout(() => {
|
|
|
+ this.moveFaceMessage = null;
|
|
|
+ }, 3000);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ const detectEndTime = performance.now();
|
|
|
+ console.log("single detect time: ", detectEndTime - detectStartTime);
|
|
|
+ if (this.shouldDetectExpression && this.behavingStartDate) {
|
|
|
+ // 到指定区块后才开始检测表情
|
|
|
+ if (result.expressions.happy < 0.5 && this.currentStep.happy) {
|
|
|
+ this.happyFailedTimes++;
|
|
|
+ if (this.happyFailedTimes % 2) {
|
|
|
+ if (Date.now() - (this.showExpresionTipDate || 0) > 1500) {
|
|
|
+ this.showExpresionTipDate = Date.now();
|
|
|
+ // this.$message({
|
|
|
+ // type: "warning",
|
|
|
+ // message: this.currentStep.happy ? "请保持微笑" : "请保持严肃",
|
|
|
+ // duration: 1000,
|
|
|
+ // offset: 300,
|
|
|
+ // });
|
|
|
+ this.$Message.warning({
|
|
|
+ content: this.currentStep.happy ? "请保持微笑" : "请保持严肃",
|
|
|
+ duration: 1,
|
|
|
+ });
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ // if(result.expressions.happy >= 0.5 && this.currentStep.happy) {
|
|
|
+ // this.happyFailedTimes = 0; // 恢复的话,容易降低恶意用户攻击难度
|
|
|
+ // }
|
|
|
+ if (result.expressions.neutral < 0.5 && !this.currentStep.happy) {
|
|
|
+ this.happyFailedTimes++;
|
|
|
+ if (this.happyFailedTimes % 2) {
|
|
|
+ if (Date.now() - (this.showExpresionTipDate || 0) > 1500) {
|
|
|
+ this.showExpresionTipDate = Date.now();
|
|
|
+ // this.$message({
|
|
|
+ // type: "warning",
|
|
|
+ // message: this.currentStep.happy ? "请保持微笑" : "请保持严肃",
|
|
|
+ // duration: 1000,
|
|
|
+ // offset: 300,
|
|
|
+ // });
|
|
|
+ this.$Message.warning({
|
|
|
+ content: this.currentStep.happy ? "请保持微笑" : "请保持严肃",
|
|
|
+ duration: 1,
|
|
|
+ });
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (this.happyFailedTimes >= 6) {
|
|
|
+ this.failedTest("指定表情失败!活体检测失败!");
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ const stayMoreForProgress = 500; // wait for progress reach 0/100
|
|
|
+ if (
|
|
|
+ this.behaving &&
|
|
|
+ Date.now() - this.behavingStartDate - stayMoreForProgress >
|
|
|
+ this.currentStep.stay * 1000
|
|
|
+ ) {
|
|
|
+ console.log("通过section" + this.currentStep.section);
|
|
|
+ this.behaving = false;
|
|
|
+ this.happyFailedTimes = 0;
|
|
|
+ this.behavingStartDate = null;
|
|
|
+ if (!this.instructionsFinished && this.currentStep)
|
|
|
+ this.currentStep.finished = true;
|
|
|
+ }
|
|
|
+ // console.log(resizedResult.alignedRect.relativeBox.y);
|
|
|
+ // if (true) {
|
|
|
+ // faceapi.draw.drawDetections(canvas, resizedResult);
|
|
|
+ // }
|
|
|
+ // faceapi.draw.drawFaceLandmarks(canvas, resizedResult);
|
|
|
+ }
|
|
|
+ setTimeout(() => this.onPlay(), 300);
|
|
|
+ },
|
|
|
+ failedTest(msg) {
|
|
|
+ clearInterval(this.remainInteval);
|
|
|
+ this.isDetecting = false;
|
|
|
+ if (!this.instructionsFinished) {
|
|
|
+ // this.$message({
|
|
|
+ // message: msg || "活体检测失败",
|
|
|
+ // type: "error",
|
|
|
+ // });
|
|
|
+ this.$Message.error({ content: msg || "活体检测失败", duration: 5 });
|
|
|
+ }
|
|
|
+ // this.resetTest();
|
|
|
+ this.closeMe();
|
|
|
+ },
|
|
|
+ async increaseTestSpeed() {
|
|
|
+ if (!this.__inThisMethodOnce) {
|
|
|
+ this.__inThisMethodOnce = true;
|
|
|
+ } else {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ const videoEl = document.getElementById("inputVideo");
|
|
|
+ const options = getFaceDetectorOptions();
|
|
|
+ console.log("increaseTestSpeed ---");
|
|
|
+ await new Promise(resolve => {
|
|
|
+ const interval = setInterval(() => {
|
|
|
+ if (
|
|
|
+ videoEl.readyState === 4 &&
|
|
|
+ faceapi.nets.tinyFaceDetector.params &&
|
|
|
+ faceapi.nets.faceExpressionNet.params
|
|
|
+ // !faceapi.nets.ssdMobilenetv1.params
|
|
|
+ // !faceapi.nets.mtcnn.params
|
|
|
+ ) {
|
|
|
+ resolve();
|
|
|
+ clearInterval(interval);
|
|
|
+ }
|
|
|
+ }, 300);
|
|
|
+ });
|
|
|
+ console.log(videoEl.readyState, faceapi.nets.tinyFaceDetector.params);
|
|
|
+ console.log("increaseTestSpeed --- doing faceapi");
|
|
|
+ const result = await faceapi
|
|
|
+ .detectSingleFace(videoEl, options)
|
|
|
+ .withFaceLandmarks()
|
|
|
+ .withFaceExpressions();
|
|
|
+ console.log("increaseTestSpeed --- result:", result);
|
|
|
+ if (!result) {
|
|
|
+ console.log("increaseTestSpeed --- end failed");
|
|
|
+ } else {
|
|
|
+ console.log("increaseTestSpeed --- end successfully");
|
|
|
+ }
|
|
|
+ this.$Spin.hide();
|
|
|
+ },
|
|
|
+ },
|
|
|
+};
|
|
|
+</script>
|
|
|
+
|
|
|
+<style scoped>
|
|
|
+.page-container {
|
|
|
+ /* margin-left: 20px; */
|
|
|
+ margin: 0 auto;
|
|
|
+ width: 640px;
|
|
|
+ height: 480px;
|
|
|
+ overflow: hidden;
|
|
|
+}
|
|
|
+.above-video {
|
|
|
+ z-index: 2;
|
|
|
+}
|
|
|
+.instruction-animation {
|
|
|
+ transition: margin-left 2s ease-in-out 0.5s;
|
|
|
+}
|
|
|
+.instruction-tips {
|
|
|
+ position: absolute;
|
|
|
+ top: 0;
|
|
|
+ left: 0;
|
|
|
+ text-align: center;
|
|
|
+ width: 100%;
|
|
|
+ background-color: rgba(255, 255, 255, 0.6);
|
|
|
+ padding: 20px 0;
|
|
|
+}
|
|
|
+.instruction-total {
|
|
|
+ position: absolute;
|
|
|
+ top: 50px;
|
|
|
+ left: 0;
|
|
|
+ text-align: center;
|
|
|
+ width: 100%;
|
|
|
+ background-color: rgba(255, 255, 255, 0);
|
|
|
+}
|
|
|
+.total-text {
|
|
|
+ background-color: rgba(255, 255, 255, 0.6);
|
|
|
+ width: 80px;
|
|
|
+ height: 80px;
|
|
|
+ line-height: 80px;
|
|
|
+ font-size: 40px;
|
|
|
+ border-radius: 50%;
|
|
|
+ border: 3px solid gold;
|
|
|
+ margin: 20px auto 0 auto;
|
|
|
+}
|
|
|
+.seperators {
|
|
|
+ position: absolute;
|
|
|
+ top: 0;
|
|
|
+ left: 0;
|
|
|
+ width: 100%;
|
|
|
+ height: 100%;
|
|
|
+ background-color: rgba(255, 255, 255, 0.1);
|
|
|
+
|
|
|
+ display: flex;
|
|
|
+ justify-content: space-evenly;
|
|
|
+}
|
|
|
+
|
|
|
+.seperators .line {
|
|
|
+ width: 5px;
|
|
|
+ height: 100%;
|
|
|
+ background-color: red;
|
|
|
+}
|
|
|
+
|
|
|
+.blocks {
|
|
|
+ position: absolute;
|
|
|
+ top: 0;
|
|
|
+ left: 0;
|
|
|
+ width: 100%;
|
|
|
+ height: 100%;
|
|
|
+ background-color: rgba(255, 255, 255, 0.1);
|
|
|
+
|
|
|
+ display: flex;
|
|
|
+ justify-content: space-around;
|
|
|
+ align-items: center;
|
|
|
+}
|
|
|
+
|
|
|
+.blocks .block-index {
|
|
|
+ margin-top: 150px;
|
|
|
+ width: 100px;
|
|
|
+ height: 100px;
|
|
|
+ border-radius: 50%;
|
|
|
+ background-color: rgba(255, 0, 0, 0.7);
|
|
|
+ color: yellow;
|
|
|
+ font-size: 50px;
|
|
|
+ line-height: 100px;
|
|
|
+ text-align: center;
|
|
|
+}
|
|
|
+
|
|
|
+.blocks .block-index-size {
|
|
|
+ margin-top: 175px;
|
|
|
+ width: 100%;
|
|
|
+ height: 100%;
|
|
|
+ transition: all 1s ease-out;
|
|
|
+}
|
|
|
+
|
|
|
+.blocks .block-index-blur {
|
|
|
+ background-color: rgba(100, 100, 100, 0.8);
|
|
|
+}
|
|
|
+
|
|
|
+.instruction-face {
|
|
|
+ position: absolute;
|
|
|
+ top: 30%;
|
|
|
+ left: 0;
|
|
|
+ width: 33%;
|
|
|
+ height: 100px;
|
|
|
+ background-size: contain;
|
|
|
+ background-repeat: no-repeat;
|
|
|
+ background-position-x: center;
|
|
|
+ text-align: center;
|
|
|
+
|
|
|
+ /* background-color: rgba(255, 255, 255, 0.6); */
|
|
|
+ /* background-image: url(./smile-icon.png); */
|
|
|
+
|
|
|
+ animation: shake 0.82s cubic-bezier(0.36, 0.07, 0.19, 0.97) both infinite;
|
|
|
+ transform: translate3d(0, 0, 0);
|
|
|
+ backface-visibility: hidden;
|
|
|
+ perspective: 1000px;
|
|
|
+}
|
|
|
+
|
|
|
+.instruction-face-animation-state {
|
|
|
+ animation-iteration-count: 1;
|
|
|
+}
|
|
|
+#overlay,
|
|
|
+.overlay {
|
|
|
+ position: absolute;
|
|
|
+ top: 0;
|
|
|
+ left: 0;
|
|
|
+}
|
|
|
+
|
|
|
+/* .detect-video {
|
|
|
+ width: 100vw;
|
|
|
+ height: 100vh;
|
|
|
+} */
|
|
|
+
|
|
|
+@keyframes shake {
|
|
|
+ 10%,
|
|
|
+ 90% {
|
|
|
+ transform: translate3d(-1px, 0, 0);
|
|
|
+ }
|
|
|
+
|
|
|
+ 20%,
|
|
|
+ 80% {
|
|
|
+ transform: translate3d(2px, 0, 0);
|
|
|
+ }
|
|
|
+
|
|
|
+ 30%,
|
|
|
+ 50%,
|
|
|
+ 70% {
|
|
|
+ transform: translate3d(-4px, 0, 0);
|
|
|
+ }
|
|
|
+
|
|
|
+ 40%,
|
|
|
+ 60% {
|
|
|
+ transform: translate3d(4px, 0, 0);
|
|
|
+ }
|
|
|
+}
|
|
|
+</style>
|
|
|
+
|
|
|
+<style>
|
|
|
+.el-message__content {
|
|
|
+ font-size: 24px !important;
|
|
|
+}
|
|
|
+</style>
|