|
@@ -0,0 +1,204 @@
|
|
|
+const getAdapterUserMedia = () => {
|
|
|
+ if (navigator.mediaDevices === undefined) navigator.mediaDevices = {};
|
|
|
+
|
|
|
+ if (navigator.mediaDevices.getUserMedia === undefined) {
|
|
|
+ navigator.mediaDevices.getUserMedia = (constraints) => {
|
|
|
+ const getUserMedia =
|
|
|
+ navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
|
|
|
+
|
|
|
+ if (!getUserMedia)
|
|
|
+ return Promise.reject(
|
|
|
+ new Error("getUserMedia is not implemented in this browser")
|
|
|
+ );
|
|
|
+
|
|
|
+ return new Promise((resolve, reject) => {
|
|
|
+ getUserMedia.call(navigator, constraints, resolve, reject);
|
|
|
+ });
|
|
|
+ };
|
|
|
+ }
|
|
|
+
|
|
|
+ return navigator.mediaDevices.getUserMedia;
|
|
|
+};
|
|
|
+
|
|
|
+class AudioRecode {
|
|
|
+ constructor(opt = {}) {
|
|
|
+ const defaultOptions = { sampleBits: 8, sampleRate: 44100 / 6 };
|
|
|
+ this.options = Object.assign(defaultOptions, opt);
|
|
|
+ this.size = 0;
|
|
|
+ this.audioBuffer = [];
|
|
|
+ this.inputSampleRate = 0;
|
|
|
+ this.inputSampleBits = 2;
|
|
|
+ this.outputSampleRate = this.options.sampleRate;
|
|
|
+ this.outputSampleBits = this.options.sampleBits;
|
|
|
+ this.audioContext = null;
|
|
|
+ this.media = null;
|
|
|
+ this.recorder = null;
|
|
|
+ }
|
|
|
+
|
|
|
+ input(data) {
|
|
|
+ this.audioBuffer.push(new Float32Array(data));
|
|
|
+ this.size += data.length;
|
|
|
+ }
|
|
|
+
|
|
|
+ compress() {
|
|
|
+ // 合并
|
|
|
+ let data = new Float32Array(this.size);
|
|
|
+ let offset = 0;
|
|
|
+ this.audioBuffer.forEach((buffer) => {
|
|
|
+ data.set(buffer, offset);
|
|
|
+ offset += buffer.length;
|
|
|
+ });
|
|
|
+ // 压缩
|
|
|
+ const compressRate = parseInt(this.inputSampleRate / this.outputSampleRate);
|
|
|
+ const len = data.length / compressRate;
|
|
|
+ let compressData = new Float32Array(len);
|
|
|
+ for (let i = 0, j = 0; i < len; i++) {
|
|
|
+ compressData[i] = data[j];
|
|
|
+ j += compressRate;
|
|
|
+ }
|
|
|
+ return compressData;
|
|
|
+ }
|
|
|
+
|
|
|
+ encode() {
|
|
|
+ const sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate);
|
|
|
+ const sampleBits = Math.min(this.inputSampleBits, this.outputSampleBits);
|
|
|
+ const compressData = this.compress();
|
|
|
+ const audioCompressDataLength = compressData.length * (sampleRate / 8);
|
|
|
+ const audioArrayBuffer = new ArrayBuffer(44 + audioCompressDataLength);
|
|
|
+ let audioDataView = new DataView(audioArrayBuffer);
|
|
|
+ const channelCount = 1;
|
|
|
+ let offset = 0;
|
|
|
+ const appendStr = (content) => {
|
|
|
+ for (let i = 0; i < content.length; i++) {
|
|
|
+ audioDataView.setUint8(offset + i, content.charCodeAt(i));
|
|
|
+ }
|
|
|
+ };
|
|
|
+ // 资源交换文件标识符
|
|
|
+ appendStr("RIFF");
|
|
|
+ offset += 4;
|
|
|
+ // 下个地址开始到文件尾总字节数,即文件大小-8
|
|
|
+ audioDataView.setUint32(offset, 36 + audioCompressDataLength, true);
|
|
|
+ offset += 4;
|
|
|
+ // WAV文件标志
|
|
|
+ appendStr("WAVE");
|
|
|
+ offset += 4;
|
|
|
+ // 波形格式标志
|
|
|
+ appendStr("fmt ");
|
|
|
+ offset += 4;
|
|
|
+ // 过滤字节,一般为 0x10 = 16
|
|
|
+ audioDataView.setUint32(offset, 16, true);
|
|
|
+ offset += 4; // 格式类别 (PCM形式采样数据)
|
|
|
+ audioDataView.setUint16(offset, 1, true);
|
|
|
+ offset += 2; // 通道数
|
|
|
+ audioDataView.setUint16(offset, channelCount, true);
|
|
|
+ offset += 2;
|
|
|
+ // 采样率,每秒样本数,表示每个通道的播放速度
|
|
|
+ audioDataView.setUint32(offset, sampleRate, true);
|
|
|
+ offset += 4;
|
|
|
+ // 波形数据传输率(每秒平均字节数) 单声道×每秒数据位数×每样本数据位/8
|
|
|
+ audioDataView.setUint32(
|
|
|
+ offset,
|
|
|
+ channelCount * sampleRate * (sampleBits / 8),
|
|
|
+ true
|
|
|
+ );
|
|
|
+ offset += 4;
|
|
|
+ // 快数据调整数 采样一次占用字节数 单声道×每样本的数据位数/8
|
|
|
+ audioDataView.setUint16(offset, channelCount * (sampleBits / 8), true);
|
|
|
+ offset += 2;
|
|
|
+ // 每样本数据位数
|
|
|
+ audioDataView.setUint16(offset, sampleBits, true);
|
|
|
+ offset += 2;
|
|
|
+ // 数据标识符
|
|
|
+ appendStr("data");
|
|
|
+ offset += 4;
|
|
|
+ // 采样数据总数,即数据总大小-44
|
|
|
+ audioDataView.setUint32(offset, audioCompressDataLength, true);
|
|
|
+ offset += 4;
|
|
|
+
|
|
|
+ // 写入采样数据
|
|
|
+ if (sampleBits === 8) {
|
|
|
+ for (let i = 0; i < compressData.length; i++, offset++) {
|
|
|
+ const s = Math.max(-1, Math.min(1, compressData[i]));
|
|
|
+ let val = s < 0 ? s * 0x8000 : s * 0x7fff;
|
|
|
+ val = parseInt(255 / (65535 / (val + 32768)));
|
|
|
+ audioDataView.setInt8(offset, val, true);
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ for (let i = 0; i < compressData.length; i++, offset += 2) {
|
|
|
+ const s = Math.max(-1, Math.min(1, compressData[i]));
|
|
|
+ audioDataView.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return new Blob([audioDataView], { type: "audio/mp3" });
|
|
|
+ }
|
|
|
+
|
|
|
+ mediaError(error) {
|
|
|
+ let msg = "";
|
|
|
+ const errorContent = error.code || error.name;
|
|
|
+ switch (errorContent) {
|
|
|
+ case "PERMISSION_DENIED":
|
|
|
+ case "PermissionDeniedError":
|
|
|
+ msg = "用户拒绝提供信息";
|
|
|
+ break;
|
|
|
+ case "NOT_SUPPORTED_ERROR":
|
|
|
+ case "NotSupportedError":
|
|
|
+ msg = "浏览器不支持硬件设备";
|
|
|
+ break;
|
|
|
+ case "MANDATORY_UNSATISFIED_ERROR":
|
|
|
+ case "MandatoryUnsatisfiedError":
|
|
|
+ msg = "无法发现指定的硬件设备";
|
|
|
+ break;
|
|
|
+ default:
|
|
|
+ msg = "无法打开麦克风。异常信息:" + errorContent;
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ console.log(msg);
|
|
|
+ return { msg, code: errorContent };
|
|
|
+ }
|
|
|
+
|
|
|
+ async init() {
|
|
|
+ const getUserMedia = getAdapterUserMedia();
|
|
|
+ let error = null;
|
|
|
+ const stream = await getUserMedia({ audio: true }).catch((err) => {
|
|
|
+ error = err;
|
|
|
+ });
|
|
|
+
|
|
|
+ if (error) {
|
|
|
+ return Promise.reject(this.mediaError(error));
|
|
|
+ }
|
|
|
+
|
|
|
+ this.audioContext = new AudioContext();
|
|
|
+ this.inputSampleRate = this.audioContext.sampleRate;
|
|
|
+ this.media = this.audioContext.createMediaStreamSource(stream);
|
|
|
+ this.recorder = this.audioContext.createScriptProcessor(4096, 1, 1);
|
|
|
+ //音频采集
|
|
|
+ this.recorder.onaudioprocess = (e) => {
|
|
|
+ this.input(e.inputBuffer.getChannelData(0));
|
|
|
+ };
|
|
|
+ }
|
|
|
+
|
|
|
+ start() {
|
|
|
+ this.media.connect(this.recorder);
|
|
|
+ this.recorder.connect(this.audioContext.destination);
|
|
|
+ }
|
|
|
+
|
|
|
+ stop() {
|
|
|
+ this.recorder.disconnect();
|
|
|
+ }
|
|
|
+
|
|
|
+ getAudioBlob() {
|
|
|
+ this.stop();
|
|
|
+ return this.encode();
|
|
|
+ }
|
|
|
+
|
|
|
+ restart() {
|
|
|
+ this.size = 0;
|
|
|
+ this.audioBuffer = [];
|
|
|
+ this.audioContext = null;
|
|
|
+ this.media = null;
|
|
|
+ this.recorder = null;
|
|
|
+ this.start();
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+export default AudioRecode;
|