思路
获取设备信息
获取录音的频谱数据
绘制频谱图
具体实现
封装 loadDevices.js
js
/**
* 是否支持录音
*/
const recordingSupport = () => {
const scope = navigator.mediaDevices || {};
if (!scope.getUserMedia) {
scope = navigator
scope.getUserMedia || (scope.getUserMedia = scope.webkitGetUserMedia || scope.mozGetUserMedia || scope.msGetUserMedia);
}
if (!scope.getUserMedia) {
return false
}
return scope
}
// 获取麦克风权限
export const getUserMediaPermission = () => {
return new Promise((resolve, reject) => {
const mediaDevices = recordingSupport()
if (mediaDevices.getUserMedia) {
let constraints = { audio: true }
mediaDevices.getUserMedia(constraints).then(resolve, reject);
} else { reject(false) } // 浏览器不支持录音
})
}
function checkMime() {
var types = [
"audio/mpeg",
"audio/webm",
"audio/mp4",
"audio/wav",
"audio/ogg",
"audio/flac",
"audio/m4a",
"audio/mp3",
"audio/mpga",
"audio/oga",
];
let first;
for (var i in types) {
// 判断当前浏览器支持哪种
let supported = MediaRecorder.isTypeSupported(types[i]);
if (supported && !first) {
console.log("Is " + types[i] + " supported? " + (supported ? "Yes!" : "Nope :("));
first = types[i];
}
}
return first;
}
let streams = []
let stopDraw = false
/**
* 释放资源
*/
export const devicesDispose = () => {
console.log('devicesDispose-释放资源');
stopDraw = true
streams.forEach(e => {
e.getTracks().forEach(track => track.stop());
})
}
export const getAudioContext = () => window.AudioContext ||
window.webkitAudioContext ||
window.mozAudioContext ||
window.msAudioContext;
export default function loadDevices(options = {}) {
const { readover = () => { }, change = () => { }, stop = () => { } } = options
let analyser;
let mediaRecorder;
let dataArray;
let audioChunks = [];
try {
const draw = () => {
if (stopDraw) return
requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
change(dataArray);
};
let mimeType = checkMime();
getUserMediaPermission().then((stream) => {
streams.push(streams)
// 创建记录器
mediaRecorder = new MediaRecorder(stream, { mimeType });
// 音频数据发生变化时收集音频片段,用于合成音频文件
mediaRecorder.addEventListener("dataavailable", (event) => {
console.log("mediaRecorder-dataavailable:", event);
audioChunks.push(event.data);
});
// // 监听音频开始录制
// mediaRecorder.addEventListener('start', () => {
// console.log("mediaRecorder-start:");
// audioChunks = []
// })
// 音频录制结束回调
mediaRecorder.addEventListener("stop", () => {
console.log("mediaRecorder-end:", audioChunks);
const audioBlob = new Blob(audioChunks, { type: "audio/mp4" }); // wav webm mp4
stop(audioBlob);
// 清空 chunks 以便下一次录音
audioChunks = []
});
// 获取音频数据
const audioContext = new getAudioContext()();
const source = audioContext.createMediaStreamSource(stream);
// 通过AnalyserNode对象的getByteTimeDomainData方法来获取音频数据的波形形式:
// 获取音频时间和频率数据
analyser = audioContext.createAnalyser();
// 定义长度
analyser.fftSize = 2048; // 可以调整这个值来改变细节
const bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
// 合并流数据
source.connect(analyser);
draw()
readover(mediaRecorder)
}).catch((err) => {
console.log("stream-errr", err);
});
} catch (err) {
console.log("mediaDevices-errr", err);
}
}
示例
vue
import { onMounted, onUnmounted } from "vue";
import loadDevices, {
devicesDispose,
getAudioContext,
} from "../compositions/VerbalChat/loadDevices";
let mediaRecorder;
const speak = ref(false);
// 停止录制
const uploadAudio = (blob) => {
// others 获取录音数据之后后续处理 上传
// const formData = new FormData();
// formData.append("file", blob);
// 接口formData上传
};
// 绘制方法
const draw = ({ data }) => {
// 调用子组件的绘制方法,传递数据
// verCanvas.value && verCanvas.value.draw({ data });
};
const btnClick = () => {
if (!speak.value) {
console.log("开始录制");
speak.value = true;
mediaRecorder && mediaRecorder.start();
} else {
console.log("停止录制");
speak.value = false;
mediaRecorder && mediaRecorder.stop();
}
};
onMounted(() => {
loadDevices({
readover: (r) => (mediaRecorder = r),
change: (dataArray) => {
if (speak.value) {
// 处于录制中
draw({ data: dataArray });
}
},
stop: (blob) => uploadAudio(blob),
});
});
onUnmounted(()=>devicesDispose())
绘制频谱图
vue
<template>
<canvas class="VerbalCanvas" ref="canvasRef"></canvas>
</template>
<script setup>
import { onMounted, ref, watch } from "vue";
let ctx, canvas;
const canvasRef = ref();
const draw = ({ data }) => {
if (!canvasRef.value) return;
canvas = canvasRef.value;
canvas.height = parseFloat(getComputedStyle(canvas)["height"]);
canvas.width = parseFloat(getComputedStyle(canvas)["width"]);
ctx = canvas.getContext("2d");
// drawWave(ctx, canvas, type, data);
// drawLoop(ctx, canvas, type, data);
drawCircle(ctx, canvas, type, data);
}
const clear = () => {
try {
ctx.clearRect(0, 0, canvas.width, canvas.height);
} catch (er) {
console.log("er", er);
}
}
defineExpose({ draw, clear });
绘制曲线
js
const waveH = 150; // 波区域高度
const obj = {
top: 0,
center: canvas.height / 2,
bottom: canvas.height - waveH,
};
const initY = obj[type];
const dataArray = data || []; // 模拟数据 随机生成一个数组,值随机
ctx.fillStyle = "rgba(200, 200, 200, 0)";
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.lineWidth = 1;
ctx.strokeStyle = "#0077FF"; //"rgb(0, 0, 0)";
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.beginPath();
const sliceWidth = (canvas.width * 1.0) / dataArray.length;
let x = 0;
for (let i = 0; i < dataArray.length; i++) {
const v = dataArray[i] / 128.0;
// const y = (v * canvas.height) / 2 ;
const y = (v * waveH) / 2 + initY;
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
x += sliceWidth;
}
// ctx.lineTo(canvas.width, canvas.height / 2);
ctx.lineTo(canvas.width, waveH / 2) + initY;
ctx.stroke();
绘制音频环
js
ctx.clearRect(0, 0, canvas.width, canvas.height);
const cX = canvas.width / 2;
const cY = canvas.height / 2;
const r = 100;
const basel = Math.floor(data.length / 360);
for (var i = 0; i < 360; i++) {
var value = (data[i * basel] / 60) * 8; //8;
// 模拟数据 value = Math.random() * 100
ctx.beginPath();
ctx.lineWidth = 2;
ctx.strokeStyle = "#08a3ef";
ctx.moveTo(cX, cY);
//R * cos (PI/180*一次旋转的角度数) ,-R * sin (PI/180*一次旋转的角度数)
ctx.lineTo(
Math.cos(((i * 1) / 180) * Math.PI) * (r + value) + cX,
-Math.sin(((i * 1) / 180) * Math.PI) * (r + value) + cY
);
ctx.stroke();
}
//画一个小圆,将线条覆盖
ctx.beginPath();
ctx.lineWidth = 1;
ctx.arc(cX, cY, r, 0, 2 * Math.PI, false);
ctx.fillStyle = "#000";
ctx.stroke();
ctx.fill();
绘制圆
js
/** 绘制圆 */
const drawCircle = (ctx, canvas, type, data) => {
ctx.clearRect(0, 0, canvas.width, canvas.height);
const cX = canvas.width / 2;
const cY = canvas.height / 2;
const r = 100;
for (var i = 0; i < data.length; i += 4) {
const v = (data[i] + data[i + 1] + data[i + 2] + data[i + 3]) / 4;
const r = v * 0.5;
// for (var i = 0; i < 254; i += 4) {
// const r = Math.random() * 100;
ctx.beginPath();
ctx.lineWidth = 1;
ctx.arc(cX, cY, r, 0, 2 * Math.PI, false);
ctx.strokeStyle = "#c46868";
ctx.stroke();
}
};