audioContext
const arraybuffer: ArrayBuffer = await new ArrayBuffer(res);const audioBuffer = await audioContext.decodeAudioData(arraybuffer);const source = audioContext.createBufferSource();source.connect(audioContext.destination); //连接上实例source.buffer = audioBuffer;source.start();
谷歌浏览器限制
// 播放▶bufferSource.start();// 状态是停止,被浏览器限制if (audioCtx.state === 'suspended') {window.console.log('警告:谷歌音视频播放策略限制触发');const button = document.createElement('button');button.click();window.console.log('尝试恢复播放...');audioCtx.resume().then(function () {window.console.log('恢复播放成功');}).catch((err) => {window.console.log(err);window.console.log('恢复播放失败');});}
Audio()
3.
科大讯飞base64格式转换为可播放数据
import { Modal } from 'ant-design-vue';import { createVNode } from 'vue';import { webSocketInit } from './webscoket';// 是否播放let onPlaying = false;// 音频信息let audioData = [];// 语音数据偏移量let audioDataOffset = 0;let bufferSource: AudioBufferSourceNode;/*** @description 播放*/const play = () => {const audioCtx = new AudioContext();audioData = audioData.slice(audioDataOffset);audioDataOffset += audioData.length;const audioBuffer = audioCtx.createBuffer(1, audioData.length, 22050);const nowBuffering = audioBuffer.getChannelData(0);if (audioBuffer.copyToChannel) {audioBuffer.copyToChannel(new Float32Array(audioData), 0, 0);} else {for (let i = 0; i < audioData.length; i++) {nowBuffering[i] = audioData[i];}}bufferSource = audioCtx.createBufferSource();bufferSource.buffer = audioBuffer;bufferSource.connect(audioCtx.destination);console.log(audioBuffer);// 播放▶bufferSource.start();// 状态是停止,被浏览器限制if (audioCtx.state === 'suspended') {window.console.log('警告:谷歌音视频播放策略限制触发');// const button = document.createElement('button');// button.style.position = 'abslout';// document.body.appendChild(button);// button.click();// window.console.log('尝试恢复播放...');// audioCtx// .resume()// .then(function () {// window.console.log('恢复播放成功');// })// .catch((err) => {// window.console.log(err);// stop(true);// window.console.log('恢复播放失败');// });Modal.confirm({title: () => '警告:谷歌音视频播放策略限制触发',content: () => createVNode('div', { style: 'color:red;' }, '点击后播报'),onOk() {window.console.log('尝试恢复播放...');audioCtx.resume().then(function () {window.console.log('恢复播放成功');}).catch((err) => {window.console.log(err);stop(true);window.console.log('恢复播放失败');});},});}bufferSource.onended = (_e) => {if (audioDataOffset < audioData.length) {console.log('audio loop');play();} else {stop(true);}};};const stop = (isMoveAudio: Boolean) => {console.log('audio stop');if (isMoveAudio) {audioData = [];}audioDataOffset = 0;onPlaying = false;if (bufferSource) {try {bufferSource.stop();} catch (e) {console.log('err:', e);}}};/*** @description 转换语音数据格式* @param audioDataStr 语音字符* @param fromRate 速度* @param toRate 速度*/const transToAudioData = (audioDataStr, fromRate = 16000, toRate = 22505) => {const outputS16 = rawToS16(audioDataStr);let output: any = transS16ToF32(outputS16);output = transSamplingRate(output, fromRate, toRate);output = Array.from(output);for (let i = 0; i < output.length; i++) {audioData.push(output[i]);}if (!audioDataOffset && !onPlaying) {onPlaying = true;play();}// else {// console.log('audio suspended: ', audioDataOffset, onPlaying);// }};/*** @description* @param rawAudioData 原声音数据* @returns {Int16Array} 16位数据数组*/const rawToS16 = (rawAudioData) => {const outputArray = new Uint8Array(rawAudioData.length);for (let i = 0; i < rawAudioData.length; ++i) {outputArray[i] = rawAudioData.charCodeAt(i);}return new Int16Array(new DataView(outputArray.buffer).buffer);};/*** 将16位转成32* @param input* @returns {Float32Array}*/const transS16ToF32 = (input) => {const tmpData: Array<number> = [];for (let i = 0; i < input.length; i++) {const d = input[i] < 0 ? input[i] / 0x8000 : input[i] / 0x7fff;tmpData.push(d);}return new Float32Array(tmpData);};/*** @description 取样速度* @param data 音频数据* @param fromRate 开始速率* @param toRate 结束速率* @returns {Float32Array}*/const transSamplingRate = (data, fromRate = 44100, toRate = 16000) => {const fitCount = Math.round(data.length * (toRate / fromRate));const newData = new Float32Array(fitCount);const springFactor = (data.length - 1) / (fitCount - 1);newData[0] = data[0];for (let i = 1; i < fitCount - 1; i++) {const tmp = i * springFactor;const before = Math.floor(tmp);const after = Math.ceil(tmp);const atPoint = tmp - before;newData[i] = data[before] + (data[after] - data[before]) * atPoint;}newData[fitCount - 1] = data[data.length - 1];return newData;};/*** @description websockt 调用后端语音合成服务*/export const AIVoiceRead = () => {// webSocketInit('wss://api.fullpan.cn/ws/push?uid=xxxx&token=xxxxx', transToAudioData);webSocketInit('wss://api.domain/ws/push?uid=xxxx&token=xxxxx', transToAudioData);};
参考
【1】https://developer.mozilla.org/zh-CN/docs/Web/API/HTMLAudioElement/Audio
【2】https://developer.mozilla.org/zh-CN/docs/Web/API/AudioContext
【3】AudioContext入门-掘金
【4】HTML5怎么实现录音和播放功能
