audioContext

  1. const arraybuffer: ArrayBuffer = await new ArrayBuffer(res);
  2. const audioBuffer = await audioContext.decodeAudioData(arraybuffer);
  3. const source = audioContext.createBufferSource();
  4. source.connect(audioContext.destination); //连接上实例
  5. source.buffer = audioBuffer;
  6. source.start();

谷歌浏览器限制

  1. // 播放▶
  2. bufferSource.start();
  3. // 状态是停止,被浏览器限制
  4. if (audioCtx.state === 'suspended') {
  5. window.console.log('警告:谷歌音视频播放策略限制触发');
  6. const button = document.createElement('button');
  7. button.click();
  8. window.console.log('尝试恢复播放...');
  9. audioCtx
  10. .resume()
  11. .then(function () {
  12. window.console.log('恢复播放成功');
  13. })
  14. .catch((err) => {
  15. window.console.log(err);
  16. window.console.log('恢复播放失败');
  17. });
  18. }

Audio()

  1. 3
  2. .

科大讯飞base64格式转换为可播放数据

  1. import { Modal } from 'ant-design-vue';
  2. import { createVNode } from 'vue';
  3. import { webSocketInit } from './webscoket';
  4. // 是否播放
  5. let onPlaying = false;
  6. // 音频信息
  7. let audioData = [];
  8. // 语音数据偏移量
  9. let audioDataOffset = 0;
  10. let bufferSource: AudioBufferSourceNode;
  11. /**
  12. * @description 播放
  13. */
  14. const play = () => {
  15. const audioCtx = new AudioContext();
  16. audioData = audioData.slice(audioDataOffset);
  17. audioDataOffset += audioData.length;
  18. const audioBuffer = audioCtx.createBuffer(1, audioData.length, 22050);
  19. const nowBuffering = audioBuffer.getChannelData(0);
  20. if (audioBuffer.copyToChannel) {
  21. audioBuffer.copyToChannel(new Float32Array(audioData), 0, 0);
  22. } else {
  23. for (let i = 0; i < audioData.length; i++) {
  24. nowBuffering[i] = audioData[i];
  25. }
  26. }
  27. bufferSource = audioCtx.createBufferSource();
  28. bufferSource.buffer = audioBuffer;
  29. bufferSource.connect(audioCtx.destination);
  30. console.log(audioBuffer);
  31. // 播放▶
  32. bufferSource.start();
  33. // 状态是停止,被浏览器限制
  34. if (audioCtx.state === 'suspended') {
  35. window.console.log('警告:谷歌音视频播放策略限制触发');
  36. // const button = document.createElement('button');
  37. // button.style.position = 'abslout';
  38. // document.body.appendChild(button);
  39. // button.click();
  40. // window.console.log('尝试恢复播放...');
  41. // audioCtx
  42. // .resume()
  43. // .then(function () {
  44. // window.console.log('恢复播放成功');
  45. // })
  46. // .catch((err) => {
  47. // window.console.log(err);
  48. // stop(true);
  49. // window.console.log('恢复播放失败');
  50. // });
  51. Modal.confirm({
  52. title: () => '警告:谷歌音视频播放策略限制触发',
  53. content: () => createVNode('div', { style: 'color:red;' }, '点击后播报'),
  54. onOk() {
  55. window.console.log('尝试恢复播放...');
  56. audioCtx
  57. .resume()
  58. .then(function () {
  59. window.console.log('恢复播放成功');
  60. })
  61. .catch((err) => {
  62. window.console.log(err);
  63. stop(true);
  64. window.console.log('恢复播放失败');
  65. });
  66. },
  67. });
  68. }
  69. bufferSource.onended = (_e) => {
  70. if (audioDataOffset < audioData.length) {
  71. console.log('audio loop');
  72. play();
  73. } else {
  74. stop(true);
  75. }
  76. };
  77. };
  78. const stop = (isMoveAudio: Boolean) => {
  79. console.log('audio stop');
  80. if (isMoveAudio) {
  81. audioData = [];
  82. }
  83. audioDataOffset = 0;
  84. onPlaying = false;
  85. if (bufferSource) {
  86. try {
  87. bufferSource.stop();
  88. } catch (e) {
  89. console.log('err:', e);
  90. }
  91. }
  92. };
  93. /**
  94. * @description 转换语音数据格式
  95. * @param audioDataStr 语音字符
  96. * @param fromRate 速度
  97. * @param toRate 速度
  98. */
  99. const transToAudioData = (audioDataStr, fromRate = 16000, toRate = 22505) => {
  100. const outputS16 = rawToS16(audioDataStr);
  101. let output: any = transS16ToF32(outputS16);
  102. output = transSamplingRate(output, fromRate, toRate);
  103. output = Array.from(output);
  104. for (let i = 0; i < output.length; i++) {
  105. audioData.push(output[i]);
  106. }
  107. if (!audioDataOffset && !onPlaying) {
  108. onPlaying = true;
  109. play();
  110. }
  111. // else {
  112. // console.log('audio suspended: ', audioDataOffset, onPlaying);
  113. // }
  114. };
  115. /**
  116. * @description
  117. * @param rawAudioData 原声音数据
  118. * @returns {Int16Array} 16位数据数组
  119. */
  120. const rawToS16 = (rawAudioData) => {
  121. const outputArray = new Uint8Array(rawAudioData.length);
  122. for (let i = 0; i < rawAudioData.length; ++i) {
  123. outputArray[i] = rawAudioData.charCodeAt(i);
  124. }
  125. return new Int16Array(new DataView(outputArray.buffer).buffer);
  126. };
  127. /**
  128. * 将16位转成32
  129. * @param input
  130. * @returns {Float32Array}
  131. */
  132. const transS16ToF32 = (input) => {
  133. const tmpData: Array<number> = [];
  134. for (let i = 0; i < input.length; i++) {
  135. const d = input[i] < 0 ? input[i] / 0x8000 : input[i] / 0x7fff;
  136. tmpData.push(d);
  137. }
  138. return new Float32Array(tmpData);
  139. };
  140. /**
  141. * @description 取样速度
  142. * @param data 音频数据
  143. * @param fromRate 开始速率
  144. * @param toRate 结束速率
  145. * @returns {Float32Array}
  146. */
  147. const transSamplingRate = (data, fromRate = 44100, toRate = 16000) => {
  148. const fitCount = Math.round(data.length * (toRate / fromRate));
  149. const newData = new Float32Array(fitCount);
  150. const springFactor = (data.length - 1) / (fitCount - 1);
  151. newData[0] = data[0];
  152. for (let i = 1; i < fitCount - 1; i++) {
  153. const tmp = i * springFactor;
  154. const before = Math.floor(tmp);
  155. const after = Math.ceil(tmp);
  156. const atPoint = tmp - before;
  157. newData[i] = data[before] + (data[after] - data[before]) * atPoint;
  158. }
  159. newData[fitCount - 1] = data[data.length - 1];
  160. return newData;
  161. };
  162. /**
  163. * @description websockt 调用后端语音合成服务
  164. */
  165. export const AIVoiceRead = () => {
  166. // webSocketInit('wss://api.fullpan.cn/ws/push?uid=xxxx&token=xxxxx', transToAudioData);
  167. webSocketInit('wss://api.domain/ws/push?uid=xxxx&token=xxxxx', transToAudioData);
  168. };

参考

【1】https://developer.mozilla.org/zh-CN/docs/Web/API/HTMLAudioElement/Audio
【2】https://developer.mozilla.org/zh-CN/docs/Web/API/AudioContext
【3】AudioContext入门-掘金
【4】HTML5怎么实现录音和播放功能