微信小程序之基于WebAudioContext实现带波形图的自定义音频播放器
一.实现思路
- 定义自定义属性
- 获取音频的二进制流
- 将音频的二进制流解码为AudioBuffer
- 播放控制,计算播放时长以及播放进度
- 确定播放器的宽高,通过AudioBuffer获取音频时长,根据音频时长duration以及播放器宽度确定波形的宽度
- 根据波形宽度以及AudioBuffer生成波形图数据,并计算峰值
- 确定canvas宽高
- 绘制波形图和播放进度
- 音频播放过程中,根据播放进度重绘波形
二.定义自定义属性
属性 | 使用 | 默认值 |
---|
width | 组件宽度,String类型,取值:100%/100rpx/100px | 100% | height | 组件高度,String类型,取值:100%/100rpx/100px | 200rpx | normalize | 波形幅度归一化,Boolean类型 | true | audio-url | 音频url,String类型 | | min-px-per-sec | 每秒代表多少个像素,Integer类型,单位为px | 20 | bar-width | 每一个波形的宽度,Integer类型,单位rpx | 4 | wave-color | 波形图颜色,String类型,取值:#e0e0e0/rgba(240, 240, 240, 1) | #e0e0e0 | progress-color | 进度条颜色,String类型,取值:#ff0000/rgba(255, 0, 0, 1) | #ff0000 | start-play | 播放控制,Boolean类型,取值true/false,为true开始播放,为false暂停播放 | false | bind:loaded | 音频加载完成回调 | event{duration: duration} | bind:playing | 播放中回调 | event{currentTime: currentTime} | bind:drawed | 波形图绘制完成回调 | event{} | bind:start | 开始播放回调 | event{} | bind:paused | 暂停播放回调 | event{} | bind:finish | 播放完成回调 | event{} |
三.获取音频的二进制流
wx.request({
url: this.data.audioUrl,
responseType: 'arraybuffer',
success: (res) => {
let audioData = res.data
},
fail: (err) => {
console.error('request fail', err)
}
})
四.将音频的二进制流解码为AudioBuffer
let audioContext = wx.createWebAudioContext()
wx.request({
url: this.data.audioUrl,
responseType: 'arraybuffer',
success: (res) => {
let audioData = res.data
audioContext.decodeAudioData(audioData, (buffer) => {
let duration = buffer ? buffer.duration:0
this.triggerEvent("loaded", {duration: duration})
this.setData({
audioBuffer: buffer,
audioContext: audioContext
})
}, (err) => {
console.error('decodeAudioData fail', err)
})
},
fail: (err) => {
console.error('request fail', err)
}
})
五.播放控制,计算播放时长以及播放进度
var timer
Component({
properties: {
audioUrl: {
type: String,
value: "",
observer: function(newVal, oldVal) {
if(newVal) {
this.setData({
audioUrl: newVal
})
this.loadAudio()
}
}
},
startPlay: {
type: Boolean,
value: false,
observer: function(newVal, oldVal) {
if(newVal) {
this.play()
} else {
this.pause()
}
}
}
},
data: {
state: "paused",
audioContext: null,
audioBuffer: null,
audioSource: null,
startPosition: 0,
lastPlay: 0
},
lifetimes: {
attached: function() {
this.pollingState()
},
detached: function() {
if(timer) {
clearTimeout(timer)
}
}
},
pageLifetimes: {
hide: function() {
this.pause()
},
},
methods: {
loadAudio: function() {
let audioContext = wx.createWebAudioContext()
wx.request({
url: this.data.audioUrl,
responseType: 'arraybuffer',
success: (res) => {
let audioData = res.data
audioContext.decodeAudioData(audioData, (buffer) => {
let duration = buffer ? buffer.duration:0
this.triggerEvent("loaded", {duration: duration})
this.setData({
audioBuffer: buffer,
audioContext: audioContext,
lastPlay: audioContext.currentTime,
startPosition: 0
})
}, (err) => {
console.error('decodeAudioData fail', err)
})
},
fail: (err) => {
console.error('request fail', err)
}
})
},
getCurrentTime: function() {
let audioContext = this.data.audioContext
let startPosition = this.data.startPosition
let lastPlay = this.data.lastPlay
return startPosition + (audioContext.currentTime - lastPlay)
},
getDuration: function() {
let audioBuffer = this.data.audioBuffer
return audioBuffer ? audioBuffer.duration:0
},
play: function() {
let state = this.data.state
if(state == "playing") {
return
}
let audioBuffer = this.data.audioBuffer
let audioContext = this.data.audioContext
if(!audioContext) {
return
}
console.log("play", state)
if(audioContext.state == "suspended") {
audioContext.resume()
}
let audioSource = audioContext.createBufferSource()
audioSource.buffer = audioBuffer
audioSource.connect(audioContext.destination)
audioSource.onended = () => {
let startPosition = this.data.startPosition + (audioContext.currentTime - this.data.lastPlay)
if(startPosition > audioBuffer.duration) {
startPosition = 0
this.setData({
startPosition: startPosition,
lastPlay: audioContext.currentTime,
state: "finish",
startPlay: false
})
this.triggerEvent("finish", {})
} else {
this.setData({
state: "paused",
startPosition: startPosition,
startPlay: false
})
this.triggerEvent("paused", {})
}
}
audioSource.start(0, this.data.startPosition)
let lastPlay = audioContext.currentTime
this.setData({
audioSource: audioSource,
lastPlay: lastPlay,
state: "playing"
})
this.triggerEvent("start", {})
},
pause: function() {
let audioSource = this.data.audioSource
if(!audioSource) {
return
}
audioSource.disconnect()
audioSource.stop(0)
},
pollingState: function() {
timer = setTimeout(() => {
clearTimeout(timer)
let state = this.data.state
if(state == "playing") {
let currentTime = this.getCurrentTime()
let duration = this.getDuration()
let percent = Math.min(currentTime/duration, 1)
this.triggerEvent("playing", {
currentTime: currentTime,
percent: percent
})
}
this.pollingState()
}, 50)
}
}
})
<view class="container">
<wave-audio-player audio-url="http://ep.sycdn.kuwo.cn/7a9d4ef8b0bbee905e0797464ffbdde3/61ee1cfc/resource/n1/13/34/1474260193.mp3" start-play="{{play}}"></wave-audio-player>
<view style="height: 88rpx; background: #00f; line-height: 88rpx; text-align: center; color:white;margin-top: 20rpx;" bindtap="toPlay">play</view>
<view style="height: 88rpx; background: #f00; line-height: 88rpx; text-align: center; color:white; margin-top: 20rpx;" bindtap="toPause">pause</view>
</view>
六.确定播放器的宽高,通过AudioBuffer获取音频时长,根据音频时长duration以及播放器宽度确定波形的宽度
var timer
Component({
properties: {
width: {
type: String,
value: "100%"
},
height: {
type: String,
value: "200rpx"
},
audioUrl: {
type: String,
value: "",
observer: function(newVal, oldVal) {
if(newVal) {
this.setData({
audioUrl: newVal
})
this.loadAudio()
}
}
},
minPxPerSec: {
type: Number,
value: 20
},
startPlay: {
type: Boolean,
value: false,
observer: function(newVal, oldVal) {
if(newVal) {
this.play()
} else {
this.pause()
}
}
}
},
methods: {
loadAudio: function() {
let audioContext = wx.createWebAudioContext()
wx.request({
url: this.data.audioUrl,
responseType: 'arraybuffer',
success: (res) => {
let audioData = res.data
audioContext.decodeAudioData(audioData, (buffer) => {
let duration = buffer ? buffer.duration:0
this.triggerEvent("loaded", {duration: duration})
this.setData({
audioBuffer: buffer,
audioContext: audioContext,
lastPlay: audioContext.currentTime,
startPosition: 0
})
this.calcWaveWidth();
}, (err) => {
console.error('decodeAudioData fail', err)
})
},
fail: (err) => {
console.error('request fail', err)
}
})
},
calcWaveWidth: function() {
const query = this.createSelectorQuery()
query.select('#wave').boundingClientRect()
query.exec((res) => {
let playerWidth = res && res[0] ? res[0].width:0
let playerHeight = res && res[0] ? res[0].height:0
if(!playerWidth) {
return
}
this.setData({
playerWidth: playerWidth,
playerHeight: playerHeight
})
let minPxPerSec = this.data.minPxPerSec
let duration = this.getDuration()
let nominalWidth = Math.round(duration * minPxPerSec);
let width = nominalWidth;
let start = 0;
let end = Math.max(start + playerWidth, width);
if(nominalWidth < playerWidth) {
width = playerWidth;
start = 0;
end = width;
}
this.setData({
waveWidth: width
})
})
}
},
})
<view id="waveform" >
<view id="wave" style="display: block; position: relative; user-select: none; width: 100%; height: {{height}}; overflow: hidden hidden;">
</view>
</view>
七.根据波形宽度以及AudioBuffer生成波形图数据,并计算峰值
calcWaveWidth: function() {
const query = this.createSelectorQuery()
query.select('#wave').boundingClientRect()
query.exec((res) => {
let playerWidth = res && res[0] ? res[0].width:0
let playerHeight = res && res[0] ? res[0].height:0
if(!playerWidth) {
return
}
this.setData({
playerWidth: playerWidth,
playerHeight: playerHeight
})
let minPxPerSec = this.data.minPxPerSec
let duration = this.getDuration()
let nominalWidth = Math.round(duration * minPxPerSec);
let width = nominalWidth;
let start = 0;
let end = Math.max(start + playerWidth, width);
if(nominalWidth < playerWidth) {
width = playerWidth;
start = 0;
end = width;
}
this.setData({
waveWidth: width
})
let peaks = this.getPeaks()
let absMaxOfPeaks = this.absMax(peaks)
this.setData({
peaks: peaks,
absMaxOfPeaks: absMaxOfPeaks
})
})
}
getPeaks: function(splitChannels = false) {
let audioBuffer = this.data.audioBuffer
let waveWidth = this.data.waveWidth
let length = waveWidth
let first = 0
let last = length - 1
let splitPeaks = []
let channels = audioBuffer ? audioBuffer.numberOfChannels : 1
let c
for (c = 0; c < channels; c++) {
splitPeaks[c] = []
splitPeaks[c][2 * (length - 1)] = 0
splitPeaks[c][2 * (length - 1) + 1] = 0
}
let mergedPeaks = []
mergedPeaks[2 * (length - 1)] = 0;
mergedPeaks[2 * (length - 1) + 1] = 0;
if (!audioBuffer || !audioBuffer.length) {
return splitChannels ? splitPeaks:mergedPeaks;
}
let sampleSize = audioBuffer.length / length;
let sampleStep = ~~(sampleSize / 10) || 1;
for (c = 0; c < channels; c++) {
let peaks = splitPeaks[c];
let chan = audioBuffer.getChannelData(c);
let i = void 0;
for (i = first; i <= last; i++) {
let start = ~~(i * sampleSize);
let end = ~~(start + sampleSize);
let min = chan[start];
let max = min;
let j = void 0;
for (j = start; j < end; j += sampleStep) {
let value = chan[j];
if (value > max) {
max = value;
}
if (value < min) {
min = value;
}
}
peaks[2 * i] = max;
peaks[2 * i + 1] = min;
if (c == 0 || max > mergedPeaks[2 * i]) {
mergedPeaks[2 * i] = max;
}
if (c == 0 || min < mergedPeaks[2 * i + 1]) {
mergedPeaks[2 * i + 1] = min;
}
}
}
let result = splitChannels ? splitPeaks:mergedPeaks;
return result;
}
absMax: function(array) {
let newArray = array.filter(function(item) {
return item != undefined;
})
let max = Math.max(...newArray)
let min = Math.min(...newArray)
return -min > max ? -min : max;
}
八.确定canvas宽高
calcWaveWidth: function() {
const query = this.createSelectorQuery()
query.select('#wave').boundingClientRect()
query.exec((res) => {
let playerWidth = res && res[0] ? res[0].width:0
let playerHeight = res && res[0] ? res[0].height:0
if(!playerWidth) {
return
}
this.setData({
playerWidth: playerWidth,
playerHeight: playerHeight
})
let minPxPerSec = this.data.minPxPerSec
let duration = this.getDuration()
let nominalWidth = Math.round(duration * minPxPerSec);
let width = nominalWidth;
let start = 0;
let end = Math.max(start + playerWidth, width);
if(nominalWidth < playerWidth) {
width = playerWidth;
start = 0;
end = width;
}
this.setData({
waveWidth: width
})
let peaks = this.getPeaks()
let absMaxOfPeaks = this.absMax(peaks)
this.setData({
peaks: peaks,
absMaxOfPeaks: absMaxOfPeaks
})
this.initCanvas()
})
}
initCanvas: function() {
const query = this.createSelectorQuery()
query.select('#wave-canvas').fields({ node: true, size: true })
query.select('#progress-canvas').fields({ node: true, size: true })
query.exec((res) => {
const dpr = wx.getSystemInfoSync().pixelRatio
const waveCanvas = res[0].node
const waveContext = waveCanvas.getContext('2d')
waveCanvas.width = res[0].width * dpr
waveCanvas.height = res[0].height * dpr
waveContext.scale(dpr, dpr)
const progressCanvas = res[1].node
const progressContext = progressCanvas.getContext('2d')
progressCanvas.width = res[1].width * dpr
progressCanvas.height = res[1].height * dpr
progressContext.scale(dpr, dpr)
this.setData({
waveContext: waveContext,
progressContext: progressContext
})
})
}
<view id="waveform" >
<view id="wave" style="display: block; position: relative; user-select: none; width: 100%; height: {{height}}; overflow: hidden hidden;">
<canvas id="wave-canvas" type="2d" style="position: absolute; left: 0px; top: 0px; bottom: 0px; height: 100%; width: 100%;"></canvas>
<canvas id="progress-canvas" type="2d" style="position: absolute; left: 0px; top: 0px; bottom: 0px; height: 100%; width: 100%; z-index:99;"></canvas>
</view>
</view>
九.绘制波形图和播放进度
initCanvas: function() {
const query = this.createSelectorQuery()
query.select('#wave-canvas').fields({ node: true, size: true })
query.select('#progress-canvas').fields({ node: true, size: true })
query.exec((res) => {
const dpr = wx.getSystemInfoSync().pixelRatio
const waveCanvas = res[0].node
const waveContext = waveCanvas.getContext('2d')
waveCanvas.width = res[0].width * dpr
waveCanvas.height = res[0].height * dpr
waveContext.scale(dpr, dpr)
const progressCanvas = res[1].node
const progressContext = progressCanvas.getContext('2d')
progressCanvas.width = res[1].width * dpr
progressCanvas.height = res[1].height * dpr
progressContext.scale(dpr, dpr)
this.setData({
waveContext: waveContext,
progressContext: progressContext
})
this.drawWave()
})
}
drawWave: function(percent = 0) {
let peaks = this.data.peaks
let absMaxOfPeaks = this.data.absMaxOfPeaks
let waveWidth = this.data.waveWidth
if (peaks[0] instanceof Array) {
let channels = peaks;
peaks = channels[0];
}
let normalize = this.data.normalize
let absmax = 1;
if(normalize) {
absmax = absMaxOfPeaks;
}
let hasMinVals = [].some.call(peaks, function (val) {
return val < 0;
});
let height = this.data.playerHeight;
let halfH = height / 2;
let offsetY = 0;
let peakIndexScale = hasMinVals ? 2 : 1;
let length = peaks.length / peakIndexScale;
let barWidth = this.data.barWidth
let bar = barWidth;
let gap = Math.max(1, ~~(bar / 2))
let step = bar + gap;
let scale = length / waveWidth;
let first = 0;
let last = waveWidth;
let i = first;
let halfPixel = 0.5
let playerWidth = this.data.playerWidth
let target = 0;
let pos = ~~(waveWidth * percent);
let halfW = ~~(playerWidth / 2);
let maxScroll = waveWidth - playerWidth;
if(maxScroll != 0) {
target = Math.max(0, Math.min(maxScroll, pos - halfW));
}
this.clearCanvas()
for (i; i < last; i += step) {
var peak = peaks[Math.floor(i * scale * peakIndexScale)] || 0;
var h = Math.round(peak / absmax * halfH);
this.fillRect(i + halfPixel - target, halfH - h + offsetY, bar + halfPixel, h * 2)
if(i + halfPixel < waveWidth*percent) {
this.fillProgressRect(i + halfPixel - target, halfH - h + offsetY, bar + halfPixel, h * 2)
}
}
this.triggerEvent("drawed", {})
}
fillRect: function(x, y, width, height) {
let playerWidth = this.data.playerWidth
let waveColor = this.data.waveColor
let intersection = {
x1: x,
y1: y,
x2: Math.min(x + width, playerWidth),
y2: y + height
};
let waveContext = this.data.waveContext
waveContext.fillStyle = waveColor
waveContext.fillRect(intersection.x1, intersection.y1, intersection.x2 - intersection.x1, (intersection.y2 - intersection.y1) == 0 ? 1:intersection.y2 - intersection.y1)
}
fillProgressRect: function(x, y, width, height) {
let playerWidth = this.data.playerWidth
let progressColor = this.data.progressColor
let intersection = {
x1: x,
y1: y,
x2: Math.min(x + width, playerWidth),
y2: y + height
};
let progressContext = this.data.progressContext
progressContext.fillStyle = progressColor
progressContext.fillRect(intersection.x1, intersection.y1, intersection.x2 - intersection.x1, (intersection.y2 - intersection.y1) == 0 ? 1:intersection.y2 - intersection.y1)
}
clearCanvas: function() {
let playerWidth = this.data.playerWidth
let playerHeight = this.data.playerHeight
let waveContext = this.data.waveContext
let progressContext = this.data.progressContext
waveContext.clearRect(0, 0, playerWidth, playerHeight)
progressContext.clearRect(0, 0, playerWidth, playerHeight)
}
十.音频播放过程中,根据播放进度重绘波形
pollingState: function() {
timer = setTimeout(() => {
clearTimeout(timer)
let state = this.data.state
if(state == "playing") {
let currentTime = this.getCurrentTime()
let duration = this.getDuration()
let percent = Math.min(currentTime/duration, 1)
this.drawWave(percent)
this.triggerEvent("playing", {
currentTime: currentTime,
percent: percent
})
}
this.pollingState()
}, 50)
},
十一.最终效果
|