@@ -2169,72 +2169,83 @@ function handleDecodedFrame(frame) { // frame.codedWidth/Height are physical pix
2169
2169
try {
2170
2170
const audioWorkletProcessorCode = `
2171
2171
class AudioFrameProcessor extends AudioWorkletProcessor {
2172
- constructor() {
2173
- super();
2174
- this.audioBufferQueue = [];
2175
- this.currentAudioData = null;
2176
- this.currentDataOffset = 0;
2177
- this.port.onmessage = (event) => {
2178
- if (event.data.audioData) {
2179
- const pcmData = new Float32Array(event.data.audioData);
2180
- this.audioBufferQueue.push(pcmData);
2181
- } else if (event.data.type === 'getBufferSize') {
2182
- this.port.postMessage({ type: 'audioBufferSize', size: this.audioBufferQueue.length });
2183
- }
2184
- };
2185
- }
2172
+ constructor(options) {
2173
+ super();
2174
+ this.audioBufferQueue = [];
2175
+ this.currentAudioData = null;
2176
+ this.currentDataOffset = 0;
2177
+
2178
+ this.TARGET_BUFFER_PACKETS = 3;
2179
+ this.MAX_BUFFER_PACKETS = 8;
2180
+
2181
+ this.port.onmessage = (event) => {
2182
+ if (event.data.audioData) {
2183
+ const pcmData = new Float32Array(event.data.audioData);
2184
+ if (this.audioBufferQueue.length >= this.MAX_BUFFER_PACKETS) {
2185
+ this.audioBufferQueue.shift();
2186
+ }
2187
+ this.audioBufferQueue.push(pcmData);
2188
+ } else if (event.data.type === 'getBufferSize') {
2189
+ const bufferMillis = this.audioBufferQueue.reduce((total, buf) => total + (buf.length / 2 / sampleRate) * 1000, 0);
2190
+ this.port.postMessage({
2191
+ type: 'audioBufferSize',
2192
+ size: this.audioBufferQueue.length,
2193
+ durationMs: bufferMillis
2194
+ });
2195
+ }
2196
+ };
2197
+ }
2186
2198
2187
- process(inputs, outputs, parameters) {
2188
- const output = outputs[0];
2189
- const leftChannel = output ? output[0] : undefined;
2190
- const rightChannel = output ? output[1] : undefined;
2199
+ process(inputs, outputs, parameters) {
2200
+ const output = outputs[0];
2201
+ const leftChannel = output ? output[0] : undefined;
2191
2202
2192
- if (!leftChannel || !rightChannel) {
2193
- if (leftChannel) leftChannel.fill(0);
2194
- if (rightChannel) rightChannel.fill(0);
2195
- return true;
2196
- }
2203
+ if (!leftChannel) {
2204
+ return true;
2205
+ }
2206
+
2207
+ const rightChannel = output ? output[1] : leftChannel;
2208
+ const samplesPerBuffer = leftChannel.length;
2209
+
2210
+ if (this.audioBufferQueue.length === 0 && this.currentAudioData === null) {
2211
+ leftChannel.fill(0);
2212
+ rightChannel.fill(0);
2213
+ return true;
2214
+ }
2197
2215
2198
- const samplesPerBuffer = leftChannel.length;
2199
- if (this.audioBufferQueue.length === 0 && this.currentAudioData === null) {
2200
- leftChannel.fill(0);
2201
- rightChannel.fill(0);
2202
- return true;
2203
- }
2216
+ let data = this.currentAudioData;
2217
+ let offset = this.currentDataOffset;
2204
2218
2205
- let data = this.currentAudioData;
2206
- let offset = this.currentDataOffset;
2219
+ for (let sampleIndex = 0; sampleIndex < samplesPerBuffer; sampleIndex++) {
2220
+ if (!data || offset >= data.length) {
2221
+ if (this.audioBufferQueue.length > 0) {
2222
+ data = this.currentAudioData = this.audioBufferQueue.shift();
2223
+ offset = this.currentDataOffset = 0;
2224
+ } else {
2225
+ this.currentAudioData = null;
2226
+ this.currentDataOffset = 0;
2227
+ leftChannel.fill(0, sampleIndex);
2228
+ rightChannel.fill(0, sampleIndex);
2229
+ return true;
2230
+ }
2231
+ }
2232
+
2233
+ leftChannel[sampleIndex] = data[offset++];
2234
+ if (offset < data.length) {
2235
+ rightChannel[sampleIndex] = data[offset++];
2236
+ } else {
2237
+ rightChannel[sampleIndex] = leftChannel[sampleIndex];
2238
+ }
2239
+ }
2207
2240
2208
- for (let sampleIndex = 0; sampleIndex < samplesPerBuffer; sampleIndex++) {
2209
- if (!data || offset >= data.length) {
2210
- if (this.audioBufferQueue.length > 0) {
2211
- data = this.currentAudioData = this.audioBufferQueue.shift();
2212
- offset = this.currentDataOffset = 0;
2213
- } else {
2214
- this.currentAudioData = null;
2215
- this.currentDataOffset = 0;
2216
- leftChannel.fill(0, sampleIndex);
2217
- rightChannel.fill(0, sampleIndex);
2218
- return true;
2241
+ this.currentDataOffset = offset;
2242
+ if (data && offset >= data.length) {
2243
+ this.currentAudioData = null;
2244
+ this.currentDataOffset = 0;
2219
2245
}
2220
- }
2221
- leftChannel[sampleIndex] = data[offset++];
2222
- if (offset < data.length) {
2223
- rightChannel[sampleIndex] = data[offset++];
2224
- } else {
2225
- rightChannel[sampleIndex] = leftChannel[sampleIndex];
2226
- offset++;
2227
- }
2228
- }
2229
- this.currentDataOffset = offset;
2230
- if (offset >= data.length) {
2231
- this.currentAudioData = null;
2232
- this.currentDataOffset = 0;
2233
- } else {
2234
- this.currentAudioData = data;
2246
+
2247
+ return true;
2235
2248
}
2236
- return true;
2237
- }
2238
2249
}
2239
2250
registerProcessor('audio-frame-processor', AudioFrameProcessor);
2240
2251
` ;
@@ -2251,7 +2262,8 @@ function handleDecodedFrame(frame) { // frame.codedWidth/Height are physical pix
2251
2262
audioWorkletProcessorPort = audioWorkletNode . port ;
2252
2263
audioWorkletProcessorPort . onmessage = ( event ) => {
2253
2264
if ( event . data . type === 'audioBufferSize' ) {
2254
- window . currentAudioBufferSize = event . data . size ;
2265
+ window . currentAudioBufferSize = event . data . size ;
2266
+ window . currentAudioBufferDuration = event . data . durationMs ;
2255
2267
}
2256
2268
} ;
2257
2269
audioWorkletNode . connect ( audioContext . destination ) ;
0 commit comments