You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

234 lines
7.4 KiB

2 years ago
var CHUNK_SIZE = 4096;
2 years ago
// var file;
// var filePos = 0;
2 years ago
var audioContext;
var audioSource;
2 years ago
var audiobuffer = [];
2 years ago
var audioEl;
var mediaSource;
var sourceBuffer;
var isMSEStart = false;
var audioDecoder = 0;//0 - audio context; 1 - media source extention
var scriptBuffer;
var scriptPos = 0;
const DECODER_WEBAUDIO = 0;
const DECODER_MSE = 1;
function setAudioDecoder(decoder) {
audioDecoder = decoder
}
2 years ago
// function handleAudioFiles(files) {
// var file_list = files;
// var file_idx = 0;
// loadFile(file_list, file_idx);
// }
2 years ago
function initWebAudio() {
console.log('Init audio context')
audioContext = new (window.AudioContext || window.webkitAudioContext)();
}
function initMSE() {
mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', sourceOpenCallback, false);
mediaSource.addEventListener('webkitsourceopen', sourceOpenCallback, false);
mediaSource.addEventListener('sourceclose', sourceCloseCallback, false);
mediaSource.addEventListener('webkitsourceclose', sourceCloseCallback, false);
mediaSource.addEventListener('sourceended', sourceEndedCallback, false);
mediaSource.addEventListener('webkitsourceended', sourceEndedCallback, false);
audioEl = document.createElement('audio');
audioEl.src = window.URL.createObjectURL(mediaSource);
initWebAudio();
var source = audioContext.createMediaElementSource(audioEl);
source.connect(audioContext.destination);
}
function sourceOpenCallback() {
console.log('Media Source Ready')
2 years ago
sourceBuffer = mediaSource.addSourceBuffer('audio/ogg');//'audio/aac')
2 years ago
sourceBuffer.addEventListener('updateend', updateEndCallback, false)
loadNextChunk();
}
function sourceCloseCallback() {
console.log('Media Source closed')
}
function sourceEndedCallback() {
console.log('Media Source ended')
}
function updateEndCallback() {
console.log('load next buffer in update end')
loadNextBuffer()
}
function loadFile(file_list, file_idx) {
if (file_idx >= file_list.length)
return;
file = file_list[file_idx];
CHUNK_SIZE = file.size
}
function showCurrentTime() {
console.log(new Date(), audioContext.currentTime)
}
function playAudio() {
setInterval(showCurrentTime, 1000)
if (audioDecoder === DECODER_WEBAUDIO ) {
loadNextChunk()
} else if (audioDecoder === DECODER_MSE ) {
initMSE()
}
}
function fastForword() {
console.log('fast forward')
scriptPos += 50000
}
function webAudioCallback() {
2 years ago
audiobuffer.push(this.result)
var audioBuffer = mergeBuffer(audiobuffer, filePos)
2 years ago
console.log('File buffer size = ', audioBuffer.byteLength)
if(!audioContext) {
initWebAudio();
}
audioContext.decodeAudioData(audioBuffer, buffer => {
console.log(buffer)
playAudioBuffer(buffer);
});
setTimeout(function() {
loadNextChunk()
}, 100);
}
2 years ago
function scriptNodeCallback(data,len) {
audiobuffer.push(data)
var audioBuffer = mergeBuffer(data, len)
2 years ago
console.log('File buffer size = ', audioBuffer.byteLength)
if(!audioContext) {
initWebAudio();
let bufferSize = 16384
let node = audioContext.createScriptProcessor(bufferSize, 2, 2);
// audioSource = audioContext.createBufferSource();
node.onaudioprocess = audioProcessingEvent=> {
// let left = event.outputBuffer.getChannelData(0);
// let right = event.outputBuffer.getChannelData(1);
// if(scriptBuffer) {
// event.outputBuffer.copyToChannel(scriptBuffer.getChannelData(0), 0, 0)
// event.outputBuffer.copyToChannel(scriptBuffer.getChannelData(1), 1, 0)
// }
if(scriptBuffer) {
var inputBuffer = scriptBuffer;
var outputBuffer = audioProcessingEvent.outputBuffer;
var lastLength = scriptBuffer.length - scriptPos
var currentPos = scriptPos
let copyLength = Math.min(lastLength, outputBuffer.length)
scriptPos += copyLength
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) {
var inputData = inputBuffer.getChannelData(channel);
var outputData = outputBuffer.getChannelData(channel);
for (var sample = 0; sample < copyLength; sample++) {
outputData[sample] = inputData[sample + currentPos];
}
}
}
}
// audioSource.connect(node)
node.connect(audioContext.destination)
// audioSource.start()
}
audioContext.decodeAudioData(audioBuffer, buffer => {
console.log(buffer)
scriptBuffer = buffer
// playAudioBuffer(buffer);
});
setTimeout(function() {
loadNextChunk()
}, 100);
}
2 years ago
function mseCallback(data) {
audiobuffer.push(data)
2 years ago
if(!sourceBuffer.updating) {
console.log('load next buffer in mse call back')
loadNextBuffer()
}
if(!isMSEStart) {
isMSEStart = true
startMSEPlay()
}
setTimeout(function() {
loadNextChunk()
}, 100);
}
function loadNextBuffer() {
2 years ago
if (audiobuffer.length) {
sourceBuffer.appendBuffer(audiobuffer.shift());
2 years ago
}
if (filePos === file.size && !sourceBuffer.updating) {
// else close the stream
console.log('End Media Source')
mediaSource.endOfStream();
}
}
function startMSEPlay() {
if (audioEl.paused) {
audioEl.play();
}
}
function loadNextChunk() {
2 years ago
// var reader = new FileReader();
// reader.onload = audioDecoder === DECODER_WEBAUDIO ? scriptNodeCallback : mseCallback
// var i_stream_size = read_file_slice(reader, file, filePos, CHUNK_SIZE);
// filePos += i_stream_size;
// console.log('Load file size', i_stream_size)
2 years ago
}
function appendBuffer(buffer1, buffer2) {
var result = buffer2
if(buffer1) {
var tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength);
var buff1 = new Uint8Array(buffer1);
var buff2 = new Uint8Array(buffer2);
tmp.set(buff1, 0);
tmp.set(buff2, buffer1.byteLength);
result = tmp.buffer;
}
return result
}
function mergeBuffer(arr, size) {
var res = new Uint8Array(size)
var pos = 0;
for(let i=0; i< arr.length; i++) {
var tmp = new Uint8Array(arr[i])
res.set(tmp, pos)
pos += tmp.byteLength
}
return res.buffer
}
function playAudioBuffer(audioBuffer) {
// Adding a bit of scheduling so that we won't have single digit milisecond overlaps.
// Thanks to Chris Wilson for his suggestion.
var scheduledTime = 0.015;
if(audioSource) {
audioSource.stop(scheduledTime);
}
audioSource = audioContext.createBufferSource();
audioSource.buffer = audioBuffer;
audioSource.connect(audioContext.destination);
var currentTime = audioContext.currentTime + 0.010 || 0;
var startTime = scheduledTime - 0.005;
var offset = currentTime;
var duration = audioBuffer.duration - currentTime
console.log(startTime, offset, duration)
audioSource.start(startTime, offset);
// audioSource.playbackRate.value = 1;
}