261 lines
7.7 KiB
JavaScript
261 lines
7.7 KiB
JavaScript
"use strict";
|
|
|
|
(() => {
|
|
const TAU = 2 * Math.PI;
|
|
|
|
// Build buffers b1 and b2 such that
|
|
// the original buffer can be played
|
|
// in a loop where it overlaps itself
|
|
// by the duration specified in overlap.
|
|
// If the overlap is two units, the
|
|
// resulting buffers look like this:
|
|
//
|
|
// buffer: OOBBBBBOO
|
|
// b1: OOBBBBBOO-----
|
|
// b2: -----OOBBBBBOO
|
|
//
|
|
// B: Buffer data (plays normally)
|
|
// O: Affected by overlap
|
|
// -: Inserted silence
|
|
//
|
|
// By starting b2 overlap seconds after
|
|
// b1, overlapping looping playback is
|
|
//
|
|
//
|
|
// OOBBBBBOO-----OOBBBBBOO-----
|
|
// -----OOBBBBBOO-----OOBBBBBOO
|
|
function buildOverlappingBuffers (audioCtx, buffer, overlap) {
|
|
const nonOverlapDuration = buffer.duration - 2 * overlap;
|
|
const nonOverlapSamples = Math.floor(nonOverlapDuration * buffer.sampleRate);
|
|
|
|
const totalLength = buffer.length + nonOverlapSamples;
|
|
|
|
const b1 = audioCtx.createBuffer(buffer.numberOfChannels, totalLength, buffer.sampleRate);
|
|
const b2 = audioCtx.createBuffer(buffer.numberOfChannels, totalLength, buffer.sampleRate);
|
|
|
|
// TODO: May be faster using the copy methods on AudioBuffer
|
|
for (let channel = 0; channel < buffer.numberOfChannels; channel++) {
|
|
const channelBuffer = buffer.getChannelData(channel);
|
|
const b1ChannelBuffer = b1.getChannelData(channel);
|
|
const b2ChannelBuffer = b2.getChannelData(channel);
|
|
|
|
for (let i = 0; i < channelBuffer.length; i++) {
|
|
b1ChannelBuffer[i] = channelBuffer[i];
|
|
b2ChannelBuffer[nonOverlapSamples + i] = channelBuffer[i];
|
|
}
|
|
}
|
|
|
|
return { b1, b2 };
|
|
}
|
|
|
|
function baseGain (peak, rotH) {
|
|
const diff = Math.abs(util.diffAngles(peak, rotH));
|
|
|
|
if (diff < TAU / 8) {
|
|
return 1;
|
|
} else if (diff < TAU / 8 + TAU / 16) {
|
|
return 1 - (diff - TAU / 8) / (TAU / 16);
|
|
} else {
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
function gain (peak, rotH, rotV) {
|
|
return baseGain(peak, rotH) * (1 - rotV / (Math.PI / 2));
|
|
}
|
|
|
|
function drawRing (ctx, r, f) {
|
|
const n = 48;
|
|
for (let i = 0; i < n; i++) {
|
|
const a = Math.PI * 2 * i / n;
|
|
const a_ = Math.PI * 2 * (i + 1) / n;
|
|
const g = f((a + a_) / 2);
|
|
if (g === 0) continue;
|
|
ctx.lineWidth = 0.1 * g;
|
|
ctx.beginPath();
|
|
ctx.arc(0, 0, r, a, a_);
|
|
ctx.stroke();
|
|
}
|
|
}
|
|
|
|
function loadTrack (audioCtx, trackHref) {
|
|
return (
|
|
fetch(trackHref)
|
|
.then(response => response.arrayBuffer())
|
|
.then(arrayBuffer => audioCtx.decodeAudioData(arrayBuffer))
|
|
.then(track => {
|
|
const { b1, b2 } = buildOverlappingBuffers(audioCtx, track, 2);
|
|
const gainNode = audioCtx.createGain();
|
|
|
|
const s1 = audioCtx.createBufferSource();
|
|
s1.buffer = b1;
|
|
s1.loop = true;
|
|
s1.connect(gainNode);
|
|
s1.start();
|
|
|
|
const s2 = audioCtx.createBufferSource();
|
|
s2.buffer = b2;
|
|
s2.loop = true;
|
|
s2.connect(gainNode);
|
|
s2.start(audioCtx.currentTime + 0.5);
|
|
|
|
return gainNode;
|
|
})
|
|
);
|
|
}
|
|
|
|
function setupAudioStuff (trackHrefs) {
|
|
const audioCtx = new AudioContext();
|
|
return (
|
|
Promise.all(trackHrefs.map((trackHref, i) =>
|
|
loadTrack(audioCtx, trackHref)
|
|
.then((track) => {
|
|
log.innerHTML += `* Track ${i}\n`;
|
|
return track;
|
|
})
|
|
))
|
|
.then(gains => {
|
|
log.innerHTML += `All tracks received\n`;
|
|
return {
|
|
ctx: audioCtx,
|
|
gains,
|
|
};
|
|
})
|
|
);
|
|
}
|
|
|
|
function waitForEvent (target, eventKey, waitTime) {
|
|
return new Promise((resolve, reject) => {
|
|
const waitTimeout = setTimeout(() => reject(new Error(`${eventKey} didnt fire after ${waitTime}ms`)), waitTime);
|
|
|
|
target.addEventListener(eventKey, (e) => {
|
|
clearTimeout(waitTimeout);
|
|
resolve(e);
|
|
});
|
|
});
|
|
}
|
|
|
|
function setupGyroscope () {
|
|
return (
|
|
util.getGyroPermission()
|
|
.then(response => {
|
|
if (response !== "granted") {
|
|
throw new Error("gyroscope permission denied");
|
|
}
|
|
|
|
return waitForEvent(window, "deviceorientation", 10000);
|
|
})
|
|
);
|
|
}
|
|
|
|
function setupCanvas () {
|
|
const ctx = canvas.getContext("2d");
|
|
const scale = Math.min(canvas.width, canvas.height) / 2;
|
|
ctx.setTransform(
|
|
scale, 0,
|
|
0, -scale,
|
|
canvas.width / 2, canvas.height / 2
|
|
);
|
|
|
|
return ctx;
|
|
}
|
|
|
|
function render (ctx, audio, rotH, rotV) {
|
|
for (const gainNode of audio.gains) {
|
|
gainNode.gain.value = 0;
|
|
}
|
|
|
|
const rotVScaled = 1 - rotV / (Math.PI / 2);
|
|
audio.gains[0].gain.value = rotVScaled < 0.75 ? 1 : 1 - (rotVScaled - 0.75) / 0.25;
|
|
audio.gains[1].gain.value = gain(0, rotH, rotV);
|
|
audio.gains[2].gain.value = gain(Math.PI / 2, rotH, rotV);
|
|
audio.gains[3].gain.value = gain(Math.PI, rotH, rotV);
|
|
audio.gains[4].gain.value = gain(Math.PI * (3 / 2), rotH, rotV);
|
|
audio.gains[5].gain.value = rotVScaled < 0.75 ? 0 : (rotVScaled - 0.75) / 0.25;
|
|
|
|
ctx.clearRect(-1, -1, 2, 2);
|
|
|
|
ctx.strokeStyle = "red";
|
|
drawRing(ctx, 0.8, (a) => gain(0, a, rotV));
|
|
|
|
ctx.strokeStyle = "blue";
|
|
drawRing(ctx, 0.7, (a) => gain(Math.PI / 2, a, rotV));
|
|
|
|
ctx.strokeStyle = "green";
|
|
drawRing(ctx, 0.8, (a) => gain(Math.PI, a, rotV));
|
|
|
|
ctx.strokeStyle = "orange";
|
|
drawRing(ctx, 0.7, (a) => gain(Math.PI * (3 / 2), a, rotV));
|
|
|
|
ctx.strokeStyle = "black";
|
|
ctx.beginPath();
|
|
ctx.moveTo(0, 0);
|
|
ctx.lineTo(Math.cos(rotH), Math.sin(rotH));
|
|
ctx.stroke();
|
|
|
|
drawBar(ctx, -0.65, 0.8, 0.1, 0.2, "red", audio.gains[1].gain.value);
|
|
drawBar(ctx, -0.25, 0.8, 0.1, 0.2, "blue", audio.gains[2].gain.value);
|
|
drawBar(ctx, 0.15, 0.8, 0.1, 0.2, "green", audio.gains[3].gain.value);
|
|
drawBar(ctx, 0.55, 0.8, 0.1, 0.2, "orange", audio.gains[4].gain.value);
|
|
|
|
drawBar(ctx, -0.25, -1, 0.1, 0.2, "lime", audio.gains[0].gain.value);
|
|
drawBar(ctx, 0.15, -1, 0.1, 0.2, "grey", audio.gains[5].gain.value);
|
|
}
|
|
|
|
function drawBar (ctx, x, y, w, h, color, value) {
|
|
const hScaled = h * value;
|
|
ctx.fillStyle = color;
|
|
ctx.fillRect(x, y, w, hScaled);
|
|
ctx.fillStyle = "black";
|
|
ctx.fillRect(x, y + hScaled, w, h - hScaled);
|
|
}
|
|
|
|
perm.addEventListener("click", e => {
|
|
setupAudioStuff([
|
|
"Track1Fade.mp3",
|
|
"Track2Fade.mp3",
|
|
"Track3Fade.mp3",
|
|
"Track4Fade.mp3",
|
|
"Track5Fade.mp3",
|
|
"Track6Fade.mp3",
|
|
])
|
|
.then(audio => {
|
|
log.innerHTML += "waiting for gyroscope permissions\n";
|
|
return (
|
|
setupGyroscope()
|
|
.then(() => {
|
|
return audio;
|
|
})
|
|
);
|
|
})
|
|
.then(audio => {
|
|
document.body.dataset.state = "main";
|
|
|
|
for (const gainNode of audio.gains) {
|
|
gainNode.connect(audio.ctx.destination);
|
|
}
|
|
|
|
const ctx = setupCanvas();
|
|
window.addEventListener("deviceorientation", e => {
|
|
const alpha = util.deg2rad(e.alpha);
|
|
const beta = util.deg2rad(e.beta);
|
|
const gamma = util.deg2rad(e.gamma);
|
|
|
|
const [screenNormal, phi, theta] = util.toPolarCoordinates(alpha, beta, gamma);
|
|
// "horizontal rotation": phi offset by 90 degrees.
|
|
// rotH ∈ [-π, π]
|
|
const rotH = phi - Math.PI / 2;
|
|
// "vertical rotation": theta mirrored at xy plane.
|
|
// rotV ∈ [0, π / 2]
|
|
const rotV = Math.abs(theta - Math.PI / 2);
|
|
|
|
render(ctx, audio, rotH, rotV);
|
|
});
|
|
})
|
|
.catch(err => {
|
|
document.body.dataset.state = "error";
|
|
error.innerHTML = err;
|
|
});
|
|
}, { once: true });
|
|
})();
|