Audio
Play Audio
const audio = new Audio('audio_file.mp3'); //or url
audio.play();
audio.pause();audio.currentTime;
audio.duration;
audioPlayer.addEventListener("timeupdate", () => {})
audioPlayer.addEventListener("ended", () => {});Record Audio
JS
const stopButton = document.getElementById("stop");
const handleSuccess = function(stream) {
console.log("Success");
const options = { mimeType: "audio/webm" };
const mediaRecorder = new MediaRecorder(stream, options);
stopButton.onclick = () => {
mediaRecorder.stop();
};
mediaRecorder.addEventListener("dataavailable", function(e) {
if (e.data.size > 0) {
console.log("Stopped", e);
const audioURL = URL.createObjectURL(e.data);
downloadLink.href = audioURL;
downloadLink.download = "help.webm";
if (window.URL) {
player.srcObject = stream;
} else {
player.src = stream;
}
}
});
mediaRecorder.start();
};
navigator.mediaDevices
.getUserMedia({ audio: true, video: false })
.then(handleSuccess);Html
Audio Viz
Audio Context
Analyze and create and play audio
Create, manipulate, and play audio
If you dont want to play it, use OfflineAudioContext.
Create the audio context.
Inside the context, create sources — such as ``, oscillator, or stream.
Create effects nodes, such as reverb, biquad filter, panner, or compressor.
Choose the final destination for the audio, such as the user's computer speakers.
Establish connections from the audio sources through zero or more effects, eventually ending at the chosen destination.
Edge Cases
Chrome needs use interaction before creating an audio context ot it
Last updated