I want to code in simple HTML, JS/Jquery for recording an audio of user’s microphone and background audio(instrumental song). So how to do ?
I have tried like this way:
When I click on Start recording then background(system) audio becomes muted & when I stop recording then I am not able to record anything.
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Sing Along App</title>
</head>
<body>
<h1>Sing Along with Background Music</h1>
<audio id="background-audio" controls>
<source src="instrumental.mp3" type="audio/mp3">
Your browser does not support the audio element.
</audio>
<div>
<button id="start-recording">Start Recording</button>
<button id="stop-recording" disabled>Stop Recording</button>
</div>
<h2>Recorded Audio</h2>
<audio id="recorded-audio" controls></audio>
<script src="recording.js"></script>
</body>
</html>
document.addEventListener('DOMContentLoaded', () => {
// Access DOM elements
const backgroundAudio = document.getElementById('background-audio');
const startRecordingButton = document.getElementById('start-recording');
const stopRecordingButton = document.getElementById('stop-recording');
const recordedAudio = document.getElementById('recorded-audio');
// Setup Web Audio API context
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
let mediaRecorder;
let recordedChunks = [];
startRecordingButton.addEventListener('click', async () => {
try {
// Request access to the microphone
const micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
// Create a MediaElementAudioSourceNode for the background audio
const backgroundAudioSource = audioContext.createMediaElementSource(backgroundAudio);
// Create a MediaStreamAudioSourceNode for the microphone stream
const micSource = audioContext.createMediaStreamSource(micStream);
// Create a MediaStreamAudioDestinationNode to capture the combined audio
const destination = audioContext.createMediaStreamDestination();
// Connect the audio nodes
backgroundAudioSource.connect(audioContext.destination);
backgroundAudioSource.connect(destination);
micSource.connect(audioContext.destination);
micSource.connect(destination);
// Initialize MediaRecorder with the destination stream
mediaRecorder = new MediaRecorder(destination.stream);
// Handle dataavailable event
mediaRecorder.ondataavailable = event => {
if (event.data.size > 0) {
recordedChunks.push(event.data);
}
};
// Handle stop event
mediaRecorder.onstop = () => {
// Create a blob from the recorded chunks
const recordedBlob = new Blob(recordedChunks, { type: 'audio/webm' });
recordedChunks = []; // Reset the recorded chunks
// Create a URL for the recorded audio and set it as the source for the audio element
const recordedAudioURL = URL.createObjectURL(recordedBlob);
recordedAudio.src = recordedAudioURL;
recordedAudio.controls = true;
};
// Start recording
mediaRecorder.start();
// Play the background audio
backgroundAudio.play();
// Update the UI
startRecordingButton.disabled = true;
stopRecordingButton.disabled = false;
} catch (error) {
console.error('Error accessing microphone:', error);
alert('Could not access the microphone. Please check your permissions.');
}
});
stopRecordingButton.addEventListener('click', () => {
if (mediaRecorder && mediaRecorder.state === 'recording') {
// Stop the media recorder
mediaRecorder.stop();
// Pause the background audio
backgroundAudio.pause();
// Update the UI
startRecordingButton.disabled = false;
stopRecordingButton.disabled = true;
} else {
console.warn('MediaRecorder is not recording.');
}
});
});
New contributor
shruti prajapati is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.