For a friends master thesis I have started creating a small video recorder app, so her participants can record themselves (also because I wanted to use this API 🙂 ).
I’m in the early stages of this sidequest. I’ve scaffolded the next.js app and so far have been able to record the Media Stream from the Webcam and play it back to the user while the recording is running. Once I’m done with this I want to continue with storing the video, etc.
Now my problem is: After the user finished recording, I want to give them the chance to look at the recording. However, if the user hits the “play back” button, the video player stays black and does not show the video as I would expect.
Here is how the videoplayer looks like after I’ve clicked the “Play Back” button:
I’ve already asserted the video blob exists after recording
I have the suspicion that the videoplayer is not able to process the blob-url. Do I have to append an extension? IF so, how do I accomplish this here?
Some help would be much appreciated!
Kind regards,
Max
'use client'
import React, { useState, useRef } from 'react'
import RecorderControls from './RecorderControls/RecorderControls'
const Recorder = () => {
const videoRef = useRef(null);
const mediaRecorderRef = useRef(null);
const constraints = { audio: true, video: true }
const [videoBlob, setVideoBlob] = useState(null);
const [isRecording, setIsRecording] = useState (false);
const startRecord = async () => {
// Check if the browser supports the getUserMedia API
const userMediaDevices = navigator.mediaDevices;
if (!userMediaDevices) {
console.log("getUserMedia is not supported!");
return
}
// Get the video stream from the webcam, display it in the video element, and start playing it back to the user
const stream = await navigator.mediaDevices.getUserMedia(constraints);
videoRef.current.srcObject = stream;
videoRef.current.play();
mediaRecorderRef.current = new MediaRecorder(stream, {
mimeType: 'video/mp4',
});
// When the recording starts, create an array to store the video chunks while recording
const chunks = [];
mediaRecorderRef.current.ondataavailable = (e) => {
if (e.data.size > 0) {
chunks.push(e.data);
}
};
// When the recording stops, create a video blob and set it in the state
mediaRecorderRef.current.onstop = () => {
const blob = new Blob(chunks, { type: 'video/mp4' });
setVideoBlob(blob);
}
// Start recording
mediaRecorderRef.current.start();
setIsRecording(true);
console.log("Recording started");
};
const stopRecord = () => {
mediaRecorderRef.current.stop();
videoRef.current.srcObject.getTracks().forEach((track) => track.stop());
setIsRecording(false);
console.log("Recording stopped")
};
const playVideo = () => {
console.log(videoBlob);
if (videoBlob) {
const videoURL = URL.createObjectURL(videoBlob);
videoRef.current.src = videoURL;
videoRef.current.load();
videoRef.current.play();
}
};
return (
<>
<div className='mb-6 font-semibold text-xl'>Recorder Apparat</div>
<div className="bg-gray-700 w-96 h-96 rounded-lg">
<video ref={videoRef} width="640" height="480" />
</div>
<div className="mt-5"></div>
<div className='flex gap-3'>
<button onClick={startRecord} className='btn btn-primary btn-outline'>
record
</button>
<button onClick={stopRecord} className='btn btn-primary btn-outline'>
stop
</button>
<button onClick={playVideo} className='btn btn-primary btn-outline'>
playback
</button>
</div>
<RecorderControls />
</>
)
}
export default Recorder