Below is the room clientcode
async start() {
let room_id = this.room_id;
let name = this.name;
await this.socket.request('start', { room_id, name }).catch((err: any) => {
console.log('Start debate error:', err);
});
}
async createRoom(room_id: string, name: string): Promise<void> {
try {
await this.socket.request('createRoom', {
room_id,
name
});
} catch (err) {
console.log('Create room error:', err);
}
}
async join(name: string, room_id: string) {
try {
const e: any = await this.socket.request('join', { name, room_id });
if (e.hasOwnProperty('error')) {
return Promise.reject(new Error('The room is full!'));
}
console.log('Joined to room', e);
const data = await this.socket.request('getRouterRtpCapabilities');
const device = await this.loadDevice(data);
this.device = device;
await this.initTransports(device);
this.socket.emit('getProducers');
} catch (error) {
console.error('Failed to load device:', error);
alert('Failed to join the room');
}
}
async loadDevice(routerRtpCapabilities: mediasoupClient.types.RtpCapabilities): Promise<mediasoupClient.Device> {
let device: mediasoupClient.Device;
console.log("check")
try {
device = new mediasoupClient.Device();
} catch (error: any) {
if (error.name === 'UnsupportedError') {
console.error('Browser not supported');
alert('Browser not supported');
throw error;
} else {
console.error(error);
throw error;
}
}
await device.load({ routerRtpCapabilities });
return device;
}
async initTransports(device: mediasoupClient.Device) {
// init producerTransport
{
const data = await this.socket.request('createWebRtcTransport', {
forceTcp: false,
rtpCapabilities: device.rtpCapabilities
});
if (data.error) {
console.error(data.error);
return;
}
this.producerTransport = device.createSendTransport(data);
this.producerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
this.socket
.request('connectTransport', {
dtlsParameters,
transport_id: data.id
})
.then(callback)
.catch(errback);
});
this.producerTransport.on('produce', async ({ kind, rtpParameters }, callback, errback) => {
try {
const { producer_id } = await this.socket.request('produce', {
producerTransportId: this.producerTransport!.id,
kind,
rtpParameters
});
callback({ id: producer_id });
} catch (err:any) {
errback(err);
}
});
this.producerTransport.on('connectionstatechange', (state) => {
switch (state) {
case 'connecting':
break;
case 'connected':
break;
case 'failed':
this.producerTransport!.close();
break;
default:
break;
}
});
}
// init consumerTransport
{
const data = await this.socket.request('createWebRtcTransport', {
forceTcp: false
});
if (data.error) {
console.error(data.error);
return;
}
this.consumerTransport = device.createRecvTransport(data);
this.consumerTransport.on('connect', ({ dtlsParameters }, callback, errback) => {
this.socket
.request('connectTransport', {
transport_id: this.consumerTransport!.id,
dtlsParameters
})
.then(callback)
.catch(errback);
});
this.consumerTransport.on('connectionstatechange', async (state) => {
switch (state) {
case 'connecting':
break;
case 'connected':
break;
case 'failed':
this.consumerTransport!.close();
break;
default:
break;
}
});
}
}
initSockets() {
this.socket.on('rule', (data: any) => {
console.log(new Date().toISOString(), data);
});
this.socket.on('consumerClosed', ({ consumer_id }: { consumer_id: string }) => {
console.log('Closing consumer:', consumer_id);
this.removeConsumer(consumer_id);
});
this.socket.on('addUser', (data: any) => {
console.log('New user:', data);
});
this.socket.on('swapUser', (data: any) => {
console.log('Swap user:', data);
});
this.socket.on('removeUser', (data: any) => {
console.log('Remove user:', data);
});
this.socket.on('newProducers', async (data: any) => {
console.log('New producers', data);
for (let { producer_id } of data) {
await this.consume(producer_id);
}
});
this.socket.on('disconnect', () => {
this.exit(true);
});
}
//////// MAIN FUNCTIONS 토론방 입장할때 호출 /////////////
async produce(type: string, deviceId: string | null = null) {
console.log("produceproduce");
let mediaConstraints: any = {};
let audio = false;
let screen = false;
switch (type) {
case mediaType.audio:
mediaConstraints = {
audio: {
deviceId: deviceId
},
video: false
};
audio = true;
break;
case mediaType.video:
mediaConstraints = {
audio: false,
video: {
width: {
min: 640,
ideal: 640
},
height: {
min: 480,
ideal: 480
},
deviceId: deviceId
}
};
break;
case mediaType.screen:
mediaConstraints = false;
screen = true;
break;
default:
return;
}
if (!this.device!.canProduce('video') && !audio) {
console.error('Cannot produce video');
return;
}
if (this.producerLabel.has(type)) {
console.log('Producer already exists for this type ' + type);
return;
}
console.log('Media constraints:', mediaConstraints);
let stream;
try {
stream = screen ? await (navigator.mediaDevices as any).getDisplayMedia() : await navigator.mediaDevices.getUserMedia(mediaConstraints);
console.log(navigator.mediaDevices.getSupportedConstraints());
const track = audio ? stream.getAudioTracks()[0] : stream.getVideoTracks()[0];
const params = {
track
};
const producer = await this.producerTransport!.produce(params);
console.log('Producer', producer);
this.producers.set(producer.id, producer);
let elem: HTMLVideoElement;
if (!audio) {
elem = document.createElement('video');
elem.srcObject = stream;
elem.id = producer.id;
elem.playsInline = false;
elem.autoplay = true;
elem.className = 'vid';
this.localMediaEl.appendChild(elem);
console.log(elem+ "일단은 눈에 띄어야한다")
}
producer.on('trackended', () => {
this.closeProducer(type);
});
producer.on('transportclose', () => {
console.log('Producer transport closed');
if (!audio) {
elem!.parentNode!.removeChild(elem!);
}
this.producers.delete(producer.id);
});
producer.on('@close', () => {
console.log('Closing producer');
if (!audio) {
elem!.parentNode!.removeChild(elem!);
}
this.producers.delete(producer.id);
});
this.producerLabel.set(type, producer.id);
switch (type) {
case mediaType.audio:
this.event(_EVENTS.startAudio);
break;
case mediaType.video:
this.event(_EVENTS.startVideo);
break;
case mediaType.screen:
this.event(_EVENTS.startScreen);
break;
default:
return;
}
} catch (err) {
console.log(err);
}
}
async consume(producer_id: string) {
try {
const { consumer, stream, kind } = await this.getConsumeStream(producer_id);
this.consumers.set(consumer.id, consumer);
let elem: HTMLVideoElement | HTMLAudioElement;
if (kind === 'video') {
elem = document.createElement('video');
elem.srcObject = stream;
elem.id = consumer.id;
if ("playsInline" in elem) {
elem.playsInline = false;
} // For video
elem.autoplay = true;
elem.className = 'vid';
this.remoteVideoEl.appendChild(elem);
console.log(elem+ "일단은 눈에 띄어야한다 비디오2")
} else {
elem = document.createElement('audio');
elem.srcObject = stream;
elem.id = consumer.id;
elem.autoplay = true;
// Handle playsInline property for audio
if ("playsInline" in elem) {
// Some browsers might support it, so set it to false if available
elem.playsInline = false;
}
this.remoteAudioEl.appendChild(elem);
console.log(elem+ "일단은 눈에 띄어야한다33333333")
}
consumer.on('trackended', () => {
this.removeConsumer(consumer.id);
});
consumer.on('transportclose', () => {
this.removeConsumer(consumer.id);
});
} catch (error) {
console.error('Error consuming stream:', error);
}
}
async getConsumeStream(producerId: string): Promise<ConsumeResult> {
const { rtpCapabilities } = this.device!;
const data = await this.socket.request('consume', {
rtpCapabilities,
consumerTransportId: this.consumerTransport!.id,
producerId
});
const { id, kind, rtpParameters } = data;
const consumer = await this.consumerTransport!.consume({
id,
producerId,
kind,
rtpParameters,
});
const stream = new MediaStream();
stream.addTrack(consumer.track);
return {
consumer,
stream,
kind
};
}
In html, you can display remoteVideoEl and localVideoEl obtained by the product and concume functions of RoomClient code directly on the screen, but you don’t know how to display them in a script in the form of react
If you look at the code below, I’m showing you the video through videoRef, but instead of this, please tell me how to float the remote video obtained from the Room Client code from A1 to B3 video
import React, {useEffect, useRef, useState} from 'react';
import {useLocation, useNavigate} from "react-router-dom";
import "./DebateRoom.css"
import RoomClient from "../../socket/RoomClient";
import {rc} from "../../socket/socket";
interface DebateRoomProps {
onLeave: () => void;
}
const DebateRoom: React.FC<DebateRoomProps> = ({onLeave}) => {
const navigate = useNavigate();
const location = useLocation();
const { selectedMicDevice, selectedAudioDevice, selectedVideoDevice } = location.state;
const videoRef = useRef<HTMLVideoElement>(null);
useEffect(() => {
const initVideo = async () => {
try {
console.log("device start")
const stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: true });
if (videoRef.current) {
console.log(selectedVideoDevice + " video");
console.log(selectedAudioDevice + " audio");
await rc.produce(RoomClient.mediaType.video, selectedVideoDevice);
await rc.produce(RoomClient.mediaType.audio, selectedAudioDevice);
videoRef.current.srcObject = stream;
}
} catch (error) {
console.error('Error accessing local media:', error);
}
};
initVideo();
}, []);
return (
<div>
<div className="AteamArea">
<div className="A1">
<video className="Cam" ref={videoRef} autoPlay width="526px" height="332px" ></video>
</div>
<div className="A2">
<video className="Cam" ref={videoRef} autoPlay width="526px" height="332px" ></video>
</div>
<div className="A3">
<video className="Cam" ref={videoRef} autoPlay width="526px" height="332px" ></video>
</div>
</div>
<div className="BteamArea">
<div className="B1">
<video className="Cam" ref={videoRef} autoPlay width="526px" height="332px" ></video>
</div>
<div className="B2">
<video className="Cam" ref={videoRef} autoPlay width="526px" height="332px" ></video>
</div>
<div className="B3">
<video className="Cam" ref={videoRef} autoPlay width="526px" height="332px" ></video>
</div>
</div>
</div>
);
};
export default DebateRoom;
Use this code to create the room client and navigate to debatroom
const onClickCreate = async () => {
navigate('/debateRoom', {
state: {
name,
selectedMicDevice: selectedMicDeviceRef.current?.value,
selectedAudioDevice: selectedAudioDeviceRef.current?.value,
selectedVideoDevice: selectedVideoDeviceRef.current?.value,
}
});
console.log(selectedAudioDeviceRef.current);
console.log(selectedVideoDeviceRef.current);
if (selectedAudioDeviceRef.current&&selectedVideoDeviceRef.current) {
console.log(remoteVideoEl+"remotevideo확인")
joinRoom(name, "frontend", selectedAudioDeviceRef.current, selectedVideoDeviceRef.current, localMediaEl, remoteVideoEl, remoteAudioEl, mediasoupClient, "http://localhost:3001", successCallback);
}
console.log(name);
};
This is a code with an rc
import { Socket } from 'socket.io-client';
import RoomClient from "./RoomClient";
interface SocketData {
error?: string;
[key: string]: any;
}
export interface CustomSocket extends Socket {
request: <T = any>(type: string, data?: object) => Promise<T>;
}
export const setupSocket = (socket: Socket): CustomSocket => {
const customSocket = socket as CustomSocket;
customSocket.request = function request<T = any>(type: string, data = {}): Promise<T> {
return new Promise((resolve, reject) => {
this.emit(type, data, (response: SocketData) => {
if (response.error) {
reject(response.error);
} else {
resolve(response as T);
}
});
});
};
return customSocket;
};
export let producer: any = null;
export let rc: any = null;
export let isEnumerateDevices = false;
export function start() {
rc.start();
}
export function joinRoom(name: string, room_id: string, audioSelect: HTMLOptionElement, videoSelect: HTMLOptionElement, localMediaEl: HTMLVideoElement,
remoteVideoEl: HTMLVideoElement,
remoteAudioEl: HTMLAudioElement,
mediasoupClientInstance: any,
socketUrl: string,
successCallback: () => void) {
if (rc && rc.isOpen()) {
console.log('Already connected to a room');
} else {
initEnumerateDevices(audioSelect,videoSelect);
rc = new RoomClient(localMediaEl, remoteVideoEl, remoteAudioEl, mediasoupClientInstance, socketUrl, room_id, name, successCallback);
}
}
export function initEnumerateDevices(audioSelect: HTMLOptionElement, videoSelect: HTMLOptionElement) {
console.log("initinitinit0")
// Many browsers, without the consent of getUserMedia, cannot enumerate the devices.
if (isEnumerateDevices) return;
const constraints = {
audio: true,
video: true
};
navigator.mediaDevices
.getUserMedia(constraints)
.then((stream: MediaStream) => {
enumerateDevices(audioSelect, videoSelect);
stream.getTracks().forEach(function (track: MediaStreamTrack) {
track.stop();
});
})
.catch((err: any) => {
console.error('Access denied for audio/video: ', err);
});
}
export function enumerateDevices(audioSelect: HTMLOptionElement, videoSelect: HTMLOptionElement) {
// Load mediaDevice options
navigator.mediaDevices.enumerateDevices().then((devices: MediaDeviceInfo[]) =>
devices.forEach((device) => {
let el = null;
if ('audioinput' === device.kind) {
el = audioSelect;
} else if ('videoinput' === device.kind) {
el = videoSelect;
}
if (!el) return;
let option = document.createElement('option');
option.value = device.deviceId || '';
option.innerText = device.label || '';
el.appendChild(option);
isEnumerateDevices = true;
})
);
}