Files
popa-launcher/src/renderer/realtime/voice/useVoiceRoom.ts
2026-01-02 19:22:11 +05:00

299 lines
7.5 KiB
TypeScript

import { useCallback, useEffect, useRef, useState } from 'react';
import { rtcConfig } from './rtcConfig';
import type { WSMessage } from './types';
import { setVoiceState, getVoiceState } from './voiceStore';
type PeerMap = Map<string, RTCPeerConnection>;
export function useVoiceRoom(username: string) {
const wsRef = useRef<WebSocket | null>(null);
const peersRef = useRef<PeerMap>(new Map());
const streamRef = useRef<MediaStream | null>(null);
const currentRoomIdRef = useRef<string | null>(null);
const reconnectTimeout = useRef<number | null>(null);
// const [connected, setConnected] = useState(false);
// const [participants, setParticipants] = useState<string[]>([]);
// const [muted, setMuted] = useState(false);
const pendingIceRef = useRef<Map<string, RTCIceCandidateInit[]>>(new Map());
// --- connect ---
const connect = useCallback(
async (roomId: string) => {
if (wsRef.current) return;
currentRoomIdRef.current = roomId;
// 1. микрофон
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
},
});
streamRef.current = stream;
// 2. websocket
const ws = new WebSocket(
`wss://minecraft.api.popa-popa.ru/ws/voice?room_id=${roomId}&username=${username}`,
);
wsRef.current = ws;
ws.onopen = () => {
setVoiceState({
connected: true,
shouldBeConnected: true,
participants: [username],
});
};
ws.onclose = () => {
cleanup();
setVoiceState({ connected: false });
if (getVoiceState().shouldBeConnected) {
reconnectTimeout.current = window.setTimeout(() => {
const lastRoomId = currentRoomIdRef.current;
if (lastRoomId) {
connect(lastRoomId);
}
}, 1500);
}
};
// ws.onclose = () => {
// cleanup();
// setConnected(false);
// };
ws.onmessage = async (ev) => {
const msg: WSMessage = JSON.parse(ev.data);
if (msg.type === 'join' && msg.user !== username) {
await createPeer(msg.user, false);
const { participants } = getVoiceState();
if (!participants.includes(msg.user)) {
setVoiceState({
participants: [...participants, msg.user],
});
}
}
if (msg.type === 'leave') {
removePeer(msg.user);
setVoiceState({
participants: getVoiceState().participants.filter(
(u) => u !== msg.user,
),
});
}
if (msg.type === 'signal') {
await handleSignal(msg.from, msg.data);
}
if (msg.type === 'users') {
const current = getVoiceState().participants;
const next = msg.users;
// 1. удаляем ушедших
for (const user of current) {
if (!next.includes(user)) {
removePeer(user);
}
}
// 2. создаём peer для новых
for (const user of next) {
if (user !== username && !peersRef.current.has(user)) {
await createPeer(user, true);
}
}
// 3. обновляем store
setVoiceState({
participants: next,
});
}
};
},
[username],
);
// --- create peer ---
const createPeer = async (user: string, polite: boolean) => {
if (peersRef.current.has(user)) return;
const pc = new RTCPeerConnection(rtcConfig);
peersRef.current.set(user, pc);
streamRef.current
?.getTracks()
.forEach((t) => pc.addTrack(t, streamRef.current!));
pc.onicecandidate = (e) => {
if (e.candidate) {
wsRef.current?.send(
JSON.stringify({
type: 'signal',
to: user,
data: { type: 'ice', candidate: e.candidate },
}),
);
}
};
pc.ontrack = (e) => {
const audio = document.createElement('audio');
audio.srcObject = e.streams[0];
audio.autoplay = true;
audio.setAttribute('data-user', user);
document.body.appendChild(audio);
};
if (!polite) {
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
wsRef.current?.send(
JSON.stringify({
type: 'signal',
to: user,
data: { type: 'offer', sdp: offer },
}),
);
}
};
const removePeer = (user: string) => {
const pc = peersRef.current.get(user);
if (!pc) return;
pc.close();
peersRef.current.delete(user);
pendingIceRef.current.delete(user);
// удаляем audio элемент
const audio = document.querySelector(
`audio[data-user="${user}"]`,
) as HTMLAudioElement | null;
audio?.remove();
};
// --- signaling ---
const handleSignal = async (from: string, data: any) => {
let pc = peersRef.current.get(from);
if (!pc) {
await createPeer(from, true);
pc = peersRef.current.get(from)!;
}
if (data.type === 'offer') {
if (pc.signalingState !== 'stable') {
console.warn('Skip offer, state:', pc.signalingState);
return;
}
await pc.setRemoteDescription(data.sdp);
// 🔥 применяем накопленные ICE
const queued = pendingIceRef.current.get(from);
if (queued) {
for (const c of queued) {
await pc.addIceCandidate(c);
}
pendingIceRef.current.delete(from);
}
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
wsRef.current?.send(
JSON.stringify({
type: 'signal',
to: from,
data: { type: 'answer', sdp: answer },
}),
);
}
if (data.type === 'answer') {
if (pc.signalingState === 'have-local-offer') {
await pc.setRemoteDescription(data.sdp);
const queued = pendingIceRef.current.get(from);
if (queued) {
for (const c of queued) {
await pc.addIceCandidate(c);
}
pendingIceRef.current.delete(from);
}
}
}
if (data.type === 'ice') {
if (pc.remoteDescription) {
await pc.addIceCandidate(data.candidate);
} else {
// ⏳ remoteDescription ещё нет — сохраняем
const queue = pendingIceRef.current.get(from) ?? [];
queue.push(data.candidate);
pendingIceRef.current.set(from, queue);
}
}
};
// --- mute ---
const toggleMute = () => {
if (!streamRef.current) return;
const enabled = !getVoiceState().muted;
streamRef.current.getAudioTracks().forEach((t) => (t.enabled = !enabled));
setVoiceState({ muted: enabled });
};
// --- cleanup ---
const cleanup = () => {
peersRef.current.forEach((pc) => pc.close());
peersRef.current.clear();
streamRef.current?.getTracks().forEach((t) => t.stop());
streamRef.current = null;
wsRef.current?.close();
wsRef.current = null;
if (reconnectTimeout.current) {
clearTimeout(reconnectTimeout.current);
reconnectTimeout.current = null;
}
document.querySelectorAll('audio[data-user]').forEach((a) => a.remove());
};
const disconnect = () => {
setVoiceState({
connected: false,
shouldBeConnected: false,
participants: [],
muted: false,
});
cleanup();
};
return {
connect,
disconnect,
toggleMute,
};
}