Skip to content

Commit df120e3

Browse files
committed
Merge branch 'master' into gcp-sed
2 parents 9eee3fa + 04ee633 commit df120e3

File tree

14 files changed

+821
-301
lines changed

14 files changed

+821
-301
lines changed

PeerPrep/package-lock.json

Lines changed: 0 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

PeerPrep/src/frontend/components/Collab/CollabSession.tsx

Lines changed: 29 additions & 230 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ import { useCollab } from '../../context/CollabProviderContext';
33
import { ProblemPanel } from './ProblemPanel';
44
import { CodeEditor } from './CodeEditor';
55
import { TopBar } from './TopBar';
6-
import type { WebrtcProvider } from 'y-webrtc';
76

87
interface CollabSessionProps {
98
userId: string;
@@ -33,249 +32,49 @@ export function CollabSession({
3332
seconds,
3433
}: CollabSessionProps) {
3534
// Add audio context for voice chat
36-
const { provider } = useCollab();
35+
const { voiceManager } = useCollab();
3736
const [isMuted, setIsMuted] = useState(true);
3837
const [isSpeaking, setIsSpeaking] = useState(false);
39-
const localStreamRef = useRef<MediaStream | null>(null);
4038
const analyserRef = useRef<AnalyserNode | null>(null);
4139
const audioCtxRef = useRef<AudioContext | null>(null);
42-
// const addedAudioEls = useRef<HTMLAudioElement[]>([]);
4340

4441
useEffect(() => {
45-
if (!provider) return;
46-
const room = (
47-
provider as WebrtcProvider & {
48-
room?: { webrtcConns?: Map<string, { peer: RTCPeerConnection }> };
49-
}
50-
).room;
51-
if (!room) {
52-
console.warn('⚠️ provider.room not available yet');
42+
if (!voiceManager) return;
43+
44+
const stream = voiceManager.getLocalStream();
45+
if (!stream) {
46+
console.warn('[UI] No local stream yet; speaking meter idle');
5347
return;
5448
}
5549

56-
const cleanup: (() => void)[] = [];
57-
58-
console.log('🎙️ Initializing voice chat for room:');
59-
60-
navigator.mediaDevices
61-
.getUserMedia({ audio: true })
62-
.then((stream) => {
63-
console.log('🎧 Got local microphone stream', stream);
64-
localStreamRef.current = stream;
65-
stream.getTracks().forEach((t) => (t.enabled = false)); // start muted
66-
67-
const attachToPeers = () => {
68-
const conns =
69-
room.webrtcConns ?? new Map<string, { peer: RTCPeerConnection }>();
70-
console.log('📡 Current peer connections:', [...conns.keys()]);
71-
72-
conns.forEach((conn, peerId) => {
73-
const pc = conn.peer;
74-
if (!pc) {
75-
console.warn(`⚠️ Peer ${peerId} has no RTCPeerConnection yet`);
76-
return;
77-
}
78-
79-
console.log(`🧩 Attaching audio track to peer ${peerId}`);
80-
81-
// Try to ensure transceiver exists
82-
try {
83-
const trans = pc.addTransceiver('audio', {
84-
direction: 'sendrecv',
85-
});
86-
console.log(
87-
`🎛️ Added transceiver for ${peerId}`,
88-
trans?.direction
89-
);
90-
} catch (err) {
91-
console.warn(`⚠️ addTransceiver failed for ${peerId}:`, err);
92-
}
93-
94-
// Temporarily enable tracks for negotiation
95-
stream.getTracks().forEach((t) => (t.enabled = true));
96-
97-
// Add local audio tracks
98-
stream.getTracks().forEach((track) => {
99-
try {
100-
const sender = pc.addTrack(track, stream);
101-
console.log(
102-
`🎧 Added track "${track.kind}" → peer ${peerId}`,
103-
sender
104-
);
105-
} catch (e) {
106-
console.warn(`❌ addTrack failed for ${peerId}:`, e);
107-
}
108-
});
109-
110-
// Mute again after short delay (so SDP includes audio)
111-
setTimeout(() => {
112-
stream.getTracks().forEach((t) => (t.enabled = false));
113-
console.log(
114-
`🔇 Disabled local audio track after negotiation for ${peerId}`
115-
);
116-
}, 2000);
117-
118-
pc.ontrack = (event: RTCTrackEvent) => {
119-
console.log(
120-
`🎵 Received remote ${event.track.kind} from ${peerId}`
121-
);
122-
if (event.track.kind === 'audio') {
123-
const audio = document.createElement('audio');
124-
audio.autoplay = true;
125-
audio.srcObject = event.streams[0];
126-
document.body.appendChild(audio);
127-
cleanup.push(() => audio.remove());
128-
}
129-
};
130-
131-
pc.onconnectionstatechange = () =>
132-
console.log(`🔗 ${peerId} connectionState:`, pc.connectionState);
133-
pc.oniceconnectionstatechange = () =>
134-
console.log(`❄️ ${peerId} ICE:`, pc.iceConnectionState);
135-
// pc.onicecandidate = (e) => {
136-
// if (!e.candidate) {
137-
// console.log(
138-
// `📜 [${peerId}] Final local SDP:`,
139-
// pc.localDescription?.sdp
140-
// );
141-
// }
142-
// };
143-
});
144-
};
145-
146-
// Wait for peers
147-
const tryAttach = () => {
148-
const conns = room.webrtcConns;
149-
if (!conns || conns.size === 0) {
150-
console.log('⏳ No peers yet — retrying in 1s...');
151-
setTimeout(tryAttach, 1000);
152-
} else {
153-
console.log('✅ Found peers:', [...conns.keys()]);
154-
attachToPeers();
155-
}
156-
};
157-
tryAttach();
158-
159-
// React to new peers joining
160-
provider.on(
161-
'peers',
162-
({ added, removed }: { added: string[]; removed: string[] }) => {
163-
if (added?.length) console.log('🆕 Peers added:', added);
164-
if (removed?.length) console.log('❌ Peers removed:', removed);
165-
setTimeout(attachToPeers, 500);
166-
}
167-
);
168-
169-
// Voice activity detection
170-
const audioCtx = new AudioContext();
171-
const analyser = audioCtx.createAnalyser();
172-
const src = audioCtx.createMediaStreamSource(stream);
173-
src.connect(analyser);
174-
analyser.fftSize = 512;
175-
const dataArray = new Uint8Array(analyser.frequencyBinCount);
176-
audioCtxRef.current = audioCtx;
177-
analyserRef.current = analyser;
178-
179-
const detectSpeech = () => {
180-
analyser.getByteFrequencyData(dataArray);
181-
const avg = dataArray.reduce((a, b) => a + b, 0) / dataArray.length;
182-
setIsSpeaking(avg > 20 && !isMuted);
183-
requestAnimationFrame(detectSpeech);
184-
};
185-
detectSpeech();
186-
})
187-
.catch((err) => {
188-
console.error('🚨 Error accessing microphone:', err);
189-
});
50+
stream.getTracks().forEach((t) => (t.enabled = false)); // start muted
51+
const audioCtx = new AudioContext();
52+
const analyser = audioCtx.createAnalyser();
53+
const src = audioCtx.createMediaStreamSource(stream);
54+
src.connect(analyser);
55+
analyser.fftSize = 512;
56+
const dataArray = new Uint8Array(analyser.frequencyBinCount);
57+
58+
const detectSpeech = () => {
59+
analyser.getByteFrequencyData(dataArray);
60+
const avg = dataArray.reduce((a, b) => a + b, 0) / dataArray.length;
61+
setIsSpeaking(avg > 20 && !isMuted);
62+
requestAnimationFrame(detectSpeech);
63+
};
64+
detectSpeech();
19065

66+
analyserRef.current = analyser;
67+
audioCtxRef.current = audioCtx;
19168
return () => {
192-
cleanup.forEach((fn) => fn());
193-
localStreamRef.current?.getTracks().forEach((t) => t.stop());
194-
audioCtxRef.current?.close();
69+
audioCtx.close();
19570
};
196-
}, [provider]);
197-
198-
// // Voice setup: attach audio streams to peers from y-webrtc
199-
// useEffect(() => {
200-
// if (!provider) return;
201-
// console.log('Setting up voice streams with provider:', provider);
202-
203-
// // Get internal map of peer connections
204-
// const internalProvider = provider as any;
205-
206-
// // Capture local microphone
207-
// navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => {
208-
// console.log('Got local audio stream:', stream);
209-
// localStreamRef.current = stream;
210-
// stream.getTracks().forEach((t) => (t.enabled = false));
211-
212-
// // Attach tracks to existing peers
213-
// Object.values(internalProvider.webrtcConns || {}).forEach((conn: any) => {
214-
// if (conn?.peer && conn.peer.signalingState !== 'closed') {
215-
// stream
216-
// .getTracks()
217-
// .forEach((track) => conn.peer.addTrack(track, stream));
218-
// }
219-
// });
220-
221-
// // When new peers are added, attach audio to them
222-
// provider.on('peers', ({ added }) => {
223-
// added.forEach((peerId: string) => {
224-
// const conn = internalProvider.webrtcConns?.[peerId];
225-
// if (conn?.peer) {
226-
// stream
227-
// .getTracks()
228-
// .forEach((track) => conn.peer.addTrack(track, stream));
229-
// console.log('Added local audio track to new peer:', peerId);
230-
231-
// // Handle incoming audio tracks from that peer
232-
// conn.peer.ontrack = (event: RTCTrackEvent) => {
233-
// if (event.track.kind === 'audio') {
234-
// const audioEl = document.createElement('audio');
235-
// audioEl.autoplay = true;
236-
// audioEl.srcObject = event.streams[0];
237-
// document.body.appendChild(audioEl);
238-
// addedAudioEls.current.push(audioEl);
239-
// console.log('audio track event:', event.streams[0]);
240-
// }
241-
// };
242-
// }
243-
// });
244-
// console.log('Current peers:', internalProvider.webrtcConns);
245-
// });
246-
247-
// // Voice activity detection (analyser)
248-
// const audioCtx = new AudioContext();
249-
// const analyser = audioCtx.createAnalyser();
250-
// const source = audioCtx.createMediaStreamSource(stream);
251-
// source.connect(analyser);
252-
// analyser.fftSize = 512;
253-
// const dataArray = new Uint8Array(analyser.frequencyBinCount);
254-
255-
// audioCtxRef.current = audioCtx;
256-
// analyserRef.current = analyser;
257-
258-
// const detectSpeech = () => {
259-
// analyser.getByteFrequencyData(dataArray);
260-
// const avg = dataArray.reduce((a, b) => a + b, 0) / dataArray.length;
261-
// setIsSpeaking(avg > 20 && !isMuted);
262-
// requestAnimationFrame(detectSpeech);
263-
// };
264-
// detectSpeech();
265-
// });
266-
267-
// return () => {
268-
// addedAudioEls.current.forEach((el) => el.remove());
269-
// localStreamRef.current?.getTracks().forEach((t) => t.stop());
270-
// audioCtxRef.current?.close();
271-
// };
272-
// }, [provider]);
71+
}, [voiceManager]);
27372

27473
const handleToggleMute = () => {
275-
if (!localStreamRef.current) return;
276-
const mute = !isMuted;
277-
localStreamRef.current.getTracks().forEach((t) => (t.enabled = !mute));
278-
setIsMuted(mute);
74+
if (!voiceManager) return;
75+
const newMuted = !isMuted;
76+
voiceManager.toggleMute(newMuted);
77+
setIsMuted(newMuted);
27978
};
28079

28180
return (

PeerPrep/src/frontend/context/CollabProviderContext.tsx

Lines changed: 39 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,20 @@ import React, {
77
} from 'react';
88
import * as Y from 'yjs';
99
import { WebrtcProvider } from 'y-webrtc';
10+
import { VoiceConnectionManager } from './VoiceConnectionManager';
1011

1112
type CollabContextType = {
1213
ydoc: Y.Doc | null;
1314
provider: WebrtcProvider | null;
1415
roomId: string;
16+
voiceManager: VoiceConnectionManager | null;
1517
};
1618

1719
const CollabContext = createContext<CollabContextType>({
1820
ydoc: null,
1921
provider: null,
2022
roomId: '',
23+
voiceManager: null,
2124
});
2225

2326
export const useCollab = () => useContext(CollabContext);
@@ -31,28 +34,48 @@ export function CollabProvider({ roomId, children }: CollabProviderProps) {
3134
const [providerReady, setProviderReady] = useState(false);
3235
const ydocRef = useRef<Y.Doc | null>(null);
3336
const providerRef = useRef<WebrtcProvider | null>(null);
37+
const voiceManagerRef = useRef<VoiceConnectionManager | null>(null);
3438

3539
useEffect(() => {
36-
const ydoc = new Y.Doc();
37-
console.log('Y.Doc created for room:', roomId);
38-
const provider = new WebrtcProvider(`room-${roomId}`, ydoc, {
39-
signaling: [import.meta.env.VITE_SIGNALING_SERVER_URL],
40-
peerOpts: {
41-
config: {
42-
iceServers: [{ urls: [import.meta.env.VITE_ICE_SERVERS] }],
43-
},
44-
},
45-
});
46-
console.log('WebRTC Provider:', provider);
47-
console.log('WebRTC ydoc:', ydoc);
40+
let manager: VoiceConnectionManager;
41+
let ydoc: Y.Doc;
42+
let provider: WebrtcProvider;
4843

49-
ydocRef.current = ydoc;
50-
providerRef.current = provider;
51-
setProviderReady(true);
44+
async function init() {
45+
try {
46+
// --- Collaborative Yjs provider ---
47+
ydoc = new Y.Doc();
48+
provider = new WebrtcProvider(`room-${roomId}`, ydoc, {
49+
signaling: [import.meta.env.VITE_SIGNALING_SERVER_URL],
50+
peerOpts: {
51+
config: {
52+
iceServers: [{ urls: [import.meta.env.VITE_ICE_SERVERS] }],
53+
},
54+
},
55+
});
56+
ydocRef.current = ydoc;
57+
providerRef.current = provider;
58+
setProviderReady(true);
59+
60+
// --- Voice layer ---
61+
manager = new VoiceConnectionManager(
62+
import.meta.env.VITE_SIGNALING_SERVER_URL,
63+
roomId
64+
);
65+
await manager.initLocalMic(); // get mic tracks
66+
await manager.startCall(); // send offer
67+
voiceManagerRef.current = manager;
68+
} catch (err) {
69+
console.error('Error initializing CollabProvider:', err);
70+
}
71+
}
72+
73+
init();
5274

5375
return () => {
5476
provider.destroy();
5577
ydoc.destroy();
78+
manager.cleanup();
5679
};
5780
}, [roomId]);
5881

@@ -64,6 +87,7 @@ export function CollabProvider({ roomId, children }: CollabProviderProps) {
6487
ydoc: ydocRef.current,
6588
provider: providerRef.current,
6689
roomId,
90+
voiceManager: voiceManagerRef.current,
6791
}}
6892
>
6993
{children}

0 commit comments

Comments
 (0)