Files
App/lib/pods/chat/webrtc_manager.dart
2025-10-19 19:34:22 +08:00

477 lines
16 KiB
Dart

import 'dart:async';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:island/models/account.dart';
import 'package:island/pods/chat/webrtc_signaling.dart';
import 'package:island/pods/userinfo.dart';
import 'package:island/talker.dart';
class WebRTCParticipant {
final String id;
final String name;
final SnAccount userinfo;
RTCPeerConnection? peerConnection;
MediaStream? remoteStream;
List<RTCIceCandidate> remoteCandidates = [];
bool isAudioEnabled = true;
bool isVideoEnabled = false;
bool isConnected = false;
bool isLocal = false;
double audioLevel = 0.0;
WebRTCParticipant({
required this.id,
required this.name,
required this.userinfo,
this.isAudioEnabled = true,
this.isVideoEnabled = false,
this.isLocal = false,
});
}
class WebRTCManager {
final String roomId;
final String serverUrl;
late WebRTCSignaling _signaling;
final Map<String, WebRTCParticipant> _participants = {};
final Map<String, RTCPeerConnection> _peerConnections = {};
MediaStream? _localStream;
Timer? _audioLevelTimer;
MediaStream? get localStream => _localStream;
final StreamController<WebRTCParticipant> _participantController =
StreamController<WebRTCParticipant>.broadcast();
final StreamController<String> _participantLeftController =
StreamController<String>.broadcast();
Stream<WebRTCParticipant> get onParticipantJoined =>
_participantController.stream;
Stream<String> get onParticipantLeft => _participantLeftController.stream;
WebRTCManager({required this.roomId, required this.serverUrl}) {
_signaling = WebRTCSignaling(roomId: roomId);
}
Future<void> initialize(Ref ref) async {
final user = ref.watch(userInfoProvider).value!;
_signaling.userId = user.id;
_signaling.userName = user.name;
_signaling.user = user;
await _initializeLocalStream();
_setupSignalingListeners();
await _signaling.connect(ref);
_startAudioLevelMonitoring();
}
Future<void> _initializeLocalStream() async {
try {
_localStream = await navigator.mediaDevices.getUserMedia({
'audio': true,
'video': true,
});
talker.info('[WebRTC] Local stream initialized');
} catch (e) {
talker.error('[WebRTC] Failed to initialize local stream: $e');
rethrow;
}
}
void _setupSignalingListeners() {
_signaling.messages.listen((message) async {
switch (message.type) {
case 'offer':
await _handleOffer(message.accountId, message.account, message.data);
break;
case 'answer':
await _handleAnswer(message.accountId, message.data);
break;
case 'ice-candidate':
await _handleIceCandidate(message.accountId, message.data);
break;
// CHANGED: Listen for new users joining the room.
case 'user-joined':
await _handleUserJoined(message.accountId, message.account);
break;
default:
talker.warning(
'[WebRTC Manager] Receieved an unknown type singaling message: ${message.type} with ${message.data}',
);
}
});
// CHANGED: The welcome message now drives connection initiation.
_signaling.welcomeMessages.listen((welcome) {
talker.info('[WebRTC Manager] Connected to room: ${welcome.roomId}');
final existingParticipants =
welcome.participants; // Assuming the server sends this.
talker.info(
'[WebRTC Manager] Existing participants: $existingParticipants',
);
// The newcomer is responsible for initiating the connection to everyone else.
for (final participant in existingParticipants) {
if (participant.identity != _signaling.userId) {
if (!_participants.containsKey(participant.identity)) {
final webrtcParticipant = WebRTCParticipant(
id: participant.identity,
name: participant.name,
userinfo: participant.account!,
);
_participants[participant.identity] = webrtcParticipant;
_participantController.add(webrtcParticipant);
}
_createPeerConnection(participant.identity, isInitiator: true);
}
}
});
}
// CHANGED: New handler for when an existing user is notified of a new peer.
Future<void> _handleUserJoined(
String participantId,
SnAccount account,
) async {
talker.info(
'[WebRTC Manager] User joined: $participantId. Waiting for their offer.',
);
// We don't need to be the initiator here. The newcomer will send us an offer.
// We just create the peer connection to be ready for it.
if (!_peerConnections.containsKey(participantId)) {
// Create a participant object to represent the new user
if (!_participants.containsKey(participantId)) {
final participant = WebRTCParticipant(
id: participantId,
name: participantId,
userinfo: account,
); // Placeholder name
_participants[participantId] = participant;
_participantController.add(participant);
}
await _createPeerConnection(participantId, isInitiator: false);
}
}
Future<void> _createPeerConnection(
String participantId, {
bool isInitiator = false,
}) async {
talker.info(
'[WebRTC] Creating peer connection to $participantId (initiator: $isInitiator)',
);
final configuration = {
'iceServers': [
{'urls': 'stun:stun.l.google.com:19302'},
],
};
final peerConnection = await createPeerConnection(configuration);
_peerConnections[participantId] = peerConnection;
_participants[participantId]!.peerConnection = peerConnection;
if (_localStream != null) {
for (final track in _localStream!.getTracks()) {
await peerConnection.addTrack(track, _localStream!);
}
}
peerConnection.onTrack = (event) {
if (event.streams.isNotEmpty) {
final participant = _participants[participantId];
if (participant != null) {
participant.remoteStream = event.streams[0];
participant.isConnected = true;
// Detect video tracks and update video enabled state
final videoTracks = event.streams[0].getVideoTracks();
if (videoTracks.isNotEmpty) {
participant.isVideoEnabled = true;
}
_participantController.add(participant);
}
}
};
peerConnection.onIceCandidate = (candidate) {
// CHANGED: Send candidate to the specific participant
_signaling.sendIceCandidate(participantId, candidate);
};
peerConnection.onConnectionState = (state) {
talker.info('[WebRTC] Connection state for $participantId: $state');
final participant = _participants[participantId];
if (participant != null) {
participant.isConnected =
state == RTCPeerConnectionState.RTCPeerConnectionStateConnected;
_participantController.add(participant);
}
};
if (isInitiator) {
final offer = await peerConnection.createOffer();
await peerConnection.setLocalDescription(offer);
// CHANGED: Send offer to the specific participant
_signaling.sendOffer(participantId, offer);
}
}
Future<void> _handleOffer(
String from,
SnAccount account,
Map<String, dynamic> data,
) async {
final participantId = from;
talker.info('[WebRTC Manager] Handling offer from $participantId');
final offer = RTCSessionDescription(data['sdp'], data['type']);
if (!_peerConnections.containsKey(participantId)) {
if (!_participants.containsKey(participantId)) {
final participant = WebRTCParticipant(
id: participantId,
name: participantId,
userinfo: account,
);
_participants[participantId] = participant;
_participantController.add(participant);
}
await _createPeerConnection(participantId, isInitiator: false);
}
final peerConnection = _peerConnections[participantId]!;
await peerConnection.setRemoteDescription(offer);
final answer = await peerConnection.createAnswer();
await peerConnection.setLocalDescription(answer);
// CHANGED: Send answer to the specific participant
_signaling.sendAnswer(participantId, answer);
// Process any queued ICE candidates
final participant = _participants[participantId];
if (participant != null) {
for (final candidate in participant.remoteCandidates) {
await peerConnection.addCandidate(candidate);
}
participant.remoteCandidates.clear();
}
}
Future<void> _handleAnswer(String from, Map<String, dynamic> data) async {
final participantId = from;
talker.info('[WebRTC Manager] Handling answer from $participantId');
final answer = RTCSessionDescription(data['sdp'], data['type']);
final peerConnection = _peerConnections[participantId];
if (peerConnection != null) {
await peerConnection.setRemoteDescription(answer);
// Process any queued ICE candidates
final participant = _participants[participantId];
if (participant != null) {
for (final candidate in participant.remoteCandidates) {
await peerConnection.addCandidate(candidate);
}
participant.remoteCandidates.clear();
}
}
}
Future<void> _handleIceCandidate(
String from,
Map<String, dynamic> data,
) async {
final participantId = from;
final candidate = RTCIceCandidate(
data['candidate'],
data['sdpMid'],
data['sdpMLineIndex'],
);
final participant = _participants[participantId];
if (participant != null) {
final pc = participant.peerConnection;
if (pc != null) {
await pc.addCandidate(candidate);
} else {
participant.remoteCandidates.add(candidate);
}
}
}
Future<void> replaceMediaStream(Map<String, dynamic> constraints) async {
try {
final newStream = await navigator.mediaDevices.getUserMedia(constraints);
final newVideoTrack = newStream.getVideoTracks().firstOrNull;
final newAudioTrack = newStream.getAudioTracks().firstOrNull;
if (_localStream != null) {
final oldVideoTrack = _localStream!.getVideoTracks().firstOrNull;
final oldAudioTrack = _localStream!.getAudioTracks().firstOrNull;
// Replace tracks in all existing peer connections
for (final pc in _peerConnections.values) {
final senders = await pc.getSenders();
for (final sender in senders) {
if (newVideoTrack != null && sender.track == oldVideoTrack) {
await sender.replaceTrack(newVideoTrack);
} else if (newAudioTrack != null && sender.track == oldAudioTrack) {
await sender.replaceTrack(newAudioTrack);
}
}
}
// Stop old tracks and update local stream
for (final track in _localStream!.getTracks()) {
track.stop();
}
}
_localStream = newStream;
talker.info('[WebRTC] Media stream replaced with new constraints');
} catch (e) {
talker.error('[WebRTC] Failed to replace media stream: $e');
rethrow;
}
}
Future<void> toggleMicrophone(bool enabled) async {
if (_localStream != null) {
final audioTracks = _localStream!.getAudioTracks();
for (final track in audioTracks) {
track.enabled = enabled;
}
}
}
Future<void> toggleCamera(bool enabled) async {
if (_localStream != null) {
_localStream!.getVideoTracks().forEach((track) {
track.enabled = enabled;
});
}
}
Future<void> switchCamera(String deviceId) async {
await replaceMediaStream({
'audio': _localStream?.getAudioTracks().isNotEmpty ?? true,
'video': {'deviceId': deviceId},
});
talker.info('[WebRTC] Switched to camera device: $deviceId');
}
Future<void> switchMicrophone(String deviceId) async {
await replaceMediaStream({
'audio': {'deviceId': deviceId},
'video': _localStream?.getVideoTracks().isNotEmpty ?? true,
});
talker.info('[WebRTC] Switched to microphone device: $deviceId');
}
Future<List<MediaDeviceInfo>> getVideoDevices() async {
try {
final devices = await navigator.mediaDevices.enumerateDevices();
return devices.where((device) => device.kind == 'videoinput').toList();
} catch (e) {
talker.error('[WebRTC] Failed to enumerate video devices: $e');
return [];
}
}
Future<List<MediaDeviceInfo>> getAudioDevices() async {
try {
final devices = await navigator.mediaDevices.enumerateDevices();
return devices.where((device) => device.kind == 'audioinput').toList();
} catch (e) {
talker.error('[WebRTC] Failed to enumerate audio devices: $e');
return [];
}
}
void _startAudioLevelMonitoring() {
_audioLevelTimer?.cancel();
_audioLevelTimer = Timer.periodic(const Duration(milliseconds: 100), (_) {
_updateAudioLevels();
});
}
void _stopAudioLevelMonitoring() {
_audioLevelTimer?.cancel();
_audioLevelTimer = null;
}
Future<void> _updateAudioLevels() async {
bool hasUpdates = false;
for (final participant in _participants.values) {
if (participant.remoteStream != null && participant.isAudioEnabled) {
final audioTracks = participant.remoteStream!.getAudioTracks();
if (audioTracks.isNotEmpty) {
try {
// Try to get stats for more accurate audio level detection
final pc = participant.peerConnection;
if (pc != null) {
final stats = await pc.getStats();
double maxAudioLevel = 0.0;
// Look for audio receiver stats
for (var report in stats) {
if (report.type == 'inbound-rtp' &&
report.values['mediaType'] == 'audio') {
final audioLevel = report.values['audioLevel'] as double?;
if (audioLevel != null && audioLevel > maxAudioLevel) {
maxAudioLevel = audioLevel;
}
}
}
// If we got stats, use them; otherwise use a simple heuristic
if (maxAudioLevel > 0) {
participant.audioLevel = maxAudioLevel.clamp(0.0, 1.0);
} else {
// Simple heuristic: if audio track is enabled, assume some level
// In a real app, you'd analyze the actual audio data
participant.audioLevel = audioTracks[0].enabled ? 0.5 : 0.0;
}
} else {
// Fallback for local participant or when no PC available
participant.audioLevel = participant.isLocal ? 0.0 : 0.3;
}
hasUpdates = true;
} catch (e) {
talker.warning('[WebRTC] Failed to update audio level for ${participant.id}: $e');
participant.audioLevel = 0.0;
}
} else {
participant.audioLevel = 0.0;
}
} else {
participant.audioLevel = 0.0;
}
}
// Notify listeners if there were updates (throttled to avoid excessive updates)
if (hasUpdates) {
// This will trigger UI updates for speaking indicators
}
}
List<WebRTCParticipant> get participants => _participants.values.toList();
void dispose() {
_stopAudioLevelMonitoring();
_signaling.disconnect();
for (final pc in _peerConnections.values) {
pc.close();
}
_peerConnections.clear();
for (var p in _participants.values) {
p.remoteCandidates.clear();
}
_participants.clear();
_localStream?.dispose();
_participantController.close();
_participantLeftController.close();
}
}