♻️ Trying out the new built-in webrtc

This commit is contained in:
2025-10-19 17:30:06 +08:00
parent 001549b190
commit 3f83bbc1d8
27 changed files with 1420 additions and 580 deletions

View File

@@ -16,7 +16,7 @@ Future<SnRealtimeCall?> ongoingCall(Ref ref, String roomId) async {
if (roomId.isEmpty) return null;
try {
final apiClient = ref.watch(apiClientProvider);
final resp = await apiClient.get('/sphere/chat/realtime/$roomId');
final resp = await apiClient.get('/sphere/chat/realtime/$roomId/status');
return SnRealtimeCall.fromJson(resp.data);
} catch (e) {
if (e is DioException && e.response?.statusCode == 404) {

View File

@@ -6,7 +6,7 @@ part of 'call_button.dart';
// RiverpodGenerator
// **************************************************************************
String _$ongoingCallHash() => r'48031badb79efa07aefb3a4fc51635be457bd3f9';
String _$ongoingCallHash() => r'0f14b36393276720a06190cab3dc8d5e4c88cd57';
/// Copied from Dart SDK
class _SystemHash {

View File

@@ -10,7 +10,7 @@ import 'package:island/widgets/chat/call_participant_tile.dart';
import 'package:island/widgets/content/sheet.dart';
import 'package:material_symbols_icons/symbols.dart';
import 'package:styled_widget/styled_widget.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
class CallControlsBar extends HookConsumerWidget {
const CallControlsBar({super.key});
@@ -194,9 +194,16 @@ class CallControlsBar extends HookConsumerWidget {
String deviceType,
) async {
try {
final devices = await Hardware.instance.enumerateDevices(
type: deviceType,
);
final devices = await navigator.mediaDevices.enumerateDevices();
final filteredDevices =
devices.where((device) {
if (deviceType == 'videoinput') {
return device.kind == 'videoinput';
} else if (deviceType == 'audioinput') {
return device.kind == 'audioinput';
}
return false;
}).toList();
if (!context.mounted) return;
@@ -209,9 +216,9 @@ class CallControlsBar extends HookConsumerWidget {
? 'selectCamera'.tr()
: 'selectMicrophone'.tr(),
child: ListView.builder(
itemCount: devices.length,
itemCount: filteredDevices.length,
itemBuilder: (context, index) {
final device = devices[index];
final device = filteredDevices[index];
return ListTile(
title: Text(
device.label.isNotEmpty
@@ -236,35 +243,12 @@ class CallControlsBar extends HookConsumerWidget {
Future<void> _switchDevice(
BuildContext context,
WidgetRef ref,
MediaDevice device,
MediaDeviceInfo device,
String deviceType,
) async {
try {
final callNotifier = ref.read(callNotifierProvider.notifier);
if (deviceType == 'videoinput') {
// Switch camera device
final localParticipant = callNotifier.room?.localParticipant;
final videoTrack =
localParticipant?.videoTrackPublications.firstOrNull?.track;
if (videoTrack is LocalVideoTrack) {
await videoTrack.switchCamera(device.deviceId);
}
} else if (deviceType == 'audioinput') {
// Switch microphone device
final localParticipant = callNotifier.room?.localParticipant;
final audioTrack =
localParticipant?.audioTrackPublications.firstOrNull?.track;
if (audioTrack is LocalAudioTrack) {
// For audio devices, we need to restart the track with new device
await audioTrack.restartTrack(
AudioCaptureOptions(deviceId: device.deviceId),
);
}
}
// TODO: Implement device switching for WebRTC
// This would require restarting the media stream with the new device
if (context.mounted) {
showSnackBar(
'switchedTo'.tr(
@@ -289,31 +273,9 @@ class CallOverlayBar extends HookConsumerWidget {
if (!callState.isConnected) return const SizedBox.shrink();
final lastSpeaker =
callNotifier.participants
.where(
(element) => element.remoteParticipant.lastSpokeAt != null,
)
.isEmpty
callNotifier.participants.isNotEmpty
? callNotifier.participants.first
: callNotifier.participants
.where(
(element) => element.remoteParticipant.lastSpokeAt != null,
)
.fold(
callNotifier.participants.first,
(value, element) =>
element.remoteParticipant.lastSpokeAt != null &&
(value.remoteParticipant.lastSpokeAt == null ||
element.remoteParticipant.lastSpokeAt!
.compareTo(
value
.remoteParticipant
.lastSpokeAt!,
) >
0)
? element
: value,
);
: null;
final actionButtonStyle = ButtonStyle(
minimumSize: const MaterialStatePropertyAll(Size(24, 24)),
@@ -330,17 +292,16 @@ class CallOverlayBar extends HookConsumerWidget {
children: [
Builder(
builder: (context) {
if (callNotifier.localParticipant == null) {
return CircularProgressIndicator().center();
if (lastSpeaker == null) {
return const CircularProgressIndicator();
}
return SizedBox(
width: 40,
height: 40,
child:
SpeakingRippleAvatar(
live: lastSpeaker,
size: 36,
).center(),
child: SpeakingRippleAvatar(
live: lastSpeaker,
size: 36,
),
);
},
),
@@ -348,7 +309,9 @@ class CallOverlayBar extends HookConsumerWidget {
Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('@${lastSpeaker.participant.identity}').bold(),
Text(
'@${lastSpeaker?.participant.identity ?? 'Unknown'}',
),
Text(
formatDuration(callState.duration),
style: Theme.of(context).textTheme.bodySmall,

View File

@@ -7,7 +7,6 @@ import 'package:gap/gap.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:island/pods/chat/call.dart';
import 'package:island/widgets/account/account_nameplate.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:material_symbols_icons/material_symbols_icons.dart';
import 'package:styled_widget/styled_widget.dart';
@@ -66,19 +65,17 @@ class CallParticipantCard extends HookConsumerWidget {
children: [
const Icon(Symbols.wifi, size: 16),
const Gap(8),
Text(switch (live.remoteParticipant.connectionQuality) {
ConnectionQuality.excellent => 'Excellent',
ConnectionQuality.good => 'Good',
ConnectionQuality.poor => 'Bad',
ConnectionQuality.lost => 'Lost',
_ => 'Connecting',
}),
Text(
live.remoteParticipant.isConnected
? 'Connected'
: 'Connecting',
),
],
),
],
).padding(horizontal: 20, top: 16),
AccountNameplate(
name: live.participant.identity,
name: live.remoteParticipant.userinfo.name,
isOutlined: false,
),
],

View File

@@ -1,10 +1,9 @@
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:island/pods/chat/call.dart';
import 'package:island/screens/account/profile.dart';
import 'package:island/widgets/chat/call_participant_card.dart';
import 'package:island/widgets/content/cloud_files.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:material_symbols_icons/symbols.dart';
import 'package:styled_widget/styled_widget.dart';
@@ -16,10 +15,9 @@ class SpeakingRippleAvatar extends HookConsumerWidget {
@override
Widget build(BuildContext context, WidgetRef ref) {
final account = ref.watch(accountProvider(live.participant.identity));
final avatarRadius = size / 2;
final clampedLevel = live.remoteParticipant.audioLevel.clamp(0.0, 1.0);
// TODO: Implement audio level detection for WebRTC
final clampedLevel = 0.0;
final rippleRadius = avatarRadius + clampedLevel * (size * 0.333);
return SizedBox(
width: size + 8,
@@ -27,7 +25,7 @@ class SpeakingRippleAvatar extends HookConsumerWidget {
child: TweenAnimationBuilder<double>(
tween: Tween<double>(
begin: avatarRadius,
end: live.remoteParticipant.isSpeaking ? rippleRadius : avatarRadius,
end: live.isSpeaking ? rippleRadius : avatarRadius,
),
duration: const Duration(milliseconds: 250),
curve: Curves.easeOut,
@@ -35,7 +33,7 @@ class SpeakingRippleAvatar extends HookConsumerWidget {
return Stack(
alignment: Alignment.center,
children: [
if (live.remoteParticipant.isSpeaking)
if (live.isSpeaking)
Container(
width: animatedRadius * 2,
height: animatedRadius * 2,
@@ -49,28 +47,15 @@ class SpeakingRippleAvatar extends HookConsumerWidget {
height: size,
alignment: Alignment.center,
decoration: BoxDecoration(shape: BoxShape.circle),
child: account.when(
data:
(value) => CallParticipantGestureDetector(
participant: live,
child: ProfilePictureWidget(
file: value.profile.picture,
radius: size / 2,
),
),
error:
(_, _) => CircleAvatar(
radius: size / 2,
child: const Icon(Symbols.person_remove),
),
loading:
() => CircleAvatar(
radius: size / 2,
child: CircularProgressIndicator(),
),
child: CallParticipantGestureDetector(
participant: live,
child: ProfilePictureWidget(
file: live.remoteParticipant.userinfo.profile.picture,
radius: size / 2,
),
),
),
if (live.remoteParticipant.isMuted)
if (live.isMuted)
Positioned(
bottom: 4,
right: 4,
@@ -103,25 +88,15 @@ class CallParticipantTile extends HookConsumerWidget {
@override
Widget build(BuildContext context, WidgetRef ref) {
final hasVideo =
live.hasVideo &&
live.remoteParticipant.trackPublications.values
.where((pub) => pub.track != null && pub.kind == TrackType.VIDEO)
.isNotEmpty;
if (hasVideo) {
if (live.hasVideo && live.remoteParticipant.remoteStream != null) {
return Stack(
fit: StackFit.loose,
children: [
AspectRatio(
aspectRatio: 16 / 9,
child: VideoTrackRenderer(
live.remoteParticipant.trackPublications.values
.where((track) => track.kind == TrackType.VIDEO)
.first
.track
as VideoTrack,
renderMode: VideoRenderMode.platformView,
child: RTCVideoView(
RTCVideoRenderer()
..srcObject = live.remoteParticipant.remoteStream,
),
),
Positioned(

View File

@@ -44,10 +44,12 @@ void showInfoAlert(String message, String title) async {
Future<bool> showConfirmAlert(String message, String title) async {
final result = await js.context.callMethod('swal', [
title,
message,
'question',
{'buttons': true},
js.JsObject.jsify({
'title': title,
'text': message,
'icon': 'info',
'buttons': {'cancel': true, 'confirm': true},
}),
]);
return result == true;
}

View File

@@ -193,10 +193,10 @@ class _PaymentContentState extends ConsumerState<_PaymentContent> {
// Perform biometric authentication
final bool didAuthenticate = await _localAuth.authenticate(
localizedReason: 'biometricPrompt'.tr(),
options: const AuthenticationOptions(
biometricOnly: true,
stickyAuth: true,
),
// options: const AuthenticationOptions(
// biometricOnly: true,
// stickyAuth: true,
// ),
);
if (didAuthenticate) {