♻️ Replace livekit with jitsi in calling

This commit is contained in:
2025-04-06 01:48:36 +08:00
parent 48f40099f4
commit 5c9569ef36
24 changed files with 74 additions and 2468 deletions

View File

@ -26,7 +26,6 @@ import 'package:styled_widget/styled_widget.dart';
import 'package:surface/firebase_options.dart';
import 'package:surface/logger.dart';
import 'package:surface/providers/channel.dart';
import 'package:surface/providers/chat_call.dart';
import 'package:surface/providers/config.dart';
import 'package:surface/providers/database.dart';
import 'package:surface/providers/keypair.dart';
@ -198,7 +197,6 @@ class SolianApp extends StatelessWidget {
Provider(create: (ctx) => KeyPairProvider(ctx)),
ChangeNotifierProvider(create: (ctx) => NotificationProvider(ctx)),
ChangeNotifierProvider(create: (ctx) => ChatChannelProvider(ctx)),
ChangeNotifierProvider(create: (ctx) => ChatCallProvider(ctx)),
Provider(create: (ctx) => SnTranslator()),
// Additional helper layer

View File

@ -1,474 +0,0 @@
import 'dart:async';
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:livekit_noise_filter/livekit_noise_filter.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:provider/provider.dart';
import 'package:surface/providers/sn_network.dart';
import 'package:surface/types/chat.dart';
import 'package:wakelock_plus/wakelock_plus.dart';
class ChatCallProvider extends ChangeNotifier {
late final SnNetworkProvider _sn;
ChatCallProvider(BuildContext context) {
_sn = context.read<SnNetworkProvider>();
}
SnChatCall? _current;
SnChannel? _channel;
bool _isReady = false;
bool _isMounted = false;
bool _isInitialized = false;
bool _isBusy = false;
String _lastDuration = '00:00:00';
Timer? _lastDurationUpdateTimer;
String? token;
String? endpoint;
StreamSubscription? hwSubscription;
List<MediaDevice> _audioInputs = [];
List<MediaDevice> _videoInputs = [];
bool _enableAudio = true;
bool _enableVideo = false;
LocalAudioTrack? _audioTrack;
LocalVideoTrack? _videoTrack;
MediaDevice? _videoDevice;
MediaDevice? _audioDevice;
late Room _room;
late EventsListener<RoomEvent> _listener;
List<ParticipantTrack> _participantTracks = [];
ParticipantTrack? _focusTrack;
// Getters for private fields
SnChatCall? get current => _current;
SnChannel? get channel => _channel;
bool get isReady => _isReady;
bool get isMounted => _isMounted;
bool get isInitialized => _isInitialized;
bool get isBusy => _isBusy;
String get lastDuration => _lastDuration;
List<MediaDevice> get audioInputs => _audioInputs;
List<MediaDevice> get videoInputs => _videoInputs;
bool get enableAudio => _enableAudio;
bool get enableVideo => _enableVideo;
LocalAudioTrack? get audioTrack => _audioTrack;
LocalVideoTrack? get videoTrack => _videoTrack;
MediaDevice? get videoDevice => _videoDevice;
MediaDevice? get audioDevice => _audioDevice;
List<ParticipantTrack> get participantTracks => _participantTracks;
ParticipantTrack? get focusTrack => _focusTrack;
Room get room => _room;
void _updateDuration() {
if (_current == null) {
_lastDuration = '00:00:00';
} else {
Duration duration = DateTime.now().difference(_current!.createdAt);
String twoDigits(int n) => n.toString().padLeft(2, '0');
_lastDuration = '${twoDigits(duration.inHours)}:'
'${twoDigits(duration.inMinutes.remainder(60))}:'
'${twoDigits(duration.inSeconds.remainder(60))}';
}
notifyListeners();
}
void enableDurationUpdater() {
_updateDuration();
_lastDurationUpdateTimer = Timer.periodic(
const Duration(seconds: 1),
(_) => _updateDuration(),
);
}
void disableDurationUpdater() {
_lastDurationUpdateTimer?.cancel();
_lastDurationUpdateTimer = null;
}
Future<void> checkPermissions() async {
if (lkPlatformIs(PlatformType.macOS) || lkPlatformIs(PlatformType.linux)) {
return;
}
await Permission.camera.request();
await Permission.microphone.request();
await Permission.bluetooth.request();
await Permission.bluetoothConnect.request();
}
void setCall(SnChatCall call, SnChannel related) {
_current = call;
_channel = related;
notifyListeners();
}
Future<(String, String)> getRoomToken() async {
final resp = await _sn.client.post(
'/cgi/im/channels/${_channel!.keyPath}/calls/ongoing/token',
);
token = resp.data['token'];
endpoint = 'wss://${resp.data['endpoint']}';
return (token!, endpoint!);
}
void initHardware() {
if (_isReady) return;
_isReady = true;
hwSubscription = Hardware.instance.onDeviceChange.stream.listen(
_revertDevices,
);
Hardware.instance.enumerateDevices().then(_revertDevices);
notifyListeners();
}
void initRoom() {
initHardware();
final timeout = const Duration(seconds: 60);
_room = Room(
roomOptions: RoomOptions(
dynacast: true,
adaptiveStream: true,
defaultAudioCaptureOptions: AudioCaptureOptions(
processor: LiveKitNoiseFilter(),
),
defaultAudioPublishOptions: AudioPublishOptions(
name: 'call_voice',
stream: 'call_stream',
),
defaultVideoPublishOptions: VideoPublishOptions(
name: 'call_video',
stream: 'call_stream',
simulcast: true,
backupVideoCodec: BackupVideoCodec(enabled: true),
),
defaultScreenShareCaptureOptions: ScreenShareCaptureOptions(
useiOSBroadcastExtension: true,
params: VideoParametersPresets.screenShareH1080FPS30,
),
defaultCameraCaptureOptions: CameraCaptureOptions(
maxFrameRate: 30,
params: VideoParametersPresets.h1080_169,
),
),
connectOptions: ConnectOptions(
autoSubscribe: true,
timeouts: Timeouts(
connection: timeout,
debounce: timeout,
publish: timeout,
peerConnection: timeout,
iceRestart: timeout,
),
),
);
_listener = _room.createListener();
WakelockPlus.enable();
}
Future<void> joinRoom(String url, String token) async {
if (_isMounted) return;
try {
await _room.connect(
url,
token,
fastConnectOptions: FastConnectOptions(
microphone: TrackOption(track: _audioTrack),
camera: TrackOption(track: _videoTrack),
),
);
} finally {
_isMounted = true;
notifyListeners();
}
}
void setupRoom() {
if (isInitialized) return;
sortParticipants();
_room.addListener(_onRoomDidUpdate);
WidgetsBindingCompatible.instance?.addPostFrameCallback(
(_) => autoPublish(),
);
if (lkPlatformIsMobile()) {
Hardware.instance.setSpeakerphoneOn(true);
}
_isBusy = false;
_isInitialized = true;
notifyListeners();
}
void autoPublish() async {
try {
if (enableVideo) {
await _room.localParticipant?.setCameraEnabled(true);
}
if (enableAudio) {
await _room.localParticipant?.setMicrophoneEnabled(true);
}
} catch (error) {
rethrow;
}
}
Future<void> setEnableAudio(bool value) async {
_enableAudio = value;
if (!_enableAudio) {
await _audioTrack?.stop();
_audioTrack = null;
} else {
await _changeLocalAudioTrack();
}
notifyListeners();
}
Future<void> setEnableVideo(bool value) async {
_enableVideo = value;
if (!_enableVideo) {
await _videoTrack?.stop();
_videoTrack = null;
} else {
await _changeLocalVideoTrack();
}
notifyListeners();
}
void setupRoomListeners({
required Function(DisconnectReason?) onDisconnected,
}) {
_listener
..on<RoomDisconnectedEvent>((event) async {
onDisconnected(event.reason);
})
..on<ParticipantEvent>((event) => sortParticipants())
..on<LocalTrackPublishedEvent>((_) => sortParticipants())
..on<LocalTrackUnpublishedEvent>((_) => sortParticipants())
..on<TrackSubscribedEvent>((_) => sortParticipants())
..on<TrackUnsubscribedEvent>((_) => sortParticipants())
..on<ParticipantNameUpdatedEvent>((event) {
sortParticipants();
});
}
void sortParticipants() {
Map<String, ParticipantTrack> mediaTracks = {};
for (var participant in _room.remoteParticipants.values) {
mediaTracks[participant.sid] = ParticipantTrack(
participant: participant,
videoTrack: null,
isScreenShare: false,
);
for (var t in participant.videoTrackPublications) {
mediaTracks[participant.sid]?.videoTrack = t.track;
mediaTracks[participant.sid]?.isScreenShare = t.isScreenShare;
}
}
final newTracks = List<ParticipantTrack>.empty(growable: true);
final mediaTrackList = mediaTracks.values.toList();
mediaTrackList.sort((a, b) {
// Loudest people first
if (a.participant.isSpeaking && b.participant.isSpeaking) {
if (a.participant.audioLevel > b.participant.audioLevel) {
return -1;
} else {
return 1;
}
}
// Last spoke first
final aSpokeAt = a.participant.lastSpokeAt?.millisecondsSinceEpoch ?? 0;
final bSpokeAt = b.participant.lastSpokeAt?.millisecondsSinceEpoch ?? 0;
if (aSpokeAt != bSpokeAt) {
return aSpokeAt > bSpokeAt ? -1 : 1;
}
// Has video first
if (a.participant.hasVideo != b.participant.hasVideo) {
return a.participant.hasVideo ? -1 : 1;
}
// First joined people first
return a.participant.joinedAt.millisecondsSinceEpoch -
b.participant.joinedAt.millisecondsSinceEpoch;
});
newTracks.addAll(mediaTrackList);
if (_room.localParticipant != null) {
ParticipantTrack localTrack = ParticipantTrack(
participant: _room.localParticipant!,
videoTrack: null,
isScreenShare: false,
);
final localParticipantTracks =
_room.localParticipant?.videoTrackPublications;
if (localParticipantTracks != null) {
for (var t in localParticipantTracks) {
localTrack.videoTrack = t.track;
localTrack.isScreenShare = t.isScreenShare;
}
}
newTracks.add(localTrack);
}
_participantTracks = newTracks;
if (focusTrack != null) {
final idx = participantTracks
.indexWhere((x) => x.participant.sid == _focusTrack!.participant.sid);
if (idx == -1) {
_focusTrack = null;
}
}
if (focusTrack == null) {
_focusTrack = participantTracks.firstOrNull;
} else {
final idx = participantTracks.indexWhere(
(x) => _focusTrack!.participant.sid == x.participant.sid,
);
if (idx > -1) {
_focusTrack = participantTracks[idx];
}
}
notifyListeners();
}
Future<void> _changeLocalAudioTrack() async {
if (_audioTrack != null) {
await _audioTrack!.stop();
_audioTrack = null;
}
if (_audioDevice != null) {
_audioTrack = await LocalAudioTrack.create(
AudioCaptureOptions(deviceId: _audioDevice!.deviceId),
);
await _audioTrack!.start();
}
notifyListeners();
}
Future<void> _changeLocalVideoTrack() async {
if (_videoTrack != null) {
await _videoTrack!.stop();
_videoTrack = null;
}
if (_videoDevice != null) {
_videoTrack = await LocalVideoTrack.createCameraTrack(
CameraCaptureOptions(
deviceId: _videoDevice!.deviceId,
params: VideoParametersPresets.h1080_169,
),
);
await _videoTrack!.start();
}
notifyListeners();
}
void _revertDevices(List<MediaDevice> devices) {
_audioInputs = devices.where((d) => d.kind == 'audioinput').toList();
_videoInputs = devices.where((d) => d.kind == 'videoinput').toList();
notifyListeners();
}
void _onRoomDidUpdate() => sortParticipants();
Future<void> changeLocalAudioTrack() async {
if (audioTrack != null) {
await audioTrack!.stop();
_audioTrack = null;
}
if (audioDevice != null) {
_audioTrack = await LocalAudioTrack.create(
AudioCaptureOptions(
deviceId: audioDevice!.deviceId,
),
);
await audioTrack!.start();
}
}
Future<void> changeLocalVideoTrack() async {
if (videoTrack != null) {
await _videoTrack!.stop();
_videoTrack = null;
}
if (videoDevice != null) {
_videoTrack = await LocalVideoTrack.createCameraTrack(
CameraCaptureOptions(
deviceId: videoDevice!.deviceId,
params: VideoParametersPresets.h1080_169,
),
);
await videoTrack!.start();
}
}
void deactivateHardware() {
hwSubscription?.cancel();
}
void disposeRoom() {
_isBusy = false;
_isMounted = false;
_isInitialized = false;
_current = null;
_channel = null;
_room.removeListener(_onRoomDidUpdate);
_room.disconnect();
_room.dispose();
_listener.dispose();
WakelockPlus.disable();
}
void disposeHardware() {
_isReady = false;
_audioTrack?.stop();
_audioTrack = null;
_videoTrack?.stop();
_videoTrack = null;
}
void setVideoDevice(MediaDevice? value) {
_videoDevice = value;
notifyListeners();
}
void setAudioDevice(MediaDevice? value) {
_audioDevice = value;
notifyListeners();
}
void setFocusTrack(ParticipantTrack? value) {
_focusTrack = value;
notifyListeners();
}
void setIsBusy(bool value) {
_isBusy = value;
notifyListeners();
}
}

View File

@ -22,7 +22,6 @@ import 'package:surface/screens/album.dart';
import 'package:surface/screens/auth/login.dart';
import 'package:surface/screens/auth/register.dart';
import 'package:surface/screens/chat.dart';
import 'package:surface/screens/chat/call_room.dart';
import 'package:surface/screens/chat/channel_detail.dart';
import 'package:surface/screens/chat/manage.dart';
import 'package:surface/screens/chat/room.dart';
@ -264,14 +263,6 @@ final _appRoutes = [
extra: state.extra as ChatRoomScreenExtra?,
),
),
GoRoute(
path: '/:scope/:alias/call',
name: 'chatCallRoom',
builder: (context, state) => CallRoomScreen(
scope: state.pathParameters['scope']!,
alias: state.pathParameters['alias']!,
),
),
GoRoute(
path: '/:scope/:alias/detail',
name: 'channelDetail',

View File

@ -1,289 +0,0 @@
import 'dart:math' as math;
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:gap/gap.dart';
import 'package:livekit_client/livekit_client.dart' as livekit;
import 'package:provider/provider.dart';
import 'package:styled_widget/styled_widget.dart';
import 'package:surface/providers/chat_call.dart';
import 'package:surface/widgets/chat/call/call_controls.dart';
import 'package:surface/widgets/chat/call/call_participant.dart';
import 'package:surface/widgets/navigation/app_scaffold.dart';
class CallRoomScreen extends StatefulWidget {
final String scope;
final String alias;
const CallRoomScreen({super.key, required this.scope, required this.alias});
@override
State<CallRoomScreen> createState() => _CallRoomScreenState();
}
class _CallRoomScreenState extends State<CallRoomScreen> {
int _layoutMode = 0;
void _switchLayout() {
if (_layoutMode < 1) {
setState(() => _layoutMode++);
} else {
setState(() => _layoutMode = 0);
}
}
Widget _buildMeetLayout() {
final call = context.read<ChatCallProvider>();
return Stack(
children: [
Container(
color:
Theme.of(context).colorScheme.surfaceContainer.withOpacity(0.75),
child: call.focusTrack != null
? InteractiveParticipantWidget(
participant: call.focusTrack!,
)
: const SizedBox.shrink(),
),
Positioned(
left: 0,
right: 0,
top: 0,
child: SizedBox(
height: 128,
child: ListView.builder(
scrollDirection: Axis.horizontal,
itemCount: math.max(0, call.participantTracks.length),
itemBuilder: (BuildContext context, int index) {
final track = call.participantTracks[index];
if (track.participant.sid == call.focusTrack?.participant.sid) {
return Container();
}
return SizedBox(
height: 128,
width: 128,
child: InteractiveParticipantWidget(
participant: track,
avatarSize: 32,
onTap: () {
if (track.participant.sid !=
call.focusTrack?.participant.sid) {
call.setFocusTrack(track);
}
},
),
);
},
),
),
),
],
);
}
Widget _buildListLayout() {
final call = context.read<ChatCallProvider>();
return LayoutBuilder(
builder: (context, constraints) {
return ListView.builder(
padding: EdgeInsets.zero,
itemCount: math.max(0, call.participantTracks.length),
itemBuilder: (BuildContext context, int index) {
final track = call.participantTracks[index];
return InteractiveParticipantWidget(
padding: EdgeInsets.symmetric(horizontal: 16, vertical: 8),
isList: true,
avatarSize: 24,
participant: track,
);
},
);
},
);
}
@override
void initState() {
super.initState();
final call = context.read<ChatCallProvider>();
Future.delayed(Duration.zero, () {
call
..setupRoom()
..enableDurationUpdater();
});
}
@override
Widget build(BuildContext context) {
final call = context.read<ChatCallProvider>();
return ListenableBuilder(
listenable: call,
builder: (context, _) {
return AppScaffold(
noBackground: ResponsiveScaffold.getIsExpand(context),
appBar: AppBar(
title: RichText(
textAlign: TextAlign.center,
text: TextSpan(children: [
TextSpan(
text: 'call'.tr(),
style: Theme.of(context).textTheme.titleLarge!.copyWith(
color: Theme.of(context).appBarTheme.foregroundColor,
),
),
const TextSpan(text: '\n'),
TextSpan(
text: call.lastDuration.toString(),
style: Theme.of(context).textTheme.bodySmall!.copyWith(
color: Theme.of(context).appBarTheme.foregroundColor,
),
),
]),
),
),
body: Column(
children: [
SizedBox(
width: MediaQuery.of(context).size.width,
height: 64,
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
Builder(builder: (context) {
final call = context.read<ChatCallProvider>();
final connectionQuality =
call.room.localParticipant?.connectionQuality ??
livekit.ConnectionQuality.unknown;
return Expanded(
child: Column(
mainAxisSize: MainAxisSize.min,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
Text(
call.channel?.name ?? 'unknown'.tr(),
style: const TextStyle(
fontWeight: FontWeight.bold,
),
),
const Gap(6),
Text(call.lastDuration.toString())
],
),
Row(
children: [
Text(
{
livekit.ConnectionState.disconnected:
'callStatusDisconnected'.tr(),
livekit.ConnectionState.connected:
'callStatusConnected'.tr(),
livekit.ConnectionState.connecting:
'callStatusConnecting'.tr(),
livekit.ConnectionState.reconnecting:
'callStatusReconnecting'.tr(),
}[call.room.connectionState]!,
),
const Gap(6),
if (connectionQuality !=
livekit.ConnectionQuality.unknown)
Icon(
{
livekit.ConnectionQuality.excellent:
Icons.signal_cellular_alt,
livekit.ConnectionQuality.good:
Icons.signal_cellular_alt_2_bar,
livekit.ConnectionQuality.poor:
Icons.signal_cellular_alt_1_bar,
}[connectionQuality],
color: {
livekit.ConnectionQuality.excellent:
Colors.green,
livekit.ConnectionQuality.good:
Colors.orange,
livekit.ConnectionQuality.poor:
Colors.red,
}[connectionQuality],
size: 16,
)
else
const SizedBox(
width: 12,
height: 12,
child: CircularProgressIndicator(
color: Colors.white,
strokeWidth: 2,
padding: EdgeInsets.zero,
),
).padding(all: 3),
],
),
],
),
);
}),
Row(
children: [
IconButton(
icon: _layoutMode == 0
? const Icon(Icons.view_list)
: const Icon(Icons.grid_view),
onPressed: () {
_switchLayout();
},
),
],
),
],
).padding(left: 20, right: 16),
),
Expanded(
child: Material(
color: Theme.of(context).colorScheme.surfaceContainerLow,
child: Builder(
builder: (context) {
switch (_layoutMode) {
case 1:
return _buildListLayout();
default:
return _buildMeetLayout();
}
},
),
),
),
if (call.room.localParticipant != null)
SizedBox(
width: MediaQuery.of(context).size.width,
child: ControlsWidget(
call.room,
call.room.localParticipant!,
),
),
Gap(MediaQuery.of(context).padding.bottom),
],
),
);
});
}
@override
void deactivate() {
final call = context.read<ChatCallProvider>();
call.disableDurationUpdater();
super.deactivate();
}
@override
void activate() {
final call = context.read<ChatCallProvider>();
call.enableDurationUpdater();
super.activate();
}
}

View File

@ -2,18 +2,17 @@ import 'dart:async';
import 'dart:convert';
import 'dart:developer';
import 'package:dio/dio.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:gap/gap.dart';
import 'package:go_router/go_router.dart';
import 'package:jitsi_meet_flutter_sdk/jitsi_meet_flutter_sdk.dart';
import 'package:material_symbols_icons/symbols.dart';
import 'package:provider/provider.dart';
import 'package:styled_widget/styled_widget.dart';
import 'package:surface/controllers/chat_message_controller.dart';
import 'package:surface/controllers/post_write_controller.dart';
import 'package:surface/providers/channel.dart';
import 'package:surface/providers/chat_call.dart';
import 'package:surface/providers/notification.dart';
import 'package:surface/providers/sn_network.dart';
import 'package:surface/providers/user_directory.dart';
@ -21,7 +20,6 @@ import 'package:surface/providers/userinfo.dart';
import 'package:surface/providers/websocket.dart';
import 'package:surface/types/chat.dart';
import 'package:surface/types/websocket.dart';
import 'package:surface/widgets/chat/call/call_prejoin.dart';
import 'package:surface/widgets/chat/chat_message.dart';
import 'package:surface/widgets/chat/chat_message_input.dart';
import 'package:surface/widgets/chat/chat_typing_indicator.dart';
@ -51,13 +49,11 @@ class ChatRoomScreen extends StatefulWidget {
class _ChatRoomScreenState extends State<ChatRoomScreen> {
bool _isBusy = false;
bool _isCalling = false;
bool _isJoining = false;
SnChannel? _channel;
SnChannelMember? _currentMember;
SnChannelMember? _otherMember;
SnChatCall? _ongoingCall;
final GlobalKey<ChatMessageInputState> _inputGlobalKey = GlobalKey();
late final ChatMessageController _messageController;
@ -139,88 +135,25 @@ class _ChatRoomScreenState extends State<ChatRoomScreen> {
}
}
Future<void> _fetchOngoingCall() async {
setState(() => _isCalling = true);
try {
final sn = context.read<SnNetworkProvider>();
final resp = await sn.client.get(
'/cgi/im/channels/${_messageController.channel!.keyPath}/calls/ongoing',
options: Options(
validateStatus: (status) => status != null && status < 500,
receiveTimeout: const Duration(seconds: 60),
sendTimeout: const Duration(seconds: 60),
),
);
if (resp.statusCode == 200) {
_ongoingCall = SnChatCall.fromJson(resp.data);
}
} catch (err) {
if (!mounted) return;
context.showErrorDialog(err);
} finally {
setState(() => _isCalling = false);
}
}
Future<void> _makeCall() async {
setState(() => _isCalling = true);
try {
final sn = context.read<SnNetworkProvider>();
await sn.client.post(
'/cgi/im/channels/${_messageController.channel!.keyPath}/calls',
options: Options(
sendTimeout: const Duration(seconds: 30),
receiveTimeout: const Duration(seconds: 30),
),
);
} catch (err) {
if (!mounted) return;
if (_ongoingCall == null) {
// ignore the error because the call is already ongoing
context.showErrorDialog(err);
}
} finally {
setState(() => _isCalling = false);
}
}
Future<void> _endCall() async {
setState(() => _isCalling = true);
try {
final sn = context.read<SnNetworkProvider>();
await sn.client.delete(
'/cgi/im/channels/${_messageController.channel!.keyPath}/calls/ongoing',
);
} catch (err) {
if (!mounted) return;
context.showErrorDialog(err);
} finally {
setState(() => _isCalling = false);
}
}
Future<void> _onCallJoin() async {
await showModalBottomSheet(
context: context,
builder: (context) => ChatCallPrejoinPopup(
ongoingCall: _ongoingCall!,
channel: _channel!,
onJoin: _onCallResume,
final sn = context.read<SnNetworkProvider>();
final ua = context.read<UserProvider>();
final meet = JitsiMeet();
final confOpts = JitsiMeetConferenceOptions(
room: 'sn-chat-${_channel!.id}',
serverURL:
'https://meet.element.io', // TODO fetch this as config from remote
configOverrides: {
"subject": _channel!.name,
},
userInfo: JitsiMeetUserInfo(
avatar: ua.user!.avatar.isNotEmpty
? sn.getAttachmentUrl(ua.user!.avatar)
: null,
displayName: _currentMember!.nick ?? ua.user!.nick,
),
);
}
void _onCallResume() {
GoRouter.of(context).pushNamed(
'chatCallRoom',
pathParameters: {
'scope': _channel!.realm?.alias ?? 'global',
'alias': _channel!.alias,
},
);
meet.join(confOpts);
}
bool _checkMessageMergeable(SnChatMessage? a, SnChatMessage? b) {
@ -248,10 +181,7 @@ class _ChatRoomScreenState extends State<ChatRoomScreen> {
});
}
await Future.wait([
_messageController.checkUpdate(),
_fetchOngoingCall(),
]);
await _messageController.checkUpdate();
});
}
@ -260,23 +190,6 @@ class _ChatRoomScreenState extends State<ChatRoomScreen> {
super.initState();
_messageController = ChatMessageController(context);
_initializeChat();
_wsSubscription = _ws.pk.stream.listen((event) {
switch (event.method) {
case 'calls.new':
final payload = SnChatCall.fromJson(event.payload!);
if (payload.channelId == _channel?.id) {
setState(() => _ongoingCall = payload);
}
break;
case 'calls.end':
final payload = SnChatCall.fromJson(event.payload!);
if (payload.channelId == _channel?.id) {
setState(() => _ongoingCall = null);
}
break;
}
});
}
@override
@ -300,7 +213,6 @@ class _ChatRoomScreenState extends State<ChatRoomScreen> {
@override
Widget build(BuildContext context) {
final call = context.watch<ChatCallProvider>();
final ud = context.read<UserDirectoryProvider>();
return AppScaffold(
@ -324,14 +236,8 @@ class _ChatRoomScreenState extends State<ChatRoomScreen> {
),
if (_currentMember != null)
IconButton(
icon: _ongoingCall == null
? const Icon(Symbols.call)
: const Icon(Symbols.call_end),
onPressed: _isCalling
? null
: _ongoingCall == null
? _makeCall
: _endCall,
icon: const Icon(Symbols.video_call),
onPressed: _onCallJoin,
),
IconButton(
icon: const Icon(Symbols.more_vert),
@ -359,28 +265,6 @@ class _ChatRoomScreenState extends State<ChatRoomScreen> {
LoadingIndicator(
isActive: _isBusy || _messageController.isAggressiveLoading,
),
SingleChildScrollView(
physics: const NeverScrollableScrollPhysics(),
child: MaterialBanner(
dividerColor: Colors.transparent,
leading: const Icon(Symbols.call_received),
content: Text('callOngoingNotice').tr().padding(top: 2),
actions: [
if (call.current == null)
TextButton(
onPressed: _onCallJoin,
child: Text('callJoin').tr(),
)
else if (call.current?.channelId == _channel?.id)
TextButton(
onPressed: _onCallResume,
child: Text('callResume').tr(),
)
],
),
).height(_ongoingCall != null ? 54 : 0, animate: true).animate(
const Duration(milliseconds: 300),
Curves.fastLinearToSlowEaseIn),
if (_currentMember == null && !_isBusy)
Expanded(
child: Center(

View File

@ -1,5 +1,4 @@
import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:surface/types/account.dart';
import 'package:surface/types/attachment.dart';
import 'package:surface/types/realm.dart';
@ -116,24 +115,3 @@ abstract class SnChatCall with _$SnChatCall {
factory SnChatCall.fromJson(Map<String, dynamic> json) =>
_$SnChatCallFromJson(json);
}
// Call stuff
enum ParticipantStatsType {
unknown,
localAudioSender,
localVideoSender,
remoteAudioReceiver,
remoteVideoReceiver,
}
class ParticipantTrack {
ParticipantTrack(
{required this.participant,
required this.videoTrack,
required this.isScreenShare});
VideoTrack? videoTrack;
Participant participant;
bool isScreenShare;
}

View File

@ -1,369 +0,0 @@
import 'dart:async';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:material_symbols_icons/symbols.dart';
import 'package:provider/provider.dart';
import 'package:surface/providers/chat_call.dart';
import 'package:surface/widgets/dialog.dart';
class ControlsWidget extends StatefulWidget {
final Room room;
final LocalParticipant participant;
const ControlsWidget(
this.room,
this.participant, {
super.key,
});
@override
State<StatefulWidget> createState() => _ControlsWidgetState();
}
class _ControlsWidgetState extends State<ControlsWidget> {
CameraPosition _position = CameraPosition.front;
List<MediaDevice>? _audioInputs;
List<MediaDevice>? _audioOutputs;
List<MediaDevice>? _videoInputs;
StreamSubscription? _subscription;
bool _speakerphoneOn = false;
@override
void initState() {
super.initState();
_participant.addListener(onChange);
_subscription = Hardware.instance.onDeviceChange.stream
.listen((List<MediaDevice> devices) {
_revertDevices(devices);
});
Hardware.instance.enumerateDevices().then(_revertDevices);
_speakerphoneOn = Hardware.instance.speakerOn ?? false;
}
@override
void dispose() {
_subscription?.cancel();
_participant.removeListener(onChange);
super.dispose();
}
LocalParticipant get _participant => widget.participant;
void _revertDevices(List<MediaDevice> devices) async {
_audioInputs = devices.where((d) => d.kind == 'audioinput').toList();
_audioOutputs = devices.where((d) => d.kind == 'audiooutput').toList();
_videoInputs = devices.where((d) => d.kind == 'videoinput').toList();
setState(() {});
}
void onChange() => setState(() {});
bool get isMuted => _participant.isMuted;
Future<bool?> showDisconnectDialog() {
return showDialog<bool>(
context: context,
builder: (ctx) => AlertDialog(
title: Text('callDisconnect').tr(),
content: Text('callDisconnectDescription').tr(),
actions: [
TextButton(
onPressed: () => Navigator.pop(ctx, false),
child: Text('cancel').tr(),
),
TextButton(
onPressed: () => Navigator.pop(ctx, true),
child: Text('dialogConfirm').tr(),
),
],
),
);
}
void _disconnect() async {
if (await showDisconnectDialog() != true) return;
if (!mounted) return;
final call = context.read<ChatCallProvider>();
if (call.current != null) {
call.disposeRoom();
if (Navigator.canPop(context)) {
Navigator.pop(context);
}
}
}
void _disableAudio() async {
await _participant.setMicrophoneEnabled(false);
}
void _enableAudio() async {
await _participant.setMicrophoneEnabled(true);
}
void _disableVideo() async {
await _participant.setCameraEnabled(false);
}
void _enableVideo() async {
await _participant.setCameraEnabled(true);
}
void _selectAudioOutput(MediaDevice device) async {
await widget.room.setAudioOutputDevice(device);
setState(() {});
}
void _selectAudioInput(MediaDevice device) async {
await widget.room.setAudioInputDevice(device);
setState(() {});
}
void _selectVideoInput(MediaDevice device) async {
await widget.room.setVideoInputDevice(device);
setState(() {});
}
void _toggleSpeakerphoneOn() {
_speakerphoneOn = !_speakerphoneOn;
Hardware.instance.setSpeakerphoneOn(_speakerphoneOn);
setState(() {});
}
void _toggleCamera() async {
final track = _participant.videoTrackPublications.firstOrNull?.track;
if (track == null) return;
try {
final newPosition = _position.switched();
await track.setCameraPosition(newPosition);
setState(() {
_position = newPosition;
});
} catch (error) {
return;
}
}
void _enableScreenShare() async {
if (lkPlatformIsDesktop()) {
try {
final source = await showDialog<DesktopCapturerSource>(
context: context,
builder: (context) => ScreenSelectDialog(),
);
if (source == null) {
return;
}
var track = await LocalVideoTrack.createScreenShareTrack(
ScreenShareCaptureOptions(
captureScreenAudio: true,
sourceId: source.id,
maxFrameRate: 30.0,
),
);
await _participant.publishVideoTrack(track);
} catch (err) {
if (!mounted) return;
context.showErrorDialog(err);
}
return;
}
if (lkPlatformIs(PlatformType.iOS)) {
var track = await LocalVideoTrack.createScreenShareTrack(
const ScreenShareCaptureOptions(
useiOSBroadcastExtension: true,
captureScreenAudio: true,
maxFrameRate: 30.0,
),
);
await _participant.publishVideoTrack(track);
return;
}
if (lkPlatformIsWebMobile()) {
ScaffoldMessenger.of(context).showSnackBar(const SnackBar(
content: Text('Screen share is not supported mobile platform.'),
));
return;
}
await _participant.setScreenShareEnabled(true, captureScreenAudio: true);
}
void _disableScreenShare() async {
await _participant.setScreenShareEnabled(false);
}
@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.symmetric(
vertical: 10,
),
child: Wrap(
alignment: WrapAlignment.center,
spacing: 5,
runSpacing: 5,
children: [
IconButton(
icon: const Icon(Symbols.exit_to_app),
color: Theme.of(context).colorScheme.onSurface,
onPressed: _disconnect,
),
if (_participant.isMicrophoneEnabled())
if (lkPlatformIs(PlatformType.android))
IconButton(
onPressed: _disableAudio,
icon: const Icon(Symbols.mic),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callMicrophoneOff'.tr(),
)
else
PopupMenuButton<MediaDevice>(
icon: const Icon(Symbols.settings_voice),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
onTap: isMuted ? _enableAudio : _disableAudio,
child: ListTile(
leading: const Icon(Symbols.mic_off),
title: Text(isMuted
? 'callMicrophoneOn'.tr()
: 'callMicrophoneOff'.tr()),
),
),
if (_audioInputs != null)
..._audioInputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedAudioInputDeviceId)
? const Icon(Symbols.check_box)
: const Icon(Symbols.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => _selectAudioInput(device),
);
})
];
},
)
else
IconButton(
onPressed: _enableAudio,
icon: const Icon(Symbols.mic_off),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callMicrophoneOn'.tr(),
),
if (_participant.isCameraEnabled())
PopupMenuButton<MediaDevice>(
icon: const Icon(Symbols.videocam_sharp),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
onTap: _disableVideo,
child: ListTile(
leading: const Icon(Symbols.videocam_off),
title: Text('callCameraOff'.tr()),
),
),
if (_videoInputs != null)
..._videoInputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedVideoInputDeviceId)
? const Icon(Symbols.check_box)
: const Icon(Symbols.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => _selectVideoInput(device),
);
})
];
},
)
else
IconButton(
onPressed: _enableVideo,
icon: const Icon(Symbols.videocam_off),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callCameraOn'.tr(),
),
IconButton(
icon: Icon(_position == CameraPosition.back
? Symbols.video_camera_back
: Symbols.video_camera_front),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => _toggleCamera(),
tooltip: 'callVideoFlip'.tr(),
),
if (!lkPlatformIs(PlatformType.iOS))
PopupMenuButton<MediaDevice>(
icon: const Icon(Symbols.volume_up),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
child: ListTile(
leading: const Icon(Symbols.speaker),
title: Text('callSpeakerSelect').tr(),
),
),
if (_audioOutputs != null)
..._audioOutputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedAudioOutputDeviceId)
? const Icon(Symbols.check_box)
: const Icon(Symbols.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => _selectAudioOutput(device),
);
})
];
},
),
if (!kIsWeb && Hardware.instance.canSwitchSpeakerphone)
IconButton(
onPressed: _toggleSpeakerphoneOn,
color: Theme.of(context).colorScheme.onSurface,
icon: _speakerphoneOn
? Icon(Symbols.volume_up)
: Icon(Symbols.volume_down),
tooltip: 'callSpeakerphoneToggle'.tr(),
),
if (_participant.isScreenShareEnabled())
IconButton(
icon: const Icon(Symbols.stop_screen_share),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => _disableScreenShare(),
tooltip: 'callScreenOff'.tr(),
)
else
IconButton(
icon: const Icon(Symbols.screen_share),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => _enableScreenShare(),
tooltip: 'callScreenOn'.tr(),
),
],
),
);
}
}

View File

@ -1,86 +0,0 @@
import 'dart:math' as math;
import 'package:flutter/material.dart';
import 'package:flutter_animate/flutter_animate.dart';
import 'package:surface/types/account.dart';
import 'package:surface/widgets/account/account_image.dart';
class NoContentWidget extends StatefulWidget {
final SnAccount? userinfo;
final bool isSpeaking;
final double? avatarSize;
const NoContentWidget({
super.key,
this.userinfo,
this.avatarSize,
required this.isSpeaking,
});
@override
State<NoContentWidget> createState() => _NoContentWidgetState();
}
class _NoContentWidgetState extends State<NoContentWidget>
with SingleTickerProviderStateMixin {
late final AnimationController _animationController;
@override
void initState() {
super.initState();
_animationController = AnimationController(vsync: this);
}
@override
void didUpdateWidget(NoContentWidget old) {
super.didUpdateWidget(old);
if (widget.isSpeaking) {
_animationController.repeat(reverse: true);
} else {
_animationController
.animateTo(0, duration: 300.ms)
.then((_) => _animationController.reset());
}
}
@override
Widget build(BuildContext context) {
final double radius = widget.avatarSize ??
math.min(
MediaQuery.of(context).size.width * 0.1,
MediaQuery.of(context).size.height * 0.1,
);
return Animate(
autoPlay: false,
controller: _animationController,
effects: [
CustomEffect(
begin: widget.isSpeaking ? 2 : 0,
end: 8,
curve: Curves.easeInOut,
duration: 1250.ms,
builder: (context, value, child) => Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.all(Radius.circular(radius + 8)),
border: value > 0
? Border.all(color: Colors.green, width: value)
: null,
),
child: child,
),
)
],
child: AccountImage(
content: widget.userinfo?.avatar,
radius: radius,
),
);
}
@override
void dispose() {
_animationController.dispose();
super.dispose();
}
}

View File

@ -1,337 +0,0 @@
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:gap/gap.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:styled_widget/styled_widget.dart';
import 'package:surface/types/account.dart';
import 'package:surface/types/chat.dart';
import 'package:surface/widgets/chat/call/call_no_content.dart';
import 'package:surface/widgets/chat/call/call_participant_info.dart';
import 'package:surface/widgets/chat/call/call_participant_menu.dart';
import 'package:surface/widgets/chat/call/call_participant_stats.dart';
abstract class ParticipantWidget extends StatefulWidget {
static ParticipantWidget widgetFor(
ParticipantTrack participantTrack, {
double? avatarSize,
EdgeInsets? padding,
bool showStatsLayer = false,
bool isList = false,
}) {
if (participantTrack.participant is LocalParticipant) {
return LocalParticipantWidget(
participantTrack.participant as LocalParticipant,
participantTrack.videoTrack,
avatarSize,
participantTrack.isScreenShare,
showStatsLayer,
isList,
padding,
);
} else if (participantTrack.participant is RemoteParticipant) {
return RemoteParticipantWidget(
participantTrack.participant as RemoteParticipant,
participantTrack.videoTrack,
avatarSize,
participantTrack.isScreenShare,
showStatsLayer,
isList,
padding,
);
}
throw UnimplementedError('Unknown participant type');
}
abstract final Participant participant;
abstract final VideoTrack? videoTrack;
abstract final bool isScreenShare;
abstract final double? avatarSize;
abstract final bool showStatsLayer;
abstract final bool isList;
abstract final EdgeInsets? padding;
final VideoQuality quality;
const ParticipantWidget({
super.key,
this.quality = VideoQuality.MEDIUM,
});
}
class LocalParticipantWidget extends ParticipantWidget {
@override
final LocalParticipant participant;
@override
final VideoTrack? videoTrack;
@override
final double? avatarSize;
@override
final bool isScreenShare;
@override
final bool showStatsLayer;
@override
final bool isList;
@override
final EdgeInsets? padding;
const LocalParticipantWidget(
this.participant,
this.videoTrack,
this.avatarSize,
this.isScreenShare,
this.showStatsLayer,
this.isList,
this.padding, {
super.key,
});
@override
State<StatefulWidget> createState() => _LocalParticipantWidgetState();
}
class RemoteParticipantWidget extends ParticipantWidget {
@override
final RemoteParticipant participant;
@override
final VideoTrack? videoTrack;
@override
final double? avatarSize;
@override
final bool isScreenShare;
@override
final bool showStatsLayer;
@override
final bool isList;
@override
final EdgeInsets? padding;
const RemoteParticipantWidget(
this.participant,
this.videoTrack,
this.avatarSize,
this.isScreenShare,
this.showStatsLayer,
this.isList,
this.padding, {
super.key,
});
@override
State<StatefulWidget> createState() => _RemoteParticipantWidgetState();
}
abstract class _ParticipantWidgetState<T extends ParticipantWidget>
extends State<T> {
VideoTrack? get _activeVideoTrack;
TrackPublication? get _firstAudioPublication;
SnAccount? _userinfoMetadata;
@override
void initState() {
super.initState();
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
}
@override
void dispose() {
widget.participant.removeListener(onParticipantChanged);
super.dispose();
}
@override
void didUpdateWidget(covariant T oldWidget) {
oldWidget.participant.removeListener(onParticipantChanged);
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
super.didUpdateWidget(oldWidget);
}
void onParticipantChanged() {
setState(() {
if (widget.participant.metadata != null) {
_userinfoMetadata = SnAccount.fromJson(
jsonDecode(widget.participant.metadata!),
);
}
});
}
@override
Widget build(BuildContext context) {
if (widget.isList) {
return Padding(
padding: widget.padding ?? EdgeInsets.zero,
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
SizedBox(
width: (widget.avatarSize ?? 32) * 2,
height: (widget.avatarSize ?? 32) * 2,
child: Center(
child: NoContentWidget(
userinfo: _userinfoMetadata,
avatarSize: widget.avatarSize,
isSpeaking: widget.participant.isSpeaking,
),
),
),
const Gap(8),
Expanded(
child: SizedBox(
height: (widget.avatarSize ?? 32) * 2,
child: ParticipantInfoWidget(
isList: true,
title: widget.participant.name.isNotEmpty
? widget.participant.name
: widget.participant.identity,
audioAvailable: _firstAudioPublication?.muted == false &&
_firstAudioPublication?.subscribed == true,
connectionQuality: widget.participant.connectionQuality,
isScreenShare: widget.isScreenShare,
),
),
),
],
),
if (_activeVideoTrack != null && !_activeVideoTrack!.muted)
ClipRRect(
borderRadius: const BorderRadius.all(Radius.circular(8)),
child: AspectRatio(
aspectRatio: 16 / 9,
child: Material(
borderRadius: const BorderRadius.all(Radius.circular(8)),
color: Theme.of(context)
.colorScheme
.surfaceContainer
.withOpacity(0.75),
child: VideoTrackRenderer(
_activeVideoTrack!,
fit: RTCVideoViewObjectFit.RTCVideoViewObjectFitContain,
),
),
).padding(top: 8),
),
],
),
);
}
return Stack(
children: [
if (_activeVideoTrack != null && !_activeVideoTrack!.muted)
VideoTrackRenderer(
_activeVideoTrack!,
fit: RTCVideoViewObjectFit.RTCVideoViewObjectFitContain,
)
else
Center(
child: NoContentWidget(
userinfo: _userinfoMetadata,
avatarSize: widget.avatarSize,
isSpeaking: widget.participant.isSpeaking,
),
),
if (widget.showStatsLayer)
Positioned(
top: 30,
right: 30,
child: ParticipantStatsWidget(participant: widget.participant),
),
Align(
alignment: Alignment.bottomCenter,
child: Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
mainAxisSize: MainAxisSize.min,
children: [
ParticipantInfoWidget(
title: widget.participant.name.isNotEmpty
? widget.participant.name
: widget.participant.identity,
audioAvailable: _firstAudioPublication?.muted == false &&
_firstAudioPublication?.subscribed == true,
connectionQuality: widget.participant.connectionQuality,
isScreenShare: widget.isScreenShare,
),
],
),
),
],
);
}
}
class _LocalParticipantWidgetState
extends _ParticipantWidgetState<LocalParticipantWidget> {
@override
LocalTrackPublication<LocalAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
@override
VideoTrack? get _activeVideoTrack => widget.videoTrack;
}
class _RemoteParticipantWidgetState
extends _ParticipantWidgetState<RemoteParticipantWidget> {
@override
RemoteTrackPublication<RemoteAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
@override
VideoTrack? get _activeVideoTrack => widget.videoTrack;
}
class InteractiveParticipantWidget extends StatelessWidget {
final double? avatarSize;
final bool isList;
final ParticipantTrack participant;
final Function? onTap;
final EdgeInsets? padding;
const InteractiveParticipantWidget({
super.key,
this.avatarSize,
this.isList = false,
this.padding,
required this.participant,
this.onTap,
});
@override
Widget build(BuildContext context) {
return Material(
color: Colors.transparent,
child: InkWell(
onTap: onTap != null
? () {
onTap?.call();
}
: null,
onLongPress: () {
if (participant.participant is LocalParticipant) return;
showModalBottomSheet(
context: context,
builder: (context) => ParticipantMenu(
participant: participant.participant as RemoteParticipant,
videoTrack: participant.videoTrack,
isScreenShare: participant.isScreenShare,
),
);
},
child: Container(
child: ParticipantWidget.widgetFor(
participant,
avatarSize: avatarSize,
isList: isList,
padding: padding,
),
),
),
);
}
}

View File

@ -1,140 +0,0 @@
import 'package:flutter/material.dart';
import 'package:gap/gap.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:material_symbols_icons/symbols.dart';
import 'package:styled_widget/styled_widget.dart';
class ParticipantInfoWidget extends StatelessWidget {
final String? title;
final bool audioAvailable;
final ConnectionQuality connectionQuality;
final bool isScreenShare;
final bool isList;
const ParticipantInfoWidget({
super.key,
this.title,
this.audioAvailable = true,
this.connectionQuality = ConnectionQuality.unknown,
this.isScreenShare = false,
this.isList = false,
});
@override
Widget build(BuildContext context) {
if (isList) {
return Column(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
if (title != null)
Text(
title!,
overflow: TextOverflow.ellipsis,
style: const TextStyle(
color: Colors.white,
fontWeight: FontWeight.bold,
),
).padding(left: 2),
Row(
children: [
isScreenShare
? const Icon(
Symbols.monitor,
color: Colors.white,
size: 16,
)
: Icon(
audioAvailable ? Symbols.mic : Symbols.mic_off,
color: audioAvailable ? Colors.white : Colors.red,
size: 16,
),
const Gap(3),
if (connectionQuality != ConnectionQuality.unknown)
Icon(
{
ConnectionQuality.excellent: Symbols.signal_cellular_alt,
ConnectionQuality.good: Symbols.signal_cellular_alt_2_bar,
ConnectionQuality.poor: Symbols.signal_cellular_alt_1_bar,
}[connectionQuality],
color: {
ConnectionQuality.excellent: Colors.green,
ConnectionQuality.good: Colors.orange,
ConnectionQuality.poor: Colors.red,
}[connectionQuality],
size: 16,
)
else
const SizedBox(
width: 12,
height: 12,
child: CircularProgressIndicator(
color: Colors.white,
strokeWidth: 2,
),
).padding(all: 3),
],
)
],
);
}
return Container(
color: Theme.of(context).colorScheme.onSurface.withOpacity(0.75),
padding: const EdgeInsets.symmetric(
vertical: 7,
horizontal: 10,
),
child: Row(
mainAxisAlignment: MainAxisAlignment.end,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
if (title != null)
Flexible(
child: Text(
title!,
overflow: TextOverflow.ellipsis,
style: const TextStyle(color: Colors.white),
),
),
const Gap(5),
isScreenShare
? const Icon(
Symbols.monitor,
color: Colors.white,
size: 16,
)
: Icon(
audioAvailable ? Symbols.mic : Symbols.mic_off,
color: audioAvailable ? Colors.white : Colors.red,
size: 16,
),
const Gap(3),
if (connectionQuality != ConnectionQuality.unknown)
Icon(
{
ConnectionQuality.excellent: Symbols.signal_cellular_alt,
ConnectionQuality.good: Symbols.signal_cellular_alt_2_bar,
ConnectionQuality.poor: Symbols.signal_cellular_alt_1_bar,
}[connectionQuality],
color: {
ConnectionQuality.excellent: Colors.green,
ConnectionQuality.good: Colors.orange,
ConnectionQuality.poor: Colors.red,
}[connectionQuality],
size: 16,
)
else
const SizedBox(
width: 12,
height: 12,
child: CircularProgressIndicator(
color: Colors.white,
strokeWidth: 2,
),
).padding(all: 3),
],
),
);
}
}

View File

@ -1,161 +0,0 @@
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:material_symbols_icons/symbols.dart';
class ParticipantMenu extends StatefulWidget {
final RemoteParticipant participant;
final VideoTrack? videoTrack;
final bool isScreenShare;
final bool showStatsLayer;
const ParticipantMenu({
super.key,
required this.participant,
this.videoTrack,
this.isScreenShare = false,
this.showStatsLayer = false,
});
@override
State<ParticipantMenu> createState() => _ParticipantMenuState();
}
class _ParticipantMenuState extends State<ParticipantMenu> {
RemoteTrackPublication<RemoteVideoTrack>? get _videoPublication =>
widget.participant.videoTrackPublications
.where((element) => element.sid == widget.videoTrack?.sid)
.firstOrNull;
RemoteTrackPublication<RemoteAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
void tookAction() {
if (Navigator.canPop(context)) {
Navigator.pop(context);
}
}
@override
Widget build(BuildContext context) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Container(
padding:
const EdgeInsets.only(left: 8, right: 8, top: 20, bottom: 12),
child: Padding(
padding: const EdgeInsets.symmetric(
horizontal: 8,
vertical: 12,
),
child: Text(
'callParticipantAction',
style: Theme.of(context).textTheme.headlineSmall,
).tr(),
),
),
Expanded(
child: ListView(
children: [
if (_firstAudioPublication != null && !widget.isScreenShare)
ListTile(
leading: Icon(
Symbols.volume_up,
color: {
TrackSubscriptionState.notAllowed:
Theme.of(context).colorScheme.error,
TrackSubscriptionState.unsubscribed: Theme.of(context)
.colorScheme
.onSurface
.withOpacity(0.6),
TrackSubscriptionState.subscribed:
Theme.of(context).colorScheme.primary,
}[_firstAudioPublication!.subscriptionState],
),
title: Text(
_firstAudioPublication!.subscribed
? 'callParticipantMicrophoneOff'.tr()
: 'callParticipantMicrophoneOn'.tr(),
),
onTap: () {
if (_firstAudioPublication!.subscribed) {
_firstAudioPublication!.unsubscribe();
} else {
_firstAudioPublication!.subscribe();
}
tookAction();
},
),
if (_videoPublication != null)
ListTile(
leading: Icon(
widget.isScreenShare ? Symbols.monitor : Symbols.videocam,
color: {
TrackSubscriptionState.notAllowed:
Theme.of(context).colorScheme.error,
TrackSubscriptionState.unsubscribed: Theme.of(context)
.colorScheme
.onSurface
.withOpacity(0.6),
TrackSubscriptionState.subscribed:
Theme.of(context).colorScheme.primary,
}[_videoPublication!.subscriptionState],
),
title: Text(
_videoPublication!.subscribed
? 'callParticipantVideoOff'.tr()
: 'callParticipantVideoOn'.tr(),
),
onTap: () {
if (_videoPublication!.subscribed) {
_videoPublication!.unsubscribe();
} else {
_videoPublication!.subscribe();
}
tookAction();
},
),
if (_videoPublication != null) const Divider(thickness: 0.3),
if (_videoPublication != null)
...[30, 15, 8].map(
(x) => ListTile(
leading: Icon(
_videoPublication?.fps == x
? Symbols.check_box
: Symbols.check_box_outline_blank,
),
title: Text('Set preferred frame-per-second to $x'),
onTap: () {
_videoPublication!.setVideoFPS(x);
tookAction();
},
),
),
if (_videoPublication != null) const Divider(thickness: 0.3),
if (_videoPublication != null)
...[
('High', VideoQuality.HIGH),
('Medium', VideoQuality.MEDIUM),
('Low', VideoQuality.LOW),
].map(
(x) => ListTile(
leading: Icon(
_videoPublication?.videoQuality == x.$2
? Symbols.check_box
: Symbols.check_box_outline_blank,
),
title: Text('Set preferred quality to ${x.$1}'),
onTap: () {
_videoPublication!.setVideoQuality(x.$2);
tookAction();
},
),
),
],
),
),
],
);
}
}

View File

@ -1,133 +0,0 @@
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:surface/types/chat.dart';
class ParticipantStatsWidget extends StatefulWidget {
const ParticipantStatsWidget({super.key, required this.participant});
final Participant participant;
@override
State<StatefulWidget> createState() => _ParticipantStatsWidgetState();
}
class _ParticipantStatsWidgetState extends State<ParticipantStatsWidget> {
List<EventsListener<TrackEvent>> listeners = [];
ParticipantStatsType statsType = ParticipantStatsType.unknown;
Map<String, String> stats = {};
void _setUpListener(Track track) {
var listener = track.createListener();
listeners.add(listener);
if (track is LocalVideoTrack) {
statsType = ParticipantStatsType.localVideoSender;
listener.on<VideoSenderStatsEvent>((event) {
setState(() {
stats['video tx'] = 'total sent ${event.currentBitrate.toInt()} kpbs';
event.stats.forEach((key, value) {
stats['layer-$key'] =
'${value.frameWidth ?? 0}x${value.frameHeight ?? 0} ${value.framesPerSecond?.toDouble() ?? 0} fps, ${event.bitrateForLayers[key] ?? 0} kbps';
});
var firstStats =
event.stats['f'] ?? event.stats['h'] ?? event.stats['q'];
if (firstStats != null) {
stats['encoder'] = firstStats.encoderImplementation ?? '';
stats['video codec'] =
'${firstStats.mimeType}, ${firstStats.clockRate}hz, pt: ${firstStats.payloadType}';
stats['qualityLimitationReason'] =
firstStats.qualityLimitationReason ?? '';
}
});
});
} else if (track is RemoteVideoTrack) {
statsType = ParticipantStatsType.remoteVideoReceiver;
listener.on<VideoReceiverStatsEvent>((event) {
setState(() {
stats['video rx'] = '${event.currentBitrate.toInt()} kpbs';
stats['video codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, pt: ${event.stats.payloadType}';
stats['video size'] =
'${event.stats.frameWidth}x${event.stats.frameHeight} ${event.stats.framesPerSecond?.toDouble()}fps';
stats['video jitter'] = '${event.stats.jitter} s';
stats['video decoder'] = '${event.stats.decoderImplementation}';
stats['video packets lost'] = '${event.stats.packetsLost}';
stats['video packets received'] = '${event.stats.packetsReceived}';
stats['video frames received'] = '${event.stats.framesReceived}';
stats['video frames decoded'] = '${event.stats.framesDecoded}';
stats['video frames dropped'] = '${event.stats.framesDropped}';
});
});
} else if (track is LocalAudioTrack) {
statsType = ParticipantStatsType.localAudioSender;
listener.on<AudioSenderStatsEvent>((event) {
setState(() {
stats['audio tx'] = '${event.currentBitrate.toInt()} kpbs';
stats['audio codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, ${event.stats.channels}ch, pt: ${event.stats.payloadType}';
});
});
} else if (track is RemoteAudioTrack) {
statsType = ParticipantStatsType.remoteAudioReceiver;
listener.on<AudioReceiverStatsEvent>((event) {
setState(() {
stats['audio rx'] = '${event.currentBitrate.toInt()} kpbs';
stats['audio codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, ${event.stats.channels}ch, pt: ${event.stats.payloadType}';
stats['audio jitter'] = '${event.stats.jitter} s';
stats['audio concealed samples'] =
'${event.stats.concealedSamples} / ${event.stats.concealmentEvents}';
stats['audio packets lost'] = '${event.stats.packetsLost}';
stats['audio packets received'] = '${event.stats.packetsReceived}';
});
});
}
}
onParticipantChanged() {
for (var element in listeners) {
element.dispose();
}
listeners.clear();
for (var track in [
...widget.participant.videoTrackPublications,
...widget.participant.audioTrackPublications
]) {
if (track.track != null) {
_setUpListener(track.track!);
}
}
}
@override
void initState() {
super.initState();
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
}
@override
void deactivate() {
for (var element in listeners) {
element.dispose();
}
widget.participant.removeListener(onParticipantChanged);
super.deactivate();
}
num sendBitrate = 0;
@override
Widget build(BuildContext context) {
return Container(
color: Theme.of(context).colorScheme.onSurface.withOpacity(0.75),
padding: const EdgeInsets.symmetric(
vertical: 8,
horizontal: 8,
),
child: Column(
children:
stats.entries.map((e) => Text('${e.key}: ${e.value}')).toList(),
),
);
}
}

View File

@ -1,191 +0,0 @@
import 'package:dropdown_button2/dropdown_button2.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:provider/provider.dart';
import 'package:styled_widget/styled_widget.dart';
import 'package:surface/providers/chat_call.dart';
import 'package:surface/types/chat.dart';
import 'package:surface/widgets/dialog.dart';
class ChatCallPrejoinPopup extends StatefulWidget {
final SnChatCall ongoingCall;
final SnChannel channel;
final void Function() onJoin;
const ChatCallPrejoinPopup({
super.key,
required this.ongoingCall,
required this.channel,
required this.onJoin,
});
@override
State<ChatCallPrejoinPopup> createState() => _ChatCallPrejoinPopupState();
}
class _ChatCallPrejoinPopupState extends State<ChatCallPrejoinPopup> {
bool _isBusy = false;
late final ChatCallProvider _call = context.read<ChatCallProvider>();
void _performJoin() async {
setState(() => _isBusy = true);
_call.setCall(widget.ongoingCall, widget.channel);
_call.setIsBusy(true);
try {
final resp = await _call.getRoomToken();
final token = resp.$1;
final endpoint = resp.$2;
_call.initRoom();
_call.setupRoomListeners(
onDisconnected: (reason) {
context.showSnackbar(
'callDisconnected'.tr(args: [reason.toString()]),
);
},
);
await _call.joinRoom(endpoint, token);
widget.onJoin();
if (!mounted) return;
Navigator.pop(context);
} catch (e) {
if (!mounted) return;
context.showErrorDialog(e);
} finally {
setState(() => _isBusy = false);
}
}
@override
void initState() {
final call = context.read<ChatCallProvider>();
call.checkPermissions().then((_) {
call.initHardware();
});
super.initState();
}
@override
Widget build(BuildContext context) {
final call = context.read<ChatCallProvider>();
return ListenableBuilder(
listenable: call,
builder: (context, _) {
return Center(
child: Container(
constraints: const BoxConstraints(maxWidth: 320),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text('callMicrophone').tr(),
Switch(
value: call.enableAudio,
onChanged: null,
),
],
).padding(bottom: 5),
DropdownButtonHideUnderline(
child: DropdownButton2<MediaDevice>(
isExpanded: true,
disabledHint: Text('callMicrophoneDisabled').tr(),
hint: Text('callMicrophoneSelect').tr(),
items: call.enableAudio
? call.audioInputs
.map(
(item) => DropdownMenuItem<MediaDevice>(
value: item,
child: Text(item.label),
),
)
.toList()
.cast<DropdownMenuItem<MediaDevice>>()
: [],
value: call.audioDevice,
onChanged: (MediaDevice? value) async {
if (value != null) {
call.setAudioDevice(value);
await call.changeLocalAudioTrack();
}
},
buttonStyleData: const ButtonStyleData(
height: 40,
width: 320,
),
),
).padding(bottom: 25),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text('callCamera').tr(),
Switch(
value: call.enableVideo,
onChanged: call.setEnableVideo,
),
],
).padding(bottom: 5),
DropdownButtonHideUnderline(
child: DropdownButton2<MediaDevice>(
isExpanded: true,
disabledHint: Text('callCameraDisabled').tr(),
hint: Text('callCameraSelect').tr(),
items: call.enableVideo
? call.videoInputs
.map(
(item) => DropdownMenuItem<MediaDevice>(
value: item,
child: Text(item.label),
),
)
.toList()
.cast<DropdownMenuItem<MediaDevice>>()
: [],
value: call.videoDevice,
onChanged: (MediaDevice? value) async {
if (value != null) {
call.setVideoDevice(value);
await call.changeLocalVideoTrack();
}
},
buttonStyleData: const ButtonStyleData(
height: 40,
width: 320,
),
),
).padding(bottom: 25),
if (_isBusy)
const Center(child: CircularProgressIndicator())
else
ElevatedButton(
style: ElevatedButton.styleFrom(
minimumSize: const Size(320, 56),
),
onPressed: _isBusy ? null : _performJoin,
child: Text('callJoin').tr(),
),
],
),
),
);
},
);
}
@override
void dispose() {
_call
..deactivateHardware()
..disposeHardware();
super.dispose();
}
}