This commit is contained in:
2024-11-24 20:23:06 +08:00
parent 66f41179ba
commit 7221af75eb
31 changed files with 2769 additions and 46 deletions

View File

@ -290,7 +290,7 @@ class _AttachmentItemContentVideoState
),
),
const Icon(
Icons.play_arrow,
Symbols.play_arrow,
shadows: labelShadows,
color: Colors.white,
).padding(bottom: 4, right: 8),
@ -450,7 +450,7 @@ class _AttachmentItemContentAudioState
),
),
const Icon(
Icons.play_arrow,
Symbols.play_arrow,
shadows: labelShadows,
color: Colors.white,
).padding(bottom: 4, right: 8),

View File

@ -0,0 +1,369 @@
import 'dart:async';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:material_symbols_icons/symbols.dart';
import 'package:provider/provider.dart';
import 'package:surface/providers/chat_call.dart';
import 'package:surface/widgets/dialog.dart';
class ControlsWidget extends StatefulWidget {
final Room room;
final LocalParticipant participant;
const ControlsWidget(
this.room,
this.participant, {
super.key,
});
@override
State<StatefulWidget> createState() => _ControlsWidgetState();
}
class _ControlsWidgetState extends State<ControlsWidget> {
CameraPosition _position = CameraPosition.front;
List<MediaDevice>? _audioInputs;
List<MediaDevice>? _audioOutputs;
List<MediaDevice>? _videoInputs;
StreamSubscription? _subscription;
bool _speakerphoneOn = false;
@override
void initState() {
super.initState();
_participant.addListener(onChange);
_subscription = Hardware.instance.onDeviceChange.stream
.listen((List<MediaDevice> devices) {
_revertDevices(devices);
});
Hardware.instance.enumerateDevices().then(_revertDevices);
_speakerphoneOn = Hardware.instance.speakerOn ?? false;
}
@override
void dispose() {
_subscription?.cancel();
_participant.removeListener(onChange);
super.dispose();
}
LocalParticipant get _participant => widget.participant;
void _revertDevices(List<MediaDevice> devices) async {
_audioInputs = devices.where((d) => d.kind == 'audioinput').toList();
_audioOutputs = devices.where((d) => d.kind == 'audiooutput').toList();
_videoInputs = devices.where((d) => d.kind == 'videoinput').toList();
setState(() {});
}
void onChange() => setState(() {});
bool get isMuted => _participant.isMuted;
Future<bool?> showDisconnectDialog() {
return showDialog<bool>(
context: context,
builder: (ctx) => AlertDialog(
title: Text('callDisconnect').tr(),
content: Text('callDisconnectDescription').tr(),
actions: [
TextButton(
onPressed: () => Navigator.pop(ctx, false),
child: Text('cancel').tr(),
),
TextButton(
onPressed: () => Navigator.pop(ctx, true),
child: Text('dialogConfirm').tr(),
),
],
),
);
}
void _disconnect() async {
if (await showDisconnectDialog() != true) return;
if (!mounted) return;
final call = context.read<ChatCallProvider>();
if (call.current != null) {
call.disposeRoom();
if (Navigator.canPop(context)) {
Navigator.pop(context);
}
}
}
void _disableAudio() async {
await _participant.setMicrophoneEnabled(false);
}
void _enableAudio() async {
await _participant.setMicrophoneEnabled(true);
}
void _disableVideo() async {
await _participant.setCameraEnabled(false);
}
void _enableVideo() async {
await _participant.setCameraEnabled(true);
}
void _selectAudioOutput(MediaDevice device) async {
await widget.room.setAudioOutputDevice(device);
setState(() {});
}
void _selectAudioInput(MediaDevice device) async {
await widget.room.setAudioInputDevice(device);
setState(() {});
}
void _selectVideoInput(MediaDevice device) async {
await widget.room.setVideoInputDevice(device);
setState(() {});
}
void _toggleSpeakerphoneOn() {
_speakerphoneOn = !_speakerphoneOn;
Hardware.instance.setSpeakerphoneOn(_speakerphoneOn);
setState(() {});
}
void _toggleCamera() async {
final track = _participant.videoTrackPublications.firstOrNull?.track;
if (track == null) return;
try {
final newPosition = _position.switched();
await track.setCameraPosition(newPosition);
setState(() {
_position = newPosition;
});
} catch (error) {
return;
}
}
void _enableScreenShare() async {
if (lkPlatformIsDesktop()) {
try {
final source = await showDialog<DesktopCapturerSource>(
context: context,
builder: (context) => ScreenSelectDialog(),
);
if (source == null) {
return;
}
var track = await LocalVideoTrack.createScreenShareTrack(
ScreenShareCaptureOptions(
captureScreenAudio: true,
sourceId: source.id,
maxFrameRate: 30.0,
),
);
await _participant.publishVideoTrack(track);
} catch (err) {
if (!mounted) return;
context.showErrorDialog(err);
}
return;
}
if (lkPlatformIs(PlatformType.iOS)) {
var track = await LocalVideoTrack.createScreenShareTrack(
const ScreenShareCaptureOptions(
useiOSBroadcastExtension: true,
captureScreenAudio: true,
maxFrameRate: 30.0,
),
);
await _participant.publishVideoTrack(track);
return;
}
if (lkPlatformIsWebMobile()) {
ScaffoldMessenger.of(context).showSnackBar(const SnackBar(
content: Text('Screen share is not supported mobile platform.'),
));
return;
}
await _participant.setScreenShareEnabled(true, captureScreenAudio: true);
}
void _disableScreenShare() async {
await _participant.setScreenShareEnabled(false);
}
@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.symmetric(
vertical: 10,
),
child: Wrap(
alignment: WrapAlignment.center,
spacing: 5,
runSpacing: 5,
children: [
IconButton(
icon: const Icon(Symbols.exit_to_app),
color: Theme.of(context).colorScheme.onSurface,
onPressed: _disconnect,
),
if (_participant.isMicrophoneEnabled())
if (lkPlatformIs(PlatformType.android))
IconButton(
onPressed: _disableAudio,
icon: const Icon(Symbols.mic),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callMicrophoneOff'.tr(),
)
else
PopupMenuButton<MediaDevice>(
icon: const Icon(Symbols.settings_voice),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
onTap: isMuted ? _enableAudio : _disableAudio,
child: ListTile(
leading: const Icon(Symbols.mic_off),
title: Text(isMuted
? 'callMicrophoneOn'.tr()
: 'callMicrophoneOff'.tr()),
),
),
if (_audioInputs != null)
..._audioInputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedAudioInputDeviceId)
? const Icon(Symbols.check_box)
: const Icon(Symbols.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => _selectAudioInput(device),
);
})
];
},
)
else
IconButton(
onPressed: _enableAudio,
icon: const Icon(Symbols.mic_off),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callMicrophoneOn'.tr(),
),
if (_participant.isCameraEnabled())
PopupMenuButton<MediaDevice>(
icon: const Icon(Symbols.videocam_sharp),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
onTap: _disableVideo,
child: ListTile(
leading: const Icon(Symbols.videocam_off),
title: Text('callCameraOff'.tr()),
),
),
if (_videoInputs != null)
..._videoInputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedVideoInputDeviceId)
? const Icon(Symbols.check_box)
: const Icon(Symbols.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => _selectVideoInput(device),
);
})
];
},
)
else
IconButton(
onPressed: _enableVideo,
icon: const Icon(Symbols.videocam_off),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callCameraOn'.tr(),
),
IconButton(
icon: Icon(_position == CameraPosition.back
? Symbols.video_camera_back
: Symbols.video_camera_front),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => _toggleCamera(),
tooltip: 'callVideoFlip'.tr(),
),
if (!lkPlatformIs(PlatformType.iOS))
PopupMenuButton<MediaDevice>(
icon: const Icon(Symbols.volume_up),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
child: ListTile(
leading: const Icon(Symbols.speaker),
title: Text('callSpeakerSelect').tr(),
),
),
if (_audioOutputs != null)
..._audioOutputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedAudioOutputDeviceId)
? const Icon(Symbols.check_box)
: const Icon(Symbols.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => _selectAudioOutput(device),
);
})
];
},
),
if (!kIsWeb && Hardware.instance.canSwitchSpeakerphone)
IconButton(
onPressed: _toggleSpeakerphoneOn,
color: Theme.of(context).colorScheme.onSurface,
icon: _speakerphoneOn
? Icon(Symbols.volume_up)
: Icon(Symbols.volume_down),
tooltip: 'callSpeakerphoneToggle'.tr(),
),
if (_participant.isScreenShareEnabled())
IconButton(
icon: const Icon(Symbols.stop_screen_share),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => _disableScreenShare(),
tooltip: 'callScreenOff'.tr(),
)
else
IconButton(
icon: const Icon(Symbols.screen_share),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => _enableScreenShare(),
tooltip: 'callScreenOn'.tr(),
),
],
),
);
}
}

View File

@ -0,0 +1,92 @@
import 'dart:math' as math;
import 'package:flutter/material.dart';
import 'package:flutter_animate/flutter_animate.dart';
import 'package:surface/types/account.dart';
import 'package:surface/widgets/account/account_image.dart';
class NoContentWidget extends StatefulWidget {
final SnAccount? userinfo;
final bool isSpeaking;
final bool isFixed;
const NoContentWidget({
super.key,
this.userinfo,
this.isFixed = false,
required this.isSpeaking,
});
@override
State<NoContentWidget> createState() => _NoContentWidgetState();
}
class _NoContentWidgetState extends State<NoContentWidget>
with SingleTickerProviderStateMixin {
late final AnimationController _animationController;
@override
void initState() {
super.initState();
_animationController = AnimationController(vsync: this);
}
@override
void didUpdateWidget(NoContentWidget old) {
super.didUpdateWidget(old);
if (widget.isSpeaking) {
_animationController.repeat(reverse: true);
} else {
_animationController
.animateTo(0, duration: 300.ms)
.then((_) => _animationController.reset());
}
}
@override
Widget build(BuildContext context) {
final double radius = widget.isFixed
? 32
: math.min(
MediaQuery.of(context).size.width * 0.1,
MediaQuery.of(context).size.height * 0.1,
);
return Container(
alignment: Alignment.center,
child: Center(
child: Animate(
autoPlay: false,
controller: _animationController,
effects: [
CustomEffect(
begin: widget.isSpeaking ? 2 : 0,
end: 8,
curve: Curves.easeInOut,
duration: 1250.ms,
builder: (context, value, child) => Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.all(Radius.circular(radius + 8)),
border: value > 0
? Border.all(color: Colors.green, width: value)
: null,
),
child: child,
),
)
],
child: AccountImage(
content: widget.userinfo?.avatar,
radius: radius,
),
),
),
);
}
@override
void dispose() {
_animationController.dispose();
super.dispose();
}
}

View File

@ -0,0 +1,242 @@
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:surface/types/account.dart';
import 'package:surface/types/chat.dart';
import 'package:surface/widgets/chat/call/call_no_content.dart';
import 'package:surface/widgets/chat/call/call_participant_info.dart';
import 'package:surface/widgets/chat/call/call_participant_menu.dart';
import 'package:surface/widgets/chat/call/call_participant_stats.dart';
abstract class ParticipantWidget extends StatefulWidget {
static ParticipantWidget widgetFor(ParticipantTrack participantTrack,
{bool isFixed = false, bool showStatsLayer = false}) {
if (participantTrack.participant is LocalParticipant) {
return LocalParticipantWidget(
participantTrack.participant as LocalParticipant,
participantTrack.videoTrack,
isFixed,
participantTrack.isScreenShare,
showStatsLayer,
);
} else if (participantTrack.participant is RemoteParticipant) {
return RemoteParticipantWidget(
participantTrack.participant as RemoteParticipant,
participantTrack.videoTrack,
isFixed,
participantTrack.isScreenShare,
showStatsLayer,
);
}
throw UnimplementedError('Unknown participant type');
}
abstract final Participant participant;
abstract final VideoTrack? videoTrack;
abstract final bool isScreenShare;
abstract final bool isFixed;
abstract final bool showStatsLayer;
final VideoQuality quality;
const ParticipantWidget({
super.key,
this.quality = VideoQuality.MEDIUM,
});
}
class LocalParticipantWidget extends ParticipantWidget {
@override
final LocalParticipant participant;
@override
final VideoTrack? videoTrack;
@override
final bool isFixed;
@override
final bool isScreenShare;
@override
final bool showStatsLayer;
const LocalParticipantWidget(
this.participant,
this.videoTrack,
this.isFixed,
this.isScreenShare,
this.showStatsLayer, {
super.key,
});
@override
State<StatefulWidget> createState() => _LocalParticipantWidgetState();
}
class RemoteParticipantWidget extends ParticipantWidget {
@override
final RemoteParticipant participant;
@override
final VideoTrack? videoTrack;
@override
final bool isFixed;
@override
final bool isScreenShare;
@override
final bool showStatsLayer;
const RemoteParticipantWidget(
this.participant,
this.videoTrack,
this.isFixed,
this.isScreenShare,
this.showStatsLayer, {
super.key,
});
@override
State<StatefulWidget> createState() => _RemoteParticipantWidgetState();
}
abstract class _ParticipantWidgetState<T extends ParticipantWidget>
extends State<T> {
VideoTrack? get _activeVideoTrack;
TrackPublication? get _firstAudioPublication;
SnAccount? _userinfoMetadata;
@override
void initState() {
super.initState();
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
}
@override
void dispose() {
widget.participant.removeListener(onParticipantChanged);
super.dispose();
}
@override
void didUpdateWidget(covariant T oldWidget) {
oldWidget.participant.removeListener(onParticipantChanged);
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
super.didUpdateWidget(oldWidget);
}
void onParticipantChanged() {
setState(() {
if (widget.participant.metadata != null) {
_userinfoMetadata = SnAccount.fromJson(
jsonDecode(widget.participant.metadata!),
);
}
});
}
@override
Widget build(BuildContext ctx) {
return Stack(
children: [
_activeVideoTrack != null && !_activeVideoTrack!.muted
? VideoTrackRenderer(
_activeVideoTrack!,
fit: RTCVideoViewObjectFit.RTCVideoViewObjectFitContain,
)
: NoContentWidget(
userinfo: _userinfoMetadata,
isFixed: widget.isFixed,
isSpeaking: widget.participant.isSpeaking,
),
if (widget.showStatsLayer)
Positioned(
top: 30,
right: 30,
child: ParticipantStatsWidget(participant: widget.participant),
),
Align(
alignment: Alignment.bottomCenter,
child: Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
mainAxisSize: MainAxisSize.min,
children: [
ParticipantInfoWidget(
title: widget.participant.name.isNotEmpty
? widget.participant.name
: widget.participant.identity,
audioAvailable: _firstAudioPublication?.muted == false &&
_firstAudioPublication?.subscribed == true,
connectionQuality: widget.participant.connectionQuality,
isScreenShare: widget.isScreenShare,
),
],
),
),
],
);
}
}
class _LocalParticipantWidgetState
extends _ParticipantWidgetState<LocalParticipantWidget> {
@override
LocalTrackPublication<LocalAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
@override
VideoTrack? get _activeVideoTrack => widget.videoTrack;
}
class _RemoteParticipantWidgetState
extends _ParticipantWidgetState<RemoteParticipantWidget> {
@override
RemoteTrackPublication<RemoteAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
@override
VideoTrack? get _activeVideoTrack => widget.videoTrack;
}
class InteractiveParticipantWidget extends StatelessWidget {
final double? width;
final double? height;
final Color? color;
final bool isFixedAvatar;
final ParticipantTrack participant;
final Function() onTap;
const InteractiveParticipantWidget({
super.key,
this.width,
this.height,
this.color,
this.isFixedAvatar = false,
required this.participant,
required this.onTap,
});
@override
Widget build(BuildContext context) {
return GestureDetector(
child: Container(
width: width,
height: height,
color: color,
child: ParticipantWidget.widgetFor(participant, isFixed: isFixedAvatar),
),
onTap: () => onTap(),
onLongPress: () {
if (participant.participant is LocalParticipant) return;
showModalBottomSheet(
context: context,
builder: (context) => ParticipantMenu(
participant: participant.participant as RemoteParticipant,
videoTrack: participant.videoTrack,
isScreenShare: participant.isScreenShare,
),
);
},
);
}
}

View File

@ -0,0 +1,79 @@
import 'package:flutter/material.dart';
import 'package:gap/gap.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:material_symbols_icons/symbols.dart';
import 'package:styled_widget/styled_widget.dart';
class ParticipantInfoWidget extends StatelessWidget {
final String? title;
final bool audioAvailable;
final ConnectionQuality connectionQuality;
final bool isScreenShare;
const ParticipantInfoWidget({
super.key,
this.title,
this.audioAvailable = true,
this.connectionQuality = ConnectionQuality.unknown,
this.isScreenShare = false,
});
@override
Widget build(BuildContext context) => Container(
color: Theme.of(context).colorScheme.onSurface.withOpacity(0.75),
padding: const EdgeInsets.symmetric(
vertical: 7,
horizontal: 10,
),
child: Row(
mainAxisAlignment: MainAxisAlignment.end,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
if (title != null)
Flexible(
child: Text(
title!,
overflow: TextOverflow.ellipsis,
style: const TextStyle(color: Colors.white),
),
),
const Gap(5),
isScreenShare
? const Icon(
Symbols.monitor,
color: Colors.white,
size: 16,
)
: Icon(
audioAvailable ? Symbols.mic : Symbols.mic_off,
color: audioAvailable ? Colors.white : Colors.red,
size: 16,
),
const Gap(3),
if (connectionQuality != ConnectionQuality.unknown)
Icon(
{
ConnectionQuality.excellent: Symbols.signal_cellular_alt,
ConnectionQuality.good: Symbols.signal_cellular_alt_2_bar,
ConnectionQuality.poor: Symbols.signal_cellular_alt_1_bar,
}[connectionQuality],
color: {
ConnectionQuality.excellent: Colors.green,
ConnectionQuality.good: Colors.orange,
ConnectionQuality.poor: Colors.red,
}[connectionQuality],
size: 16,
)
else
const SizedBox(
width: 12,
height: 12,
child: CircularProgressIndicator(
color: Colors.white,
strokeWidth: 2,
),
).padding(all: 3),
],
),
);
}

View File

@ -0,0 +1,161 @@
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:material_symbols_icons/symbols.dart';
class ParticipantMenu extends StatefulWidget {
final RemoteParticipant participant;
final VideoTrack? videoTrack;
final bool isScreenShare;
final bool showStatsLayer;
const ParticipantMenu({
super.key,
required this.participant,
this.videoTrack,
this.isScreenShare = false,
this.showStatsLayer = false,
});
@override
State<ParticipantMenu> createState() => _ParticipantMenuState();
}
class _ParticipantMenuState extends State<ParticipantMenu> {
RemoteTrackPublication<RemoteVideoTrack>? get _videoPublication =>
widget.participant.videoTrackPublications
.where((element) => element.sid == widget.videoTrack?.sid)
.firstOrNull;
RemoteTrackPublication<RemoteAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
void tookAction() {
if (Navigator.canPop(context)) {
Navigator.pop(context);
}
}
@override
Widget build(BuildContext context) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Container(
padding:
const EdgeInsets.only(left: 8, right: 8, top: 20, bottom: 12),
child: Padding(
padding: const EdgeInsets.symmetric(
horizontal: 8,
vertical: 12,
),
child: Text(
'callParticipantAction',
style: Theme.of(context).textTheme.headlineSmall,
).tr(),
),
),
Expanded(
child: ListView(
children: [
if (_firstAudioPublication != null && !widget.isScreenShare)
ListTile(
leading: Icon(
Symbols.volume_up,
color: {
TrackSubscriptionState.notAllowed:
Theme.of(context).colorScheme.error,
TrackSubscriptionState.unsubscribed: Theme.of(context)
.colorScheme
.onSurface
.withOpacity(0.6),
TrackSubscriptionState.subscribed:
Theme.of(context).colorScheme.primary,
}[_firstAudioPublication!.subscriptionState],
),
title: Text(
_firstAudioPublication!.subscribed
? 'callParticipantMicrophoneOff'.tr()
: 'callParticipantMicrophoneOn'.tr(),
),
onTap: () {
if (_firstAudioPublication!.subscribed) {
_firstAudioPublication!.unsubscribe();
} else {
_firstAudioPublication!.subscribe();
}
tookAction();
},
),
if (_videoPublication != null)
ListTile(
leading: Icon(
widget.isScreenShare ? Symbols.monitor : Symbols.videocam,
color: {
TrackSubscriptionState.notAllowed:
Theme.of(context).colorScheme.error,
TrackSubscriptionState.unsubscribed: Theme.of(context)
.colorScheme
.onSurface
.withOpacity(0.6),
TrackSubscriptionState.subscribed:
Theme.of(context).colorScheme.primary,
}[_videoPublication!.subscriptionState],
),
title: Text(
_videoPublication!.subscribed
? 'callParticipantVideoOff'.tr()
: 'callParticipantVideoOn'.tr(),
),
onTap: () {
if (_videoPublication!.subscribed) {
_videoPublication!.unsubscribe();
} else {
_videoPublication!.subscribe();
}
tookAction();
},
),
if (_videoPublication != null) const Divider(thickness: 0.3),
if (_videoPublication != null)
...[30, 15, 8].map(
(x) => ListTile(
leading: Icon(
_videoPublication?.fps == x
? Symbols.check_box
: Symbols.check_box_outline_blank,
),
title: Text('Set preferred frame-per-second to $x'),
onTap: () {
_videoPublication!.setVideoFPS(x);
tookAction();
},
),
),
if (_videoPublication != null) const Divider(thickness: 0.3),
if (_videoPublication != null)
...[
('High', VideoQuality.HIGH),
('Medium', VideoQuality.MEDIUM),
('Low', VideoQuality.LOW),
].map(
(x) => ListTile(
leading: Icon(
_videoPublication?.videoQuality == x.$2
? Symbols.check_box
: Symbols.check_box_outline_blank,
),
title: Text('Set preferred quality to ${x.$1}'),
onTap: () {
_videoPublication!.setVideoQuality(x.$2);
tookAction();
},
),
),
],
),
),
],
);
}
}

View File

@ -0,0 +1,133 @@
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:surface/types/chat.dart';
class ParticipantStatsWidget extends StatefulWidget {
const ParticipantStatsWidget({super.key, required this.participant});
final Participant participant;
@override
State<StatefulWidget> createState() => _ParticipantStatsWidgetState();
}
class _ParticipantStatsWidgetState extends State<ParticipantStatsWidget> {
List<EventsListener<TrackEvent>> listeners = [];
ParticipantStatsType statsType = ParticipantStatsType.unknown;
Map<String, String> stats = {};
void _setUpListener(Track track) {
var listener = track.createListener();
listeners.add(listener);
if (track is LocalVideoTrack) {
statsType = ParticipantStatsType.localVideoSender;
listener.on<VideoSenderStatsEvent>((event) {
setState(() {
stats['video tx'] = 'total sent ${event.currentBitrate.toInt()} kpbs';
event.stats.forEach((key, value) {
stats['layer-$key'] =
'${value.frameWidth ?? 0}x${value.frameHeight ?? 0} ${value.framesPerSecond?.toDouble() ?? 0} fps, ${event.bitrateForLayers[key] ?? 0} kbps';
});
var firstStats =
event.stats['f'] ?? event.stats['h'] ?? event.stats['q'];
if (firstStats != null) {
stats['encoder'] = firstStats.encoderImplementation ?? '';
stats['video codec'] =
'${firstStats.mimeType}, ${firstStats.clockRate}hz, pt: ${firstStats.payloadType}';
stats['qualityLimitationReason'] =
firstStats.qualityLimitationReason ?? '';
}
});
});
} else if (track is RemoteVideoTrack) {
statsType = ParticipantStatsType.remoteVideoReceiver;
listener.on<VideoReceiverStatsEvent>((event) {
setState(() {
stats['video rx'] = '${event.currentBitrate.toInt()} kpbs';
stats['video codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, pt: ${event.stats.payloadType}';
stats['video size'] =
'${event.stats.frameWidth}x${event.stats.frameHeight} ${event.stats.framesPerSecond?.toDouble()}fps';
stats['video jitter'] = '${event.stats.jitter} s';
stats['video decoder'] = '${event.stats.decoderImplementation}';
stats['video packets lost'] = '${event.stats.packetsLost}';
stats['video packets received'] = '${event.stats.packetsReceived}';
stats['video frames received'] = '${event.stats.framesReceived}';
stats['video frames decoded'] = '${event.stats.framesDecoded}';
stats['video frames dropped'] = '${event.stats.framesDropped}';
});
});
} else if (track is LocalAudioTrack) {
statsType = ParticipantStatsType.localAudioSender;
listener.on<AudioSenderStatsEvent>((event) {
setState(() {
stats['audio tx'] = '${event.currentBitrate.toInt()} kpbs';
stats['audio codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, ${event.stats.channels}ch, pt: ${event.stats.payloadType}';
});
});
} else if (track is RemoteAudioTrack) {
statsType = ParticipantStatsType.remoteAudioReceiver;
listener.on<AudioReceiverStatsEvent>((event) {
setState(() {
stats['audio rx'] = '${event.currentBitrate.toInt()} kpbs';
stats['audio codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, ${event.stats.channels}ch, pt: ${event.stats.payloadType}';
stats['audio jitter'] = '${event.stats.jitter} s';
stats['audio concealed samples'] =
'${event.stats.concealedSamples} / ${event.stats.concealmentEvents}';
stats['audio packets lost'] = '${event.stats.packetsLost}';
stats['audio packets received'] = '${event.stats.packetsReceived}';
});
});
}
}
onParticipantChanged() {
for (var element in listeners) {
element.dispose();
}
listeners.clear();
for (var track in [
...widget.participant.videoTrackPublications,
...widget.participant.audioTrackPublications
]) {
if (track.track != null) {
_setUpListener(track.track!);
}
}
}
@override
void initState() {
super.initState();
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
}
@override
void deactivate() {
for (var element in listeners) {
element.dispose();
}
widget.participant.removeListener(onParticipantChanged);
super.deactivate();
}
num sendBitrate = 0;
@override
Widget build(BuildContext context) {
return Container(
color: Theme.of(context).colorScheme.onSurface.withOpacity(0.75),
padding: const EdgeInsets.symmetric(
vertical: 8,
horizontal: 8,
),
child: Column(
children:
stats.entries.map((e) => Text('${e.key}: ${e.value}')).toList(),
),
);
}
}

View File

@ -0,0 +1,191 @@
import 'package:dropdown_button2/dropdown_button2.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:provider/provider.dart';
import 'package:styled_widget/styled_widget.dart';
import 'package:surface/providers/chat_call.dart';
import 'package:surface/types/chat.dart';
import 'package:surface/widgets/dialog.dart';
class ChatCallPrejoinPopup extends StatefulWidget {
final SnChatCall ongoingCall;
final SnChannel channel;
final void Function() onJoin;
const ChatCallPrejoinPopup({
super.key,
required this.ongoingCall,
required this.channel,
required this.onJoin,
});
@override
State<ChatCallPrejoinPopup> createState() => _ChatCallPrejoinPopupState();
}
class _ChatCallPrejoinPopupState extends State<ChatCallPrejoinPopup> {
bool _isBusy = false;
late final ChatCallProvider _call = context.read<ChatCallProvider>();
void _performJoin() async {
setState(() => _isBusy = true);
_call.setCall(widget.ongoingCall, widget.channel);
_call.setIsBusy(true);
try {
final resp = await _call.getRoomToken();
final token = resp.$1;
final endpoint = resp.$2;
_call.initRoom();
_call.setupRoomListeners(
onDisconnected: (reason) {
context.showSnackbar(
'callDisconnected'.tr(args: [reason.toString()]),
);
},
);
await _call.joinRoom(endpoint, token);
widget.onJoin();
if (!mounted) return;
Navigator.pop(context);
} catch (e) {
if (!mounted) return;
context.showErrorDialog(e);
} finally {
setState(() => _isBusy = false);
}
}
@override
void initState() {
final call = context.read<ChatCallProvider>();
call.checkPermissions().then((_) {
call.initHardware();
});
super.initState();
}
@override
Widget build(BuildContext context) {
final call = context.read<ChatCallProvider>();
return ListenableBuilder(
listenable: call,
builder: (context, _) {
return Center(
child: Container(
constraints: const BoxConstraints(maxWidth: 320),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text('callMicrophone').tr(),
Switch(
value: call.enableAudio,
onChanged: null,
),
],
).padding(bottom: 5),
DropdownButtonHideUnderline(
child: DropdownButton2<MediaDevice>(
isExpanded: true,
disabledHint: Text('callMicrophoneDisabled').tr(),
hint: Text('callMicrophoneSelect').tr(),
items: call.enableAudio
? call.audioInputs
.map(
(item) => DropdownMenuItem<MediaDevice>(
value: item,
child: Text(item.label),
),
)
.toList()
.cast<DropdownMenuItem<MediaDevice>>()
: [],
value: call.audioDevice,
onChanged: (MediaDevice? value) async {
if (value != null) {
call.setAudioDevice(value);
await call.changeLocalAudioTrack();
}
},
buttonStyleData: const ButtonStyleData(
height: 40,
width: 320,
),
),
).padding(bottom: 25),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text('callCamera').tr(),
Switch(
value: call.enableVideo,
onChanged: (value) => call.setEnableAudio(value),
),
],
).padding(bottom: 5),
DropdownButtonHideUnderline(
child: DropdownButton2<MediaDevice>(
isExpanded: true,
disabledHint: Text('callCameraDisabled').tr(),
hint: Text('callCameraSelect').tr(),
items: call.enableVideo
? call.videoInputs
.map(
(item) => DropdownMenuItem<MediaDevice>(
value: item,
child: Text(item.label),
),
)
.toList()
.cast<DropdownMenuItem<MediaDevice>>()
: [],
value: call.videoDevice,
onChanged: (MediaDevice? value) async {
if (value != null) {
call.setVideoDevice(value);
await call.changeLocalVideoTrack();
}
},
buttonStyleData: const ButtonStyleData(
height: 40,
width: 320,
),
),
).padding(bottom: 25),
if (_isBusy)
const Center(child: CircularProgressIndicator())
else
ElevatedButton(
style: ElevatedButton.styleFrom(
minimumSize: const Size(320, 56),
),
onPressed: _isBusy ? null : _performJoin,
child: Text('callJoin').tr(),
),
],
),
),
);
},
);
}
@override
void dispose() {
_call
..deactivateHardware()
..disposeHardware();
super.dispose();
}
}