Full functional call

This commit is contained in:
LittleSheep 2024-06-01 20:18:25 +08:00
parent 508cba8ed3
commit 5c625fc15a
19 changed files with 1934 additions and 19 deletions

View File

@ -1,4 +1,8 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE"/>
<uses-permission android:name="android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS" />
<application
android:label="solian"
android:name="${applicationName}"

View File

@ -14,11 +14,13 @@ extension SolianExtenions on BuildContext {
if (message.trim().isEmpty) return '';
return message
.split(' ')
.map((element) => '${element[0].toUpperCase()}${element.substring(1).toLowerCase()}')
.map((element) =>
'${element[0].toUpperCase()}${element.substring(1).toLowerCase()}')
.join(' ');
}
return showDialog<void>(
useRootNavigator: true,
context: this,
builder: (ctx) => AlertDialog(
title: Text('errorHappened'.tr),

View File

@ -4,6 +4,7 @@ import 'package:solian/providers/account.dart';
import 'package:solian/providers/auth.dart';
import 'package:solian/providers/chat.dart';
import 'package:solian/providers/content/attachment.dart';
import 'package:solian/providers/content/call.dart';
import 'package:solian/providers/content/channel.dart';
import 'package:solian/providers/content/post.dart';
import 'package:solian/providers/content/realm.dart';
@ -41,6 +42,7 @@ class SolianApp extends StatelessWidget {
Get.lazyPut(() => AccountProvider());
Get.lazyPut(() => ChannelProvider());
Get.lazyPut(() => RealmProvider());
Get.lazyPut(() => ChatCallProvider());
final AuthProvider auth = Get.find();
auth.isAuthorized.then((value) async {

View File

@ -1,3 +1,4 @@
import 'package:livekit_client/livekit_client.dart';
import 'package:solian/models/channel.dart';
class Call {
@ -48,3 +49,21 @@ class Call {
'channel': channel.toJson(),
};
}
enum ParticipantStatsType {
unknown,
localAudioSender,
localVideoSender,
remoteAudioReceiver,
remoteVideoReceiver,
}
class ParticipantTrack {
ParticipantTrack(
{required this.participant,
required this.videoTrack,
required this.isScreenShare});
VideoTrack? videoTrack;
Participant participant;
bool isScreenShare;
}

View File

@ -0,0 +1,383 @@
import 'dart:async';
import 'package:flutter/material.dart';
import 'package:flutter_background/flutter_background.dart';
import 'package:get/get.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:solian/models/call.dart';
import 'package:solian/models/channel.dart';
import 'package:solian/providers/auth.dart';
import 'package:solian/screens/channel/call/call.dart';
import 'package:solian/services.dart';
import 'package:wakelock_plus/wakelock_plus.dart';
class ChatCallProvider extends GetxController {
Rx<Call?> current = Rx(null);
Rx<Channel?> channel = Rx(null);
RxBool isReady = false.obs;
RxBool isMounted = false.obs;
String? token;
String? endpoint;
StreamSubscription? hwSubscription;
RxList audioInputs = [].obs;
RxList videoInputs = [].obs;
RxBool enableAudio = true.obs;
RxBool enableVideo = false.obs;
Rx<LocalAudioTrack?> audioTrack = Rx(null);
Rx<LocalVideoTrack?> videoTrack = Rx(null);
Rx<MediaDevice?> videoDevice = Rx(null);
Rx<MediaDevice?> audioDevice = Rx(null);
final VideoParameters videoParameters = VideoParametersPresets.h720_169;
late Room room;
late EventsListener<RoomEvent> listener;
RxList participantTracks = [].obs;
Rx<ParticipantTrack?> focusTrack = Rx(null);
Future<void> checkPermissions() async {
if (lkPlatformIs(PlatformType.macOS) || lkPlatformIs(PlatformType.linux)) {
return;
}
if (lkPlatformIs(PlatformType.android)) {
FlutterBackground.enableBackgroundExecution();
}
await Permission.camera.request();
await Permission.microphone.request();
await Permission.bluetooth.request();
await Permission.bluetoothConnect.request();
}
void setCall(Call call, Channel related) {
current.value = call;
channel.value = related;
}
Future<(String, String)> getRoomToken() async {
final AuthProvider auth = Get.find();
if (!await auth.isAuthorized) throw Exception('unauthorized');
final client = GetConnect(maxAuthRetries: 3);
client.httpClient.baseUrl = ServiceFinder.services['messaging'];
client.httpClient.addAuthenticator(auth.requestAuthenticator);
final resp = await client.post(
'/api/channels/global/${channel.value!.alias}/calls/ongoing/token',
{},
);
if (resp.statusCode == 200) {
token = resp.body['token'];
endpoint = 'wss://${resp.body['endpoint']}';
return (token!, endpoint!);
} else {
throw Exception(resp.bodyString);
}
}
void initHardware() {
if (isReady.value) {
return;
} else {
isReady.value = true;
}
hwSubscription = Hardware.instance.onDeviceChange.stream.listen(
revertDevices,
);
Hardware.instance.enumerateDevices().then(revertDevices);
}
void initRoom() {
initHardware();
room = Room();
listener = room.createListener();
WakelockPlus.enable();
}
void joinRoom(String url, String token) async {
if (isMounted.value) {
return;
} else {
isMounted.value = true;
}
try {
await room.connect(
url,
token,
roomOptions: RoomOptions(
dynacast: true,
adaptiveStream: true,
defaultAudioPublishOptions: const AudioPublishOptions(
name: 'call_voice',
stream: 'call_stream',
),
defaultVideoPublishOptions: const VideoPublishOptions(
name: 'call_video',
stream: 'call_stream',
simulcast: true,
backupVideoCodec: BackupVideoCodec(enabled: true),
),
defaultScreenShareCaptureOptions: const ScreenShareCaptureOptions(
useiOSBroadcastExtension: true,
params: VideoParameters(
dimensions: VideoDimensionsPresets.h1080_169,
encoding:
VideoEncoding(maxBitrate: 3 * 1000 * 1000, maxFramerate: 30),
),
),
defaultCameraCaptureOptions:
CameraCaptureOptions(maxFrameRate: 30, params: videoParameters),
),
fastConnectOptions: FastConnectOptions(
microphone: TrackOption(track: audioTrack.value),
camera: TrackOption(track: videoTrack.value),
),
);
} catch (e) {
rethrow;
}
}
void autoPublish() async {
try {
if (enableVideo.value) {
await room.localParticipant?.setCameraEnabled(true);
}
if (enableAudio.value) {
await room.localParticipant?.setMicrophoneEnabled(true);
}
} catch (error) {
rethrow;
}
}
void onRoomDidUpdate() => sortParticipants();
void setupRoom() {
sortParticipants();
room.addListener(onRoomDidUpdate);
WidgetsBindingCompatible.instance?.addPostFrameCallback(
(_) => autoPublish(),
);
if (lkPlatformIsMobile()) {
Hardware.instance.setSpeakerphoneOn(true);
}
}
void setupRoomListeners({
required Function(DisconnectReason?) onDisconnected,
}) {
listener
..on<RoomDisconnectedEvent>((event) async {
onDisconnected(event.reason);
})
..on<ParticipantEvent>((event) => sortParticipants())
..on<LocalTrackPublishedEvent>((_) => sortParticipants())
..on<LocalTrackUnpublishedEvent>((_) => sortParticipants())
..on<TrackSubscribedEvent>((_) => sortParticipants())
..on<TrackUnsubscribedEvent>((_) => sortParticipants())
..on<ParticipantNameUpdatedEvent>((event) {
sortParticipants();
});
}
void sortParticipants() {
Map<String, ParticipantTrack> mediaTracks = {};
for (var participant in room.remoteParticipants.values) {
mediaTracks[participant.sid] = ParticipantTrack(
participant: participant,
videoTrack: null,
isScreenShare: false,
);
for (var t in participant.videoTrackPublications) {
mediaTracks[participant.sid]?.videoTrack = t.track;
mediaTracks[participant.sid]?.isScreenShare = t.isScreenShare;
}
}
final newTracks = List.empty(growable: true);
final mediaTrackList = mediaTracks.values.toList();
mediaTrackList.sort((a, b) {
// Loudest people first
if (a.participant.isSpeaking && b.participant.isSpeaking) {
if (a.participant.audioLevel > b.participant.audioLevel) {
return -1;
} else {
return 1;
}
}
// Last spoke first
final aSpokeAt = a.participant.lastSpokeAt?.millisecondsSinceEpoch ?? 0;
final bSpokeAt = b.participant.lastSpokeAt?.millisecondsSinceEpoch ?? 0;
if (aSpokeAt != bSpokeAt) {
return aSpokeAt > bSpokeAt ? -1 : 1;
}
// Has video first
if (a.participant.hasVideo != b.participant.hasVideo) {
return a.participant.hasVideo ? -1 : 1;
}
// First joined people first
return a.participant.joinedAt.millisecondsSinceEpoch -
b.participant.joinedAt.millisecondsSinceEpoch;
});
newTracks.addAll(mediaTrackList);
if (room.localParticipant != null) {
ParticipantTrack localTrack = ParticipantTrack(
participant: room.localParticipant!,
videoTrack: null,
isScreenShare: false,
);
final localParticipantTracks =
room.localParticipant?.videoTrackPublications;
if (localParticipantTracks != null) {
for (var t in localParticipantTracks) {
localTrack.videoTrack = t.track;
localTrack.isScreenShare = t.isScreenShare;
}
}
newTracks.add(localTrack);
}
participantTracks.value = newTracks;
if (focusTrack.value == null) {
focusTrack.value = participantTracks.firstOrNull;
} else {
final idx = participantTracks.indexWhere(
(x) => focusTrack.value!.participant.sid == x.participant.sid,
);
if (idx > -1) {
focusTrack.value = participantTracks[idx];
}
}
}
void revertDevices(List<MediaDevice> devices) async {
audioInputs.clear();
audioInputs.addAll(devices.where((d) => d.kind == 'audioinput'));
videoInputs.clear();
videoInputs.addAll(devices.where((d) => d.kind == 'videoinput'));
if (audioInputs.isNotEmpty) {
if (audioDevice.value == null && enableAudio.value) {
audioDevice.value = audioInputs.first;
Future.delayed(const Duration(milliseconds: 100), () async {
await changeLocalAudioTrack();
});
}
}
if (videoInputs.isNotEmpty) {
if (videoDevice.value == null && enableVideo.value) {
videoDevice.value = videoInputs.first;
Future.delayed(const Duration(milliseconds: 100), () async {
await changeLocalVideoTrack();
});
}
}
}
Future<void> setEnableVideo(value) async {
enableVideo.value = value;
if (!enableVideo.value) {
await videoTrack.value?.stop();
videoTrack.value = null;
} else {
await changeLocalVideoTrack();
}
}
Future<void> setEnableAudio(value) async {
enableAudio.value = value;
if (!enableAudio.value) {
await audioTrack.value?.stop();
audioTrack.value = null;
} else {
await changeLocalAudioTrack();
}
}
Future<void> changeLocalAudioTrack() async {
if (audioTrack.value != null) {
await audioTrack.value!.stop();
audioTrack.value = null;
}
if (audioDevice.value != null) {
audioTrack.value = await LocalAudioTrack.create(
AudioCaptureOptions(
deviceId: audioDevice.value!.deviceId,
),
);
await audioTrack.value!.start();
}
}
Future<void> changeLocalVideoTrack() async {
if (videoTrack.value != null) {
await videoTrack.value!.stop();
videoTrack.value = null;
}
if (videoDevice.value != null) {
videoTrack.value = await LocalVideoTrack.createCameraTrack(
CameraCaptureOptions(
deviceId: videoDevice.value!.deviceId,
params: videoParameters,
),
);
await videoTrack.value!.start();
}
}
void changeFocusTrack(ParticipantTrack track) {
focusTrack.value = track;
}
Future gotoScreen(BuildContext context) {
return Navigator.of(context, rootNavigator: true).push(
MaterialPageRoute(builder: (context) => const CallScreen()),
);
}
void deactivateHardware() {
hwSubscription?.cancel();
}
void disposeRoom() {
isMounted.value = false;
current.value = null;
channel.value = null;
room.removeListener(onRoomDidUpdate);
room.disconnect();
room.dispose();
listener.dispose();
WakelockPlus.disable();
}
void disposeHardware() {
isReady.value = false;
audioTrack.value?.stop();
audioTrack.value = null;
videoTrack.value?.stop();
videoTrack.value = null;
}
}

View File

@ -0,0 +1,101 @@
import 'dart:math' as math;
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:solian/providers/content/call.dart';
import 'package:solian/widgets/chat/call/call_controls.dart';
import 'package:solian/widgets/chat/call/call_participant.dart';
class CallScreen extends StatefulWidget {
const CallScreen({super.key});
@override
State<CallScreen> createState() => _CallScreenState();
}
class _CallScreenState extends State<CallScreen> {
@override
void initState() {
Get.find<ChatCallProvider>().setupRoom();
super.initState();
}
@override
Widget build(BuildContext context) {
final ChatCallProvider provider = Get.find();
return Material(
color: Theme.of(context).colorScheme.surface,
child: SafeArea(
top: false,
child: Obx(
() => Stack(
children: [
Column(
children: [
Expanded(
child: Container(
color: Theme.of(context).colorScheme.surfaceContainer,
child: provider.focusTrack.value != null
? InteractiveParticipantWidget(
isFixed: false,
participant: provider.focusTrack.value!,
onTap: () {},
)
: const SizedBox(),
),
),
if (provider.room.localParticipant != null)
ControlsWidget(
provider.room,
provider.room.localParticipant!,
),
],
),
Positioned(
left: 0,
right: 0,
top: 0,
child: SizedBox(
height: 128,
child: ListView.builder(
scrollDirection: Axis.horizontal,
itemCount: math.max(0, provider.participantTracks.length),
itemBuilder: (BuildContext context, int index) {
final track = provider.participantTracks[index];
if (track.participant.sid ==
provider.focusTrack.value?.participant.sid) {
return Container();
}
return Padding(
padding: const EdgeInsets.only(top: 8, left: 8),
child: ClipRRect(
borderRadius:
const BorderRadius.all(Radius.circular(8)),
child: InteractiveParticipantWidget(
isFixed: true,
width: 120,
height: 120,
color: Theme.of(context).cardColor,
participant: track,
onTap: () {
if (track.participant.sid !=
provider.focusTrack.value?.participant.sid) {
provider.changeFocusTrack(track);
}
},
),
),
);
},
),
),
),
],
),
),
),
);
}
}

View File

@ -11,10 +11,12 @@ import 'package:solian/models/packet.dart';
import 'package:solian/models/pagination.dart';
import 'package:solian/providers/auth.dart';
import 'package:solian/providers/chat.dart';
import 'package:solian/providers/content/call.dart';
import 'package:solian/providers/content/channel.dart';
import 'package:solian/router.dart';
import 'package:solian/services.dart';
import 'package:solian/theme.dart';
import 'package:solian/widgets/chat/call/call_prejoin.dart';
import 'package:solian/widgets/chat/call/chat_call_action.dart';
import 'package:solian/widgets/chat/chat_message.dart';
import 'package:solian/widgets/chat/chat_message_action.dart';
@ -177,6 +179,17 @@ class _ChannelChatScreenState extends State<ChannelChatScreen> {
return a.createdAt.difference(b.createdAt).inMinutes <= 3;
}
void showCallPrejoin() {
showModalBottomSheet(
useRootNavigator: true,
context: context,
builder: (context) => ChatCallPrejoinPopup(
ongoingCall: _ongoingCall!,
channel: _channel!,
),
);
}
Message? _messageToReplying;
Message? _messageToEditing;
@ -238,7 +251,7 @@ class _ChannelChatScreenState extends State<ChannelChatScreen> {
@override
Widget build(BuildContext context) {
if (_isBusy) {
if (_isBusy || _channel == null) {
return const Center(
child: CircularProgressIndicator(),
);
@ -257,6 +270,8 @@ class _ChannelChatScreenState extends State<ChannelChatScreen> {
);
}
final ChatCallProvider call = Get.find();
return Scaffold(
appBar: AppBar(
title: Text(title),
@ -335,18 +350,37 @@ class _ChannelChatScreenState extends State<ChannelChatScreen> {
),
),
if (_ongoingCall != null)
MaterialBanner(
padding: const EdgeInsets.only(left: 10, right: 20),
leading: const Icon(Icons.call_received),
backgroundColor: Theme.of(context).colorScheme.surfaceContainer,
dividerColor: const Color.fromARGB(1, 0, 0, 0),
content: Text('callOngoing'.tr),
actions: [
TextButton(
child: Text('callJoin'.tr),
onPressed: () {},
),
],
Positioned(
top: 0,
left: 0,
right: 0,
child: MaterialBanner(
padding: const EdgeInsets.only(left: 16, top: 4, bottom: 4),
leading: const Icon(Icons.call_received),
backgroundColor: Theme.of(context).colorScheme.surfaceContainer,
dividerColor: Colors.transparent,
content: Text('callOngoing'.tr),
actions: [
Obx(() {
if (call.current.value == null) {
return TextButton(
onPressed: showCallPrejoin,
child: Text('callJoin'.tr),
);
} else if (call.channel.value?.id == _channel?.id) {
return TextButton(
onPressed: () => call.gotoScreen(context),
child: Text('callResume'.tr),
);
} else {
return TextButton(
onPressed: null,
child: Text('callJoin'.tr),
);
}
})
],
),
),
],
),

View File

@ -4,12 +4,14 @@ import 'package:font_awesome_flutter/font_awesome_flutter.dart';
import 'package:get/get.dart';
import 'package:solian/models/channel.dart';
import 'package:solian/providers/auth.dart';
import 'package:solian/providers/content/call.dart';
import 'package:solian/providers/content/channel.dart';
import 'package:solian/router.dart';
import 'package:solian/screens/account/notification.dart';
import 'package:solian/theme.dart';
import 'package:solian/widgets/account/signin_required_overlay.dart';
import 'package:solian/widgets/channel/channel_list.dart';
import 'package:solian/widgets/chat/call/chat_call_indicator.dart';
class ContactScreen extends StatefulWidget {
const ContactScreen({super.key});
@ -57,6 +59,7 @@ class _ContactScreenState extends State<ContactScreen> {
@override
Widget build(BuildContext context) {
final AuthProvider auth = Get.find();
final ChatCallProvider call = Get.find();
return Material(
color: Theme.of(context).colorScheme.surface,
@ -133,6 +136,15 @@ class _ContactScreenState extends State<ContactScreen> {
),
],
),
Obx(() {
if (call.current.value != null) {
return const SliverToBoxAdapter(
child: ChatCallCurrentIndicator(),
);
} else {
return const SizedBox();
}
}),
if (_isBusy)
SliverToBoxAdapter(
child: const LinearProgressIndicator().animate().scaleX(),

View File

@ -155,6 +155,13 @@ class SolianMessages extends Translations {
'Are your sure to delete message @id? This action cannot be undone!',
'callOngoing': 'A call is ongoing...',
'callJoin': 'Join',
'callMicrophone': 'Microphone',
'callMicrophoneDisabled': 'Microphone Disabled',
'callMicrophoneSelect': 'Select Microphone',
'callCamera': 'Camera',
'callCameraDisabled': 'Camera Disabled',
'callCameraSelect': 'Select Camera',
'callDisconnected': 'Call has been disconnected... @reason',
},
'zh_CN': {
'hide': '隐藏',
@ -298,6 +305,31 @@ class SolianMessages extends Translations {
'messageDeletionConfirmCaption': '你确定要删除消息 @id 吗?该操作不可撤销。',
'callOngoing': '一则通话正在进行中…',
'callJoin': '加入',
'callResume': '恢复',
'callMicrophone': '麦克风',
'callMicrophoneDisabled': '麦克风禁用',
'callMicrophoneSelect': '选择麦克风',
'callCamera': '摄像头',
'callCameraDisabled': '摄像头禁用',
'callCameraSelect': '选择摄像头',
'callSpeakerSelect': '选择扬声器',
'callDisconnected': '通话已断开… @reason',
'callMicrophoneOn': '开启麦克风',
'callMicrophoneOff': '关闭麦克风',
'callCameraOn': '开启摄像头',
'callCameraOff': '关闭摄像头',
'callVideoFlip': '翻转视频输入',
'callSpeakerphoneToggle': '切换扬声器模式',
'callScreenOn': '启动屏幕分享',
'callScreenOff': '关闭屏幕分享',
'callDisconnect': '断开连接',
'callDisconnectCaption': '你确定要断开与该则通话的连接吗?你也可以直接返回页面,通话将在后台继续。',
'callParticipantAction': '通话参与者的操作',
'callParticipantMicrophoneOff': '静音参与者',
'callParticipantMicrophoneOn': '解除静音参与者',
'callParticipantVideoOff': '静音参与者',
'callParticipantVideoOn': '解除静音参与者',
'callAlreadyOngoing': '当前正在进行一则通话',
}
};
}

View File

@ -0,0 +1,399 @@
import 'dart:async';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_background/flutter_background.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:get/get.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:solian/exts.dart';
import 'package:solian/providers/content/call.dart';
class ControlsWidget extends StatefulWidget {
final Room room;
final LocalParticipant participant;
const ControlsWidget(
this.room,
this.participant, {
super.key,
});
@override
State<StatefulWidget> createState() => _ControlsWidgetState();
}
class _ControlsWidgetState extends State<ControlsWidget> {
CameraPosition position = CameraPosition.front;
List<MediaDevice>? _audioInputs;
List<MediaDevice>? _audioOutputs;
List<MediaDevice>? _videoInputs;
StreamSubscription? _subscription;
bool _speakerphoneOn = false;
@override
void initState() {
super.initState();
participant.addListener(onChange);
_subscription = Hardware.instance.onDeviceChange.stream
.listen((List<MediaDevice> devices) {
revertDevices(devices);
});
Hardware.instance.enumerateDevices().then(revertDevices);
_speakerphoneOn = Hardware.instance.speakerOn ?? false;
}
@override
void dispose() {
_subscription?.cancel();
participant.removeListener(onChange);
super.dispose();
}
LocalParticipant get participant => widget.participant;
void revertDevices(List<MediaDevice> devices) async {
_audioInputs = devices.where((d) => d.kind == 'audioinput').toList();
_audioOutputs = devices.where((d) => d.kind == 'audiooutput').toList();
_videoInputs = devices.where((d) => d.kind == 'videoinput').toList();
setState(() {});
}
void onChange() => setState(() {});
bool get isMuted => participant.isMuted;
Future<bool?> showDisconnectDialog() {
return showDialog<bool>(
context: context,
builder: (ctx) => AlertDialog(
title: Text('callDisconnect'.tr),
content: Text('callDisconnectCaption'.tr),
actions: [
TextButton(
onPressed: () => Navigator.pop(ctx, false),
child: Text('cancel'.tr),
),
TextButton(
onPressed: () => Navigator.pop(ctx, true),
child: Text('confirm'.tr),
),
],
),
);
}
void disconnect() async {
if (await showDisconnectDialog() != true) return;
final ChatCallProvider provider = Get.find();
if (provider.current.value != null) {
provider.disposeRoom();
Navigator.pop(context);
}
}
void disableAudio() async {
await participant.setMicrophoneEnabled(false);
}
void enableAudio() async {
await participant.setMicrophoneEnabled(true);
}
void disableVideo() async {
await participant.setCameraEnabled(false);
}
void enableVideo() async {
await participant.setCameraEnabled(true);
}
void selectAudioOutput(MediaDevice device) async {
await widget.room.setAudioOutputDevice(device);
setState(() {});
}
void selectAudioInput(MediaDevice device) async {
await widget.room.setAudioInputDevice(device);
setState(() {});
}
void selectVideoInput(MediaDevice device) async {
await widget.room.setVideoInputDevice(device);
setState(() {});
}
void setSpeakerphoneOn() {
_speakerphoneOn = !_speakerphoneOn;
Hardware.instance.setSpeakerphoneOn(_speakerphoneOn);
setState(() {});
}
void toggleCamera() async {
final track = participant.videoTrackPublications.firstOrNull?.track;
if (track == null) return;
try {
final newPosition = position.switched();
await track.setCameraPosition(newPosition);
setState(() {
position = newPosition;
});
} catch (error) {
return;
}
}
void enableScreenShare() async {
if (lkPlatformIsDesktop()) {
try {
final source = await showDialog<DesktopCapturerSource>(
context: context,
builder: (context) => ScreenSelectDialog(),
);
if (source == null) {
return;
}
var track = await LocalVideoTrack.createScreenShareTrack(
ScreenShareCaptureOptions(
sourceId: source.id,
maxFrameRate: 15.0,
),
);
await participant.publishVideoTrack(track);
} catch (e) {
final message = e.toString();
context.showErrorDialog(message);
}
return;
}
if (lkPlatformIs(PlatformType.android)) {
requestBackgroundPermission([bool isRetry = false]) async {
try {
bool hasPermissions = await FlutterBackground.hasPermissions;
if (!isRetry) {
const androidConfig = FlutterBackgroundAndroidConfig(
notificationTitle: 'Screen Sharing',
notificationText: 'Solar Messager is sharing your screen',
notificationImportance: AndroidNotificationImportance.Default,
notificationIcon:
AndroidResource(name: 'launcher_icon', defType: 'mipmap'),
);
hasPermissions = await FlutterBackground.initialize(
androidConfig: androidConfig);
}
if (hasPermissions &&
!FlutterBackground.isBackgroundExecutionEnabled) {
await FlutterBackground.enableBackgroundExecution();
}
} catch (e) {
if (!isRetry) {
return await Future<void>.delayed(const Duration(seconds: 1),
() => requestBackgroundPermission(true));
}
}
}
await requestBackgroundPermission();
}
if (lkPlatformIs(PlatformType.iOS)) {
var track = await LocalVideoTrack.createScreenShareTrack(
const ScreenShareCaptureOptions(
useiOSBroadcastExtension: true,
maxFrameRate: 30.0,
),
);
await participant.publishVideoTrack(track);
return;
}
if (lkPlatformIsWebMobile()) {
ScaffoldMessenger.of(context).showSnackBar(const SnackBar(
content: Text('Screen share is not supported mobile platform.'),
));
return;
}
await participant.setScreenShareEnabled(true, captureScreenAudio: true);
}
void disableScreenShare() async {
await participant.setScreenShareEnabled(false);
if (lkPlatformIs(PlatformType.android)) {
// Android specific
try {
await FlutterBackground.disableBackgroundExecution();
} catch (_) {}
}
}
@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.symmetric(
vertical: 10,
),
child: Wrap(
alignment: WrapAlignment.center,
spacing: 5,
runSpacing: 5,
children: [
IconButton(
icon: Transform.flip(
flipX: true, child: const Icon(Icons.exit_to_app)),
color: Theme.of(context).colorScheme.onSurface,
onPressed: disconnect,
),
if (participant.isMicrophoneEnabled())
if (lkPlatformIs(PlatformType.android))
IconButton(
onPressed: disableAudio,
icon: const Icon(Icons.mic),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callMicrophoneOff'.tr,
)
else
PopupMenuButton<MediaDevice>(
icon: const Icon(Icons.settings_voice),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
onTap: isMuted ? enableAudio : disableAudio,
child: ListTile(
leading: const Icon(Icons.mic_off),
title: Text('callMicrophoneOn'.tr),
),
),
if (_audioInputs != null)
..._audioInputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedAudioInputDeviceId)
? const Icon(Icons.check_box_outlined)
: const Icon(Icons.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => selectAudioInput(device),
);
})
];
},
)
else
IconButton(
onPressed: enableAudio,
icon: const Icon(Icons.mic_off),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callMicrophoneOn'.tr,
),
if (participant.isCameraEnabled())
PopupMenuButton<MediaDevice>(
icon: const Icon(Icons.videocam_sharp),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
onTap: disableVideo,
child: ListTile(
leading: const Icon(Icons.videocam_off),
title: Text('callCameraOff'.tr),
),
),
if (_videoInputs != null)
..._videoInputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedVideoInputDeviceId)
? const Icon(Icons.check_box_outlined)
: const Icon(Icons.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => selectVideoInput(device),
);
})
];
},
)
else
IconButton(
onPressed: enableVideo,
icon: const Icon(Icons.videocam_off),
color: Theme.of(context).colorScheme.onSurface,
tooltip: 'callCameraOn'.tr,
),
IconButton(
icon: Icon(position == CameraPosition.back
? Icons.video_camera_back
: Icons.video_camera_front),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => toggleCamera(),
tooltip: 'callVideoFlip'.tr,
),
if (!lkPlatformIs(PlatformType.iOS))
PopupMenuButton<MediaDevice>(
icon: const Icon(Icons.volume_up),
itemBuilder: (BuildContext context) {
return [
PopupMenuItem<MediaDevice>(
value: null,
child: ListTile(
leading: const Icon(Icons.speaker),
title: Text('callSpeakerSelect'.tr),
),
),
if (_audioOutputs != null)
..._audioOutputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedAudioOutputDeviceId)
? const Icon(Icons.check_box_outlined)
: const Icon(Icons.check_box_outline_blank),
title: Text(device.label),
),
onTap: () => selectAudioOutput(device),
);
})
];
},
),
if (!kIsWeb && lkPlatformIs(PlatformType.iOS))
IconButton(
onPressed: Hardware.instance.canSwitchSpeakerphone
? setSpeakerphoneOn
: null,
color: Theme.of(context).colorScheme.onSurface,
icon: Icon(
_speakerphoneOn ? Icons.volume_up : Icons.volume_down,
),
tooltip: 'callSpeakerphoneToggle'.tr,
),
if (participant.isScreenShareEnabled())
IconButton(
icon: const Icon(Icons.monitor_outlined),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => disableScreenShare(),
tooltip: 'callScreenOff'.tr,
)
else
IconButton(
icon: const Icon(Icons.monitor),
color: Theme.of(context).colorScheme.onSurface,
onPressed: () => enableScreenShare(),
tooltip: 'callScreenOn'.tr,
),
],
),
);
}
}

View File

@ -0,0 +1,92 @@
import 'package:flutter/material.dart';
import 'package:flutter_animate/flutter_animate.dart';
import 'package:solian/models/account.dart';
import 'package:solian/widgets/account/account_avatar.dart';
import 'dart:math' as math;
class NoContentWidget extends StatefulWidget {
final Account? userinfo;
final bool isSpeaking;
final bool isFixed;
const NoContentWidget({
super.key,
this.userinfo,
this.isFixed = false,
required this.isSpeaking,
});
@override
State<NoContentWidget> createState() => _NoContentWidgetState();
}
class _NoContentWidgetState extends State<NoContentWidget>
with SingleTickerProviderStateMixin {
late final AnimationController _animationController;
@override
void initState() {
super.initState();
_animationController = AnimationController(vsync: this);
}
@override
void didUpdateWidget(NoContentWidget old) {
super.didUpdateWidget(old);
if (widget.isSpeaking) {
_animationController.repeat(reverse: true);
} else {
_animationController
.animateTo(0, duration: 300.ms)
.then((_) => _animationController.reset());
}
}
@override
Widget build(BuildContext context) {
final double radius = widget.isFixed
? 32
: math.min(
MediaQuery.of(context).size.width * 0.1,
MediaQuery.of(context).size.height * 0.1,
);
return Container(
alignment: Alignment.center,
child: Center(
child: Animate(
autoPlay: false,
controller: _animationController,
effects: [
CustomEffect(
begin: widget.isSpeaking ? 2 : 0,
end: 8,
curve: Curves.easeInOut,
duration: 1250.ms,
builder: (context, value, child) => Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.all(Radius.circular(radius + 8)),
border: value > 0
? Border.all(color: Colors.green, width: value)
: null,
),
child: child,
),
)
],
child: AccountAvatar(
content: widget.userinfo!.avatar,
bgColor: Colors.transparent,
radius: radius,
),
),
),
);
}
@override
void dispose() {
_animationController.dispose();
super.dispose();
}
}

View File

@ -0,0 +1,244 @@
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:solian/models/account.dart';
import 'package:solian/models/call.dart';
import 'package:solian/widgets/chat/call/call_no_content.dart';
import 'package:solian/widgets/chat/call/call_participant_info.dart';
import 'package:solian/widgets/chat/call/call_participant_menu.dart';
import 'package:solian/widgets/chat/call/call_participant_stats.dart';
abstract class ParticipantWidget extends StatefulWidget {
static ParticipantWidget widgetFor(ParticipantTrack participantTrack,
{bool isFixed = false, bool showStatsLayer = false}) {
if (participantTrack.participant is LocalParticipant) {
return LocalParticipantWidget(
participantTrack.participant as LocalParticipant,
participantTrack.videoTrack,
isFixed,
participantTrack.isScreenShare,
showStatsLayer,
);
} else if (participantTrack.participant is RemoteParticipant) {
return RemoteParticipantWidget(
participantTrack.participant as RemoteParticipant,
participantTrack.videoTrack,
isFixed,
participantTrack.isScreenShare,
showStatsLayer,
);
}
throw UnimplementedError('Unknown participant type');
}
abstract final Participant participant;
abstract final VideoTrack? videoTrack;
abstract final bool isScreenShare;
abstract final bool isFixed;
abstract final bool showStatsLayer;
final VideoQuality quality;
const ParticipantWidget({
super.key,
this.quality = VideoQuality.MEDIUM,
});
}
class LocalParticipantWidget extends ParticipantWidget {
@override
final LocalParticipant participant;
@override
final VideoTrack? videoTrack;
@override
final bool isFixed;
@override
final bool isScreenShare;
@override
final bool showStatsLayer;
const LocalParticipantWidget(
this.participant,
this.videoTrack,
this.isFixed,
this.isScreenShare,
this.showStatsLayer, {
super.key,
});
@override
State<StatefulWidget> createState() => _LocalParticipantWidgetState();
}
class RemoteParticipantWidget extends ParticipantWidget {
@override
final RemoteParticipant participant;
@override
final VideoTrack? videoTrack;
@override
final bool isFixed;
@override
final bool isScreenShare;
@override
final bool showStatsLayer;
const RemoteParticipantWidget(
this.participant,
this.videoTrack,
this.isFixed,
this.isScreenShare,
this.showStatsLayer, {
super.key,
});
@override
State<StatefulWidget> createState() => _RemoteParticipantWidgetState();
}
abstract class _ParticipantWidgetState<T extends ParticipantWidget>
extends State<T> {
VideoTrack? get _activeVideoTrack;
TrackPublication? get _firstAudioPublication;
Account? _userinfoMetadata;
@override
void initState() {
super.initState();
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
}
@override
void dispose() {
widget.participant.removeListener(onParticipantChanged);
super.dispose();
}
@override
void didUpdateWidget(covariant T oldWidget) {
oldWidget.participant.removeListener(onParticipantChanged);
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
super.didUpdateWidget(oldWidget);
}
void onParticipantChanged() {
setState(() {
if (widget.participant.metadata != null) {
_userinfoMetadata =
Account.fromJson(jsonDecode(widget.participant.metadata!));
}
});
}
@override
Widget build(BuildContext ctx) {
return Stack(
children: [
_activeVideoTrack != null && !_activeVideoTrack!.muted
? VideoTrackRenderer(
_activeVideoTrack!,
fit: RTCVideoViewObjectFit.RTCVideoViewObjectFitContain,
)
: NoContentWidget(
userinfo: _userinfoMetadata,
isFixed: widget.isFixed,
isSpeaking: widget.participant.isSpeaking,
),
if (widget.showStatsLayer)
Positioned(
top: 30,
right: 30,
child: ParticipantStatsWidget(participant: widget.participant),
),
Align(
alignment: Alignment.bottomCenter,
child: Column(
crossAxisAlignment: CrossAxisAlignment.stretch,
mainAxisSize: MainAxisSize.min,
children: [
ParticipantInfoWidget(
title: widget.participant.name.isNotEmpty
? widget.participant.name
: widget.participant.identity,
audioAvailable: _firstAudioPublication?.muted == false &&
_firstAudioPublication?.subscribed == true,
connectionQuality: widget.participant.connectionQuality,
isScreenShare: widget.isScreenShare,
),
],
),
),
],
);
}
}
class _LocalParticipantWidgetState
extends _ParticipantWidgetState<LocalParticipantWidget> {
@override
LocalTrackPublication<LocalAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
@override
VideoTrack? get _activeVideoTrack => widget.videoTrack;
}
class _RemoteParticipantWidgetState
extends _ParticipantWidgetState<RemoteParticipantWidget> {
@override
RemoteTrackPublication<RemoteAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
@override
VideoTrack? get _activeVideoTrack => widget.videoTrack;
}
class InteractiveParticipantWidget extends StatelessWidget {
final double? width;
final double? height;
final Color? color;
final bool isFixed;
final ParticipantTrack participant;
final Function() onTap;
const InteractiveParticipantWidget({
super.key,
this.width,
this.height,
this.color,
this.isFixed = false,
required this.participant,
required this.onTap,
});
@override
Widget build(BuildContext context) {
return Material(
color: Colors.transparent,
child: InkWell(
child: Container(
width: width,
height: height,
color: color,
child: ParticipantWidget.widgetFor(participant, isFixed: isFixed),
),
onTap: () => onTap(),
onLongPress: () {
if (participant.participant is LocalParticipant) return;
showModalBottomSheet(
context: context,
builder: (context) => ParticipantMenu(
participant: participant.participant as RemoteParticipant,
videoTrack: participant.videoTrack,
isScreenShare: participant.isScreenShare,
),
);
},
),
);
}
}

View File

@ -0,0 +1,72 @@
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
class ParticipantInfoWidget extends StatelessWidget {
final String? title;
final bool audioAvailable;
final ConnectionQuality connectionQuality;
final bool isScreenShare;
const ParticipantInfoWidget({
super.key,
this.title,
this.audioAvailable = true,
this.connectionQuality = ConnectionQuality.unknown,
this.isScreenShare = false,
});
@override
Widget build(BuildContext context) => Container(
color: Theme.of(context).colorScheme.onSurface.withOpacity(0.75),
padding: const EdgeInsets.symmetric(
vertical: 7,
horizontal: 10,
),
child: Row(
mainAxisAlignment: MainAxisAlignment.end,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
if (title != null)
Flexible(
child: Text(
title!,
overflow: TextOverflow.ellipsis,
style: const TextStyle(color: Colors.white),
),
),
isScreenShare
? const Padding(
padding: EdgeInsets.only(left: 5),
child: Icon(
Icons.monitor,
color: Colors.white,
size: 16,
),
)
: Padding(
padding: const EdgeInsets.only(left: 5),
child: Icon(
audioAvailable ? Icons.mic : Icons.mic_off,
color: audioAvailable ? Colors.white : Colors.red,
size: 16,
),
),
if (connectionQuality != ConnectionQuality.unknown)
Padding(
padding: const EdgeInsets.only(left: 5),
child: Icon(
connectionQuality == ConnectionQuality.poor
? Icons.wifi_off_outlined
: Icons.wifi,
color: {
ConnectionQuality.excellent: Colors.green,
ConnectionQuality.good: Colors.orange,
ConnectionQuality.poor: Colors.red,
}[connectionQuality],
size: 16,
),
),
],
),
);
}

View File

@ -0,0 +1,160 @@
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:livekit_client/livekit_client.dart';
class ParticipantMenu extends StatefulWidget {
final RemoteParticipant participant;
final VideoTrack? videoTrack;
final bool isScreenShare;
final bool showStatsLayer;
const ParticipantMenu({
super.key,
required this.participant,
this.videoTrack,
this.isScreenShare = false,
this.showStatsLayer = false,
});
@override
State<ParticipantMenu> createState() => _ParticipantMenuState();
}
class _ParticipantMenuState extends State<ParticipantMenu> {
RemoteTrackPublication<RemoteVideoTrack>? get _videoPublication =>
widget.participant.videoTrackPublications
.where((element) => element.sid == widget.videoTrack?.sid)
.firstOrNull;
RemoteTrackPublication<RemoteAudioTrack>? get _firstAudioPublication =>
widget.participant.audioTrackPublications.firstOrNull;
void tookAction() {
if (Navigator.canPop(context)) {
Navigator.pop(context);
}
}
@override
Widget build(BuildContext context) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Container(
padding:
const EdgeInsets.only(left: 8, right: 8, top: 20, bottom: 12),
child: Padding(
padding: const EdgeInsets.symmetric(
horizontal: 8,
vertical: 12,
),
child: Text(
'callParticipantAction'.tr,
style: Theme.of(context).textTheme.headlineSmall,
),
),
),
Expanded(
child: ListView(
children: [
if (_firstAudioPublication != null && !widget.isScreenShare)
ListTile(
leading: Icon(
Icons.volume_up,
color: {
TrackSubscriptionState.notAllowed:
Theme.of(context).colorScheme.error,
TrackSubscriptionState.unsubscribed: Theme.of(context)
.colorScheme
.onSurface
.withOpacity(0.6),
TrackSubscriptionState.subscribed:
Theme.of(context).colorScheme.primary,
}[_firstAudioPublication!.subscriptionState],
),
title: Text(
_firstAudioPublication!.subscribed
? 'callParticipantMicrophoneOff'.tr
: 'callParticipantMicrophoneOn'.tr,
),
onTap: () {
if (_firstAudioPublication!.subscribed) {
_firstAudioPublication!.unsubscribe();
} else {
_firstAudioPublication!.subscribe();
}
tookAction();
},
),
if (_videoPublication != null)
ListTile(
leading: Icon(
widget.isScreenShare ? Icons.monitor : Icons.videocam,
color: {
TrackSubscriptionState.notAllowed:
Theme.of(context).colorScheme.error,
TrackSubscriptionState.unsubscribed: Theme.of(context)
.colorScheme
.onSurface
.withOpacity(0.6),
TrackSubscriptionState.subscribed:
Theme.of(context).colorScheme.primary,
}[_videoPublication!.subscriptionState],
),
title: Text(
_videoPublication!.subscribed
? 'callParticipantVideoOff'.tr
: 'callParticipantVideoOn'.tr,
),
onTap: () {
if (_videoPublication!.subscribed) {
_videoPublication!.unsubscribe();
} else {
_videoPublication!.subscribe();
}
tookAction();
},
),
if (_videoPublication != null) const Divider(thickness: 0.3),
if (_videoPublication != null)
...[30, 15, 8].map(
(x) => ListTile(
leading: Icon(
_videoPublication?.fps == x
? Icons.check_box_outlined
: Icons.check_box_outline_blank,
),
title: Text('Set preferred frame-per-second to $x'),
onTap: () {
_videoPublication!.setVideoFPS(x);
tookAction();
},
),
),
if (_videoPublication != null) const Divider(thickness: 0.3),
if (_videoPublication != null)
...[
('High', VideoQuality.HIGH),
('Medium', VideoQuality.MEDIUM),
('Low', VideoQuality.LOW),
].map(
(x) => ListTile(
leading: Icon(
_videoPublication?.videoQuality == x.$2
? Icons.check_box_outlined
: Icons.check_box_outline_blank,
),
title: Text('Set preferred quality to ${x.$1}'),
onTap: () {
_videoPublication!.setVideoQuality(x.$2);
tookAction();
},
),
),
],
),
),
],
);
}
}

View File

@ -0,0 +1,133 @@
import 'package:flutter/material.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:solian/models/call.dart';
class ParticipantStatsWidget extends StatefulWidget {
const ParticipantStatsWidget({super.key, required this.participant});
final Participant participant;
@override
State<StatefulWidget> createState() => _ParticipantStatsWidgetState();
}
class _ParticipantStatsWidgetState extends State<ParticipantStatsWidget> {
List<EventsListener<TrackEvent>> listeners = [];
ParticipantStatsType statsType = ParticipantStatsType.unknown;
Map<String, String> stats = {};
void _setUpListener(Track track) {
var listener = track.createListener();
listeners.add(listener);
if (track is LocalVideoTrack) {
statsType = ParticipantStatsType.localVideoSender;
listener.on<VideoSenderStatsEvent>((event) {
setState(() {
stats['video tx'] = 'total sent ${event.currentBitrate.toInt()} kpbs';
event.stats.forEach((key, value) {
stats['layer-$key'] =
'${value.frameWidth ?? 0}x${value.frameHeight ?? 0} ${value.framesPerSecond?.toDouble() ?? 0} fps, ${event.bitrateForLayers[key] ?? 0} kbps';
});
var firstStats =
event.stats['f'] ?? event.stats['h'] ?? event.stats['q'];
if (firstStats != null) {
stats['encoder'] = firstStats.encoderImplementation ?? '';
stats['video codec'] =
'${firstStats.mimeType}, ${firstStats.clockRate}hz, pt: ${firstStats.payloadType}';
stats['qualityLimitationReason'] =
firstStats.qualityLimitationReason ?? '';
}
});
});
} else if (track is RemoteVideoTrack) {
statsType = ParticipantStatsType.remoteVideoReceiver;
listener.on<VideoReceiverStatsEvent>((event) {
setState(() {
stats['video rx'] = '${event.currentBitrate.toInt()} kpbs';
stats['video codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, pt: ${event.stats.payloadType}';
stats['video size'] =
'${event.stats.frameWidth}x${event.stats.frameHeight} ${event.stats.framesPerSecond?.toDouble()}fps';
stats['video jitter'] = '${event.stats.jitter} s';
stats['video decoder'] = '${event.stats.decoderImplementation}';
stats['video packets lost'] = '${event.stats.packetsLost}';
stats['video packets received'] = '${event.stats.packetsReceived}';
stats['video frames received'] = '${event.stats.framesReceived}';
stats['video frames decoded'] = '${event.stats.framesDecoded}';
stats['video frames dropped'] = '${event.stats.framesDropped}';
});
});
} else if (track is LocalAudioTrack) {
statsType = ParticipantStatsType.localAudioSender;
listener.on<AudioSenderStatsEvent>((event) {
setState(() {
stats['audio tx'] = '${event.currentBitrate.toInt()} kpbs';
stats['audio codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, ${event.stats.channels}ch, pt: ${event.stats.payloadType}';
});
});
} else if (track is RemoteAudioTrack) {
statsType = ParticipantStatsType.remoteAudioReceiver;
listener.on<AudioReceiverStatsEvent>((event) {
setState(() {
stats['audio rx'] = '${event.currentBitrate.toInt()} kpbs';
stats['audio codec'] =
'${event.stats.mimeType}, ${event.stats.clockRate}hz, ${event.stats.channels}ch, pt: ${event.stats.payloadType}';
stats['audio jitter'] = '${event.stats.jitter} s';
stats['audio concealed samples'] =
'${event.stats.concealedSamples} / ${event.stats.concealmentEvents}';
stats['audio packets lost'] = '${event.stats.packetsLost}';
stats['audio packets received'] = '${event.stats.packetsReceived}';
});
});
}
}
onParticipantChanged() {
for (var element in listeners) {
element.dispose();
}
listeners.clear();
for (var track in [
...widget.participant.videoTrackPublications,
...widget.participant.audioTrackPublications
]) {
if (track.track != null) {
_setUpListener(track.track!);
}
}
}
@override
void initState() {
super.initState();
widget.participant.addListener(onParticipantChanged);
onParticipantChanged();
}
@override
void deactivate() {
for (var element in listeners) {
element.dispose();
}
widget.participant.removeListener(onParticipantChanged);
super.deactivate();
}
num sendBitrate = 0;
@override
Widget build(BuildContext context) {
return Container(
color: Theme.of(context).colorScheme.onSurface.withOpacity(0.75),
padding: const EdgeInsets.symmetric(
vertical: 8,
horizontal: 8,
),
child: Column(
children:
stats.entries.map((e) => Text('${e.key}: ${e.value}')).toList(),
),
);
}
}

View File

@ -0,0 +1,189 @@
import 'package:dropdown_button2/dropdown_button2.dart';
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:livekit_client/livekit_client.dart';
import 'package:solian/exts.dart';
import 'package:solian/models/call.dart';
import 'package:solian/models/channel.dart';
import 'package:solian/providers/auth.dart';
import 'package:solian/providers/content/call.dart';
class ChatCallPrejoinPopup extends StatefulWidget {
final Call ongoingCall;
final Channel channel;
const ChatCallPrejoinPopup({
super.key,
required this.ongoingCall,
required this.channel,
});
@override
State<ChatCallPrejoinPopup> createState() => _ChatCallPrejoinPopupState();
}
class _ChatCallPrejoinPopupState extends State<ChatCallPrejoinPopup> {
bool _isBusy = false;
void performJoin() async {
final AuthProvider auth = Get.find();
final ChatCallProvider provider = Get.find();
if (!await auth.isAuthorized) return;
setState(() => _isBusy = true);
provider.setCall(widget.ongoingCall, widget.channel);
try {
final resp = await provider.getRoomToken();
final token = resp.$1;
final endpoint = resp.$2;
provider.initRoom();
provider.setupRoomListeners(
onDisconnected: (reason) {
context.showSnackbar(
'callDisconnected'.trParams({'reason': reason.toString()}),
);
},
);
provider.joinRoom(endpoint, token);
provider.gotoScreen(context).then((_) {
Navigator.pop(context);
});
} catch (e) {
context.showErrorDialog(e);
}
setState(() => _isBusy = false);
}
@override
void initState() {
final ChatCallProvider provider = Get.find();
provider.checkPermissions().then((_) {
provider.initHardware();
});
super.initState();
}
@override
Widget build(BuildContext context) {
final ChatCallProvider provider = Get.find();
return Obx(
() => Center(
child: Container(
constraints: const BoxConstraints(maxWidth: 320),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text('callMicrophone'.tr),
Switch(
value: provider.enableAudio.value,
onChanged: null,
),
],
).paddingOnly(bottom: 5),
DropdownButtonHideUnderline(
child: DropdownButton2<MediaDevice>(
isExpanded: true,
disabledHint: Text('callMicrophoneDisabled'.tr),
hint: Text('callMicrophoneSelect'.tr),
items: provider.enableAudio.value
? provider.audioInputs
.map(
(item) => DropdownMenuItem<MediaDevice>(
value: item,
child: Text(item.label),
),
)
.toList()
.cast<DropdownMenuItem<MediaDevice>>()
: [],
value: provider.audioDevice.value,
onChanged: (MediaDevice? value) async {
if (value != null) {
provider.audioDevice.value = value;
await provider.changeLocalAudioTrack();
}
},
buttonStyleData: const ButtonStyleData(
height: 40,
width: 320,
),
),
).paddingOnly(bottom: 25),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text('callCamera'.tr),
Switch(
value: provider.enableVideo.value,
onChanged: (value) => provider.enableVideo.value = value,
),
],
).paddingOnly(bottom: 5),
DropdownButtonHideUnderline(
child: DropdownButton2<MediaDevice>(
isExpanded: true,
disabledHint: Text('callCameraDisabled'.tr),
hint: Text('callCameraSelect'.tr),
items: provider.enableVideo.value
? provider.videoInputs
.map(
(item) => DropdownMenuItem<MediaDevice>(
value: item,
child: Text(item.label),
),
)
.toList()
.cast<DropdownMenuItem<MediaDevice>>()
: [],
value: provider.videoDevice.value,
onChanged: (MediaDevice? value) async {
if (value != null) {
provider.videoDevice.value = value;
await provider.changeLocalVideoTrack();
}
},
buttonStyleData: const ButtonStyleData(
height: 40,
width: 320,
),
),
).paddingOnly(bottom: 25),
if (_isBusy)
const Center(child: CircularProgressIndicator())
else
ElevatedButton(
style: ElevatedButton.styleFrom(
minimumSize: const Size(320, 56),
backgroundColor:
Theme.of(context).colorScheme.primaryContainer,
),
onPressed: _isBusy ? null : performJoin,
child: Text('callJoin'.tr),
),
],
),
),
),
);
}
@override
void dispose() {
Get.find<ChatCallProvider>()
..deactivateHardware()
..disposeHardware();
super.dispose();
}
}

View File

@ -0,0 +1,27 @@
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:solian/providers/content/call.dart';
class ChatCallCurrentIndicator extends StatelessWidget {
const ChatCallCurrentIndicator({super.key});
@override
Widget build(BuildContext context) {
final ChatCallProvider provider = Get.find();
if (provider.current.value == null || provider.channel.value == null) {
return const SizedBox();
}
return ListTile(
tileColor: Theme.of(context).colorScheme.surfaceContainerHigh,
contentPadding: const EdgeInsets.symmetric(horizontal: 32),
leading: const Icon(Icons.call),
title: Text(provider.channel.value!.name),
subtitle: Text('callAlreadyOngoing'.tr),
onTap: () {
provider.gotoScreen(context);
},
);
}
}

View File

@ -5,10 +5,10 @@ packages:
dependency: transitive
description:
name: archive
sha256: "6bd38d335f0954f5fad9c79e614604fbf03a0e5b975923dd001b6ea965ef5b4b"
sha256: cb6a278ef2dbb298455e1a713bda08524a175630ec643a242c399c932a0a1f7d
url: "https://pub.dev"
source: hosted
version: "3.6.0"
version: "3.6.1"
args:
dependency: transitive
description:
@ -254,6 +254,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "4.5.0"
flutter_background:
dependency: "direct main"
description:
name: flutter_background
sha256: "035c31a738509d67ee70bbf174e5aa7db462c371e838ec8259700c5c4e7ca17f"
url: "https://pub.dev"
source: hosted
version: "1.2.0"
flutter_lints:
dependency: "direct dev"
description:
@ -468,10 +476,10 @@ packages:
dependency: transitive
description:
name: image_picker_ios
sha256: "4824d8c7f6f89121ef0122ff79bb00b009607faecc8545b86bca9ab5ce1e95bf"
sha256: "6703696ad49f5c3c8356d576d7ace84d1faf459afb07accbb0fae780753ff447"
url: "https://pub.dev"
source: hosted
version: "0.8.11+2"
version: "0.8.12"
image_picker_linux:
dependency: transitive
description:
@ -1062,7 +1070,7 @@ packages:
source: hosted
version: "14.2.1"
wakelock_plus:
dependency: transitive
dependency: "direct main"
description:
name: wakelock_plus
sha256: "14758533319a462ffb5aa3b7ddb198e59b29ac3b02da14173a1715d65d4e6e68"

View File

@ -62,6 +62,8 @@ dependencies:
chewie: ^1.8.1
livekit_client: ^2.1.5
flutter_webrtc: ^0.10.7
wakelock_plus: ^1.2.5
flutter_background: ^1.2.0
dev_dependencies:
flutter_test: