onufitness_mobile/lib/services/agora/call_services.dart
2026-01-13 11:36:24 +05:30

1235 lines
37 KiB
Dart

import 'dart:convert';
import 'dart:async';
import 'dart:developer';
import 'dart:io';
import 'package:agora_rtc_engine/agora_rtc_engine.dart';
import 'package:agora_rtm/agora_rtm.dart';
import 'package:audioplayers/audioplayers.dart';
import 'package:awesome_snackbar_content/awesome_snackbar_content.dart';
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:onufitness/constants/api_enum_constant.dart';
import 'package:onufitness/constants/data_constant.dart';
import 'package:onufitness/routes/route_constant.dart';
import 'package:onufitness/screens/chat/controllers/chat_controller.dart';
import 'package:onufitness/services/local_storage_services/shared_services.dart';
import 'package:onufitness/utils/custom_sneakbar.dart';
import 'package:onufitness/services/logger_service.dart';
import 'package:onufitness/widgets/others/new_custom_sneakbar.dart';
import 'package:permission_handler/permission_handler.dart';
import '../../controller/get_agora_token_controller.dart';
enum CallType { voice, video }
enum CallState { idle, calling, ringing, connected }
class AgoraCallService extends GetxService {
// Singleton pattern for global access
static AgoraCallService? _instance;
static AgoraCallService get instance {
_instance ??= Get.find<AgoraCallService>();
return _instance!;
}
final logger = LoggerService();
Future<AgoraCallService> init() async {
await initializeAgora();
return this;
}
// ✅ Batch update method - sets all values WITHOUT triggering multiple rebuilds................
void setCallDataBatch({
required String channel,
required String userId,
required String name,
required String profilePic,
required CallType callType,
required bool isCaller,
required CallState state,
String? token,
}) {
channelName.value = channel;
remoteUserId.value = userId;
callerName.value = name;
callerProfilePic.value = profilePic;
currentCallType.value = callType;
amICaller.value = isCaller;
callState.value = state;
if (token != null) {
rtcToken.value = token;
}
}
//....................................................................................................
final AudioPlayer _ringtonePlayer = AudioPlayer();
bool _isRingtonePlaying = false;
RtmClient? rtmClient;
RtcEngine? rtcEngine;
Timer? callTimer;
//....
String agoraUserId = SharedServices.getLoginDetails()!.data!.userId
.toString()
.replaceAll('-', '');
//.....
String rtmToken =
SharedServices.getAgoraUserAndRtmTokens()?.data?.agoraRtmToken ?? "";
static const appId = agoraAppId;
//.....
RxString rtcToken = "".obs;
// Reactive variables
var callState = CallState.idle.obs;
var currentCallType = CallType.voice.obs;
var isLocalVideoEnabled = true.obs;
var isLocalAudioEnabled = true.obs;
var isRemoteVideoEnabled = true.obs;
var remoteUserId = ''.obs;
var localUserId = ''.obs;
var channelName = ''.obs;
var callDuration = 0.obs;
var isVideoPreviewStarted = false.obs;
RxBool amICaller = false.obs;
// New variables for caller information
var callerName = ''.obs;
var callerProfilePic = ''.obs;
Timer? callTimeoutTimer;
RxInt integerRemoteUserUid = 0.obs;
bool _isNavigating = false;
Future<void> initializeAgora() async {
try {
rtmToken =
SharedServices.getAgoraUserAndRtmTokens()?.data?.agoraRtmToken ?? "";
// Initialize RTM
await initializeRTM();
// Initialize RTC
await initializeRTC();
// Login to RTM
await agoraRTMlogin();
} catch (e, stackTrace) {
logger.error(
"Failed to initialize Agora services",
error: e,
stackTrace: stackTrace,
);
}
}
Future<void> agoraRTMlogin() async {
//.......................................................................
agoraUserId = SharedServices.getLoginDetails()!.data!.userId
.toString()
.replaceAll('-', '');
rtmToken =
SharedServices.getAgoraUserAndRtmTokens()?.data?.agoraRtmToken ?? "";
//.......................................................................
try {
var (status, response) = await rtmClient!.login(rtmToken);
if (status.error == true) {
if (status.errorCode == "-10009") {
// Token Expiry status code
await AgoraTokenController().getAgoraUserAndRrmToken();
agoraUserId = SharedServices.getLoginDetails()!.data!.userId
.toString()
.replaceAll('-', '');
//.....
rtmToken =
SharedServices.getAgoraUserAndRtmTokens()?.data?.agoraRtmToken ??
"";
await rtmClient!.login(rtmToken);
}
} else {
localUserId.value = agoraUserId;
}
} catch (e, stackTrace) {
logger.error(
"Failed to login to Agora RTM",
error: e,
stackTrace: stackTrace,
);
}
}
Future<void> initializeRTM() async {
try {
final (status, client) = await RTM(appId, agoraUserId);
if (status.error == true) {
} else {
rtmClient = client;
_setupRTMListeners();
}
} catch (e, stackTrace) {
logger.error(
"Failed to initialize Agora RTM",
error: e,
stackTrace: stackTrace,
);
}
}
Future<void> initializeRTC() async {
try {
rtcEngine = createAgoraRtcEngine();
await rtcEngine?.initialize(
const RtcEngineContext(
appId: appId,
channelProfile: ChannelProfileType.channelProfileCommunication,
),
);
_setupRTCListeners();
} catch (e, stackTrace) {
logger.error(
"Failed to initialize Agora RTC",
error: e,
stackTrace: stackTrace,
);
}
}
void _setupRTMListeners() {
rtmClient?.addListener(
message: (event) {
try {
final messageData = jsonDecode(utf8.decode(event.message!));
_handleIncomingMessage(messageData);
} catch (e, stackTrace) {
logger.error(
"Failed to parse RTM message",
error: e,
stackTrace: stackTrace,
);
}
},
);
}
void _setupRTCListeners() {
rtcEngine?.registerEventHandler(
RtcEngineEventHandler(
onJoinChannelSuccess: (RtcConnection connection, int elapsed) async {
log(
"Joined channel: ${connection.channelId} with UID: ${connection.localUid}",
);
await _configureAudioSession();
},
onUserJoined: (RtcConnection connection, int remoteUid, int elapsed) {
integerRemoteUserUid.value = remoteUid;
},
onUserOffline: (
RtcConnection connection,
int remoteUid,
UserOfflineReasonType reason,
) {
endCall();
},
onLocalVideoStateChanged:
(
VideoSourceType source,
LocalVideoStreamState state,
LocalVideoStreamReason reason,
) {},
onRemoteVideoStateChanged: (
RtcConnection connection,
int remoteUid,
RemoteVideoState state,
RemoteVideoStateReason reason,
int elapsed,
) {
isRemoteVideoEnabled.value =
state == RemoteVideoState.remoteVideoStateStarting ||
state == RemoteVideoState.remoteVideoStateDecoding;
},
onError: (ErrorCodeType err, String msg) {},
),
);
}
Future<RtmClient> checkRtmClient() async {
if (rtmClient == null) {
await initializeRTM();
await agoraRTMlogin();
}
return rtmClient!;
}
Future<RtcEngine> checkRtcEngine() async {
if (rtcEngine == null) {
await initializeRTC();
}
return rtcEngine!;
}
void startCallTimeout() async {
stopCallTimeout();
callTimeoutTimer = Timer(Duration(minutes: 1), () async {
if (callState.value == CallState.calling ||
callState.value == CallState.ringing) {
await stopRingtone();
endCall();
//........Auto Dismiss Push Notification..............................
if (!Get.isRegistered<AgoraTokenController>()) {
Get.put(AgoraTokenController());
}
final agoraTokenController = Get.find<AgoraTokenController>();
await agoraTokenController.cancelNotification(
channelName: channelName.value,
receiverId: remoteUserId.value,
callType: currentCallType.value == CallType.video ? "VIDEO" : "VOICE",
notificationId: agoraTokenController.incomingCallNotificationID.value,
);
//...............................................................
customSnackbar(
title: "Call Timeout",
message: "No response. Call ended automatically.",
duration: 2,
);
}
});
}
void stopCallTimeout() {
callTimeoutTimer?.cancel();
callTimeoutTimer = null;
}
//Ringtone relates code..............................................................................
Future<void> initializeAudioPlayer() async {
try {
if (Platform.isAndroid) {
await _ringtonePlayer.setAudioContext(
AudioContext(
android: AudioContextAndroid(
isSpeakerphoneOn: false,
stayAwake: false,
contentType: AndroidContentType.music,
usageType: AndroidUsageType.notificationRingtone,
audioFocus: AndroidAudioFocus.gain,
),
),
);
} else if (Platform.isIOS) {
await _ringtonePlayer.setAudioContext(
AudioContext(
iOS: AudioContextIOS(
category: AVAudioSessionCategory.playback,
options: {
AVAudioSessionOptions.mixWithOthers,
AVAudioSessionOptions.duckOthers,
},
),
),
);
}
} catch (e, stackTrace) {
logger.error(
"Failed to initialize audio player",
error: e,
stackTrace: stackTrace,
);
}
}
// playIncomingRingtone method..................................................................
Future<void> playIncomingRingtone() async {
try {
if (_isRingtonePlaying) {
await stopRingtone();
}
await initializeAudioPlayer();
await _ringtonePlayer.setVolume(1.0);
await _ringtonePlayer.setReleaseMode(ReleaseMode.loop);
String assetPath = 'sounds/ringtone.mp3';
await _ringtonePlayer.play(AssetSource(assetPath));
_isRingtonePlaying = true;
} catch (e, stackTrace) {
logger.error(
"Failed to play incoming ringtone",
error: e,
stackTrace: stackTrace,
);
}
}
//playOutgoingRingtone method......................................................................
Future<void> playOutgoingRingtone() async {
try {
if (_isRingtonePlaying) {
await stopRingtone();
}
await initializeAudioPlayer();
await _ringtonePlayer.setVolume(1.0);
await _ringtonePlayer.setReleaseMode(ReleaseMode.loop);
String assetPath = 'sounds/callertune.mp3';
await _ringtonePlayer.play(AssetSource(assetPath));
_isRingtonePlaying = true;
} catch (e, stackTrace) {
logger.error(
"Failed to play outgoing ringtone",
error: e,
stackTrace: stackTrace,
);
}
}
// stopRingtone method......................................................................
Future<void> stopRingtone() async {
try {
if (_isRingtonePlaying) {
await _ringtonePlayer.stop();
_isRingtonePlaying = false;
}
} catch (e, stackTrace) {
logger.error("Failed to stop ringtone", error: e, stackTrace: stackTrace);
_isRingtonePlaying = false;
}
}
// Toggle speakerphone method......................................................................
RxBool isSpeakerEnabled = false.obs;
Future<void> _configureAudioSession() async {
try {
// await rtcEngine?.setDefaultAudioRouteToSpeakerphone(
// isSpeakerEnabled.value,
// );
await rtcEngine?.setEnableSpeakerphone(isSpeakerEnabled.value);
logger.error(
"Audio session configured - Speaker: ${isSpeakerEnabled.value}",
);
} catch (e, stackTrace) {
logger.error(
"Failed to configure audio session",
error: e,
stackTrace: stackTrace,
);
}
}
Future<void> toggleSpeaker() async {
try {
isSpeakerEnabled.value = !isSpeakerEnabled.value;
// Apply both settings to ensure it works
// await rtcEngine?.setDefaultAudioRouteToSpeakerphone(
// isSpeakerEnabled.value,
// );
await rtcEngine?.setEnableSpeakerphone(isSpeakerEnabled.value);
log("Speaker toggled - isSpeakerEnabled: ${isSpeakerEnabled.value}");
} catch (e, stackTrace) {
logger.error(
"Failed to toggle speaker",
error: e,
stackTrace: stackTrace,
);
}
}
//.............................................................................................
Future<void> _handleIncomingMessage(Map<String, dynamic> messageData) async {
final messageType = messageData['type'];
final callerUserID = messageData['uid'];
if (messageType == 'CALL') {
_handleIncomingCall(messageData);
} else if (messageType == 'ACCEPT_CALL') {
_handleCallAccepted(messageData);
} else if (messageType == 'END_CALL') {
if (callerUserID == remoteUserId.value) {
// For handelling 3rd persion call while app is Terminated
await _handleCallEnded();
}
} else if (messageType == 'BUSY_LINE') {
_handleBusyLine(messageData);
}
}
// NEW: Handle busy line response
void _handleBusyLine(Map<String, dynamic> messageData) async {
_resetCallState(); // Resset if First, then do other things
await stopRingtone();
callState.value = CallState.idle;
//............................................
Future.delayed(Duration(milliseconds: 100), () {
if (Get.currentRoute == RouteConstant.outgoingCallScreen) {
Get.back();
}
});
await _stopVideoPreview();
stopCallTimeout();
Future.delayed(Duration(milliseconds: 300), () {
customSnackbar(
title: "Line Busy",
message: "User currently on another call. Please try again later.",
duration: 3,
);
});
await stopRingtone();
//........Auto Dismiss Push Notification..............................
if (!Get.isRegistered<AgoraTokenController>()) {
Get.put(AgoraTokenController());
}
final agoraTokenController = Get.find<AgoraTokenController>();
await agoraTokenController.cancelNotification(
channelName: channelName.value,
receiverId: remoteUserId.value,
callType: currentCallType.value == CallType.video ? "VIDEO" : "VOICE",
notificationId: agoraTokenController.incomingCallNotificationID.value,
);
await stopRingtone();
}
void _handleIncomingCall(Map<String, dynamic> messageData) async {
//....................
if (callState.value != CallState.idle) {
await _sendBusyResponse(messageData);
return;
}
//....................
amICaller.value = false;
channelName.value = messageData['channel'];
remoteUserId.value = messageData['uid'];
currentCallType.value =
messageData['rtc'] == 'VIDEO' ? CallType.video : CallType.voice;
callerName.value = messageData['fullName'] ?? messageData['name'] ?? '';
callerProfilePic.value =
messageData['image'] ?? messageData['profilePic'] ?? '';
if (channelName.value.isEmpty) {
return;
}
if (!Get.isRegistered<AgoraTokenController>()) {
Get.put(AgoraTokenController());
}
final agoraTokenController = Get.find<AgoraTokenController>();
await agoraTokenController
.getRTCtoken(channelName: channelName.value, role: ApiEnum.SUBSCRIBER)
.then((value) {
if (value) {
// Ensure proper string assignment to RxString
String tokenValue = agoraTokenController.rTCtoken.value;
rtcToken.value = tokenValue;
}
});
callState.value = CallState.ringing;
//...........................................................................................................
await initializeAudioPlayer();
// Add a small delay to ensure audio system is fully ready
await Future.delayed(Duration(milliseconds: 300));
await playIncomingRingtone();
//...........................................................................................................
// Start video preview immediately for incoming video calls
if (currentCallType.value == CallType.video) {
await startVideoPreview();
}
// Show incoming call UI
Get.toNamed(RouteConstant.incomingCallScreen);
}
Future<void> _sendBusyResponse(Map<String, dynamic> incomingCallData) async {
try {
String callerUserId = incomingCallData['uid'] ?? '';
if (callerUserId.isEmpty) {
return;
}
var busyMessageData = {"type": "BUSY_LINE"};
final client = await checkRtmClient();
var (status, response) = await client.publish(
callerUserId,
jsonEncode(busyMessageData),
channelType: RtmChannelType.user,
customType: 'PlainText',
);
if (status.error == true) {
logger.error(
"Failed to send busy response: ${status.errorCode} - ${status.reason} ",
);
} else {
logger.error("Busy response sent successfully to: $callerUserId ");
}
} catch (e, stackTrace) {
logger.error(
"Failed to send busy response to caller ",
error: e,
stackTrace: stackTrace,
);
}
}
void _handleCallAccepted(Map<String, dynamic> messageData) async {
stopCallTimer();
callState.value = CallState.connected;
await stopRingtone();
startCallTimer();
joinChannel();
}
Future<void> _handleCallEnded() async {
await stopRingtone();
stopCallTimer();
await otherPersonEndCall();
}
Future<void> makeCall({
required BuildContext context,
required String targetUserId,
required CallType callType,
required String targatedUserName,
String? targatedUserProfilePic,
}) async {
try {
if (callState.value != CallState.idle) {
AwesomeCustomSnackbar.show(
context: context,
title: 'Call In Progress',
message:
'You are already in a call. Please end the current call first.',
contentType: ContentType.warning,
);
return;
}
amICaller.value = true;
// ✅ SET SPEAKER STATE BASED ON CALL TYPE
if (callType == CallType.video) {
isSpeakerEnabled.value = true; // Video calls use speaker
} else {
isSpeakerEnabled.value = false; // Voice calls use earpiece
}
Get.dialog(
Material(
color: Colors.black,
child: Center(
child: Container(
padding: EdgeInsets.all(20),
decoration: BoxDecoration(
color: Colors.black,
borderRadius: BorderRadius.circular(10),
),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
CircularProgressIndicator(color: Colors.white),
SizedBox(height: 16),
Text(
'Initiating call...',
style: TextStyle(color: Colors.white),
),
],
),
),
),
),
barrierDismissible: false,
);
// Request permissions first
if (Platform.isAndroid) {
await _requestPermissions(callType);
}
// Start video preview immediately when making video call
if (callType == CallType.video) {
await startVideoPreview();
}
callerName.value = targatedUserName;
callerProfilePic.value = targatedUserProfilePic ?? '';
currentCallType.value = callType;
remoteUserId.value = targetUserId;
channelName.value =
agoraUserId + DateTime.now().microsecondsSinceEpoch.toString();
callState.value = CallState.calling;
if (!Get.isRegistered<AgoraTokenController>()) {
Get.put(AgoraTokenController());
}
final agoraTokenController = Get.find<AgoraTokenController>();
// Get RTC token with timeout
bool tokenSuccess = await agoraTokenController
.getRTCtoken(channelName: channelName.value, role: ApiEnum.PUBLISHER)
.timeout(
Duration(seconds: 20),
onTimeout: () {
throw TimeoutException('Token request timeout');
},
);
if (tokenSuccess) {
String tokenValue = agoraTokenController.rTCtoken.value;
rtcToken.value = tokenValue;
} else {
if (context.mounted) {
AwesomeCustomSnackbar.show(
context: context,
title: 'Connection Error',
message: 'Not connected to messaging service. Please try again.',
contentType: ContentType.warning,
);
}
}
var messageData = {
"type": "CALL",
"rtc": callType == CallType.video ? "VIDEO" : "VOICE",
"channel": channelName.value,
"fullName": SharedServices.getUserDetails()?.data?.fullName ?? "",
"image": SharedServices.getUserDetails()?.data?.userProfilePic,
"uid": agoraUserId,
};
final client = await checkRtmClient();
var (status, response) = await client
.publish(
targetUserId,
jsonEncode(messageData),
channelType: RtmChannelType.user,
customType: 'PlainText',
)
.timeout(
Duration(seconds: 30),
onTimeout: () {
throw TimeoutException('Message send timeout');
},
);
// Close loading dialog
Get.back();
if (status.error == true) {
//...................................................................
agoraUserId = SharedServices.getLoginDetails()!.data!.userId
.toString()
.replaceAll('-', '');
rtmToken =
SharedServices.getAgoraUserAndRtmTokens()?.data?.agoraRtmToken ??
"";
agoraRTMlogin();
//...................................................................
await client
.publish(
targetUserId,
jsonEncode(messageData),
channelType: RtmChannelType.user,
customType: 'PlainText',
)
.timeout(
Duration(seconds: 20),
onTimeout: () {
throw TimeoutException('Message send timeout');
},
);
//...................................................................
callState.value = CallState.idle;
await _stopVideoPreview();
// error code is for only User Offline and timeout
if (status.errorCode == "-11033" || status.errorCode == "-11026") {
callState.value = CallState.calling;
startCallTimeout();
await startVideoPreview();
Get.toNamed(RouteConstant.outgoingCallScreen);
await playOutgoingRingtone();
//.........................................................................
if (!Get.isRegistered<AgoraTokenController>()) {
Get.put(AgoraTokenController());
}
final agoraTokenController = Get.find<AgoraTokenController>();
await agoraTokenController.incomingCallNotification(
channelName: channelName.value,
receiverId: remoteUserId.value,
callType:
currentCallType.value == CallType.video ? "VIDEO" : "VOICE",
);
}
//.........................................................................
} else {
callState.value = CallState.calling;
startCallTimeout();
// Show outgoing call UI
Get.toNamed(RouteConstant.outgoingCallScreen);
await playOutgoingRingtone();
//.........................................................................
if (!Get.isRegistered<AgoraTokenController>()) {
Get.put(AgoraTokenController());
}
final agoraTokenController = Get.find<AgoraTokenController>();
await agoraTokenController.incomingCallNotification(
channelName: channelName.value,
receiverId: remoteUserId.value,
callType: currentCallType.value == CallType.video ? "VIDEO" : "VOICE",
);
}
//.........................................................................
} on TimeoutException catch (e) {
Get.back();
callState.value = CallState.idle;
await _stopVideoPreview();
if (context.mounted) {
AwesomeCustomSnackbar.show(
context: context,
title: "Connection Timeout",
message:
'Call request timed out. Please check your connection and try again.',
contentType: ContentType.failure,
);
}
logger.error("TimeoutException catch makecall() : ", error: e);
} on Exception catch (e, stackTrace) {
logger.error(
"Exception during makeCall",
error: e,
stackTrace: stackTrace,
);
Get.back();
callState.value = CallState.idle;
await _stopVideoPreview();
if (e.toString().contains('Permission')) {
if (context.mounted) {
AwesomeCustomSnackbar.show(
context: context,
title: 'Permission Required',
message:
'Camera and microphone permissions are required for calls.',
contentType: ContentType.failure,
);
}
} else {
if (context.mounted) {
AwesomeCustomSnackbar.show(
context: context,
title: 'Connection Error',
message: 'Unable to reach $targatedUserName. Please try again.',
contentType: ContentType.failure,
);
}
}
} catch (e) {
Get.back();
callState.value = CallState.idle;
await _stopVideoPreview();
if (context.mounted) {
AwesomeCustomSnackbar.show(
context: context,
title: 'Connection Error',
message: 'Unable to reach $targatedUserName. Please try again.',
contentType: ContentType.failure,
);
}
logger.error(" catch makecall() : ", error: e);
}
}
Future<void> acceptCall({required CallType callType}) async {
stopCallTimeout();
try {
if (Platform.isAndroid) {
await _requestPermissions(currentCallType.value);
}
// ✅ SET SPEAKER STATE BASED ON CALL TYPE BEFORE JOINING
if (currentCallType.value == CallType.video) {
isSpeakerEnabled.value = true; // Video calls use speaker
} else {
isSpeakerEnabled.value = false; // Voice calls use earpiece
}
if (currentCallType.value == CallType.video) {
await startVideoPreview();
}
var messageData = {
"type": "ACCEPT_CALL",
"rtc": callType == CallType.video ? "VIDEO" : "VOICE",
"channel": channelName.value,
};
final client = await checkRtmClient();
await client.publish(
remoteUserId.value,
jsonEncode(messageData),
channelType: RtmChannelType.user,
customType: 'PlainText',
);
await stopRingtone();
// ✅ Set state to connected and start call timer
callState.value = CallState.connected;
startCallTimer();
await joinChannel();
Get.offNamed(RouteConstant.activeCallScreen);
await Future.delayed(Duration(seconds: 1), () async {
if (currentCallType.value == CallType.video) {
await startVideoPreview();
}
});
} catch (e, stackTrace) {
logger.error("Failed to accept call", error: e, stackTrace: stackTrace);
}
}
Future<void> rejectCall() async {
if (_isNavigating) return;
_isNavigating = true;
try {
var messageData = {"type": "END_CALL", "uid": agoraUserId};
final client = await checkRtmClient();
await client.publish(
remoteUserId.value,
jsonEncode(messageData),
channelType: RtmChannelType.user,
customType: 'PlainText',
);
callState.value = CallState.idle;
await stopRingtone();
await _stopVideoPreview();
_resetCallState();
if (Get.currentRoute == RouteConstant.splashScreen ||
Get.currentRoute == RouteConstant.incomingCallScreen) {
Future.microtask(() {
if (Get.previousRoute.isEmpty ||
Get.previousRoute == RouteConstant.splashScreen ||
Get.previousRoute == RouteConstant.dashboardScreen ||
Get.currentRoute == RouteConstant.splashScreen) {
// If Comes from Notificationn....
Get.offAllNamed(RouteConstant.dashboardScreen);
} else {
//Regular in App call
Get.back();
}
});
}
} catch (e, stackTrace) {
logger.error("Failed to reject call", error: e, stackTrace: stackTrace);
} finally {
// Reset navigation flag after a delay
Future.delayed(Duration(milliseconds: 500), () {
_isNavigating = false;
});
}
}
Future<void> joinChannel() async {
try {
if (channelName.value.isEmpty) {
return;
}
if (rtcToken.value.isEmpty) {
return;
}
await checkRtcEngine();
await rtcEngine?.enableAudio();
if (currentCallType.value == CallType.video) {
await rtcEngine?.enableVideo();
if (!isVideoPreviewStarted.value) {
await startVideoPreview();
}
}
await Future.delayed(Duration(milliseconds: 100));
await rtcEngine?.joinChannelWithUserAccount(
token: rtcToken.value,
channelId: channelName.value,
userAccount: agoraUserId,
options: ChannelMediaOptions(
autoSubscribeAudio: true,
autoSubscribeVideo: currentCallType.value == CallType.video,
publishMicrophoneTrack: true,
publishCameraTrack: currentCallType.value == CallType.video,
clientRoleType:
amICaller.value
? ClientRoleType.clientRoleBroadcaster
: ClientRoleType.clientRoleAudience,
),
);
} catch (e, stackTrace) {
logger.error("Failed to join channel", error: e, stackTrace: stackTrace);
}
}
//........................................................................
Future<void> otherPersonEndCall() async {
if (Get.currentRoute == RouteConstant.activeCallScreen ||
Get.currentRoute == RouteConstant.outgoingCallScreen ||
Get.currentRoute == RouteConstant.incomingCallScreen ||
Get.currentRoute == RouteConstant.splashScreen) {
Future.microtask(() {
if (Get.previousRoute.isEmpty ||
Get.previousRoute == RouteConstant.splashScreen ||
Get.previousRoute == RouteConstant.dashboardScreen ||
Get.currentRoute == RouteConstant.splashScreen) {
// If Comes from Notificationn....
Get.offAllNamed(RouteConstant.dashboardScreen);
} else {
// Fallback if nothing to pop....
Get.back();
}
});
}
await rtcEngine?.leaveChannel();
await _stopVideoPreview();
stopCallTimer();
_resetCallState();
}
//........................................................................
Future<void> endCall() async {
if (_isNavigating) return;
_isNavigating = true;
stopCallTimeout();
final previousState = callState.value;
callState.value = CallState.idle;
try {
var messageData = {"type": "END_CALL", "uid": agoraUserId};
//........Auto Dismiss Push Notification..............................
if (!Get.isRegistered<AgoraTokenController>()) {
Get.put(AgoraTokenController());
}
final agoraTokenController = Get.find<AgoraTokenController>();
if (previousState != CallState.connected) {
await agoraTokenController.cancelNotification(
channelName: channelName.value,
receiverId: remoteUserId.value,
callType: currentCallType.value == CallType.video ? "VIDEO" : "VOICE",
notificationId: agoraTokenController.incomingCallNotificationID.value,
);
}
//....................................................................
await stopRingtone();
if (Get.currentRoute == RouteConstant.activeCallScreen ||
Get.currentRoute == RouteConstant.outgoingCallScreen ||
Get.currentRoute == RouteConstant.incomingCallScreen ||
Get.currentRoute == RouteConstant.splashScreen) {
await Future.delayed(Duration(milliseconds: 100));
if (Get.previousRoute.isEmpty ||
Get.previousRoute == RouteConstant.splashScreen ||
Get.previousRoute == RouteConstant.dashboardScreen ||
Get.currentRoute == RouteConstant.splashScreen) {
// If Comes from Notificationn....
Get.offAllNamed(RouteConstant.dashboardScreen);
} else {
// Regular in-app call
Get.back();
}
}
final client = await checkRtmClient();
await client.publish(
remoteUserId.value,
jsonEncode(messageData),
channelType: RtmChannelType.user,
customType: 'PlainText',
);
await rtcEngine?.leaveChannel();
try {
if (Get.isRegistered<ChatController>()) {
final chatCtrl = Get.find<ChatController>();
await chatCtrl.markAllMessagesAsRead(remoteUserId.value);
}
} catch (e, stackTrace) {
logger.error(
"ChatController not available when ending call",
error: e,
stackTrace: stackTrace,
);
}
await _stopVideoPreview();
stopCallTimer();
if (Get.isDialogOpen == true) {
Get.back(); // Close if any open dialog
}
_resetCallState();
} catch (e, stackTrace) {
logger.error(
"Failed to end call properly",
error: e,
stackTrace: stackTrace,
);
_resetCallState();
} finally {
// Reset navigation flag after a delay
Future.delayed(Duration(milliseconds: 500), () {
_isNavigating = false;
});
}
}
Future<void> startVideoPreview() async {
try {
if (!isVideoPreviewStarted.value) {
await rtcEngine?.enableVideo();
await rtcEngine?.enableLocalVideo(true);
await rtcEngine?.startPreview();
isVideoPreviewStarted.value = true;
}
} catch (e, stackTrace) {
logger.error(
"Failed to start video preview",
error: e,
stackTrace: stackTrace,
);
}
}
Future<void> _stopVideoPreview() async {
try {
if (isVideoPreviewStarted.value) {
await rtcEngine?.stopPreview();
isVideoPreviewStarted.value = false;
}
} catch (e, stackTrace) {
logger.error(
"Failed to stop video preview",
error: e,
stackTrace: stackTrace,
);
}
}
Future<void> toggleLocalVideo() async {
isLocalVideoEnabled.value = !isLocalVideoEnabled.value;
await rtcEngine?.enableLocalVideo(isLocalVideoEnabled.value);
if (isLocalVideoEnabled.value && currentCallType.value == CallType.video) {
await startVideoPreview();
}
}
Future<void> toggleLocalAudio() async {
isLocalAudioEnabled.value = !isLocalAudioEnabled.value;
await rtcEngine?.enableLocalAudio(isLocalAudioEnabled.value);
}
Future<void> switchCamera() async {
await rtcEngine?.switchCamera();
}
Future<void> _requestPermissions(CallType callType) async {
if (Platform.isAndroid) {
List<Permission> permissions = [Permission.microphone];
if (callType == CallType.video) {
permissions.add(Permission.camera);
}
Map<Permission, PermissionStatus> statuses = await permissions.request();
for (var permission in permissions) {
if (statuses[permission] != PermissionStatus.granted) {
await permissions.request();
}
}
}
}
void startCallTimer() {
callDuration.value = 0;
callTimer = Timer.periodic(Duration(seconds: 1), (timer) {
callDuration.value++;
});
}
void stopCallTimer() {
callTimer?.cancel();
callTimer = null;
callDuration.value = 0;
}
void _resetCallState() {
callState.value = CallState.idle;
remoteUserId.value = '';
channelName.value = '';
rtcToken.value = '';
isLocalVideoEnabled.value = true;
isLocalAudioEnabled.value = true;
callDuration.value = 0;
callerName.value = '';
callerProfilePic.value = '';
isVideoPreviewStarted.value = false;
integerRemoteUserUid.value = 0;
isSpeakerEnabled.value = false;
stopCallTimeout();
}
@override
void onClose() {
stopCallTimer();
stopCallTimeout();
_stopVideoPreview();
rtcEngine?.release();
_ringtonePlayer.dispose();
super.onClose();
}
}