I am trying to implement video and voice call app using flutter_webrtc and this is my function which I have an error when I trying to call its successful only first time and the second time I got an exception: peerConnectionSetRemoteDescription(): WEBRTC_SET_REMOTE_DESCRIPTION_ERROR: Failed to set remote answer sdp: Called in wrong state: stable , closed
Future setupPeerConnection() async {
_rtcPeerConnection = await createPeerConnection({
'sdpSemantics': 'plan-b',
'iceServers': [
{
'urls': [
'stun:stun1.l.google.com:19302',
'stun:stun2.l.google.com:19302'
]
}
]
});
_localStream = await navigator.mediaDevices.getUserMedia({
'audio': isAudioOn,
'video': isVideoOn
? {'facingMode': isFrontCameraSelected ? 'user' : 'environment'}
: false,
});
_localStream!.getTracks().forEach((track) {
_rtcPeerConnection!.addTrack(track, _localStream!);
});
_localRTCVideoRenderer.srcObject = _localStream;
_rtcPeerConnection!.onTrack = (event) {
_remoteRTCVideoRenderer.srcObject = event.streams[0];
};
update();
// for Incoming call
if (offer != null) {
socket!.on("IceCandidate", (data) {
String candidate = data["iceCandidate"]["candidate"];
String sdpMid = data["iceCandidate"]["id"];
int sdpMLineIndex = data["iceCandidate"]["label"];
_rtcPeerConnection!.addCandidate(RTCIceCandidate(
candidate,
sdpMid,
sdpMLineIndex,
));
});
final remoteDescription =
RTCSessionDescription(offer["sdp"], offer["type"]);
await _rtcPeerConnection!.setRemoteDescription(remoteDescription);
RTCSessionDescription answer = await _rtcPeerConnection!.createAnswer();
await _rtcPeerConnection!.setLocalDescription(answer);
log(_rtcPeerConnection!.signalingState.toString());
socket!.emit("answerCall", {
"sender": sender,
"sdpAnswer": answer.toMap(),
});
}
// for Outgoing call
else {
_rtcPeerConnection!.onIceCandidate =
(RTCIceCandidate candidate) => rtcIceCandidates.add(candidate);
RTCSessionDescription offer = await _rtcPeerConnection!.createOffer();
await _rtcPeerConnection!.setLocalDescription(offer);
socket!.emit('makeCall', {
"receiver": receiver,
"sdpOffer": offer.toMap(),
"isVoice": isVoiceCall
});
socket!.on("callAnswered", (data) {
_rtcPeerConnection!.setRemoteDescription(
RTCSessionDescription(
data["sdpAnswer"]["sdp"],
data["sdpAnswer"]["type"],
),
);
for (RTCIceCandidate candidate in rtcIceCandidates) {
socket!.emit("IceCandidate", {
"receiver": receiver,
"iceCandidate": {
"id": candidate.sdpMid,
"label": candidate.sdpMLineIndex,
"candidate": candidate.candidate
}
});
}
});
}
}
and this how I dispose the controller:
Future disposeConnection() async {
_localStream?.getVideoTracks().forEach((track) async {
await track.stop();
});
_localStream?.getAudioTracks().forEach((track) async {
await track.stop();
});
await _localStream?.dispose();
await _rtcPeerConnection!.close();
// await _localRTCVideoRenderer.dispose();
// await _remoteRTCVideoRenderer.dispose();
}
I have been stacked here for more than week can anyone help me!!
edit!! here is my full controller:
import 'dart:developer';
import 'package:flutter/material.dart';
import 'package:get/get.dart' hide navigator;
import 'package:sizer/sizer.dart';
import '../socket/socket_controller.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
class VideoCallController extends GetxController {
final socket = Get.find<SocketController>().socket;
final String sender, receiver;
dynamic offer;
bool isVoiceCall;
bool isSender;
final _localRTCVideoRenderer = RTCVideoRenderer();
final _remoteRTCVideoRenderer = RTCVideoRenderer();
get localRTCVideoRenderer => _localRTCVideoRenderer;
get remoteRTCVideoRenderer => _remoteRTCVideoRenderer;
MediaStream? _localStream;
RTCPeerConnection? _rtcPeerConnection;
List<RTCIceCandidate> rtcIceCandidates = [];
bool isAudioOn = true,
isVideoOn = true,
isFrontCameraSelected = true,
isTopSpeakerSelected = true;
Offset myOffset = Offset(MediaQuery.sizeOf(Get.context!).width * 0.55, 50);
final double myContainerWidth = 40.w;
final double myContainerHeight = 25.h;
Offset thereOffset = Offset(MediaQuery.sizeOf(Get.context!).width * 0.55, 50);
final double thereContainerWidth = 40.w;
final double thereContainerHeight = 25.h;
bool myViewIsMain = false;
VideoCallController(
{this.offer,
required this.isVoiceCall,
required this.isSender,
required this.sender,
required this.receiver});
@override
void onInit() async {
await _localRTCVideoRenderer.initialize();
await _remoteRTCVideoRenderer.initialize();
if (isVoiceCall) {
isVideoOn = false;
isTopSpeakerSelected = false;
update();
}
setupPeerConnection();
handleRemoteExitCall();
handleRemoteCancelCall();
super.onInit();
}
@override
void onClose() async {
await disposeConnection();
super.onClose();
}
Future disposeConnection() async {
_localStream?.getVideoTracks().forEach((track) async {
await track.stop();
});
_localStream?.getAudioTracks().forEach((track) async {
await track.stop();
});
await _localStream?.dispose();
await _rtcPeerConnection!.close();
// await _localRTCVideoRenderer.dispose();
// await _remoteRTCVideoRenderer.dispose();
}
switchView() {
myViewIsMain = !myViewIsMain;
myOffset = Offset(MediaQuery.sizeOf(Get.context!).width * 0.55, 50);
thereOffset = Offset(MediaQuery.sizeOf(Get.context!).width * 0.55, 50);
update();
}
void myOverlayPosition(DragUpdateDetails d) {
double newOffsetX = myOffset.dx + d.delta.dx;
double newOffsetY = myOffset.dy + d.delta.dy;
Size screenSize = MediaQuery.of(Get.context!).size;
double maxOffsetX = screenSize.width - myContainerWidth;
double maxOffsetY = screenSize.height - myContainerHeight;
double topRange = 4.h;
double bottomRange = 13.h;
maxOffsetY -= topRange + bottomRange;
newOffsetX = newOffsetX.clamp(0, maxOffsetX);
newOffsetY = newOffsetY.clamp(topRange, maxOffsetY + topRange);
myOffset = Offset(newOffsetX, newOffsetY);
update();
}
void thereOverlayPosition(DragUpdateDetails d) {
double newOffsetX = thereOffset.dx + d.delta.dx;
double newOffsetY = thereOffset.dy + d.delta.dy;
Size screenSize = MediaQuery.of(Get.context!).size;
double maxOffsetX = screenSize.width - thereContainerWidth;
double maxOffsetY = screenSize.height - thereContainerHeight;
double topRange = 4.h;
double bottomRange = 13.h;
maxOffsetY -= topRange + bottomRange;
newOffsetX = newOffsetX.clamp(0, maxOffsetX);
newOffsetY = newOffsetY.clamp(topRange, maxOffsetY + topRange);
thereOffset = Offset(newOffsetX, newOffsetY);
update();
}
setupPeerConnection() async {
_rtcPeerConnection = await createPeerConnection({
'iceServers': [
{
'urls': [
'stun:stun1.l.google.com:19302',
'stun:stun2.l.google.com:19302'
]
}
]
});
_localStream = await navigator.mediaDevices.getUserMedia({
'audio': isAudioOn,
'video': isVideoOn
? {'facingMode': isFrontCameraSelected ? 'user' : 'environment'}
: false,
});
_localStream!.getTracks().forEach((track) {
_rtcPeerConnection!.addTrack(track, _localStream!);
});
_localRTCVideoRenderer.srcObject = _localStream;
_rtcPeerConnection!.onTrack = (event) {
_remoteRTCVideoRenderer.srcObject = event.streams[0];
};
log(_rtcPeerConnection!.signalingState.toString());
update();
// for Incoming call
if (offer != null) {
final remoteDescription =
RTCSessionDescription(offer["sdp"], offer["type"]);
await _rtcPeerConnection!.setRemoteDescription(remoteDescription);
RTCSessionDescription answer = await _rtcPeerConnection!.createAnswer();
await _rtcPeerConnection!
.setLocalDescription(answer)
.then((value) => socket!.on("IceCandidate", (data) {
String candidate = data["iceCandidate"]["candidate"];
String sdpMid = data["iceCandidate"]["id"];
int sdpMLineIndex = data["iceCandidate"]["label"];
_rtcPeerConnection!.addCandidate(RTCIceCandidate(
candidate,
sdpMid,
sdpMLineIndex,
));
}));
socket!.emit("answerCall", {
"sender": sender,
"sdpAnswer": answer.toMap(),
});
}
// for Outgoing call
else {
_rtcPeerConnection!.onIceCandidate =
(RTCIceCandidate candidate) => rtcIceCandidates.add(candidate);
RTCSessionDescription offer = await _rtcPeerConnection!.createOffer();
await _rtcPeerConnection!.setLocalDescription(offer);
socket!.emit('makeCall', {
"receiver": receiver,
"sdpOffer": offer.toMap(),
"isVoice": isVoiceCall
});
socket!.on("callAnswered", (data) {
_rtcPeerConnection!.setRemoteDescription(
RTCSessionDescription(
data["sdpAnswer"]["sdp"],
data["sdpAnswer"]["type"],
),
);
for (RTCIceCandidate candidate in rtcIceCandidates) {
socket!.emit("IceCandidate", {
"receiver": receiver,
"iceCandidate": {
"id": candidate.sdpMid,
"label": candidate.sdpMLineIndex,
"candidate": candidate.candidate
}
});
}
});
}
}
handleExitCall() {
socket!.emit("userExitCall", {
'exitUser': receiver,
});
Get.back();
}
handleRemoteExitCall() {
socket!.on("isExitCall", (data) {
Get.back();
});
}
handleRemoteCancelCall() {
socket!.on("isCancelCall", (data) async {
Get.back();
});
}
toggleMic() {
isAudioOn = !isAudioOn;
_localStream?.getAudioTracks().forEach((track) {
track.enabled = isAudioOn;
});
update();
}
toggleCamera() {
isVideoOn = !isVideoOn;
_localStream?.getVideoTracks().forEach((track) {
track.enabled = isVideoOn;
});
update();
}
switchCamera() {
isFrontCameraSelected = !isFrontCameraSelected;
_localStream?.getVideoTracks().forEach((track) {
Helper.switchCamera(track);
});
update();
}
switchSpeaker() {
isTopSpeakerSelected = !isTopSpeakerSelected;
_localStream?.getAudioTracks().forEach((track) {
track.enableSpeakerphone(isTopSpeakerSelected);
});
update();
}
}