Remote audio stream not working with WebRTC

694 views Asked by At

I am trying to connect flutter with angular through webRTC using only audio. the peer connection is done without problem, the problem is when I receive the MediaStream from flutter. It do not play the audio and I get as if the audio was muted from flutter as seen in the following:

The media stream received from flutter appears to be enabled: true and muted: true

This is how I send it from Flutter to Angular in the createpeerConnection method:

_createPeerConnection() async {

    Map<String, dynamic> configuration = {
      "iceServers": [
        {"url": "stun:stun.l.google.com:19302"},
      ]
    };

    final Map<String, dynamic> offerSdpConstraints = {
      "mandatory": {
        "OfferToReceiveAudio": true,
        "OfferToReceiveVideo": false,
      },
      "optional": [],
    };

    _localStream = await _getUserMedia();

    _localStream?.getAudioTracks().forEach((element) {
      element.enabled = true;
    });

    _localStream?.onAddTrack = (track) => {
      print("track added")

    };
    _peerConnection =
    await createPeerConnection(configuration, offerSdpConstraints);
    _localStream?.getTracks().forEach((element) {
      _peerConnection?.addTrack(element,_localStream!);
    });

    //_peerConnection?.addStream(_localStream!);
    //pc.addStream(_localStream!);

    _peerConnection?.onIceCandidate = (e) {
      if (e.candidate != null) {
        print(json.encode({
          'candidate': e.candidate.toString(),
          'sdpMid': e.sdpMid.toString(),
          'sdpMlineIndex': e.sdpMLineIndex,
        }));
      }
    };



    _peerConnection?.onIceConnectionState = (e) {
      print(e);
    };

    _peerConnection?.onSignalingState = (state) {
      if (state == RTCSignalingState.RTCSignalingStateHaveRemoteOffer) {
        // answer here
        print("remote offer");
      }
    };

    _peerConnection?.onAddTrack = ((stream, track) {

      print(stream.id);
    });


/*    pc.onAddStream = (stream) {
      print('addStream: ' + stream.id);
      //_remoteVideoRenderer.srcObject = stream;
    };*/
    //_peerConnection?.addTrack(_localStream!);

    _peerConnected = true;
    //return pc;
  }
_getUserMedia() async {
  final Map<String, dynamic> mediaConstraints = {
    'audio': true,
    'video': false
  };

  MediaStream stream =
  await navigator.mediaDevices.getUserMedia(mediaConstraints);


  //_localVideoRenderer.srcObject = stream;
  return stream;
}

How I handle the mediaStream in angular:

private handleTrackEvent = (event: RTCTrackEvent) => {
  console.log("event detected");
  console.log(event);
  console.log(event.streams[0].getTracks())
  event.streams[0].getTracks()[0].enabled = true;
  const options =
  {mediaStream: event.streams[0]}

  this.remoteAudio.srcObject = event.streams[0]
}

This is the audio element Im trying to listen, it appears to be playing but I do not listen nothing

<div *ngIf="inCall">
  <audio id="remote_audio" #remote_audio autoplay controls></audio>
</div>
0

There are 0 answers