Is it possible to send the WebRTC VideoStreamTrack from Unity to an RTCpeerConnection in the browser

928 views Asked by At

I am trying to send the camera stream from unity to an RTCPeerConnection in the browser. The signaling between the two peers is done correctly, although when I send the MediaStreamTrack it does not work in the browser. Does anybody have an idea? This is similar to Unity.RenderStreaming, but I am trying to build my own. Here's the code I use for accessing the camera and sending the track.

//Create local peer
        RTCConfiguration config = default;
        config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } };
        localConnection = new RTCPeerConnection(ref config);

        localConnection.OnNegotiationNeeded = () => { Debug.Log("negotiation needed"); StartCoroutine(handleNegotiationNeeded()); };;
        localConnection.OnIceCandidate += handleIceCandidate;
        localConnection.OnIceConnectionChange = handleIceConnectionChange;


            RenderTexture rt;
            if (arCamera.targetTexture != null)
            {
                rt = arCamera.targetTexture;
                RenderTextureFormat supportFormat = WebRTC.WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
                GraphicsFormat graphicsFormat = GraphicsFormatUtility.GetGraphicsFormat(supportFormat, RenderTextureReadWrite.Default);
                GraphicsFormat compatibleFormat = SystemInfo.GetCompatibleFormat(graphicsFormat, FormatUsage.Render);
                GraphicsFormat format = graphicsFormat == compatibleFormat ? graphicsFormat : compatibleFormat;
                
                if (rt.graphicsFormat != format)
                {
                    Debug.LogWarning(
                        $"This color format:{rt.graphicsFormat} not support in unity.webrtc. Change to supported color format:{format}.");
                    rt.Release();
                    rt.graphicsFormat = format;
                    rt.Create();
                }

                arCamera.targetTexture = rt;
            }
            else
            {
                RenderTextureFormat format = WebRTC.WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
                rt = new RenderTexture(1270, 720, 0, format)
                {
                    antiAliasing = 1
                };
                rt.Create();
                arCamera.targetTexture = rt;
            }
            track = new VideoStreamTrack(rt);
            Debug.Log(rt.format);
            Debug.Log(track.Texture.graphicsFormat);
            Debug.Log(track.IsEncoderInitialized);
            localConnection.AddTrack(track);
            Debug.Log("track added");

The full code can be found here

https://github.com/MohammedShetaya/AR_Video_Straming/blob/main/Assets/Scripts/WSClient.cs

1

There are 1 answers

0
ekrenzin On

yes it is possible, but it is quite difficult, and beyond the scope of a SO answer. Check out this Unity Package for some help: https://docs.unity3d.com/Packages/[email protected]/manual/index.html