Push tow MediaStream to server in different URL by webrtc error

webrtc version:Version 3.0.0-pre.7 - October 31, 2023
push one MediaStream to media server is well, but two stream will blend tow stream together.

//
// Copyright (c) 2022 Winlin
//
// SPDX-License-Identifier: MIT
//
using System.Collections;
using UnityEngine;
using Unity.WebRTC;
using System;
using UnityEngine.Networking;
using System.Linq;

public class SrsStreamer : MonoBehaviour
{
    // The WHIP stream url, to push WebRTC stream to SRS or other media servers.
    public string url;
    public string webrtc_url;
    public int width;
    public int height;
    // The game camera as source of video stream. Generally you should create a
    // dedicated camera, not the main camera.
    //
    // @remark We will capture all audio of game, if bind this script to the
    // only one listener in scene, for example, bind to the `Main Camera`, so
    // that we could capture audio data in `OnAudioFilterRead`.
    public Camera sourceCamera;

   // private WebCamTexture webCamTexture;
    private MediaStream videoStream;
    private AudioStreamTrack audioStreamTrack;
    private RTCPeerConnection pc;
    

    // To make the work flow better to understand, not required. You can
    // directly use the `OnAudioFilterRead` function to feed audio stream track.
    private delegate void DelegateOnAudioFilterRead(float[] data, int channels);
    private DelegateOnAudioFilterRead handleOnAudioFilterRead;

    private void Awake()
    {
#if WEBRTC_3_0_0_PRE_5_OR_BEFORE
        WebRTC.Initialize();
#endif
       
        Debug.Log("WebRTC: Initialize ok");
    }

    private void OnDestroy()
    {
        videoStream?.Dispose();
        videoStream = null;

        handleOnAudioFilterRead = null;

        audioStreamTrack?.Dispose();
        audioStreamTrack = null;

        pc?.Close();
        pc?.Dispose();
        pc = null;

        //webCamTexture?.Stop();
        //webCamTexture = null;

#if WEBRTC_3_0_0_PRE_5_OR_BEFORE
        WebRTC.Dispose();
#endif
        Debug.Log("WebRTC: Dispose ok");
    }

    void Start()
    {
        Debug.Log($"WebRTC: Start to stream {url}");

        // Start WebRTC update.
       
        StartCoroutine(WebRTC.Update());
       

        // Create object only after WebRTC initialized.
        pc = new RTCPeerConnection();

        

        // Setup player peer connection.
        pc.OnIceCandidate = candidate =>
        {
            Debug.Log($"WebRTC: OnIceCandidate {candidate.ToString()}");
        };
        pc.OnIceConnectionChange = state =>
        {
            Debug.Log($"WebRTC: OnIceConnectionChange {state.ToString()}");
        };
        pc.OnTrack = e =>
        {
            Debug.Log($"WebRTC: OnTrack {e.Track.Kind} id={e.Track.Id}");
        };

        // Setup PeerConnection to send stream only.
        StartCoroutine(SetupPeerConnection());
        IEnumerator SetupPeerConnection()
        {
            RTCRtpTransceiverInit init = new RTCRtpTransceiverInit();
            
            init.direction = RTCRtpTransceiverDirection.SendOnly;
            pc.AddTransceiver(TrackKind.Audio, init);
            pc.AddTransceiver(TrackKind.Video, init);
            
            yield return StartCoroutine(GrabCamera());
        }

        // Grab the game camera.
        IEnumerator GrabCamera()
        {
             videoStream = sourceCamera.CaptureStream(width, height);
            
            //videoStream = sourceCamera.CaptureStream(720, 480);
            Debug.Log($"WebRTC: Grab camera stream={videoStream.Id}, size={sourceCamera.targetTexture.width}x{sourceCamera.targetTexture.height}");

            foreach (var track in videoStream.GetTracks())
            {

                RTCRtpSender sender = pc.AddTrack(track);
                Debug.Log($"WebRTC: Add {track.Kind} track, id={track.Id}");

                 var parameters = sender.GetParameters();

                // Changing bitrate of all encoders.
               foreach (var encoding in parameters.encodings)
                {
                    
                    encoding.maxBitrate = 2048*1024;
                    encoding.maxFramerate = 30;
                    
                }

                // Set updated parameters.
                sender.SetParameters(parameters);
                
            }

            //yield return null;
            yield return StartCoroutine(GrabAudio());
        }

        // Grab the game audio, from the only one listener, the main camera,
        // that script should be attached to. And we will get audio data from
        // the function this.OnAudioFilterRead.
        IEnumerator GrabAudio()
        {
            // Use empty contructor to use listener, see https://docs.unity3d.com/Packages/com.unity.webrtc@2.4/manual/audiostreaming.html
            audioStreamTrack = new AudioStreamTrack();

            pc.AddTrack(audioStreamTrack);
            Debug.Log($"WebRTC: Add audio track, id={audioStreamTrack.Id}");

            // When got audio data from listener, the gameObject this script
            // attached to, generally the MainCamera object, we feed audio data to
            // audio stream track.
            handleOnAudioFilterRead = (float[] data, int channels) =>
            {
                if (audioStreamTrack != null)
                {
                    const int sampleRate = 48000;
                    audioStreamTrack.SetData(data, channels, sampleRate);
                }
            };
           
            yield return StartCoroutine(PeerNegotiationNeeded());
        }

        // Generate offer.
        IEnumerator PeerNegotiationNeeded()
        {
            //var op = pc.CreateOffer();
            var op = pc.CreateOffer();
            yield return op;

            Debug.Log($"WebRTC: CreateOffer done={op.IsDone}, hasError={op.IsError}, {op.Desc}");
            if (op.IsError) yield break;

            yield return StartCoroutine(OnCreateOfferSuccess(op.Desc));
        }

        // When offer is ready, set to local description.
        IEnumerator OnCreateOfferSuccess(RTCSessionDescription offer)
        {
            var op = pc.SetLocalDescription(ref offer);
            Debug.Log($"WebRTC: SetLocalDescription {offer.type} {offer.sdp}");
            yield return op;

            Debug.Log($"WebRTC: Offer done={op.IsDone}, hasError={op.IsError}");
            if (op.IsError) yield break;

            yield return StartCoroutine(ExchangeSDP(url, offer.sdp));
        }

        // Exchange SDP(offer) with server, got answer.
        IEnumerator ExchangeSDP(string url, string offer)
        {
            // Use Task to call async methods.
            UnityWebRequest request = new UnityWebRequest(url, "POST");
            request.SetRequestHeader("Authorization", "bearer " + webrtc_url);
            request.SetRequestHeader("Content-Type", "application/sdp");
           
            byte[] bodyRaw = System.Text.Encoding.UTF8.GetBytes(offer);
            request.uploadHandler = new UploadHandlerRaw(bodyRaw);
            request.downloadHandler = new DownloadHandlerBuffer();

            Debug.Log("offer=" + offer );
            yield return request.SendWebRequest();

        // Covert async to coroutine yield, wait for task to be completed.
            if (request.result == UnityWebRequest.Result.Success)
            {
                Debug.Log("Request succeeded: " + request.downloadHandler.text);
            }
            else
            {
                Debug.LogError("Request failed: " + request.error);
            }
            //PublishContentRusult result = JsonUtility.FromJson<PublishContentRusult>(task.Result);
            StartCoroutine(OnGotAnswerSuccess(request.downloadHandler.text));
        }

        // When got answer, set remote description.
        IEnumerator OnGotAnswerSuccess(string answer)
        {
            RTCSessionDescription desc = new RTCSessionDescription();
            desc.type = RTCSdpType.Answer;
            desc.sdp = answer;
            var op = pc.SetRemoteDescription(ref desc);
            yield return op;

            Debug.Log($"WebRTC: Answer done={op.IsDone}, hasError={op.IsError}");
            yield break;
        }
    }

    private void OnAudioFilterRead(float[] data, int channels)
    {
        if (handleOnAudioFilterRead != null)
        {
            handleOnAudioFilterRead(data, channels);
        }
    }

    void Update()
    {
       
    }
}