I'm using mediasoup and webrtc to create media streams.
async consume(transport) {
const { rtpCapabilities } = this.device;
const data = await this.socket.request('consume', { rtpCapabilities });
const {
producerId,
id,
kind,
rtpParameters,
} = data;
//Get playerID
let playerID = await this.socket.request('getPlayerID', id);
playerID = parseInt(playerID, 10);
let producerIDS = await this.socket.request('getProducerIDS', id);
let consumerIDS = await this.socket.request('getConsumerIDS', id);
console.log("producerIDS", producerIDS);
console.log("consumerIDS", consumerIDS);
let codecOptions = {};
const consumer = await transport.consume({
id: id,
producerId: producerId,
kind: kind,
rtpParameters: rtpParameters,
codecOptions: codecOptions,
});
console.log("First producer ", producerIDS[0]);
console.log("Current producer ", producerId);
console.log("Consumer ID", id)
console.log("Player ID:", playerID);
console.log("Consumer1", consumer, consumer.track);
//stream.addTrack(consumer.track);
let audioTracks = [];
audioTracks.push(consumer.track);
if (playerID === 2 && producerIDS.length === 2) {
const data1 = await this.socket.request('consume2', { rtpCapabilities });
console.log("Data: ", data);
console.log("producerId: ", data.producerId);
const consumer2 = await transport.consume({
id: data1.id,
producerId: data1.producerId,
kind: data1.kind,
rtpParameters: data1.rtpParameters,
codecOptions: codecOptions,
});
console.log("Second producer", data1.producerId);
console.log("Consumer2", consumer2, consumer2.track);
audioTracks.push(consumer2.track);
//stream.addTrack(consumer2.track);
}
console.log("Audio Tracks: ", audioTracks);
//const sources = audioTracks.map(t => ac.createMediaStreamSource(new MediaStream([t])));
//var dest = ac.createMediaStreamDestination();
//var aud1 = ac.createMediaStreamSource(s1);
//aud1.connect(dest);
//sources.forEach(s => s.connect(dest));
//console.log(dest.stream.getAudioTracks());
//stream.addTrack(dest.stream.getAudioTracks()[0]);
//stream = dest.stream;
/*
let stream = new MediaStream();
if (audioTracks.length <= 1) {
stream.addTrack(audioTracks[0]);
}
else {
stream.addTrack(audioTracks[1]);
}
*/
let stream = new MediaStream();
if (audioTracks.length <= 1) {
stream.addTrack(audioTracks[0]);
}
else {
const ac = new AudioContext();
const dest = ac.createMediaStreamDestination();
let aud1stream = new MediaStream();
aud1stream.addTrack(audioTracks[0]);
let aud2stream = new MediaStream();
aud2stream.addTrack(audioTracks[1]);
const aud1 = ac.createMediaStreamSource(aud1stream);
const aud2 = ac.createMediaStreamSource(aud2stream);
var gain = ac.createGain();
gain.gain.value = 10;
gain.connect(dest);
aud1.connect(dest);
aud2.connect(dest);
stream = dest.stream;
}
console.log("Stream tracks: ", stream.getAudioTracks());
return stream;
}
I'm attempting to mix two streams together here.
else {
const ac = new AudioContext();
const dest = ac.createMediaStreamDestination();
let aud1stream = new MediaStream();
aud1stream.addTrack(audioTracks[0]);
let aud2stream = new MediaStream();
aud2stream.addTrack(audioTracks[1]);
const aud1 = ac.createMediaStreamSource(aud1stream);
const aud2 = ac.createMediaStreamSource(aud2stream);
var gain = ac.createGain();
gain.gain.value = 10;
gain.connect(dest);
aud1.connect(dest);
aud2.connect(dest);
stream = dest.stream;
}
However the stream indicates that it is streaming but no audio can be heard. I'd appreciate any help all I want is to mix the two streams. If AudioContext doesn't work does anyone have advice on using Gstreamer to mix the audio or something else? Thanks.