LINXD-2270: Allow server and web client to have full duplex #20

Merged
sergiu merged 2 commits from LINXD-2270-full-duplex into develop 2022-12-27 11:12:39 +00:00
5 changed files with 605 additions and 273 deletions

436
app.js
View File

@ -13,50 +13,47 @@ try {
}
const mediasoup = require('mediasoup');
let worker
let worker;
/**
* videoCalls
* |-> Router
* |-> Producer
* |-> Consumer
* |-> Producer Transport
* |-> Consumer Transport
*
* videoCalls - Dictionary of Object(s)
* '<callId>': {
* router: Router,
* producer: Producer,
* producerTransport: Producer Transport,
* consumer: Consumer,
* consumerTransport: Consumer Transport
* initiatorAudioProducer: Producer,
* initiatorVideoProducer: Producer,
* receiverVideoProducer: Producer,
* receiverAudioProducer: Producer,
* initiatorProducerTransport: Producer Transport,
* receiverProducerTransport: Producer Transport,
* initiatorConsumerVideo: Consumer,
* initiatorConsumerAudio: Consumer,
* initiatorConsumerTransport: Consumer Transport
* initiatorSockerId
* receiverSocketId
* }
*
**/
let videoCalls = {}
let socketDetails = {}
let videoCalls = {};
let socketDetails = {};
app.get('/', (_req, res) => {
res.send('Hello from mediasoup app!')
})
});
app.use('/sfu', express.static(path.join(__dirname, 'public')))
app.use('/sfu', express.static(path.join(__dirname, 'public')));
// SSL cert for HTTPS access
const options = {
key: fs.readFileSync(process.env.SERVER_KEY, 'utf-8'),
cert: fs.readFileSync(process.env.SERVER_CERT, 'utf-8'),
}
};
const httpsServer = https.createServer(options, app);
const io = new Server(httpsServer, {
allowEIO3: true,
origins: ["*:*"],
// allowRequest: (req, next) => {
// console.log('req', req);
// next(null, true)
// }
origins: ["*:*"]
});
// const io = new Server(server, { origins: '*:*', allowEIO3: true });
httpsServer.listen(process.env.PORT, () => {
console.log('Video server listening on port:', process.env.PORT);
@ -141,32 +138,18 @@ const mediaCodecs = [
'x-google-start-bitrate' : 1000
}
}
// {
// kind: 'audio',
// mimeType: 'audio/opus',
// clockRate: 48000,
// channels: 2,
// },
// {
// kind: 'video',
// mimeType: 'video/VP8',
// clockRate: 90000,
// parameters: {
// 'x-google-start-bitrate': 1000,
// },
// },
];
const closeCall = (callId) => {
try {
if (callId && videoCalls[callId]) {
videoCalls[callId].producerVideo?.close();
videoCalls[callId].producerAudio?.close();
videoCalls[callId].consumerVideo?.close();
videoCalls[callId].consumerAudio?.close();
videoCalls[callId].receiverVideoProducer?.close();
videoCalls[callId].receiverAudioProducer?.close();
videoCalls[callId].initiatorConsumerVideo?.close();
videoCalls[callId].initiatorConsumerAudio?.close();
videoCalls[callId]?.consumerTransport?.close();
videoCalls[callId]?.producerTransport?.close();
videoCalls[callId]?.initiatorConsumerTransport?.close();
videoCalls[callId]?.receiverProducerTransport?.close();
videoCalls[callId]?.router?.close();
delete videoCalls[callId];
} else {
@ -212,9 +195,11 @@ peers.on('connection', async socket => {
console.log('[createRoom] callId', callId);
videoCalls[callId] = { router: await worker.createRouter({ mediaCodecs }) }
console.log(`[createRoom] Router ID: ${videoCalls[callId].router.id}`);
videoCalls[callId].receiverSocketId = socket.id
} else {
videoCalls[callId].initiatorSockerId = socket.id
}
socketDetails[socket.id] = callId;
// rtpCapabilities is set for callback
console.log('[getRtpCapabilities] callId', callId);
callbackResponse = {
@ -232,7 +217,7 @@ peers.on('connection', async socket => {
/*
- Client emits a request to create server side Transport
- Depending on the sender, producerTransport or consumerTransport is created on that router
- Depending on the sender, a producer or consumer is created is created on that router
- It will return parameters, these are required for the client to create the RecvTransport
from the client.
- If the client is producer(sender: true) then it will use parameters for device.createSendTransport(params)
@ -241,20 +226,21 @@ peers.on('connection', async socket => {
socket.on('createWebRtcTransport', async ({ sender }, callback) => {
try {
const callId = socketDetails[socket.id];
console.log(`[createWebRtcTransport] sender ${sender} | callId ${callId}`);
console.log(`[createWebRtcTransport] socket ${socket.id} | sender ${sender} | callId ${callId}`);
if (sender) {
if (!videoCalls[callId].producerTransport) {
videoCalls[callId].producerTransport = await createWebRtcTransportLayer(callId, callback);
if(!videoCalls[callId].receiverProducerTransport && !isInitiator(callId, socket.id)) {
videoCalls[callId].receiverProducerTransport = await createWebRtcTransportLayer(callId, callback);
} else if(!videoCalls[callId].initiatorProducerTransport && isInitiator(callId, socket.id)) {
videoCalls[callId].initiatorProducerTransport = await createWebRtcTransportLayer(callId, callback);
} else {
console.log(`producerTransport has already been defined | callId ${callId}`);
callback(null);
}
} else if (!sender) {
if (!videoCalls[callId].consumerTransport) {
videoCalls[callId].consumerTransport = await createWebRtcTransportLayer(callId, callback);
} else {
console.log(`consumerTransport has already been defined | callId ${callId}`);
callback(null);
if(!videoCalls[callId].receiverConsumerTransport && !isInitiator(callId, socket.id)) {
videoCalls[callId].receiverConsumerTransport = await createWebRtcTransportLayer(callId, callback);
} else if(!videoCalls[callId].initiatorConsumerTransport && isInitiator(callId, socket.id)) {
videoCalls[callId].initiatorConsumerTransport = await createWebRtcTransportLayer(callId, callback);
}
}
} catch (error) {
@ -272,64 +258,103 @@ peers.on('connection', async socket => {
const callId = socketDetails[socket.id];
if (typeof dtlsParameters === 'string') dtlsParameters = JSON.parse(dtlsParameters);
console.log(`[transport-connect] socket.id ${socket.id} | callId ${callId}`);
await videoCalls[callId].producerTransport.connect({ dtlsParameters });
console.log(`[transport-connect] socket ${socket.id} | callId ${callId}`);
if (!isInitiator(callId, socket.id)) {
await videoCalls[callId].receiverProducerTransport.connect({ dtlsParameters });
} else {
await videoCalls[callId].initiatorProducerTransport.connect({ dtlsParameters });
}
} catch (error) {
console.log(`ERROR | transport-connect | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
/*
- The event sent by the client (PRODUCER) after successfully connecting to producerTransport
- For the router with the id callId, we make produce on producerTransport
- The event sent by the client (PRODUCER) after successfully connecting to receiverProducerTransport/initiatorProducerTransport
- For the router with the id callId, we make produce on receiverProducerTransport/initiatorProducerTransport
- Create the handler on producer at the 'transportclose' event
*/
socket.on('transport-produce', async ({ kind, rtpParameters, appData }, callback) => {
try {
const callId = socketDetails[socket.id];
if (typeof rtpParameters === 'string') rtpParameters = JSON.parse(rtpParameters);
console.log(`[transport-produce] kind: ${kind} | socket.id: ${socket.id} | callId: ${callId}`);
console.log('kind', kind);
console.log('rtpParameters', rtpParameters);
if (kind === 'video') {
videoCalls[callId].producerVideo = await videoCalls[callId].producerTransport.produce({
kind,
rtpParameters,
});
socket.on('transport-produce', async ({ kind, rtpParameters, appData }, callback) => {
try {
const callId = socketDetails[socket.id];
if (typeof rtpParameters === 'string') rtpParameters = JSON.parse(rtpParameters);
console.log(`[transport-produce] kind: ${kind} | socket: ${socket.id} | callId: ${callId}`);
if (kind === 'video') {
if (!isInitiator(callId, socket.id)) {
videoCalls[callId].receiverVideoProducer = await videoCalls[callId].receiverProducerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] Producer ID: ${videoCalls[callId].producerVideo.id} | kind: ${videoCalls[callId].producerVideo.kind}`);
videoCalls[callId].producerVideo.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
console.log(`[transport-produce] receiverVideoProducer Producer ID: ${videoCalls[callId].receiverVideoProducer.id} | kind: ${videoCalls[callId].receiverVideoProducer.kind}`);
// Send back to the client the Producer's id
callback && callback({
id: videoCalls[callId].producerVideo.id
});
videoCalls[callId].receiverVideoProducer.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback && callback({
id: videoCalls[callId].receiverVideoProducer.id
});
} else {
videoCalls[callId].initiatorVideoProducer = await videoCalls[callId].initiatorProducerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] initiatorVideoProducer Producer ID: ${videoCalls[callId].initiatorVideoProducer.id} | kind: ${videoCalls[callId].initiatorVideoProducer.kind}`);
videoCalls[callId].initiatorVideoProducer.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
callback && callback({
id: videoCalls[callId].initiatorVideoProducer.id
});
}
} else if (kind === 'audio') {
videoCalls[callId].producerAudio = await videoCalls[callId].producerTransport.produce({
kind,
rtpParameters,
});
if (!isInitiator(callId, socket.id)) {
videoCalls[callId].receiverAudioProducer = await videoCalls[callId].receiverProducerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] Producer ID: ${videoCalls[callId].producerAudio.id} | kind: ${videoCalls[callId].producerAudio.kind}`);
videoCalls[callId].producerAudio.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
console.log(`[transport-produce] receiverAudioProducer Producer ID: ${videoCalls[callId].receiverAudioProducer.id} | kind: ${videoCalls[callId].receiverAudioProducer.kind}`);
videoCalls[callId].receiverAudioProducer.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback && callback({
id: videoCalls[callId].producerAudio.id
});
// Send back to the client the Producer's id
callback && callback({
id: videoCalls[callId].receiverAudioProducer.id
});
} else {
videoCalls[callId].initiatorAudioProducer = await videoCalls[callId].initiatorProducerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] initiatorAudioProducer Producer ID: ${videoCalls[callId].initiatorAudioProducer.id} | kind: ${videoCalls[callId].initiatorAudioProducer.kind}`);
videoCalls[callId].initiatorAudioProducer.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback && callback({
id: videoCalls[callId].initiatorAudioProducer.id
});
}
}
} catch (error) {
console.log(`ERROR | transport-produce | callId ${socketDetails[socket.id]} | ${error.message}`);
@ -343,8 +368,13 @@ peers.on('connection', async socket => {
socket.on('transport-recv-connect', async ({ dtlsParameters }) => {
try {
const callId = socketDetails[socket.id];
console.log(`[transport-recv-connect] socket.id ${socket.id} | callId ${callId}`);
await videoCalls[callId].consumerTransport.connect({ dtlsParameters });
console.log(`[transport-recv-connect] socket ${socket.id} | callId ${callId}`);
// await videoCalls[callId].consumerTransport.connect({ dtlsParameters });
if(!isInitiator(callId, socket.id)) {
await videoCalls[callId].receiverConsumerTransport.connect({ dtlsParameters });
} else if(isInitiator(callId, socket.id)) {
await videoCalls[callId].initiatorConsumerTransport.connect({ dtlsParameters });
}
} catch (error) {
console.log(`ERROR | transport-recv-connect | callId ${socketDetails[socket.id]} | ${error.message}`);
}
@ -359,34 +389,46 @@ peers.on('connection', async socket => {
*/
socket.on('consume', async ({ rtpCapabilities }, callback) => {
try {
console.log(`[consume] rtpCapabilities: ${JSON.stringify(rtpCapabilities)}`);
const callId = socketDetails[socket.id];
console.log(`[consume] socket ${socket.id} | callId ${callId} | rtpCapabilities: ${JSON.stringify(rtpCapabilities)}`);
console.log('[consume] callId', callId);
let canConsumeVideo, canConsumeAudio;
if (isInitiator(callId, socket.id)) {
canConsumeVideo = !!videoCalls[callId].receiverVideoProducer && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].receiverVideoProducer.id,
rtpCapabilities
});
canConsumeAudio = !!videoCalls[callId].receiverAudioProducer && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].receiverAudioProducer.id,
rtpCapabilities
});
const canConsumeVideo = !!videoCalls[callId].producerVideo && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].producerVideo.id,
rtpCapabilities
})
const canConsumeAudio = !!videoCalls[callId].producerAudio && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].producerAudio.id,
rtpCapabilities
})
} else {
canConsumeVideo = !!videoCalls[callId].initiatorVideoProducer && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].initiatorVideoProducer.id,
rtpCapabilities
});
canConsumeAudio = !!videoCalls[callId].initiatorAudioProducer && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].initiatorAudioProducer.id,
rtpCapabilities
});
}
console.log('[consume] canConsumeVideo', canConsumeVideo);
console.log('[consume] canConsumeAudio', canConsumeAudio);
if (canConsumeVideo && !canConsumeAudio) {
console.log('1');
const videoParams = await consumeVideo(callId, rtpCapabilities)
console.log('videoParams', videoParams);
const videoParams = await consumeVideo(callId, socket.id, rtpCapabilities)
callback({ videoParams, audioParams: null });
} else if (canConsumeVideo && canConsumeAudio) {
console.log('2');
const videoParams = await consumeVideo(callId, rtpCapabilities)
const audioParams = await consumeAudio(callId, rtpCapabilities)
const videoParams = await consumeVideo(callId, socket.id, rtpCapabilities)
const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities)
callback({ videoParams, audioParams });
} else if (!canConsumeVideo && canConsumeAudio) {
const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities)
const data = { videoParams: null, audioParams };
callback(data);
} else {
console.log(`[consume] Can't consume | callId ${callId}`);
callback(null);
@ -405,71 +447,139 @@ peers.on('connection', async socket => {
try {
const callId = socketDetails[socket.id];
console.log(`[consumer-resume] callId ${callId}`)
await videoCalls[callId].consumerVideo.resume();
await videoCalls[callId].consumerAudio.resume();
if (isInitiator(callId, socket.id)) {
console.log(`[consumer-resume] isInitiator true`);
await videoCalls[callId].initiatorConsumerVideo.resume();
await videoCalls[callId].initiatorConsumerAudio.resume();
} else {
console.log(`[consumer-resume] isInitiator false`);
(videoCalls[callId].receiverConsumerVideo) && await videoCalls[callId].receiverConsumerVideo.resume();
(videoCalls[callId].receiverConsumerVideo) && await videoCalls[callId].receiverConsumerAudio.resume();
}
} catch (error) {
console.log(`ERROR | consumer-resume | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
});
const consumeVideo = async (callId, rtpCapabilities) => {
videoCalls[callId].consumerVideo = await videoCalls[callId].consumerTransport.consume({
producerId: videoCalls[callId].producerVideo.id,
rtpCapabilities,
paused: true,
});
const consumeVideo = async (callId, socketId, rtpCapabilities) => {
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].consumerVideo.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
if(isInitiator(callId, socketId)) {
videoCalls[callId].initiatorConsumerVideo = await videoCalls[callId].initiatorConsumerTransport.consume({
producerId: videoCalls[callId].receiverVideoProducer.id,
rtpCapabilities,
paused: true,
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].consumerVideo.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].initiatorConsumerVideo.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].consumerVideo.id,
producerId: videoCalls[callId].producerVideo.id,
kind: 'video',
rtpParameters: videoCalls[callId].consumerVideo.rtpParameters,
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].initiatorConsumerVideo.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].initiatorConsumerVideo.id,
producerId: videoCalls[callId].receiverVideoProducer.id,
kind: 'video',
rtpParameters: videoCalls[callId].initiatorConsumerVideo.rtpParameters,
}
} else {
videoCalls[callId].receiverConsumerVideo = await videoCalls[callId].receiverConsumerTransport.consume({
producerId: videoCalls[callId].initiatorVideoProducer.id,
rtpCapabilities,
paused: true,
});
videoCalls[callId].receiverConsumerVideo.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
videoCalls[callId].receiverConsumerVideo.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].receiverConsumerVideo.id,
producerId: videoCalls[callId].initiatorVideoProducer.id,
kind: 'video',
rtpParameters: videoCalls[callId].receiverConsumerVideo.rtpParameters,
}
}
}
const consumeAudio = async (callId, rtpCapabilities) => {
videoCalls[callId].consumerAudio = await videoCalls[callId].consumerTransport.consume({
producerId: videoCalls[callId].producerAudio.id,
rtpCapabilities,
paused: true,
});
const consumeAudio = async (callId, socketId, rtpCapabilities) => {
if(isInitiator(callId, socketId)) {
videoCalls[callId].initiatorConsumerAudio = await videoCalls[callId].initiatorConsumerTransport.consume({
producerId: videoCalls[callId].receiverAudioProducer.id,
rtpCapabilities,
paused: true,
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].consumerAudio.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].initiatorConsumerAudio.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].consumerAudio.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].consumerAudio.id,
producerId: videoCalls[callId].producerAudio.id,
kind: 'audio',
rtpParameters: videoCalls[callId].consumerAudio.rtpParameters,
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].initiatorConsumerAudio.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].initiatorConsumerAudio.id,
producerId: videoCalls[callId].receiverAudioProducer.id,
kind: 'audio',
rtpParameters: videoCalls[callId].initiatorConsumerAudio.rtpParameters,
}
} else {
videoCalls[callId].receiverConsumerAudio = await videoCalls[callId].receiverConsumerTransport.consume({
producerId: videoCalls[callId].initiatorAudioProducer.id,
rtpCapabilities,
paused: true,
});
videoCalls[callId].receiverConsumerAudio.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
videoCalls[callId].receiverConsumerAudio.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].receiverConsumerAudio.id,
producerId: videoCalls[callId].initiatorAudioProducer.id,
kind: 'audio',
rtpParameters: videoCalls[callId].receiverConsumerAudio.rtpParameters,
}
}
}
const isInitiator = (callId, socketId) => {
return (videoCalls[callId].initiatorSockerId === socketId);
}
/*
- Called from at event 'createWebRtcTransport' and assigned to the consumer or producer transport
- It will return parameters, these are required for the client to create the RecvTransport

View File

@ -20353,7 +20353,7 @@ module.exports = yeast;
},{}],94:[function(require,module,exports){
module.exports = {
hubAddress: 'https://hub.dev.linx.safemobile.com/',
mediasoupAddress: 'https://video.safemobile.org',
mediasoupAddress: 'https://testing.video.safemobile.org',
}
},{}],95:[function(require,module,exports){
const io = require('socket.io-client')
@ -20368,10 +20368,24 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub
let device
let rtpCapabilities
@ -20381,6 +20395,21 @@ let producerVideo
let producerAudio
let consumer
let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
@ -20502,7 +20531,7 @@ const streamSuccess = (stream) => {
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: true,
audio: produceAudio ? true : false,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
@ -20580,7 +20609,7 @@ const createSendTransport = () => {
console.log('[createSendTransport');
// see server's socket.on('createWebRtcTransport', sender?, ...)
// this is a call from Producer, so sender = true
socket.emit('createWebRtcTransport', { sender: true, callId }, (value) => {
socket.emit('createWebRtcTransport', { sender: true }, (value) => {
console.log(`[createWebRtcTransport] value: ${JSON.stringify(value)}`);
@ -20651,34 +20680,36 @@ const connectSendTransport = async () => {
// this action will trigger the 'connect' and 'produce' events above
// Produce video
producerVideo = await producerTransport.produce(videoParams)
let producerVideoHandler = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideo.on('trackended', () => {
producerVideoHandler.on('trackended', () => {
console.log('track ended')
// close video track
})
producerVideo.on('transportclose', () => {
producerVideoHandler.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
if (produceAudio) {
let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudioHandler.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudioHandler.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
}
const answer = {
origin_asset_id: ASSET_ID,
@ -20762,34 +20793,95 @@ const connectRecvTransport = async () => {
await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities,
callId
}, async ({ params }) => {
if (params.error) {
console.log('Cannot Consume')
return
}
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
}, async ({videoParams, audioParams}) => {
console.log(`[consume] 🟩 videoParams`, videoParams)
console.log(`[consume] 🟩 audioParams`, audioParams)
console.log('[consume] 🟩 consumerTransport', consumerTransport)
let stream = new MediaStream()
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
socket.emit('consumer-resume')
console.log('consumer', consumer);
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
displayError(`remoteVideo PLAY ERROR | ${error.message}`)
})
})
}
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
consumerVideo.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => {
console.log('closeCall');
@ -20811,8 +20903,31 @@ const closeCall = () => {
resetCallSettings()
}
const consume = async () => {
console.log('[consume]')
console.log('createRecvTransport Consumer')
await socket.emit('createWebRtcTransport', { sender: false, callId, dispatcher: true }, ({ params }) => {
if (params.error) {
console.log('createRecvTransport | createWebRtcTransport | Error', params.error)
return
}
consumerTransport = device.createRecvTransport(params)
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
await socket.emit('transport-recv-connect', {
dtlsParameters,
})
callback()
} catch (error) {
errback(error)
}
})
connectRecvTransport()
})
}
btnLocalVideo.addEventListener('click', getLocalStream)
btnRecvSendTransport.addEventListener('click', goConnect)
btnRecvSendTransport.addEventListener('click', consume)
btnCloseCall.addEventListener('click', closeCall)
},{"./config":94,"mediasoup-client":66,"socket.io-client":82}]},{},[95]);

View File

@ -1,4 +1,4 @@
module.exports = {
hubAddress: 'https://hub.dev.linx.safemobile.com/',
mediasoupAddress: 'https://video.safemobile.org',
mediasoupAddress: 'https://testing.video.safemobile.org',
}

View File

@ -34,6 +34,9 @@
<body>
<body>
<div id="video">
<legend>Client options:</legend>
<input type="checkbox" id="produceAudio" name="produceAudio">
<label for="produceAudio">Produce audio</label><br>
<table>
<thead>
<th>Local Video</th>
@ -43,12 +46,24 @@
<tr>
<td>
<div id="sharedBtns">
<video id="localVideo" autoplay class="video" muted></video>
<video
id="localVideo"
class="video"
autoplay
muted
playsinline
></video>
</div>
</td>
<td>
<div id="sharedBtns">
<video id="remoteVideo" autoplay class="video" ></video>
<video
id="remoteVideo"
class="video"
autoplay
muted
playsinline
></video>
</div>
</td>
</tr>
@ -61,33 +76,10 @@
<td>
<div id="sharedBtns">
<button id="btnRecvSendTransport">Consume</button>
<button id="remoteSoundControl">Unmute</button>
</div>
</td>
</tr>
<!-- <tr>
<td colspan="2">
<div id="sharedBtns">
<button id="btnRtpCapabilities">2. Get Rtp Capabilities</button>
<br />
<button id="btnDevice">3. Create Device</button>
</div>
</td>
</tr>
<tr>
<td>
<div id="sharedBtns">
<button id="btnCreateSendTransport">4. Create Send Transport</button>
<br />
<button id="btnConnectSendTransport">5. Connect Send Transport & Produce</button></td>
</div>
<td>
<div id="sharedBtns">
<button id="btnRecvSendTransport">6. Create Recv Transport</button>
<br />
<button id="btnConnectRecvTransport">7. Connect Recv Transport & Consume</button>
</div>
</td>
</tr> -->
</tbody>
</table>
<div id="closeCallBtn">

View File

@ -10,10 +10,24 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub
let device
let rtpCapabilities
@ -23,6 +37,21 @@ let producerVideo
let producerAudio
let consumer
let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
@ -144,7 +173,7 @@ const streamSuccess = (stream) => {
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: true,
audio: produceAudio ? true : false,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
@ -222,7 +251,7 @@ const createSendTransport = () => {
console.log('[createSendTransport');
// see server's socket.on('createWebRtcTransport', sender?, ...)
// this is a call from Producer, so sender = true
socket.emit('createWebRtcTransport', { sender: true, callId }, (value) => {
socket.emit('createWebRtcTransport', { sender: true }, (value) => {
console.log(`[createWebRtcTransport] value: ${JSON.stringify(value)}`);
@ -293,34 +322,36 @@ const connectSendTransport = async () => {
// this action will trigger the 'connect' and 'produce' events above
// Produce video
producerVideo = await producerTransport.produce(videoParams)
let producerVideoHandler = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideo.on('trackended', () => {
producerVideoHandler.on('trackended', () => {
console.log('track ended')
// close video track
})
producerVideo.on('transportclose', () => {
producerVideoHandler.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
if (produceAudio) {
let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudioHandler.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudioHandler.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
}
const answer = {
origin_asset_id: ASSET_ID,
@ -404,34 +435,95 @@ const connectRecvTransport = async () => {
await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities,
callId
}, async ({ params }) => {
if (params.error) {
console.log('Cannot Consume')
return
}
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
}, async ({videoParams, audioParams}) => {
console.log(`[consume] 🟩 videoParams`, videoParams)
console.log(`[consume] 🟩 audioParams`, audioParams)
console.log('[consume] 🟩 consumerTransport', consumerTransport)
let stream = new MediaStream()
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
socket.emit('consumer-resume')
console.log('consumer', consumer);
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
displayError(`remoteVideo PLAY ERROR | ${error.message}`)
})
})
}
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
consumerVideo.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => {
console.log('closeCall');
@ -453,7 +545,30 @@ const closeCall = () => {
resetCallSettings()
}
const consume = async () => {
console.log('[consume]')
console.log('createRecvTransport Consumer')
await socket.emit('createWebRtcTransport', { sender: false, callId, dispatcher: true }, ({ params }) => {
if (params.error) {
console.log('createRecvTransport | createWebRtcTransport | Error', params.error)
return
}
consumerTransport = device.createRecvTransport(params)
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
await socket.emit('transport-recv-connect', {
dtlsParameters,
})
callback()
} catch (error) {
errback(error)
}
})
connectRecvTransport()
})
}
btnLocalVideo.addEventListener('click', getLocalStream)
btnRecvSendTransport.addEventListener('click', goConnect)
btnRecvSendTransport.addEventListener('click', consume)
btnCloseCall.addEventListener('click', closeCall)