Compare commits

..

3 Commits

4 changed files with 299 additions and 127 deletions

38
app.js
View File

@ -13,40 +13,40 @@ try {
} }
const mediasoup = require('mediasoup'); const mediasoup = require('mediasoup');
let worker let worker;
/** /**
* *
* videoCalls - Dictionary of Object(s) * videoCalls - Dictionary of Object(s)
* '<callId>': { * '<callId>': {
* router: Router, router * router: Router,
* initiatorAudioProducer: Producer, * initiatorAudioProducer: Producer,
* initiatorVideoProducer: Producer, * initiatorVideoProducer: Producer,
* receiverVideoProducer: Producer, producerVideo * receiverVideoProducer: Producer,
* receiverAudioProducer: Producer, producerAudio * receiverAudioProducer: Producer,
* initiatorProducerTransport: Producer Transport, * initiatorProducerTransport: Producer Transport,
* receiverProducerTransport: Producer Transport, producerTransport * receiverProducerTransport: Producer Transport,
* initiatorConsumerVideo: Consumer, consumerVideo * initiatorConsumerVideo: Consumer,
* initiatorConsumerAudio: Consumer, consumerAudio * initiatorConsumerAudio: Consumer,
* initiatorConsumerTransport: Consumer Transport consumerTransport * initiatorConsumerTransport: Consumer Transport
* initiatorSockerId * initiatorSockerId
* receiverSocketId * receiverSocketId
* } * }
* *
**/ **/
let videoCalls = {} let videoCalls = {};
let socketDetails = {} let socketDetails = {};
app.get('/', (_req, res) => { app.get('/', (_req, res) => {
res.send('Hello from mediasoup app!') res.send('Hello from mediasoup app!')
}) });
app.use('/sfu', express.static(path.join(__dirname, 'public'))) app.use('/sfu', express.static(path.join(__dirname, 'public')));
// SSL cert for HTTPS access // SSL cert for HTTPS access
const options = { const options = {
key: fs.readFileSync(process.env.SERVER_KEY, 'utf-8'), key: fs.readFileSync(process.env.SERVER_KEY, 'utf-8'),
cert: fs.readFileSync(process.env.SERVER_CERT, 'utf-8'), cert: fs.readFileSync(process.env.SERVER_CERT, 'utf-8'),
} };
const httpsServer = https.createServer(options, app); const httpsServer = https.createServer(options, app);
@ -225,7 +225,6 @@ peers.on('connection', async socket => {
*/ */
socket.on('createWebRtcTransport', async ({ sender }, callback) => { socket.on('createWebRtcTransport', async ({ sender }, callback) => {
try { try {
console.log('🟥', socket.id, JSON.stringify(sender));
const callId = socketDetails[socket.id]; const callId = socketDetails[socket.id];
console.log(`[createWebRtcTransport] socket ${socket.id} | sender ${sender} | callId ${callId}`); console.log(`[createWebRtcTransport] socket ${socket.id} | sender ${sender} | callId ${callId}`);
if (sender) { if (sender) {
@ -428,7 +427,8 @@ peers.on('connection', async socket => {
callback({ videoParams, audioParams }); callback({ videoParams, audioParams });
} else if (!canConsumeVideo && canConsumeAudio) { } else if (!canConsumeVideo && canConsumeAudio) {
const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities) const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities)
callback({ videoParams: null, audioParams }); const data = { videoParams: null, audioParams };
callback(data);
} else { } else {
console.log(`[consume] Can't consume | callId ${callId}`); console.log(`[consume] Can't consume | callId ${callId}`);
callback(null); callback(null);
@ -449,14 +449,14 @@ peers.on('connection', async socket => {
console.log(`[consumer-resume] callId ${callId}`) console.log(`[consumer-resume] callId ${callId}`)
if (isInitiator(callId, socket.id)) { if (isInitiator(callId, socket.id)) {
console.log(`[consumer-resume] isInitiator true`);
await videoCalls[callId].initiatorConsumerVideo.resume(); await videoCalls[callId].initiatorConsumerVideo.resume();
await videoCalls[callId].initiatorConsumerAudio.resume(); await videoCalls[callId].initiatorConsumerAudio.resume();
} else { } else {
await videoCalls[callId].receiverConsumerVideo.resume(); console.log(`[consumer-resume] isInitiator false`);
await videoCalls[callId].receiverConsumerAudio.resume(); (videoCalls[callId].receiverConsumerVideo) && await videoCalls[callId].receiverConsumerVideo.resume();
(videoCalls[callId].receiverConsumerVideo) && await videoCalls[callId].receiverConsumerAudio.resume();
} }
// await videoCalls[callId].consumerVideo.resume();
// await videoCalls[callId].consumerAudio.resume();
} catch (error) { } catch (error) {
console.log(`ERROR | consumer-resume | callId ${socketDetails[socket.id]} | ${error.message}`); console.log(`ERROR | consumer-resume | callId ${socketDetails[socket.id]} | ${error.message}`);
} }

View File

@ -20368,10 +20368,24 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null; const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null; let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER) console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config) console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub let socket, hub
let device let device
let rtpCapabilities let rtpCapabilities
@ -20381,6 +20395,21 @@ let producerVideo
let producerAudio let producerAudio
let consumer let consumer
let originAssetId let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions // https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce // https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
@ -20502,7 +20531,7 @@ const streamSuccess = (stream) => {
const getLocalStream = () => { const getLocalStream = () => {
console.log('[getLocalStream]'); console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({ navigator.mediaDevices.getUserMedia({
audio: true, audio: produceAudio ? true : false,
video: { video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } }, qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } }, vga : { width: { ideal: 640 }, height: { ideal: 480 } },
@ -20651,34 +20680,36 @@ const connectSendTransport = async () => {
// this action will trigger the 'connect' and 'produce' events above // this action will trigger the 'connect' and 'produce' events above
// Produce video // Produce video
producerVideo = await producerTransport.produce(videoParams) let producerVideoHandler = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams); console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo); console.log('producerVideo', producerVideo);
producerVideo.on('trackended', () => { producerVideoHandler.on('trackended', () => {
console.log('track ended') console.log('track ended')
// close video track // close video track
}) })
producerVideo.on('transportclose', () => { producerVideoHandler.on('transportclose', () => {
console.log('transport ended') console.log('transport ended')
// close video track // close video track
}) })
// Produce audio // Produce audio
producerAudio = await producerTransport.produce(audioParams) if (produceAudio) {
console.log('audioParams', audioParams); let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('producerAudio', producerAudio); console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => { producerAudioHandler.on('trackended', () => {
console.log('track ended') console.log('track ended')
// close audio track // close audio track
}) })
producerAudio.on('transportclose', () => { producerAudioHandler.on('transportclose', () => {
console.log('transport ended') console.log('transport ended')
// close audio track // close audio track
}) })
}
const answer = { const answer = {
origin_asset_id: ASSET_ID, origin_asset_id: ASSET_ID,
@ -20762,36 +20793,95 @@ const connectRecvTransport = async () => {
await socket.emit('consume', { await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities, rtpCapabilities: device.rtpCapabilities,
callId callId
}, async ({ params }) => { }, async ({videoParams, audioParams}) => {
// if (params.error) { console.log(`[consume] 🟩 videoParams`, videoParams)
// console.log('Cannot Consume') console.log(`[consume] 🟩 audioParams`, audioParams)
// return console.log('[consume] 🟩 consumerTransport', consumerTransport)
// }
console.log(`[connectRecvTransport] consume params ${params}`);
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
let stream = new MediaStream() let stream = new MediaStream()
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
socket.emit('consumer-resume')
console.log('consumer', consumer);
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
displayError(`remoteVideo PLAY ERROR | ${error.message}`)
})
}) })
} }
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
consumerVideo.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => { const closeCall = () => {
console.log('closeCall'); console.log('closeCall');

View File

@ -34,6 +34,9 @@
<body> <body>
<body> <body>
<div id="video"> <div id="video">
<legend>Client options:</legend>
<input type="checkbox" id="produceAudio" name="produceAudio">
<label for="produceAudio">Produce audio</label><br>
<table> <table>
<thead> <thead>
<th>Local Video</th> <th>Local Video</th>
@ -43,12 +46,24 @@
<tr> <tr>
<td> <td>
<div id="sharedBtns"> <div id="sharedBtns">
<video id="localVideo" autoplay class="video" muted></video> <video
id="localVideo"
class="video"
autoplay
muted
playsinline
></video>
</div> </div>
</td> </td>
<td> <td>
<div id="sharedBtns"> <div id="sharedBtns">
<video id="remoteVideo" autoplay class="video" ></video> <video
id="remoteVideo"
class="video"
autoplay
muted
playsinline
></video>
</div> </div>
</td> </td>
</tr> </tr>
@ -61,33 +76,10 @@
<td> <td>
<div id="sharedBtns"> <div id="sharedBtns">
<button id="btnRecvSendTransport">Consume</button> <button id="btnRecvSendTransport">Consume</button>
<button id="remoteSoundControl">Unmute</button>
</div> </div>
</td> </td>
</tr> </tr>
<!-- <tr>
<td colspan="2">
<div id="sharedBtns">
<button id="btnRtpCapabilities">2. Get Rtp Capabilities</button>
<br />
<button id="btnDevice">3. Create Device</button>
</div>
</td>
</tr>
<tr>
<td>
<div id="sharedBtns">
<button id="btnCreateSendTransport">4. Create Send Transport</button>
<br />
<button id="btnConnectSendTransport">5. Connect Send Transport & Produce</button></td>
</div>
<td>
<div id="sharedBtns">
<button id="btnRecvSendTransport">6. Create Recv Transport</button>
<br />
<button id="btnConnectRecvTransport">7. Connect Recv Transport & Consume</button>
</div>
</td>
</tr> -->
</tbody> </tbody>
</table> </table>
<div id="closeCallBtn"> <div id="closeCallBtn">

View File

@ -10,10 +10,24 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null; const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null; let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER) console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config) console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub let socket, hub
let device let device
let rtpCapabilities let rtpCapabilities
@ -23,6 +37,21 @@ let producerVideo
let producerAudio let producerAudio
let consumer let consumer
let originAssetId let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions // https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce // https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
@ -144,7 +173,7 @@ const streamSuccess = (stream) => {
const getLocalStream = () => { const getLocalStream = () => {
console.log('[getLocalStream]'); console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({ navigator.mediaDevices.getUserMedia({
audio: true, audio: produceAudio ? true : false,
video: { video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } }, qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } }, vga : { width: { ideal: 640 }, height: { ideal: 480 } },
@ -293,34 +322,36 @@ const connectSendTransport = async () => {
// this action will trigger the 'connect' and 'produce' events above // this action will trigger the 'connect' and 'produce' events above
// Produce video // Produce video
producerVideo = await producerTransport.produce(videoParams) let producerVideoHandler = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams); console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo); console.log('producerVideo', producerVideo);
producerVideo.on('trackended', () => { producerVideoHandler.on('trackended', () => {
console.log('track ended') console.log('track ended')
// close video track // close video track
}) })
producerVideo.on('transportclose', () => { producerVideoHandler.on('transportclose', () => {
console.log('transport ended') console.log('transport ended')
// close video track // close video track
}) })
// Produce audio // Produce audio
producerAudio = await producerTransport.produce(audioParams) if (produceAudio) {
console.log('audioParams', audioParams); let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('producerAudio', producerAudio); console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => { producerAudioHandler.on('trackended', () => {
console.log('track ended') console.log('track ended')
// close audio track // close audio track
}) })
producerAudio.on('transportclose', () => { producerAudioHandler.on('transportclose', () => {
console.log('transport ended') console.log('transport ended')
// close audio track // close audio track
}) })
}
const answer = { const answer = {
origin_asset_id: ASSET_ID, origin_asset_id: ASSET_ID,
@ -404,36 +435,95 @@ const connectRecvTransport = async () => {
await socket.emit('consume', { await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities, rtpCapabilities: device.rtpCapabilities,
callId callId
}, async ({ params }) => { }, async ({videoParams, audioParams}) => {
// if (params.error) { console.log(`[consume] 🟩 videoParams`, videoParams)
// console.log('Cannot Consume') console.log(`[consume] 🟩 audioParams`, audioParams)
// return console.log('[consume] 🟩 consumerTransport', consumerTransport)
// }
console.log(`[connectRecvTransport] consume params ${params}`);
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
let stream = new MediaStream() let stream = new MediaStream()
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
socket.emit('consumer-resume')
console.log('consumer', consumer);
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
displayError(`remoteVideo PLAY ERROR | ${error.message}`)
})
}) })
} }
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
consumerVideo.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => { const closeCall = () => {
console.log('closeCall'); console.log('closeCall');