Compare commits

..

62 Commits

Author SHA1 Message Date
c823f7578c LH-265: Update client 2022-11-29 13:27:33 +02:00
6cc14cdf30 LH-265: Update documentation and README.md 2022-11-29 13:27:12 +02:00
4bb23def42 Update server 2022-11-29 11:32:03 +02:00
fc745a5879 Update server 2022-11-29 11:09:10 +02:00
742d67f2e3 Update server 2022-11-29 11:08:12 +02:00
e22093d97e Update server 2022-11-29 10:27:55 +02:00
7634a18465 Update server 2022-11-29 10:22:02 +02:00
d17b035526 Update server 2022-11-29 04:03:06 +02:00
a21451e46d Update server 2022-11-29 03:59:42 +02:00
df0cb81a8e Update server 2022-11-29 03:55:34 +02:00
ac8c651a9d Update server 2022-11-29 03:52:15 +02:00
9111c4e245 Update server 2022-11-29 03:50:45 +02:00
7a2d02dcda Update server 2022-11-29 03:44:02 +02:00
39efdd12b7 Update server 2022-11-29 03:41:00 +02:00
0bdc6fac3a Update server 2022-11-29 03:17:28 +02:00
ae7a8ed9ce Update server 2022-11-29 03:16:15 +02:00
9feaebf8a7 Update server 2022-11-29 02:46:12 +02:00
85110b7f5c Update server 2022-11-29 02:44:40 +02:00
d047cdf7d1 Update server 2022-11-29 02:44:01 +02:00
753b476462 Update server 2022-11-29 02:42:13 +02:00
359c7c784e Update server 2022-11-29 02:40:25 +02:00
5169d0d49f Update server 2022-11-29 02:39:11 +02:00
a3b083fe24 Update server 2022-11-29 02:37:55 +02:00
46d3499e3d Update server 2022-11-29 02:34:48 +02:00
38b95d5246 Update server 2022-11-29 02:21:29 +02:00
984b2b892e Update server 2022-11-29 02:20:05 +02:00
e085d22e89 Update server 2022-11-29 02:16:44 +02:00
3bc15fdef1 Update server 2022-11-29 02:09:37 +02:00
67042185c4 Update server 2022-11-29 01:59:53 +02:00
c92dff9bfe Update server 2022-11-28 23:40:56 +02:00
3605ca0468 Update server 2022-11-28 23:40:08 +02:00
1edbcb2179 Merge branch 'LH-265-enable-audio-in-mediasoup' of https://git.safemobile.org/Safemobile/mediasoup into LH-265-enable-audio-in-mediasoup 2022-11-25 10:00:28 +02:00
adbcf6c2bc Update server 2022-11-25 10:00:17 +02:00
c862224ead LH-265: Update client and server 2022-11-25 09:58:53 +02:00
c02a7c7380 Update server 2022-11-24 23:56:42 +02:00
3387a362a6 Update server 2022-11-24 23:30:27 +02:00
21dffefa8c Update server 2022-11-24 23:29:03 +02:00
1369491529 Update server 2022-11-24 23:22:55 +02:00
56bdbca537 Update server 2022-11-24 23:09:18 +02:00
8444809910 Update server 2022-11-24 23:06:02 +02:00
cd84c534ce Update server 2022-11-24 22:27:33 +02:00
038bdb99bc Update server 2022-11-24 22:18:37 +02:00
d94ea12a40 Update server 2022-11-24 17:08:54 +02:00
1148532a9b Update server 2022-11-24 17:01:26 +02:00
3561bb13a6 Update server 2022-11-24 16:59:04 +02:00
22ead926b0 Update server 2022-11-24 16:57:26 +02:00
c6edb2947d Update server 2022-11-24 16:46:00 +02:00
e59f134a68 Update server 2022-11-24 16:37:44 +02:00
aad96b72f2 Update server 2022-11-24 16:36:36 +02:00
3e4c0a32bc Update server 2022-11-24 16:36:00 +02:00
2a7bd42247 Update server 2022-11-24 16:34:54 +02:00
f2c0794bf4 Update server 2022-11-24 16:22:56 +02:00
950298c4f6 Update server 2022-11-24 16:21:22 +02:00
6e74083733 Update server 2022-11-24 13:49:04 +02:00
8ef6c2abb0 Update server 2022-11-24 13:43:33 +02:00
2a86042c80 Update server 2022-11-24 13:41:24 +02:00
56b8e2ea74 Update server 2022-11-24 13:38:28 +02:00
6c42814229 Update server 2022-11-24 13:37:42 +02:00
e65b7e0d7c Update server 2022-11-24 13:36:21 +02:00
aa7c2aea90 Update server 2022-11-24 13:35:32 +02:00
458342c0d2 Update server 2022-11-24 13:32:45 +02:00
fa5a1a5ae7 Update server 2022-11-23 17:56:18 +02:00
5 changed files with 377 additions and 358 deletions

View File

@ -22,17 +22,20 @@
2. Run the `npm start:prod` command to start the server in production mode.
(To connect to the terminal, use `pm2 log video-server`)
---
### Web client
- The server will start by default on port 3000, and the ssl certificates will have to be configured
- The web client can be accessed using the /sfu path
ex: http://localhost:3000/sfu/?assetId=1&&accountId=1&producer=true&assetName=Adi&assetType=linx
ex: https://HOST/sfu/?assetId=1&&accountId=1&producer=true&dest_asset_id=75&assetName=Adi
assetId = asset id of the unit on which you are doing the test
accountId = account id of the unit on which you are doing the test
producer = it will always be true because you are the producer
(it's possible to put false, but then you have to have another client with producer true)
assetName = asset name of the unit on which you are doing the test
assetType = asset type of the unit on which you are doing the test
dest_asset_id= the addressee with whom the call is made
- To make a call using this client, you need a microphone and permission to use it
- For any changes related to the client, the command `npm run watch' will have to be used to generate the bundle.js used by the web client
### Demo project
The demo project used initially and then modified for our needs `https://github.com/jamalag/mediasoup2`

308
app.js
View File

@ -91,76 +91,80 @@ worker = createWorker();
// list of media codecs supported by mediasoup ...
// https://github.com/versatica/mediasoup/blob/v3/src/supportedRtpCapabilities.ts
const mediaCodecs = [
// {
// kind : 'audio',
// mimeType : 'audio/opus',
// clockRate : 48000,
// channels : 2
// },
// {
// kind : 'video',
// mimeType : 'video/VP8',
// clockRate : 90000,
// parameters :
// {
// 'x-google-start-bitrate' : 1000
// }
// },
// {
// kind : 'video',
// mimeType : 'video/VP9',
// clockRate : 90000,
// parameters :
// {
// 'profile-id' : 2,
// 'x-google-start-bitrate' : 1000
// }
// },
// {
// kind : 'video',
// mimeType : 'video/h264',
// clockRate : 90000,
// parameters :
// {
// 'packetization-mode' : 1,
// 'profile-level-id' : '4d0032',
// 'level-asymmetry-allowed' : 1,
// 'x-google-start-bitrate' : 1000
// }
// },
// {
// kind : 'video',
// mimeType : 'video/h264',
// clockRate : 90000,
// parameters :
// {
// 'packetization-mode' : 1,
// 'profile-level-id' : '42e01f',
// 'level-asymmetry-allowed' : 1,
// 'x-google-start-bitrate' : 1000
// }
// }
{
kind: 'audio',
mimeType: 'audio/opus',
clockRate: 48000,
channels: 2,
kind : 'audio',
mimeType : 'audio/opus',
clockRate : 48000,
channels : 2
},
{
kind: 'video',
mimeType: 'video/VP8',
clockRate: 90000,
parameters: {
'x-google-start-bitrate': 1000,
kind : 'video',
mimeType : 'video/VP8',
clockRate : 90000,
parameters :
{
'x-google-start-bitrate' : 1000
},
channels : 2
},
{
kind : 'video',
mimeType : 'video/VP9',
clockRate : 90000,
parameters :
{
'profile-id' : 2,
'x-google-start-bitrate' : 1000
}
},
{
kind : 'video',
mimeType : 'video/h264',
clockRate : 90000,
parameters :
{
'packetization-mode' : 1,
'profile-level-id' : '4d0032',
'level-asymmetry-allowed' : 1,
'x-google-start-bitrate' : 1000
}
},
{
kind : 'video',
mimeType : 'video/h264',
clockRate : 90000,
parameters :
{
'packetization-mode' : 1,
'profile-level-id' : '42e01f',
'level-asymmetry-allowed' : 1,
'x-google-start-bitrate' : 1000
}
}
// {
// kind: 'audio',
// mimeType: 'audio/opus',
// clockRate: 48000,
// channels: 2,
// },
// {
// kind: 'video',
// mimeType: 'video/VP8',
// clockRate: 90000,
// parameters: {
// 'x-google-start-bitrate': 1000,
// },
// },
];
const closeCall = (callId) => {
try {
if (callId && videoCalls[callId]) {
videoCalls[callId].producer?.close();
videoCalls[callId].consumer?.close();
videoCalls[callId].producerVideo?.close();
videoCalls[callId].producerAudio?.close();
videoCalls[callId].consumerVideo?.close();
videoCalls[callId].consumerAudio?.close();
videoCalls[callId]?.consumerTransport?.close();
videoCalls[callId]?.producerTransport?.close();
videoCalls[callId]?.router?.close();
@ -279,29 +283,54 @@ peers.on('connection', async socket => {
- The event sent by the client (PRODUCER) after successfully connecting to producerTransport
- For the router with the id callId, we make produce on producerTransport
- Create the handler on producer at the 'transportclose' event
*/
socket.on('transport-produce', async ({ kind, rtpParameters, appData }, callback) => {
try {
const callId = socketDetails[socket.id];
if (typeof rtpParameters === 'string') rtpParameters = JSON.parse(rtpParameters);
*/
socket.on('transport-produce', async ({ kind, rtpParameters, appData }, callback) => {
try {
const callId = socketDetails[socket.id];
if (typeof rtpParameters === 'string') rtpParameters = JSON.parse(rtpParameters);
console.log(`[transport-produce] kind: ${kind} | socket.id: ${socket.id} | callId: ${callId}`);
console.log('kind', kind);
console.log('rtpParameters', rtpParameters);
console.log('[transport-produce] | socket.id', socket.id, '| callId', callId);
videoCalls[callId].producer = await videoCalls[callId].producerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] Producer ID: ${videoCalls[callId].producer.id} | kind: ${videoCalls[callId].producer.kind}`);
videoCalls[callId].producer.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
if (kind === 'video') {
videoCalls[callId].producerVideo = await videoCalls[callId].producerTransport.produce({
kind,
rtpParameters,
});
// Send back to the client the Producer's id
callback && callback({
id: videoCalls[callId].producer.id
});
console.log(`[transport-produce] Producer ID: ${videoCalls[callId].producerVideo.id} | kind: ${videoCalls[callId].producerVideo.kind}`);
videoCalls[callId].producerVideo.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback && callback({
id: videoCalls[callId].producerVideo.id
});
} else if (kind === 'audio') {
videoCalls[callId].producerAudio = await videoCalls[callId].producerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] Producer ID: ${videoCalls[callId].producerAudio.id} | kind: ${videoCalls[callId].producerAudio.kind}`);
videoCalls[callId].producerAudio.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback && callback({
id: videoCalls[callId].producerAudio.id
});
}
} catch (error) {
console.log(`ERROR | transport-produce | callId ${socketDetails[socket.id]} | ${error.message}`);
}
@ -330,48 +359,36 @@ peers.on('connection', async socket => {
*/
socket.on('consume', async ({ rtpCapabilities }, callback) => {
try {
console.log(`[consume] rtpCapabilities: ${JSON.stringify(rtpCapabilities)}`);
const callId = socketDetails[socket.id];
console.log('[consume] callId', callId);
// Check if the router can consume the specified producer
if (videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].producer.id,
const canConsumeVideo = !!videoCalls[callId].producerVideo && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].producerVideo.id,
rtpCapabilities
})) {
console.log('[consume] Can consume', callId);
// Transport can now consume and return a consumer
videoCalls[callId].consumer = await videoCalls[callId].consumerTransport.consume({
producerId: videoCalls[callId].producer.id,
rtpCapabilities,
paused: true,
});
})
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].consumer.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
const canConsumeAudio = !!videoCalls[callId].producerAudio && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].producerAudio.id,
rtpCapabilities
})
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].consumer.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
console.log('[consume] canConsumeVideo', canConsumeVideo);
console.log('[consume] canConsumeAudio', canConsumeAudio);
// From the consumer extract the following params to send back to the Client
const params = {
id: videoCalls[callId].consumer.id,
producerId: videoCalls[callId].producer.id,
kind: videoCalls[callId].consumer.kind,
rtpParameters: videoCalls[callId].consumer.rtpParameters,
};
// Send the parameters to the client
callback({ params });
if (canConsumeVideo && !canConsumeAudio) {
console.log('1');
const videoParams = await consumeVideo(callId, rtpCapabilities)
console.log('videoParams', videoParams);
callback({ videoParams, audioParams: null });
} else if (canConsumeVideo && canConsumeAudio) {
console.log('2');
const videoParams = await consumeVideo(callId, rtpCapabilities)
const audioParams = await consumeAudio(callId, rtpCapabilities)
callback({ videoParams, audioParams });
} else {
console.log(`[canConsume] Can't consume | callId ${callId}`);
console.log(`[consume] Can't consume | callId ${callId}`);
callback(null);
}
} catch (error) {
@ -388,13 +405,71 @@ peers.on('connection', async socket => {
try {
const callId = socketDetails[socket.id];
console.log(`[consumer-resume] callId ${callId}`)
await videoCalls[callId].consumer.resume();
await videoCalls[callId].consumerVideo.resume();
await videoCalls[callId].consumerAudio.resume();
} catch (error) {
console.log(`ERROR | consumer-resume | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
});
const consumeVideo = async (callId, rtpCapabilities) => {
videoCalls[callId].consumerVideo = await videoCalls[callId].consumerTransport.consume({
producerId: videoCalls[callId].producerVideo.id,
rtpCapabilities,
paused: true,
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].consumerVideo.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].consumerVideo.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].consumerVideo.id,
producerId: videoCalls[callId].producerVideo.id,
kind: 'video',
rtpParameters: videoCalls[callId].consumerVideo.rtpParameters,
}
}
const consumeAudio = async (callId, rtpCapabilities) => {
videoCalls[callId].consumerAudio = await videoCalls[callId].consumerTransport.consume({
producerId: videoCalls[callId].producerAudio.id,
rtpCapabilities,
paused: true,
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].consumerAudio.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].consumerAudio.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].consumerAudio.id,
producerId: videoCalls[callId].producerAudio.id,
kind: 'audio',
rtpParameters: videoCalls[callId].consumerAudio.rtpParameters,
}
}
/*
- Called from at event 'createWebRtcTransport' and assigned to the consumer or producer transport
- It will return parameters, these are required for the client to create the RecvTransport
@ -442,6 +517,7 @@ const createWebRtcTransportLayer = async (callId, callback) => {
dtlsParameters: transport.dtlsParameters,
};
console.log('[createWebRtcTransportLayer] callback params', params);
// Send back to the client the params
callback({ params });

View File

@ -20373,6 +20373,36 @@ console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId',
console.log('🟩 config', config)
let socket, hub
let device
let rtpCapabilities
let producerTransport
let consumerTransport
let producerVideo
let producerAudio
let consumer
let originAssetId
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
let videoParams = {
encodings: [
{ scaleResolutionDownBy: 4, maxBitrate: 500000 },
{ scaleResolutionDownBy: 2, maxBitrate: 1000000 },
{ scaleResolutionDownBy: 1, maxBitrate: 5000000 },
{ scalabilityMode: 'S3T3_KEY' }
],
codecOptions: {
videoGoogleStartBitrate: 1000
}
}
let audioParams = {
codecOptions :
{
opusStereo : true,
opusDtx : true
}
}
setTimeout(() => {
hub = io(config.hubAddress)
@ -20448,117 +20478,49 @@ setTimeout(() => {
}, 1600);
let device
let rtpCapabilities
let producerTransport
let consumerTransport
let producerVideo
let producerAudio
let consumer
let originAssetId
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
let videoParams = {
// encodings: [
// { scaleResolutionDownBy: 4, maxBitrate: 500000 },
// { scaleResolutionDownBy: 2, maxBitrate: 1000000 },
// { scaleResolutionDownBy: 1, maxBitrate: 5000000 },
// { scalabilityMode: 'S3T3_KEY' }
// ],
// codecOptions: {
// videoGoogleStartBitrate: 1000
// }
encodings: [
{
rid: 'r0',
maxBitrate: 100000,
scalabilityMode: 'S1T3',
},
{
rid: 'r1',
maxBitrate: 300000,
scalabilityMode: 'S1T3',
},
{
rid: 'r2',
maxBitrate: 900000,
scalabilityMode: 'S1T3',
},
],
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerCodecOptions
codecOptions: {
videoGoogleStartBitrate: 1000
}
}
let audioParams = {
codecOptions :
{
opusStereo : true,
opusDtx : true
}
}
const streamSuccess = (stream) => {
// console.log('[streamSuccess] device', device);
// localVideo.srcObject = stream
// console.log('stream', stream);
// const videoTrack = stream.getVideoTracks()[0]
// const audioTrack = stream.getAudioTracks()[0]
// videoParams = {
// track: videoTrack,
// // codec : device.rtpCapabilities.codecs.find((codec) => codec.mimeType.toLowerCase() === 'video/vp9'),
// // codec : 'video/vp9',
// ...videoParams
// }
// audioParams = {
// track: audioTrack,
// ...audioParams
// }
// console.log('[streamSuccess] videoParams', videoParams, ' | audioParams', audioParams);
// goConnect()
console.log('[streamSuccess]');
console.log('[streamSuccess] device', device);
localVideo.srcObject = stream
const track = stream.getVideoTracks()[0]
console.log('stream', stream);
const videoTrack = stream.getVideoTracks()[0]
const audioTrack = stream.getAudioTracks()[0]
videoParams = {
track,
track: videoTrack,
...videoParams
}
audioParams = {
track: audioTrack,
...audioParams
}
console.log('[streamSuccess] videoParams', videoParams, ' | audioParams', audioParams);
goConnect()
}
const getLocalStream = () => {
console.log('[getLocalStream]');
// navigator.mediaDevices.getUserMedia({
// audio: false,
// video: {
// qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
// vga : { width: { ideal: 640 }, height: { ideal: 480 } },
// hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
// }
// })
navigator.mediaDevices.getUserMedia({
audio: false,
audio: true,
video: {
width: {
min: 640,
max: 1920,
},
height: {
min: 400,
max: 1080,
}
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
navigator.permissions.query(
{ name: 'microphone' }
).then((permissionStatus) =>{
console.log('🟨 [PERMISSION] permissionStatus', permissionStatus); // granted, denied, prompt
// It will block the code from execution and display "Permission denied" if we don't have microphone permissions
})
}
const goConnect = () => {
@ -20575,7 +20537,6 @@ const goCreateTransport = () => {
// server side to send/recive media
const createDevice = async () => {
try {
console.log('[createDevice] 1 device', device);
device = new mediasoupClient.Device()
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-load
@ -20586,7 +20547,7 @@ const createDevice = async () => {
})
console.log('Device RTP Capabilities', device.rtpCapabilities)
console.log('[createDevice] 2 device', device);
console.log('[createDevice] device', device);
// once the device loads, create transport
goCreateTransport()
@ -20619,16 +20580,17 @@ const createSendTransport = () => {
console.log('[createSendTransport');
// see server's socket.on('createWebRtcTransport', sender?, ...)
// this is a call from Producer, so sender = true
socket.emit('createWebRtcTransport', { sender: true, callId }, ({ params }) => {
socket.emit('createWebRtcTransport', { sender: true, callId }, (value) => {
console.log(`[createWebRtcTransport] value: ${JSON.stringify(value)}`);
const params = value.params;
// The server sends back params needed
// to create Send Transport on the client side
if (params.error) {
console.log(params.error)
return
}
console.log('[createWebRtcTransport] params', params)
// creates a new WebRTC Transport to send media
// based on the server's producer transport params
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#TransportOptions
@ -20657,7 +20619,7 @@ const createSendTransport = () => {
console.log('[produce] parameters', parameters)
try {
// tell the server to create a Producer
// Tell the server to create a Producer
// with the following parameters and produce
// and expect back a server side producer id
// see server's socket.on('transport-produce', ...)
@ -20683,33 +20645,40 @@ const connectSendTransport = async () => {
console.log('[connectSendTransport] producerTransport');
// we now call produce() to instruct the producer transport
// We now call produce() to instruct the producer transport
// to send media to the Router
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
// this action will trigger the 'connect' and 'produce' events above
// Produce video
producerVideo = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideo.on('trackended', () => {
console.log('track ended')
// close video track
})
})
producerVideo.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
// producerAudio = await producerTransport.produce(audioParams)
// console.log('producerAudio', producerAudio);
// producerAudio.on('trackended', () => {
// console.log('track ended')
// // close video track
// })
// producerAudio.on('transportclose', () => {
// console.log('transport ended')
// // close video track
// })
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
const answer = {
origin_asset_id: ASSET_ID,
@ -20719,7 +20688,7 @@ const connectSendTransport = async () => {
origin_asset_type_name: ASSET_TYPE,
origin_asset_name: ASSET_NAME,
video_call_id: callId,
answer: 'accepted', // answer: 'rejected'
answer: 'accepted', // answer: accepted/rejected
};
console.log('SEND answer', answer);
@ -20735,7 +20704,7 @@ const connectSendTransport = async () => {
const createRecvTransport = async () => {
console.log('createRecvTransport');
// see server's socket.on('consume', sender?, ...)
// See server's socket.on('consume', sender?, ...)
// this is a call from Consumer, so sender = false
await socket.emit('createWebRtcTransport', { sender: false, callId }, ({ params }) => {
// The server sends back params needed
@ -20747,13 +20716,13 @@ const createRecvTransport = async () => {
console.log('[createRecvTransport] params', params)
// creates a new WebRTC Transport to receive media
// Creates a new WebRTC Transport to receive media
// based on server's consumer transport params
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-createRecvTransport
consumerTransport = device.createRecvTransport(params)
// https://mediasoup.org/documentation/v3/communication-between-client-and-server/#producing-media
// this event is raised when a first call to transport.produce() is made
// This event is raised when a first call to transport.produce() is made
// see connectRecvTransport() below
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
@ -20787,7 +20756,7 @@ const resetCallSettings = () => {
const connectRecvTransport = async () => {
console.log('connectRecvTransport');
// for consumer, we need to tell the server first
// For consumer, we need to tell the server first
// to create a consumer based on the rtpCapabilities and consume
// if the router can consume, it will send back a set of params as below
await socket.emit('consume', {
@ -20799,7 +20768,7 @@ const connectRecvTransport = async () => {
return
}
// then consume with the local consumer transport
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
@ -20842,6 +20811,7 @@ const closeCall = () => {
resetCallSettings()
}
btnLocalVideo.addEventListener('click', getLocalStream)
btnRecvSendTransport.addEventListener('click', goConnect)
btnCloseCall.addEventListener('click', closeCall)

View File

@ -43,7 +43,7 @@
<tr>
<td>
<div id="sharedBtns">
<video id="localVideo" autoplay class="video" ></video>
<video id="localVideo" autoplay class="video" muted></video>
</div>
</td>
<td>

View File

@ -15,6 +15,36 @@ console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId',
console.log('🟩 config', config)
let socket, hub
let device
let rtpCapabilities
let producerTransport
let consumerTransport
let producerVideo
let producerAudio
let consumer
let originAssetId
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
let videoParams = {
encodings: [
{ scaleResolutionDownBy: 4, maxBitrate: 500000 },
{ scaleResolutionDownBy: 2, maxBitrate: 1000000 },
{ scaleResolutionDownBy: 1, maxBitrate: 5000000 },
{ scalabilityMode: 'S3T3_KEY' }
],
codecOptions: {
videoGoogleStartBitrate: 1000
}
}
let audioParams = {
codecOptions :
{
opusStereo : true,
opusDtx : true
}
}
setTimeout(() => {
hub = io(config.hubAddress)
@ -90,117 +120,49 @@ setTimeout(() => {
}, 1600);
let device
let rtpCapabilities
let producerTransport
let consumerTransport
let producerVideo
let producerAudio
let consumer
let originAssetId
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
let videoParams = {
// encodings: [
// { scaleResolutionDownBy: 4, maxBitrate: 500000 },
// { scaleResolutionDownBy: 2, maxBitrate: 1000000 },
// { scaleResolutionDownBy: 1, maxBitrate: 5000000 },
// { scalabilityMode: 'S3T3_KEY' }
// ],
// codecOptions: {
// videoGoogleStartBitrate: 1000
// }
encodings: [
{
rid: 'r0',
maxBitrate: 100000,
scalabilityMode: 'S1T3',
},
{
rid: 'r1',
maxBitrate: 300000,
scalabilityMode: 'S1T3',
},
{
rid: 'r2',
maxBitrate: 900000,
scalabilityMode: 'S1T3',
},
],
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerCodecOptions
codecOptions: {
videoGoogleStartBitrate: 1000
}
}
let audioParams = {
codecOptions :
{
opusStereo : true,
opusDtx : true
}
}
const streamSuccess = (stream) => {
// console.log('[streamSuccess] device', device);
// localVideo.srcObject = stream
// console.log('stream', stream);
// const videoTrack = stream.getVideoTracks()[0]
// const audioTrack = stream.getAudioTracks()[0]
// videoParams = {
// track: videoTrack,
// // codec : device.rtpCapabilities.codecs.find((codec) => codec.mimeType.toLowerCase() === 'video/vp9'),
// // codec : 'video/vp9',
// ...videoParams
// }
// audioParams = {
// track: audioTrack,
// ...audioParams
// }
// console.log('[streamSuccess] videoParams', videoParams, ' | audioParams', audioParams);
// goConnect()
console.log('[streamSuccess]');
console.log('[streamSuccess] device', device);
localVideo.srcObject = stream
const track = stream.getVideoTracks()[0]
console.log('stream', stream);
const videoTrack = stream.getVideoTracks()[0]
const audioTrack = stream.getAudioTracks()[0]
videoParams = {
track,
track: videoTrack,
...videoParams
}
audioParams = {
track: audioTrack,
...audioParams
}
console.log('[streamSuccess] videoParams', videoParams, ' | audioParams', audioParams);
goConnect()
}
const getLocalStream = () => {
console.log('[getLocalStream]');
// navigator.mediaDevices.getUserMedia({
// audio: false,
// video: {
// qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
// vga : { width: { ideal: 640 }, height: { ideal: 480 } },
// hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
// }
// })
navigator.mediaDevices.getUserMedia({
audio: false,
audio: true,
video: {
width: {
min: 640,
max: 1920,
},
height: {
min: 400,
max: 1080,
}
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
navigator.permissions.query(
{ name: 'microphone' }
).then((permissionStatus) =>{
console.log('🟨 [PERMISSION] permissionStatus', permissionStatus); // granted, denied, prompt
// It will block the code from execution and display "Permission denied" if we don't have microphone permissions
})
}
const goConnect = () => {
@ -217,7 +179,6 @@ const goCreateTransport = () => {
// server side to send/recive media
const createDevice = async () => {
try {
console.log('[createDevice] 1 device', device);
device = new mediasoupClient.Device()
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-load
@ -228,7 +189,7 @@ const createDevice = async () => {
})
console.log('Device RTP Capabilities', device.rtpCapabilities)
console.log('[createDevice] 2 device', device);
console.log('[createDevice] device', device);
// once the device loads, create transport
goCreateTransport()
@ -261,16 +222,17 @@ const createSendTransport = () => {
console.log('[createSendTransport');
// see server's socket.on('createWebRtcTransport', sender?, ...)
// this is a call from Producer, so sender = true
socket.emit('createWebRtcTransport', { sender: true, callId }, ({ params }) => {
socket.emit('createWebRtcTransport', { sender: true, callId }, (value) => {
console.log(`[createWebRtcTransport] value: ${JSON.stringify(value)}`);
const params = value.params;
// The server sends back params needed
// to create Send Transport on the client side
if (params.error) {
console.log(params.error)
return
}
console.log('[createWebRtcTransport] params', params)
// creates a new WebRTC Transport to send media
// based on the server's producer transport params
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#TransportOptions
@ -299,7 +261,7 @@ const createSendTransport = () => {
console.log('[produce] parameters', parameters)
try {
// tell the server to create a Producer
// Tell the server to create a Producer
// with the following parameters and produce
// and expect back a server side producer id
// see server's socket.on('transport-produce', ...)
@ -325,33 +287,40 @@ const connectSendTransport = async () => {
console.log('[connectSendTransport] producerTransport');
// we now call produce() to instruct the producer transport
// We now call produce() to instruct the producer transport
// to send media to the Router
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
// this action will trigger the 'connect' and 'produce' events above
// Produce video
producerVideo = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideo.on('trackended', () => {
console.log('track ended')
// close video track
})
})
producerVideo.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
// producerAudio = await producerTransport.produce(audioParams)
// console.log('producerAudio', producerAudio);
// producerAudio.on('trackended', () => {
// console.log('track ended')
// // close video track
// })
// producerAudio.on('transportclose', () => {
// console.log('transport ended')
// // close video track
// })
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
const answer = {
origin_asset_id: ASSET_ID,
@ -361,7 +330,7 @@ const connectSendTransport = async () => {
origin_asset_type_name: ASSET_TYPE,
origin_asset_name: ASSET_NAME,
video_call_id: callId,
answer: 'accepted', // answer: 'rejected'
answer: 'accepted', // answer: accepted/rejected
};
console.log('SEND answer', answer);
@ -377,7 +346,7 @@ const connectSendTransport = async () => {
const createRecvTransport = async () => {
console.log('createRecvTransport');
// see server's socket.on('consume', sender?, ...)
// See server's socket.on('consume', sender?, ...)
// this is a call from Consumer, so sender = false
await socket.emit('createWebRtcTransport', { sender: false, callId }, ({ params }) => {
// The server sends back params needed
@ -389,13 +358,13 @@ const createRecvTransport = async () => {
console.log('[createRecvTransport] params', params)
// creates a new WebRTC Transport to receive media
// Creates a new WebRTC Transport to receive media
// based on server's consumer transport params
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-createRecvTransport
consumerTransport = device.createRecvTransport(params)
// https://mediasoup.org/documentation/v3/communication-between-client-and-server/#producing-media
// this event is raised when a first call to transport.produce() is made
// This event is raised when a first call to transport.produce() is made
// see connectRecvTransport() below
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
@ -429,7 +398,7 @@ const resetCallSettings = () => {
const connectRecvTransport = async () => {
console.log('connectRecvTransport');
// for consumer, we need to tell the server first
// For consumer, we need to tell the server first
// to create a consumer based on the rtpCapabilities and consume
// if the router can consume, it will send back a set of params as below
await socket.emit('consume', {
@ -441,7 +410,7 @@ const connectRecvTransport = async () => {
return
}
// then consume with the local consumer transport
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
@ -484,6 +453,7 @@ const closeCall = () => {
resetCallSettings()
}
btnLocalVideo.addEventListener('click', getLocalStream)
btnRecvSendTransport.addEventListener('click', goConnect)
btnCloseCall.addEventListener('click', closeCall)