Compare commits

..

1 Commits

Author SHA1 Message Date
df3482ac15 LH-265: Enable audio on video server 2022-11-21 23:02:49 +02:00
16 changed files with 1162 additions and 1471 deletions

View File

@ -22,20 +22,18 @@
2. Run the `npm start:prod` command to start the server in production mode.
(To connect to the terminal, use `pm2 log video-server`)
### Web client
---
- The server will start by default on port 3000, and the ssl certificates will have to be configured
- The web client can be accessed using the /sfu path
ex: https://HOST/sfu/?assetId=1&&accountId=1&producer=true&dest_asset_id=75&assetName=Adi
ex: http://localhost:3000/sfu/?assetId=1&&accountId=1&producer=true&assetName=Adi&assetType=linx
assetId = asset id of the unit on which you are doing the test
accountId = account id of the unit on which you are doing the test
producer = it will always be true because you are the producer
(it's possible to put false, but then you have to have another client with producer true)
assetName = asset name of the unit on which you are doing the test
dest_asset_id= the addressee with whom the call is made
- To make a call using this client, you need a microphone and permission to use it
- For any changes related to the client, the command `npm run watch' will have to be used to generate the bundle.js used by the web client
assetType = asset type of the unit on which you are doing the test
### Demo project
The demo project used initially and then modified for our needs `https://github.com/jamalag/mediasoup2`

447
app.js
View File

@ -1,4 +1,4 @@
require('dotenv').config();
require('dotenv').config()
const express = require('express');
const app = express();
@ -13,47 +13,50 @@ try {
}
const mediasoup = require('mediasoup');
let worker;
let worker
/**
* videoCalls
* |-> Router
* |-> Producer
* |-> Consumer
* |-> Producer Transport
* |-> Consumer Transport
*
* videoCalls - Dictionary of Object(s)
* '<callId>': {
* router: Router,
* initiatorAudioProducer: Producer,
* initiatorVideoProducer: Producer,
* receiverVideoProducer: Producer,
* receiverAudioProducer: Producer,
* initiatorProducerTransport: Producer Transport,
* receiverProducerTransport: Producer Transport,
* initiatorConsumerVideo: Consumer,
* initiatorConsumerAudio: Consumer,
* initiatorConsumerTransport: Consumer Transport
* initiatorSocket
* receiverSocket
* producer: Producer,
* producerTransport: Producer Transport,
* consumer: Consumer,
* consumerTransport: Consumer Transport
* }
*
**/
let videoCalls = {};
let socketDetails = {};
let videoCalls = {}
let socketDetails = {}
app.get('/', (_req, res) => {
res.send('Hello from mediasoup app!');
});
res.send('Hello from mediasoup app!')
})
app.use('/sfu', express.static(path.join(__dirname, 'public')));
app.use('/sfu', express.static(path.join(__dirname, 'public')))
// SSL cert for HTTPS access
const options = {
key: fs.readFileSync(process.env.SERVER_KEY, 'utf-8'),
cert: fs.readFileSync(process.env.SERVER_CERT, 'utf-8'),
};
}
const httpsServer = https.createServer(options, app);
const io = new Server(httpsServer, {
allowEIO3: true,
origins: ['*:*'],
origins: ["*:*"],
// allowRequest: (req, next) => {
// console.log('req', req);
// next(null, true)
// }
});
// const io = new Server(server, { origins: '*:*', allowEIO3: true });
httpsServer.listen(process.env.PORT, () => {
console.log('Video server listening on port:', process.env.PORT);
@ -66,19 +69,19 @@ const createWorker = async () => {
worker = await mediasoup.createWorker({
rtcMinPort: parseInt(process.env.RTC_MIN_PORT),
rtcMaxPort: parseInt(process.env.RTC_MAX_PORT),
});
})
console.log(`[createWorker] worker pid ${worker.pid}`);
worker.on('died', (error) => {
worker.on('died', error => {
// This implies something serious happened, so kill the application
console.error('mediasoup worker has died', error);
setTimeout(() => process.exit(1), 2000); // exit in 2 seconds
});
})
return worker;
} catch (error) {
console.error(`[createWorker] | ERROR | error: ${error.message}`);
console.log(`ERROR | createWorker | ${error.message}`);
}
}
};
// We create a Worker as soon as our application starts
worker = createWorker();
@ -101,70 +104,36 @@ const mediaCodecs = [
parameters: {
'x-google-start-bitrate': 1000,
},
channels: 2,
},
{
kind: 'video',
mimeType: 'video/VP9',
clockRate: 90000,
parameters: {
'profile-id': 2,
'x-google-start-bitrate': 1000,
},
},
{
kind: 'video',
mimeType: 'video/h264',
clockRate: 90000,
parameters: {
'packetization-mode': 1,
'profile-level-id': '4d0032',
'level-asymmetry-allowed': 1,
'x-google-start-bitrate': 1000,
},
},
{
kind: 'video',
mimeType: 'video/h264',
clockRate: 90000,
parameters: {
'packetization-mode': 1,
'profile-level-id': '42e01f',
'level-asymmetry-allowed': 1,
'x-google-start-bitrate': 1000,
},
},
];
const closeCall = (callId) => {
try {
if (callId && videoCalls[callId]) {
videoCalls[callId].receiverVideoProducer?.close();
videoCalls[callId].receiverAudioProducer?.close();
videoCalls[callId].initiatorConsumerVideo?.close();
videoCalls[callId].initiatorConsumerAudio?.close();
videoCalls[callId]?.initiatorConsumerTransport?.close();
videoCalls[callId]?.receiverProducerTransport?.close();
videoCalls[callId].producer?.close();
videoCalls[callId].consumer?.close();
videoCalls[callId]?.consumerTransport?.close();
videoCalls[callId]?.producerTransport?.close();
videoCalls[callId]?.router?.close();
delete videoCalls[callId];
console.log(`[closeCall] | callId: ${callId}`);
} else {
console.log(`The call with id ${callId} has already been deleted`);
}
} catch (error) {
console.error(`[closeCall] | ERROR | callId: ${callId} | error: ${error.message}`);
console.log(`ERROR | closeCall | callid ${callId} | ${error.message}`);
}
}
};
/*
- Handlers for WS events
- These are created only when we have a connection with a peer
*/
peers.on('connection', async (socket) => {
peers.on('connection', async socket => {
console.log('[connection] socketId:', socket.id);
// After making the connection successfully, we send the client a 'connection-success' event
socket.emit('connection-success', {
socketId: socket.id,
socketId: socket.id
});
// It is triggered when the peer is disconnected
@ -187,22 +156,22 @@ peers.on('connection', async (socket) => {
if (callId) {
console.log(`[createRoom] socket.id ${socket.id} callId ${callId}`);
if (!videoCalls[callId]) {
videoCalls[callId] = { router: await worker.createRouter({ mediaCodecs }) };
console.log(`[createRoom] Generate Router ID: ${videoCalls[callId].router.id}`);
videoCalls[callId].receiverSocket = socket;
} else {
videoCalls[callId].initiatorSocket = socket;
console.log('[createRoom] callId', callId);
videoCalls[callId] = { router: await worker.createRouter({ mediaCodecs }) }
console.log(`[createRoom] Router ID: ${videoCalls[callId].router.id}`);
}
socketDetails[socket.id] = callId;
// rtpCapabilities is set for callback
console.log('[getRtpCapabilities] callId', callId);
callbackResponse = {
rtpCapabilities: videoCalls[callId].router.rtpCapabilities,
rtpCapabilities :videoCalls[callId].router.rtpCapabilities
};
} else {
console.log(`[createRoom] missing callId: ${callId}`);
console.log(`[createRoom] missing callId ${callId}`);
}
} catch (error) {
console.error(`[createRoom] | ERROR | callId: ${callId} | error: ${error.message}`);
console.log(`ERROR | createRoom | callId ${callId} | ${error.message}`);
} finally {
callback(callbackResponse);
}
@ -210,7 +179,7 @@ peers.on('connection', async (socket) => {
/*
- Client emits a request to create server side Transport
- Depending on the sender, a producer or consumer is created is created on that router
- Depending on the sender, producerTransport or consumerTransport is created on that router
- It will return parameters, these are required for the client to create the RecvTransport
from the client.
- If the client is producer(sender: true) then it will use parameters for device.createSendTransport(params)
@ -219,25 +188,24 @@ peers.on('connection', async (socket) => {
socket.on('createWebRtcTransport', async ({ sender }, callback) => {
try {
const callId = socketDetails[socket.id];
console.log(`[createWebRtcTransport] socket ${socket.id} | sender ${sender} | callId ${callId}`);
console.log(`[createWebRtcTransport] sender ${sender} | callId ${callId}`);
if (sender) {
if (!videoCalls[callId].receiverProducerTransport && !isInitiator(callId, socket.id)) {
videoCalls[callId].receiverProducerTransport = await createWebRtcTransportLayer(callId, callback);
} else if (!videoCalls[callId].initiatorProducerTransport && isInitiator(callId, socket.id)) {
videoCalls[callId].initiatorProducerTransport = await createWebRtcTransportLayer(callId, callback);
if (!videoCalls[callId].producerTransport) {
videoCalls[callId].producerTransport = await createWebRtcTransportLayer(callId, callback);
} else {
console.log(`producerTransport has already been defined | callId ${callId}`);
callback(null);
}
} else if (!sender) {
if (!videoCalls[callId].receiverConsumerTransport && !isInitiator(callId, socket.id)) {
videoCalls[callId].receiverConsumerTransport = await createWebRtcTransportLayer(callId, callback);
} else if (!videoCalls[callId].initiatorConsumerTransport && isInitiator(callId, socket.id)) {
videoCalls[callId].initiatorConsumerTransport = await createWebRtcTransportLayer(callId, callback);
if (!videoCalls[callId].consumerTransport) {
videoCalls[callId].consumerTransport = await createWebRtcTransportLayer(callId, callback);
} else {
console.log(`consumerTransport has already been defined | callId ${callId}`);
callback(null);
}
}
} catch (error) {
console.error(`[createWebRtcTransport] | ERROR | callId: ${socketDetails[socket.id]} | sender: ${sender} | error: ${error.message}`);
console.log(`ERROR | createWebRtcTransport | callId ${socketDetails[socket.id]} | sender ${sender} | ${error.message}`);
callback(error);
}
});
@ -251,19 +219,16 @@ peers.on('connection', async (socket) => {
const callId = socketDetails[socket.id];
if (typeof dtlsParameters === 'string') dtlsParameters = JSON.parse(dtlsParameters);
console.log(`[transport-connect] socket ${socket.id} | callId ${callId}`);
isInitiator(callId, socket.id)
? await videoCalls[callId].initiatorProducerTransport.connect({ dtlsParameters })
: await videoCalls[callId].receiverProducerTransport.connect({ dtlsParameters });
console.log(`[transport-connect] socket.id ${socket.id} | callId ${callId}`);
await videoCalls[callId].producerTransport.connect({ dtlsParameters });
} catch (error) {
console.error(`[transport-connect] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | transport-connect | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
/*
- The event sent by the client (PRODUCER) after successfully connecting to receiverProducerTransport/initiatorProducerTransport
- For the router with the id callId, we make produce on receiverProducerTransport/initiatorProducerTransport
- The event sent by the client (PRODUCER) after successfully connecting to producerTransport
- For the router with the id callId, we make produce on producerTransport
- Create the handler on producer at the 'transportclose' event
*/
socket.on('transport-produce', async ({ kind, rtpParameters, appData }, callback) => {
@ -271,86 +236,25 @@ peers.on('connection', async (socket) => {
const callId = socketDetails[socket.id];
if (typeof rtpParameters === 'string') rtpParameters = JSON.parse(rtpParameters);
console.log(`[transport-produce] callId: ${callId} | kind: ${kind} | socket: ${socket.id}`);
if (kind === 'video') {
if (!isInitiator(callId, socket.id)) {
videoCalls[callId].receiverVideoProducer = await videoCalls[callId].receiverProducerTransport.produce({
console.log('[transport-produce] | socket.id', socket.id, '| callId', callId);
videoCalls[callId].producer = await videoCalls[callId].producerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] Producer ID: ${videoCalls[callId].producer.id} | kind: ${videoCalls[callId].producer.kind}`);
videoCalls[callId].receiverVideoProducer.on('transportclose', () => {
console.log('transport for this producer closed', callId);
videoCalls[callId].producer.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback &&
callback({
id: videoCalls[callId].receiverVideoProducer.id,
callback && callback({
id: videoCalls[callId].producer.id
});
} else {
videoCalls[callId].initiatorVideoProducer = await videoCalls[callId].initiatorProducerTransport.produce({
kind,
rtpParameters,
});
videoCalls[callId].initiatorVideoProducer.on('transportclose', () => {
console.log('transport for this producer closed', callId);
closeCall(callId);
});
callback &&
callback({
id: videoCalls[callId].initiatorVideoProducer.id,
});
}
} else if (kind === 'audio') {
if (!isInitiator(callId, socket.id)) {
videoCalls[callId].receiverAudioProducer = await videoCalls[callId].receiverProducerTransport.produce({
kind,
rtpParameters,
});
videoCalls[callId].receiverAudioProducer.on('transportclose', () => {
console.log('transport for this producer closed', callId);
closeCall(callId);
});
// Send back to the client the Producer's id
callback &&
callback({
id: videoCalls[callId].receiverAudioProducer.id,
});
} else {
videoCalls[callId].initiatorAudioProducer = await videoCalls[callId].initiatorProducerTransport.produce({
kind,
rtpParameters,
});
videoCalls[callId].initiatorAudioProducer.on('transportclose', () => {
console.log('transport for this producer closed', callId);
closeCall(callId);
});
// Send back to the client the Producer's id
callback &&
callback({
id: videoCalls[callId].initiatorAudioProducer.id,
});
}
}
const socketToEmit = isInitiator(callId, socket.id)
? videoCalls[callId].receiverSocket
: videoCalls[callId].initiatorSocket;
// callId - Id of the call
// kind - producer type: audio/video
socketToEmit?.emit('new-producer', { callId, kind });
} catch (error) {
console.error(`[transport-produce] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | transport-produce | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
@ -361,18 +265,12 @@ peers.on('connection', async (socket) => {
socket.on('transport-recv-connect', async ({ dtlsParameters }) => {
try {
const callId = socketDetails[socket.id];
console.log(`[transport-recv-connect] socket ${socket.id} | callId ${callId}`);
if (typeof dtlsParameters === 'string') dtlsParameters = JSON.parse(dtlsParameters);
// await videoCalls[callId].consumerTransport.connect({ dtlsParameters });
if (!isInitiator(callId, socket.id)) {
await videoCalls[callId].receiverConsumerTransport.connect({ dtlsParameters });
} else if (isInitiator(callId, socket.id)) {
await videoCalls[callId].initiatorConsumerTransport.connect({ dtlsParameters });
}
console.log(`[transport-recv-connect] socket.id ${socket.id} | callId ${callId}`);
await videoCalls[callId].consumerTransport.connect({ dtlsParameters });
} catch (error) {
console.error(`[transport-recv-connect] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | transport-recv-connect | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
})
/*
- The customer consumes after successfully connecting to consumerTransport
@ -384,62 +282,51 @@ peers.on('connection', async (socket) => {
socket.on('consume', async ({ rtpCapabilities }, callback) => {
try {
const callId = socketDetails[socket.id];
console.log('[consume] callId', callId);
if (typeof rtpCapabilities === 'string') rtpCapabilities = JSON.parse(rtpCapabilities);
// Check if the router can consume the specified producer
if (videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].producer.id,
rtpCapabilities
})) {
console.log('[consume] Can consume', callId);
// Transport can now consume and return a consumer
videoCalls[callId].consumer = await videoCalls[callId].consumerTransport.consume({
producerId: videoCalls[callId].producer.id,
rtpCapabilities,
paused: true,
});
let canConsumeVideo, canConsumeAudio;
try {
if (isInitiator(callId, socket.id)) {
canConsumeVideo =
!!videoCalls[callId].receiverVideoProducer &&
!!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].receiverVideoProducer.id,
rtpCapabilities,
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].consumer.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
canConsumeAudio =
!!videoCalls[callId].receiverAudioProducer &&
!!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].receiverAudioProducer.id,
rtpCapabilities,
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].consumer.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
// From the consumer extract the following params to send back to the Client
const params = {
id: videoCalls[callId].consumer.id,
producerId: videoCalls[callId].producer.id,
kind: videoCalls[callId].consumer.kind,
rtpParameters: videoCalls[callId].consumer.rtpParameters,
};
// Send the parameters to the client
callback({ params });
} else {
canConsumeVideo =
!!videoCalls[callId].initiatorVideoProducer &&
!!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].initiatorVideoProducer.id,
rtpCapabilities,
});
canConsumeAudio =
!!videoCalls[callId].initiatorAudioProducer &&
!!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].initiatorAudioProducer.id,
rtpCapabilities,
});
}
} catch (error) {
console.error(`[consume] | ERROR | callId: ${callId} | error: ${error.message}`);
}
console.log(`[consume] socket ${socket.id} | callId: ${callId} | canConsumeVideo: ${canConsumeVideo} | canConsumeAudio: ${canConsumeAudio}`);
if (canConsumeVideo && !canConsumeAudio) {
const videoParams = await consumeVideo(callId, socket.id, rtpCapabilities);
callback({ videoParams, audioParams: null });
} else if (canConsumeVideo && canConsumeAudio) {
const videoParams = await consumeVideo(callId, socket.id, rtpCapabilities);
const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities);
callback({ videoParams, audioParams });
} else if (!canConsumeVideo && canConsumeAudio) {
const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities);
const data = { videoParams: null, audioParams };
callback(data);
} else {
console.log(`[consume] Can't consume | callId ${callId}`);
console.log(`[canConsume] Can't consume | callId ${callId}`);
callback(null);
}
} catch (error) {
console.error(`[consume] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | consume | callId ${socketDetails[socket.id]} | ${error.message}`)
callback({ params: { error } });
}
});
@ -447,106 +334,18 @@ peers.on('connection', async (socket) => {
/*
- Event sent by the consumer after consuming to resume the pause
- When consuming on consumerTransport, it is initially done with paused: true, here we will resume
- For the initiator we resume the initiatorConsumerAUDIO/VIDEO and for receiver the receiverConsumerAUDIO/VIDEO
*/
socket.on('consumer-resume', () => {
socket.on('consumer-resume', async () => {
try {
const callId = socketDetails[socket.id];
const isInitiatorValue = isInitiator(callId, socket.id);
console.log(`[consumer-resume] callId: ${callId} | isInitiator: ${isInitiatorValue}`);
if (isInitiatorValue) {
videoCalls[callId].initiatorConsumerVideo && videoCalls[callId].initiatorConsumerVideo.resume();
videoCalls[callId].initiatorConsumerAudio && videoCalls[callId].initiatorConsumerAudio.resume();
} else {
videoCalls[callId].receiverConsumerVideo && videoCalls[callId].receiverConsumerVideo.resume();
videoCalls[callId].receiverConsumerAudio && videoCalls[callId].receiverConsumerAudio.resume();
}
console.log(`[consumer-resume] callId ${callId}`)
await videoCalls[callId].consumer.resume();
} catch (error) {
console.error(`[consumer-resume] | ERROR | callId: ${socketDetails[socket.id]} | isInitiator: ${isInitiator} | error: ${error.message}`);
console.log(`ERROR | consumer-resume | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
socket.on('close-producer', ({ callId, kind}) => {
try {
if (isInitiator(callId, socket.id)) {
console.log(`[close-producer] initiator --EMIT--> receiver | callId: ${callId} | kind: ${kind}`);
videoCalls[callId].receiverSocket.emit('close-producer', { callId, kind });
} else {
console.log(`[close-producer] receiver --EMIT--> initiator | callId: ${callId} | kind: ${kind}`);
videoCalls[callId].initiatorSocket.emit('close-producer', { callId, kind });
}
} catch (error) {
console.error(`[close-producer] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
}
});
});
const consumeVideo = async (callId, socketId, rtpCapabilities) => {
// Handlers for transports https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
if (isInitiator(callId, socketId)) {
videoCalls[callId].initiatorConsumerVideo = await videoCalls[callId].initiatorConsumerTransport.consume({
producerId: videoCalls[callId].receiverVideoProducer.id,
rtpCapabilities,
paused: true,
});
return {
id: videoCalls[callId].initiatorConsumerVideo.id,
producerId: videoCalls[callId].receiverVideoProducer.id,
kind: 'video',
rtpParameters: videoCalls[callId].initiatorConsumerVideo.rtpParameters,
};
} else {
videoCalls[callId].receiverConsumerVideo = await videoCalls[callId].receiverConsumerTransport.consume({
producerId: videoCalls[callId].initiatorVideoProducer.id,
rtpCapabilities,
paused: true,
});
return {
id: videoCalls[callId].receiverConsumerVideo.id,
producerId: videoCalls[callId].initiatorVideoProducer.id,
kind: 'video',
rtpParameters: videoCalls[callId].receiverConsumerVideo.rtpParameters,
};
}
};
const consumeAudio = async (callId, socketId, rtpCapabilities) => {
if (isInitiator(callId, socketId)) {
videoCalls[callId].initiatorConsumerAudio = await videoCalls[callId].initiatorConsumerTransport.consume({
producerId: videoCalls[callId].receiverAudioProducer.id,
rtpCapabilities,
paused: true,
});
return {
id: videoCalls[callId].initiatorConsumerAudio.id,
producerId: videoCalls[callId].receiverAudioProducer.id,
kind: 'audio',
rtpParameters: videoCalls[callId].initiatorConsumerAudio.rtpParameters,
};
} else {
videoCalls[callId].receiverConsumerAudio = await videoCalls[callId].receiverConsumerTransport.consume({
producerId: videoCalls[callId].initiatorAudioProducer.id,
rtpCapabilities,
paused: true,
});
return {
id: videoCalls[callId].receiverConsumerAudio.id,
producerId: videoCalls[callId].initiatorAudioProducer.id,
kind: 'audio',
rtpParameters: videoCalls[callId].receiverConsumerAudio.rtpParameters,
};
}
};
const isInitiator = (callId, socketId) => {
return videoCalls[callId]?.initiatorSocket?.id === socketId;
};
/*
- Called from at event 'createWebRtcTransport' and assigned to the consumer or producer transport
- It will return parameters, these are required for the client to create the RecvTransport
@ -556,14 +355,14 @@ const isInitiator = (callId, socketId) => {
*/
const createWebRtcTransportLayer = async (callId, callback) => {
try {
console.log(`[createWebRtcTransportLayer] callId: ${callId}`);
console.log('[createWebRtcTransportLayer] callId', callId);
// https://mediasoup.org/documentation/v3/mediasoup/api/#WebRtcTransportOptions
const webRtcTransport_options = {
listenIps: [
{
ip: process.env.IP, // Listening IPv4 or IPv6.
announcedIp: process.env.ANNOUNCED_IP, // Announced IPv4 or IPv6 (useful when running mediasoup behind NAT with private IP).
},
}
],
enableUdp: true,
enableTcp: true,
@ -571,10 +370,11 @@ const createWebRtcTransportLayer = async (callId, callback) => {
};
// https://mediasoup.org/documentation/v3/mediasoup/api/#router-createWebRtcTransport
let transport = await videoCalls[callId].router.createWebRtcTransport(webRtcTransport_options);
let transport = await videoCalls[callId].router.createWebRtcTransport(webRtcTransport_options)
console.log(`callId: ${callId} | transport id: ${transport.id}`)
// Handler for when DTLS(Datagram Transport Layer Security) changes
transport.on('dtlsstatechange', (dtlsState) => {
transport.on('dtlsstatechange', dtlsState => {
console.log(`transport | dtlsstatechange | calldId ${callId} | dtlsState ${dtlsState}`);
if (dtlsState === 'closed') {
transport.close();
@ -598,8 +398,9 @@ const createWebRtcTransportLayer = async (callId, callback) => {
// Set transport to producerTransport or consumerTransport
return transport;
} catch (error) {
console.error(`[createWebRtcTransportLayer] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | createWebRtcTransportLayer | callId ${socketDetails[socket.id]} | ${error.message}`);
callback({ params: { error } });
}
};
}

View File

@ -9,21 +9,14 @@ else
## CLEANUP
rm -fr dist/*
fi
if [ -d "node_modules" ]; then
rm -fr node_modules
fi
# Install dependencies
#npm install
## PROJECT NEEDS
echo "Building app... from $(git rev-parse --abbrev-ref HEAD)"
#npm run-script build
cp -r {.env,app.js,package.json,server,public,doc,Dockerfile} dist/
#cp -r ./* dist/
dateString=$(date +"%Y%m%d-%H%M%S")
git log --pretty=format:"%ad%x09%an%x09%s" --no-merges -20 > "dist/git--$dateString.log"
cp -r {.env,app.js,package.json,server,public} dist/
#Add version control for pm2
cd dist
#Add version control for pm2
@ -51,3 +44,5 @@ fi
## POST BUILD
cd -

Binary file not shown.

Before

Width:  |  Height:  |  Size: 614 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 994 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 354 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 794 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 462 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 346 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 407 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 439 KiB

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,5 @@
module.exports = {
hubAddress: 'https://hub.dev.linx.safemobile.com/',
mediasoupAddress: 'https://testing.video.safemobile.org/',
mediasoupAddress: 'https://video.safemobile.org/mediasoup',
// mediasoupAddress: 'http://localhost:3000/mediasoup',
}

View File

@ -34,9 +34,6 @@
<body>
<body>
<div id="video">
<legend>Client options:</legend>
<input type="checkbox" id="produceAudio" name="produceAudio">
<label for="produceAudio">Produce audio</label><br>
<table>
<thead>
<th>Local Video</th>
@ -46,24 +43,12 @@
<tr>
<td>
<div id="sharedBtns">
<video
id="localVideo"
class="video"
autoplay
muted
playsinline
></video>
<video id="localVideo" autoplay class="video" ></video>
</div>
</td>
<td>
<div id="sharedBtns">
<video
id="remoteVideo"
class="video"
autoplay
muted
playsinline
></video>
<video id="remoteVideo" autoplay class="video" ></video>
</div>
</td>
</tr>
@ -75,11 +60,34 @@
</td>
<td>
<div id="sharedBtns">
<!-- <button id="btnRecvSendTransport">Consume</button> -->
<button id="remoteSoundControl">Unmute</button>
<button id="btnRecvSendTransport">Consume</button>
</div>
</td>
</tr>
<!-- <tr>
<td colspan="2">
<div id="sharedBtns">
<button id="btnRtpCapabilities">2. Get Rtp Capabilities</button>
<br />
<button id="btnDevice">3. Create Device</button>
</div>
</td>
</tr>
<tr>
<td>
<div id="sharedBtns">
<button id="btnCreateSendTransport">4. Create Send Transport</button>
<br />
<button id="btnConnectSendTransport">5. Connect Send Transport & Produce</button></td>
</div>
<td>
<div id="sharedBtns">
<button id="btnRecvSendTransport">6. Create Recv Transport</button>
<br />
<button id="btnConnectRecvTransport">7. Connect Recv Transport & Consume</button>
</div>
</td>
</tr> -->
</tbody>
</table>
<div id="closeCallBtn">

View File

@ -10,72 +10,9 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub
let device
let rtpCapabilities
let producerTransport
let consumerTransport
let producerVideo
let producerAudio
let consumer
let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
let videoParams = {
encodings: [
{ scaleResolutionDownBy: 4, maxBitrate: 500000 },
{ scaleResolutionDownBy: 2, maxBitrate: 1000000 },
{ scaleResolutionDownBy: 1, maxBitrate: 5000000 },
{ scalabilityMode: 'S3T3_KEY' }
],
codecOptions: {
videoGoogleStartBitrate: 1000
}
}
let audioParams = {
codecOptions :
{
opusStereo : true,
opusDtx : true
}
}
setTimeout(() => {
let socket
hub = io(config.hubAddress)
const connectToMediasoup = () => {
@ -91,28 +28,15 @@ setTimeout(() => {
console.log(`[MEDIA] ${config.mediasoupAddress} | connected: ${socket.connected} | existsProducer: ${existsProducer}`)
if (!IS_PRODUCER && existsProducer && consumer === undefined) {
goConnect()
// document.getElementById('btnRecvSendTransport').click();
}
if (IS_PRODUCER && urlParams.get('testing') === 'true') { getLocalStream() }
})
socket.on('new-producer', ({ callId, kind }) => {
console.log(`🟢 new-producer | callId: ${callId} | kind: ${kind} | Ready to consume`);
connectRecvTransport();
})
socket.on('close-producer', ({ callId, kind }) => {
console.log(`🔴 close-producer | callId: ${callId} | kind: ${kind}`);
if (kind === 'video') {
consumerVideo.close()
remoteVideo.srcObject = null
}
else if (kind === 'audio') consumerAudio.close()
})
}
if (IS_PRODUCER === true) {
hub.on('connect', async () => {
console.log(`[HUB]! ${config.hubAddress} | connected: ${hub.connected}`)
console.log(`[HUB] ${config.hubAddress} | connected: ${hub.connected}`)
connectToMediasoup()
hub.emit(
@ -160,51 +84,73 @@ setTimeout(() => {
connectToMediasoup()
}
}, 1600);
let device
let rtpCapabilities
let producerTransport
let consumerTransport
let producer
let consumer
let originAssetId
// let originAssetName = 'Adi'
// let originAssetTypeName = 'linx'
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
let params = {
// mediasoup params
encodings: [
{
rid: 'r0',
maxBitrate: 100000,
scalabilityMode: 'S1T3',
},
{
rid: 'r1',
maxBitrate: 300000,
scalabilityMode: 'S1T3',
},
{
rid: 'r2',
maxBitrate: 900000,
scalabilityMode: 'S1T3',
},
],
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerCodecOptions
codecOptions: {
videoGoogleStartBitrate: 1000
}
}
const streamSuccess = (stream) => {
console.log('[streamSuccess] device', device);
console.log('[streamSuccess]');
localVideo.srcObject = stream
console.log('stream', stream);
const videoTrack = stream.getVideoTracks()[0]
const audioTrack = stream.getAudioTracks()[0]
videoParams = {
track: videoTrack,
...videoParams
const track = stream.getVideoTracks()[0]
params = {
track,
...params
}
audioParams = {
track: audioTrack,
...audioParams
}
console.log('[streamSuccess] videoParams', videoParams, ' | audioParams', audioParams);
goConnect()
}
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: produceAudio ? true : false,
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
width: {
min: 640,
max: 1920,
},
height: {
min: 400,
max: 1080,
}
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
navigator.permissions.query(
{ name: 'microphone' }
).then((permissionStatus) =>{
console.log('🟨 [PERMISSION] permissionStatus', permissionStatus); // granted, denied, prompt
// It will block the code from execution and display "Permission denied" if we don't have microphone permissions
})
}
const goConnect = () => {
@ -221,6 +167,7 @@ const goCreateTransport = () => {
// server side to send/recive media
const createDevice = async () => {
try {
console.log('[createDevice]');
device = new mediasoupClient.Device()
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-load
@ -231,7 +178,6 @@ const createDevice = async () => {
})
console.log('Device RTP Capabilities', device.rtpCapabilities)
console.log('[createDevice] device', device);
// once the device loads, create transport
goCreateTransport()
@ -261,20 +207,18 @@ const getRtpCapabilities = () => {
}
const createSendTransport = () => {
console.log('[createSendTransport');
// see server's socket.on('createWebRtcTransport', sender?, ...)
// this is a call from Producer, so sender = true
socket.emit('createWebRtcTransport', { sender: true }, (value) => {
console.log(`[createWebRtcTransport] value: ${JSON.stringify(value)}`);
const params = value.params;
socket.emit('createWebRtcTransport', { sender: true, callId }, ({ params }) => {
// The server sends back params needed
// to create Send Transport on the client side
if (params.error) {
console.log(params.error)
return
}
console.log(params)
// creates a new WebRTC Transport to send media
// based on the server's producer transport params
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#TransportOptions
@ -300,10 +244,10 @@ const createSendTransport = () => {
})
producerTransport.on('produce', async (parameters, callback, errback) => {
console.log('[produce] parameters', parameters)
console.log(parameters)
try {
// Tell the server to create a Producer
// tell the server to create a Producer
// with the following parameters and produce
// and expect back a server side producer id
// see server's socket.on('transport-produce', ...)
@ -326,46 +270,22 @@ const createSendTransport = () => {
}
const connectSendTransport = async () => {
console.log('[connectSendTransport] producerTransport');
// We now call produce() to instruct the producer transport
// we now call produce() to instruct the producer transport
// to send media to the Router
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
// this action will trigger the 'connect' and 'produce' events above
producer = await producerTransport.produce(params)
// Produce video
let producerVideoHandler = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideoHandler.on('trackended', () => {
producer.on('trackended', () => {
console.log('track ended')
// close video track
})
producerVideoHandler.on('transportclose', () => {
producer.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
if (produceAudio) {
let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudioHandler.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudioHandler.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
}
const answer = {
origin_asset_id: ASSET_ID,
dest_asset_id: originAssetId || parseInt(urlParams.get('dest_asset_id')),
@ -374,7 +294,7 @@ const connectSendTransport = async () => {
origin_asset_type_name: ASSET_TYPE,
origin_asset_name: ASSET_NAME,
video_call_id: callId,
answer: 'accepted', // answer: accepted/rejected
answer: 'accepted', // answer: 'rejected'
};
console.log('SEND answer', answer);
@ -386,13 +306,11 @@ const connectSendTransport = async () => {
// Enable Close call button
const closeCallBtn = document.getElementById('btnCloseCall');
closeCallBtn.removeAttribute('disabled');
createRecvTransport();
}
const createRecvTransport = async () => {
console.log('createRecvTransport');
// See server's socket.on('consume', sender?, ...)
// see server's socket.on('consume', sender?, ...)
// this is a call from Consumer, so sender = false
await socket.emit('createWebRtcTransport', { sender: false, callId }, ({ params }) => {
// The server sends back params needed
@ -402,15 +320,15 @@ const createRecvTransport = async () => {
return
}
console.log('[createRecvTransport] params', params)
console.log(params)
// Creates a new WebRTC Transport to receive media
// creates a new WebRTC Transport to receive media
// based on server's consumer transport params
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-createRecvTransport
consumerTransport = device.createRecvTransport(params)
// https://mediasoup.org/documentation/v3/communication-between-client-and-server/#producing-media
// This event is raised when a first call to transport.produce() is made
// this event is raised when a first call to transport.produce() is made
// see connectRecvTransport() below
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
@ -427,8 +345,7 @@ const createRecvTransport = async () => {
errback(error)
}
})
// We call it in new-rpoducer, we don't need it here anymore
// connectRecvTransport()
connectRecvTransport()
})
}
@ -436,8 +353,7 @@ const resetCallSettings = () => {
localVideo.srcObject = null
remoteVideo.srcObject = null
consumer = null
producerVideo = null
producerAudio = null
producer = null
producerTransport = null
consumerTransport = null
device = undefined
@ -445,137 +361,38 @@ const resetCallSettings = () => {
const connectRecvTransport = async () => {
console.log('connectRecvTransport');
// For consumer, we need to tell the server first
// for consumer, we need to tell the server first
// to create a consumer based on the rtpCapabilities and consume
// if the router can consume, it will send back a set of params as below
await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities,
callId
}, async ({videoParams, audioParams}) => {
console.log(`[consume] 🟩 videoParams`, videoParams)
console.log(`[consume] 🟩 audioParams`, audioParams)
console.log('[consume] 🟩 consumerTransport', consumerTransport)
}, async ({ params }) => {
if (params.error) {
console.log('Cannot Consume')
return
}
// then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
let stream = new MediaStream()
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
socket.emit('consumer-resume')
console.log('consumer', consumer);
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
console.error(`remoteVideo PLAY ERROR | ${error.message}`)
})
})
}
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
consumerVideo.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
consumerVideo.on('producerclose', () => {
console.log('===================1 consumerVideo producerclose');
});
consumerVideo.on("producerclose", () => {
console.log('====================2 consumerVideo producerclose');
})
consumerVideo.on("close", () => {
console.log('====================3 consumerVideo producerclose');
})
consumerVideo.on("close-producer", () => {
console.log('====================4 consumerVideo producerclose');
})
consumerVideo.observer.on('producerclose', () => {
console.log('===================11 consumerVideo producerclose');
});
consumerVideo.observer.on("producerclose", () => {
console.log('====================22 consumerVideo producerclose');
})
consumerVideo.observer.on("close", () => {
console.log('====================33 consumerVideo producerclose');
})
consumerVideo.observer.on("close-producer", () => {
console.log('====================44 consumerVideo producerclose');
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
consumerAudio.on('producerclose', () => {
console.log('===================1 consumerAudio producerclose');
});
consumerAudio.on("producerclose", () => {
console.log('====================2 consumerAudio producerclose');
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => {
@ -599,30 +416,6 @@ const closeCall = () => {
resetCallSettings()
}
// const consume = async (kind) => {
// console.log(`[consume] kind: ${kind}`)
// console.log('createRecvTransport Consumer')
// await socket.emit('createWebRtcTransport', { sender: false, callId, dispatcher: true }, ({ params }) => {
// if (params.error) {
// console.log('createRecvTransport | createWebRtcTransport | Error', params.error)
// return
// }
// consumerTransport = device.createRecvTransport(params)
// consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
// try {
// await socket.emit('transport-recv-connect', {
// dtlsParameters,
// })
// callback()
// } catch (error) {
// errback(error)
// }
// })
// connectRecvTransport()
// })
// }
btnLocalVideo.addEventListener('click', getLocalStream)
// btnRecvSendTransport.addEventListener('click', consume)
btnRecvSendTransport.addEventListener('click', goConnect)
btnCloseCall.addEventListener('click', closeCall)