Compare commits

..

27 Commits

Author SHA1 Message Date
a31e646e2b Update 2022-12-16 14:10:26 +02:00
fe792f93b6 Update 2022-12-16 14:10:07 +02:00
dafbc486ad Update 2022-12-16 14:00:16 +02:00
b606a72030 Update 2022-12-16 13:43:55 +02:00
c174e92e3c Update 2022-12-16 13:33:13 +02:00
449724537e Update 2022-12-16 12:01:20 +02:00
9634aac153 Update 2022-12-16 11:59:10 +02:00
e0bc4642cb Update 2022-12-16 11:31:26 +02:00
f950142188 Update 2022-12-16 11:28:57 +02:00
5ba1f76585 Update 2022-12-16 11:23:10 +02:00
dc9c91fccc Update 2022-12-16 11:10:30 +02:00
5abcddc115 Update 2022-12-16 11:02:08 +02:00
bf65221664 Update 2022-12-16 10:54:25 +02:00
5687569bc1 Update 2022-12-16 10:47:13 +02:00
44c8d9b8ee Update 2022-12-16 02:15:26 +02:00
0a6985f9b9 Update 2022-12-16 02:12:22 +02:00
d29def364c Update 2022-12-16 02:10:10 +02:00
acd6025f59 Update 2022-12-16 02:07:01 +02:00
4b0c06e0b0 Added socket id to createWebRtcTransport 2022-12-16 02:02:38 +02:00
c1fe524ec7 LINXD-2270: Remove commented code; Update comments/logs 2022-12-14 11:57:19 +02:00
f8fcfb3165 Fix isInitiator in transport-produce 2022-12-14 11:33:16 +02:00
d324528d52 Added logs on transport-produce 2022-12-14 11:05:40 +02:00
d1eb7afc3a Added logs on createRoom on videoCalls 2022-12-14 10:15:34 +02:00
695964d342 Refactor code to use initiator/receiver 2022-12-14 09:55:45 +02:00
3ca555ef9e Set initiatorSocketId to be dispatcher 2022-12-13 13:23:15 +02:00
92fbecc36a Set initiatorSocketId to be dispatcher 2022-12-13 13:04:43 +02:00
d633eec92f Add socket info 2022-12-13 10:28:45 +02:00
17 changed files with 456 additions and 631 deletions

View File

@ -1,2 +0,0 @@
node_modules
doc

View File

@ -1,25 +1,11 @@
FROM ubuntu:22.04
WORKDIR /app
FROM ubuntu
RUN apt-get update && \
apt-get install -y build-essential pip net-tools iputils-ping iproute2 curl
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash -
RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -
RUN apt-get install -y nodejs
RUN npm install -g watchify
COPY . /app/
RUN npm install
EXPOSE 3000/tcp
EXPOSE 2000-2200/udp
CMD node app.js
#docker build -t linx-video .
# docker run -it -d --restart always -p 3000:3000/tcp -p 2000-2200:2000-2200/udp linx-video
#Run under host network
# docker run -it -d --network host --restart always -p 3000:3000/tcp -p 2000-2200:2000-2200/udp linx-video
#https://docs.docker.com/config/containers/resource_constraints/
#docker run -it -d --network host --cpus="0.25" --memory="512m" --restart always -p 3000:3000/tcp -p 2000-2200:2000-2200/udp linx-video
EXPOSE 3000
EXPOSE 2000-2020
EXPOSE 10000-10100

532
app.js
View File

@ -1,4 +1,4 @@
require('dotenv').config();
require('dotenv').config()
const express = require('express');
const app = express();
@ -13,46 +13,46 @@ try {
}
const mediasoup = require('mediasoup');
let worker;
let worker
/**
*
*
* videoCalls - Dictionary of Object(s)
* '<callId>': {
* router: Router,
* router: Router, router
* initiatorAudioProducer: Producer,
* initiatorVideoProducer: Producer,
* receiverVideoProducer: Producer,
* receiverAudioProducer: Producer,
* receiverVideoProducer: Producer, producerVideo
* receiverAudioProducer: Producer, producerAudio
* initiatorProducerTransport: Producer Transport,
* receiverProducerTransport: Producer Transport,
* initiatorConsumerVideo: Consumer,
* initiatorConsumerAudio: Consumer,
* initiatorConsumerTransport: Consumer Transport
* initiatorSocket
* receiverSocket
* receiverProducerTransport: Producer Transport, producerTransport
* initiatorConsumerVideo: Consumer, consumerVideo
* initiatorConsumerAudio: Consumer, consumerAudio
* initiatorConsumerTransport: Consumer Transport consumerTransport
* initiatorSockerId
* receiverSocketId
* }
*
*
**/
let videoCalls = {};
let socketDetails = {};
let videoCalls = {}
let socketDetails = {}
app.get('/', (_req, res) => {
res.send('Hello from mediasoup app!');
});
res.send('Hello from mediasoup app!')
})
app.use('/sfu', express.static(path.join(__dirname, 'public')));
app.use('/sfu', express.static(path.join(__dirname, 'public')))
// SSL cert for HTTPS access
const options = {
key: fs.readFileSync(process.env.SERVER_KEY, 'utf-8'),
cert: fs.readFileSync(process.env.SERVER_CERT, 'utf-8'),
};
}
const httpsServer = https.createServer(options, app);
const io = new Server(httpsServer, {
allowEIO3: true,
origins: ['*:*'],
origins: ["*:*"]
});
httpsServer.listen(process.env.PORT, () => {
@ -66,19 +66,19 @@ const createWorker = async () => {
worker = await mediasoup.createWorker({
rtcMinPort: parseInt(process.env.RTC_MIN_PORT),
rtcMaxPort: parseInt(process.env.RTC_MAX_PORT),
});
})
console.log(`[createWorker] worker pid ${worker.pid}`);
worker.on('died', (error) => {
worker.on('died', error => {
// This implies something serious happened, so kill the application
console.error('mediasoup worker has died', error);
setTimeout(() => process.exit(1), 2000); // exit in 2 seconds
});
})
return worker;
} catch (error) {
console.error(`[createWorker] | ERROR | error: ${error.message}`);
console.log(`ERROR | createWorker | ${error.message}`);
}
};
}
// We create a Worker as soon as our application starts
worker = createWorker();
@ -87,53 +87,57 @@ worker = createWorker();
// https://mediasoup.org/documentation/v3/mediasoup/rtp-parameters-and-capabilities/#RtpCodecCapability
// list of media codecs supported by mediasoup ...
// https://github.com/versatica/mediasoup/blob/v3/src/supportedRtpCapabilities.ts
const mediaCodecs = [
const mediaCodecs = [
{
kind: 'audio',
mimeType: 'audio/opus',
clockRate: 48000,
channels: 2,
kind : 'audio',
mimeType : 'audio/opus',
clockRate : 48000,
channels : 2
},
{
kind: 'video',
mimeType: 'video/VP8',
clockRate: 90000,
parameters: {
'x-google-start-bitrate': 1000,
kind : 'video',
mimeType : 'video/VP8',
clockRate : 90000,
parameters :
{
'x-google-start-bitrate' : 1000
},
channels: 2,
channels : 2
},
{
kind: 'video',
mimeType: 'video/VP9',
clockRate: 90000,
parameters: {
'profile-id': 2,
'x-google-start-bitrate': 1000,
},
kind : 'video',
mimeType : 'video/VP9',
clockRate : 90000,
parameters :
{
'profile-id' : 2,
'x-google-start-bitrate' : 1000
}
},
{
kind: 'video',
mimeType: 'video/h264',
clockRate: 90000,
parameters: {
'packetization-mode': 1,
'profile-level-id': '4d0032',
'level-asymmetry-allowed': 1,
'x-google-start-bitrate': 1000,
},
kind : 'video',
mimeType : 'video/h264',
clockRate : 90000,
parameters :
{
'packetization-mode' : 1,
'profile-level-id' : '4d0032',
'level-asymmetry-allowed' : 1,
'x-google-start-bitrate' : 1000
}
},
{
kind: 'video',
mimeType: 'video/h264',
clockRate: 90000,
parameters: {
'packetization-mode': 1,
'profile-level-id': '42e01f',
'level-asymmetry-allowed': 1,
'x-google-start-bitrate': 1000,
},
},
kind : 'video',
mimeType : 'video/h264',
clockRate : 90000,
parameters :
{
'packetization-mode' : 1,
'profile-level-id' : '42e01f',
'level-asymmetry-allowed' : 1,
'x-google-start-bitrate' : 1000
}
}
];
const closeCall = (callId) => {
@ -148,23 +152,24 @@ const closeCall = (callId) => {
videoCalls[callId]?.receiverProducerTransport?.close();
videoCalls[callId]?.router?.close();
delete videoCalls[callId];
console.log(`[closeCall] | callId: ${callId}`);
} else {
console.log(`The call with id ${callId} has already been deleted`);
}
} catch (error) {
console.error(`[closeCall] | ERROR | callId: ${callId} | error: ${error.message}`);
console.log(`ERROR | closeCall | callid ${callId} | ${error.message}`);
}
};
}
/*
- Handlers for WS events
- These are created only when we have a connection with a peer
*/
peers.on('connection', async (socket) => {
peers.on('connection', async socket => {
console.log('[connection] socketId:', socket.id);
// After making the connection successfully, we send the client a 'connection-success' event
socket.emit('connection-success', {
socketId: socket.id,
socketId: socket.id
});
// It is triggered when the peer is disconnected
@ -187,22 +192,24 @@ peers.on('connection', async (socket) => {
if (callId) {
console.log(`[createRoom] socket.id ${socket.id} callId ${callId}`);
if (!videoCalls[callId]) {
videoCalls[callId] = { router: await worker.createRouter({ mediaCodecs }) };
console.log(`[createRoom] Generate Router ID: ${videoCalls[callId].router.id}`);
videoCalls[callId].receiverSocket = socket;
console.log('[createRoom] callId', callId);
videoCalls[callId] = { router: await worker.createRouter({ mediaCodecs }) }
console.log(`[createRoom] Router ID: ${videoCalls[callId].router.id}`);
videoCalls[callId].receiverSocketId = socket.id
} else {
videoCalls[callId].initiatorSocket = socket;
videoCalls[callId].initiatorSockerId = socket.id
}
socketDetails[socket.id] = callId;
// rtpCapabilities is set for callback
callbackResponse = {
rtpCapabilities: videoCalls[callId].router.rtpCapabilities,
console.log('[getRtpCapabilities] callId', callId);
callbackResponse = {
rtpCapabilities :videoCalls[callId].router.rtpCapabilities
};
} else {
console.log(`[createRoom] missing callId: ${callId}`);
console.log(`[createRoom] missing callId ${callId}`);
}
} catch (error) {
console.error(`[createRoom] | ERROR | callId: ${callId} | error: ${error.message}`);
console.log(`ERROR | createRoom | callId ${callId} | ${error.message}`);
} finally {
callback(callbackResponse);
}
@ -218,30 +225,27 @@ peers.on('connection', async (socket) => {
*/
socket.on('createWebRtcTransport', async ({ sender }, callback) => {
try {
console.log('🟥', socket.id, JSON.stringify(sender));
const callId = socketDetails[socket.id];
console.log(`[createWebRtcTransport] socket ${socket.id} | sender ${sender} | callId ${callId}`);
if (sender) {
if (!videoCalls[callId].receiverProducerTransport && !isInitiator(callId, socket.id)) {
if(!videoCalls[callId].receiverProducerTransport && !isInitiator(callId, socket.id)) {
videoCalls[callId].receiverProducerTransport = await createWebRtcTransportLayer(callId, callback);
} else if (!videoCalls[callId].initiatorProducerTransport && isInitiator(callId, socket.id)) {
} else if(!videoCalls[callId].initiatorProducerTransport && isInitiator(callId, socket.id)) {
videoCalls[callId].initiatorProducerTransport = await createWebRtcTransportLayer(callId, callback);
} else {
console.log(`producerTransport has already been defined | callId ${callId}`);
callback(null);
}
} else if (!sender) {
if (!videoCalls[callId].receiverConsumerTransport && !isInitiator(callId, socket.id)) {
if(!videoCalls[callId].receiverConsumerTransport && !isInitiator(callId, socket.id)) {
videoCalls[callId].receiverConsumerTransport = await createWebRtcTransportLayer(callId, callback);
} else if (!videoCalls[callId].initiatorConsumerTransport && isInitiator(callId, socket.id)) {
} else if(!videoCalls[callId].initiatorConsumerTransport && isInitiator(callId, socket.id)) {
videoCalls[callId].initiatorConsumerTransport = await createWebRtcTransportLayer(callId, callback);
}
}
} catch (error) {
console.error(
`[createWebRtcTransport] | ERROR | callId: ${socketDetails[socket.id]} | sender: ${sender} | error: ${
error.message
}`
);
console.log(`ERROR | createWebRtcTransport | callId ${socketDetails[socket.id]} | sender ${sender} | ${error.message}`);
callback(error);
}
});
@ -256,12 +260,13 @@ peers.on('connection', async (socket) => {
if (typeof dtlsParameters === 'string') dtlsParameters = JSON.parse(dtlsParameters);
console.log(`[transport-connect] socket ${socket.id} | callId ${callId}`);
isInitiator(callId, socket.id)
? await videoCalls[callId].initiatorProducerTransport.connect({ dtlsParameters })
: await videoCalls[callId].receiverProducerTransport.connect({ dtlsParameters });
if (!isInitiator(callId, socket.id)) {
await videoCalls[callId].receiverProducerTransport.connect({ dtlsParameters });
} else {
await videoCalls[callId].initiatorProducerTransport.connect({ dtlsParameters });
}
} catch (error) {
console.error(`[transport-connect] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | transport-connect | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
@ -274,9 +279,8 @@ peers.on('connection', async (socket) => {
try {
const callId = socketDetails[socket.id];
if (typeof rtpParameters === 'string') rtpParameters = JSON.parse(rtpParameters);
console.log(`[transport-produce] callId: ${callId} | kind: ${kind} | socket: ${socket.id}`);
console.log(`[transport-produce] kind: ${kind} | socket: ${socket.id} | callId: ${callId}`);
if (kind === 'video') {
if (!isInitiator(callId, socket.id)) {
videoCalls[callId].receiverVideoProducer = await videoCalls[callId].receiverProducerTransport.produce({
@ -284,31 +288,35 @@ peers.on('connection', async (socket) => {
rtpParameters,
});
console.log(`[transport-produce] receiverVideoProducer Producer ID: ${videoCalls[callId].receiverVideoProducer.id} | kind: ${videoCalls[callId].receiverVideoProducer.kind}`);
videoCalls[callId].receiverVideoProducer.on('transportclose', () => {
console.log('transport for this producer closed', callId);
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback &&
callback({
id: videoCalls[callId].receiverVideoProducer.id,
});
callback && callback({
id: videoCalls[callId].receiverVideoProducer.id
});
} else {
videoCalls[callId].initiatorVideoProducer = await videoCalls[callId].initiatorProducerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] initiatorVideoProducer Producer ID: ${videoCalls[callId].initiatorVideoProducer.id} | kind: ${videoCalls[callId].initiatorVideoProducer.kind}`);
videoCalls[callId].initiatorVideoProducer.on('transportclose', () => {
console.log('transport for this producer closed', callId);
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
callback &&
callback({
id: videoCalls[callId].initiatorVideoProducer.id,
});
callback && callback({
id: videoCalls[callId].initiatorVideoProducer.id
});
}
} else if (kind === 'audio') {
if (!isInitiator(callId, socket.id)) {
@ -317,44 +325,40 @@ peers.on('connection', async (socket) => {
rtpParameters,
});
console.log(`[transport-produce] receiverAudioProducer Producer ID: ${videoCalls[callId].receiverAudioProducer.id} | kind: ${videoCalls[callId].receiverAudioProducer.kind}`);
videoCalls[callId].receiverAudioProducer.on('transportclose', () => {
console.log('transport for this producer closed', callId);
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback &&
callback({
id: videoCalls[callId].receiverAudioProducer.id,
});
callback && callback({
id: videoCalls[callId].receiverAudioProducer.id
});
} else {
videoCalls[callId].initiatorAudioProducer = await videoCalls[callId].initiatorProducerTransport.produce({
kind,
rtpParameters,
});
console.log(`[transport-produce] initiatorAudioProducer Producer ID: ${videoCalls[callId].initiatorAudioProducer.id} | kind: ${videoCalls[callId].initiatorAudioProducer.kind}`);
videoCalls[callId].initiatorAudioProducer.on('transportclose', () => {
console.log('transport for this producer closed', callId);
const callId = socketDetails[socket.id];
console.log('transport for this producer closed', callId)
closeCall(callId);
});
// Send back to the client the Producer's id
callback &&
callback({
id: videoCalls[callId].initiatorAudioProducer.id,
});
callback && callback({
id: videoCalls[callId].initiatorAudioProducer.id
});
}
}
const socketToEmit = isInitiator(callId, socket.id)
? videoCalls[callId].receiverSocket
: videoCalls[callId].initiatorSocket;
// callId - Id of the call
// kind - producer type: audio/video
socketToEmit?.emit('new-producer', { callId, kind });
} catch (error) {
console.error(`[transport-produce] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | transport-produce | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
@ -366,17 +370,16 @@ peers.on('connection', async (socket) => {
try {
const callId = socketDetails[socket.id];
console.log(`[transport-recv-connect] socket ${socket.id} | callId ${callId}`);
if (typeof dtlsParameters === 'string') dtlsParameters = JSON.parse(dtlsParameters);
// await videoCalls[callId].consumerTransport.connect({ dtlsParameters });
if (!isInitiator(callId, socket.id)) {
if(!isInitiator(callId, socket.id)) {
await videoCalls[callId].receiverConsumerTransport.connect({ dtlsParameters });
} else if (isInitiator(callId, socket.id)) {
} else if(isInitiator(callId, socket.id)) {
await videoCalls[callId].initiatorConsumerTransport.connect({ dtlsParameters });
}
} catch (error) {
console.error(`[transport-recv-connect] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | transport-recv-connect | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
})
/*
- The customer consumes after successfully connecting to consumerTransport
@ -386,156 +389,196 @@ peers.on('connection', async (socket) => {
- The consumer does consumerTransport.consume(params)
*/
socket.on('consume', async ({ rtpCapabilities }, callback) => {
const callId = socketDetails[socket.id];
const socketId = socket.id;
try {
const callId = socketDetails[socket.id];
console.log(`[consume] socket ${socket.id} | callId ${callId} | rtpCapabilities: ${JSON.stringify(rtpCapabilities)}`);
console.log(`[consume] socket ${socketId} | callId: ${callId}`);
console.log('[consume] callId', callId);
let canConsumeVideo, canConsumeAudio;
if (isInitiator(callId, socket.id)) {
canConsumeVideo = !!videoCalls[callId].receiverVideoProducer && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].receiverVideoProducer.id,
rtpCapabilities
});
canConsumeAudio = !!videoCalls[callId].receiverAudioProducer && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].receiverAudioProducer.id,
rtpCapabilities
});
if (typeof rtpCapabilities === 'string') rtpCapabilities = JSON.parse(rtpCapabilities);
} else {
canConsumeVideo = !!videoCalls[callId].initiatorVideoProducer && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].initiatorVideoProducer.id,
rtpCapabilities
});
callback({
videoParams: await consumeVideo({ callId, socketId, rtpCapabilities }),
audioParams: await consumeAudio({ callId, socketId, rtpCapabilities }),
});
canConsumeAudio = !!videoCalls[callId].initiatorAudioProducer && !!videoCalls[callId].router.canConsume({
producerId: videoCalls[callId].initiatorAudioProducer.id,
rtpCapabilities
});
}
console.log('[consume] canConsumeVideo', canConsumeVideo);
console.log('[consume] canConsumeAudio', canConsumeAudio);
if (canConsumeVideo && !canConsumeAudio) {
const videoParams = await consumeVideo(callId, socket.id, rtpCapabilities)
callback({ videoParams, audioParams: null });
} else if (canConsumeVideo && canConsumeAudio) {
const videoParams = await consumeVideo(callId, socket.id, rtpCapabilities)
const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities)
callback({ videoParams, audioParams });
} else if (!canConsumeVideo && canConsumeAudio) {
const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities)
callback({ videoParams: null, audioParams });
} else {
console.log(`[consume] Can't consume | callId ${callId}`);
callback(null);
}
} catch (error) {
console.log(`ERROR | consume | callId ${socketDetails[socket.id]} | ${error.message}`)
callback({ params: { error } });
}
});
/*
- Event sent by the consumer after consuming to resume the pause
- When consuming on consumerTransport, it is initially done with paused: true, here we will resume
- For the initiator we resume the initiatorConsumerAUDIO/VIDEO and for receiver the receiverConsumerAUDIO/VIDEO
- When consuming on consumerTransport, it is initially done with paused: true, here we will resume
*/
socket.on('consumer-resume', () => {
socket.on('consumer-resume', async () => {
try {
const callId = socketDetails[socket.id];
const isInitiatorValue = isInitiator(callId, socket.id);
console.log(`[consumer-resume] callId: ${callId} | isInitiator: ${isInitiatorValue}`);
console.log(`[consumer-resume] callId ${callId}`)
const consumerVideo = isInitiatorValue
? videoCalls[callId].initiatorConsumerVideo
: videoCalls[callId].receiverConsumerVideo;
const consumerAudio = isInitiatorValue
? videoCalls[callId].initiatorConsumerAudio
: videoCalls[callId].receiverConsumerAudio;
consumerVideo?.resume();
consumerAudio?.resume();
} catch (error) {
console.error(
`[consumer-resume] | ERROR | callId: ${socketDetails[socket.id]} | isInitiator: ${isInitiator} | error: ${
error.message
}`
);
}
});
socket.on('close-producer', ({ callId, kind }) => {
try {
if (isInitiator(callId, socket.id)) {
console.log(`[close-producer] initiator --EMIT--> receiver | callId: ${callId} | kind: ${kind}`);
videoCalls[callId].receiverSocket.emit('close-producer', { callId, kind });
await videoCalls[callId].initiatorConsumerVideo.resume();
await videoCalls[callId].initiatorConsumerAudio.resume();
} else {
console.log(`[close-producer] receiver --EMIT--> initiator | callId: ${callId} | kind: ${kind}`);
videoCalls[callId].initiatorSocket.emit('close-producer', { callId, kind });
await videoCalls[callId].receiverConsumerVideo.resume();
await videoCalls[callId].receiverConsumerAudio.resume();
}
// await videoCalls[callId].consumerVideo.resume();
// await videoCalls[callId].consumerAudio.resume();
} catch (error) {
console.error(`[close-producer] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`);
console.log(`ERROR | consumer-resume | callId ${socketDetails[socket.id]} | ${error.message}`);
}
});
});
const canConsume = ({ callId, producerId, rtpCapabilities }) => {
return !!videoCalls[callId].router.canConsume({
producerId,
rtpCapabilities,
});
};
const consumeVideo = async ({ callId, socketId, rtpCapabilities }) => {
// Handlers for consumer transport https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
if (isInitiator(callId, socketId) && videoCalls[callId].receiverVideoProducer) {
const producerId = videoCalls[callId].receiverVideoProducer.id;
if (!canConsume({ callId, producerId, rtpCapabilities })) return null;
const consumeVideo = async (callId, socketId, rtpCapabilities) => {
if(isInitiator(callId, socketId)) {
videoCalls[callId].initiatorConsumerVideo = await videoCalls[callId].initiatorConsumerTransport.consume({
producerId,
producerId: videoCalls[callId].receiverVideoProducer.id,
rtpCapabilities,
paused: true,
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].initiatorConsumerVideo.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].initiatorConsumerVideo.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].initiatorConsumerVideo.id,
producerId,
producerId: videoCalls[callId].receiverVideoProducer.id,
kind: 'video',
rtpParameters: videoCalls[callId].initiatorConsumerVideo.rtpParameters,
};
} else if (videoCalls[callId].initiatorVideoProducer) {
const producerId = videoCalls[callId].initiatorVideoProducer.id;
if (!canConsume({ callId, producerId, rtpCapabilities })) return null;
}
} else {
videoCalls[callId].receiverConsumerVideo = await videoCalls[callId].receiverConsumerTransport.consume({
producerId,
producerId: videoCalls[callId].initiatorVideoProducer.id,
rtpCapabilities,
paused: true,
});
videoCalls[callId].receiverConsumerVideo.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
videoCalls[callId].receiverConsumerVideo.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].receiverConsumerVideo.id,
producerId,
producerId: videoCalls[callId].initiatorVideoProducer.id,
kind: 'video',
rtpParameters: videoCalls[callId].receiverConsumerVideo.rtpParameters,
};
} else {
return null;
}
};
const consumeAudio = async ({ callId, socketId, rtpCapabilities }) => {
try {
// Handlers for consumer transport https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
if (isInitiator(callId, socketId) && videoCalls[callId].receiverAudioProducer) {
const producerId = videoCalls[callId].receiverAudioProducer.id;
if (!canConsume({ callId, producerId, rtpCapabilities })) return null;
videoCalls[callId].initiatorConsumerAudio = await videoCalls[callId].initiatorConsumerTransport.consume({
producerId,
rtpCapabilities,
paused: true,
});
return {
id: videoCalls[callId].initiatorConsumerAudio.id,
producerId,
kind: 'audio',
rtpParameters: videoCalls[callId].initiatorConsumerAudio.rtpParameters,
};
} else if (videoCalls[callId].initiatorAudioProducer) {
const producerId = videoCalls[callId].initiatorAudioProducer.id;
if (!canConsume({ callId, producerId, rtpCapabilities })) return null;
videoCalls[callId].receiverConsumerAudio = await videoCalls[callId].receiverConsumerTransport.consume({
producerId,
rtpCapabilities,
paused: true,
});
return {
id: videoCalls[callId].receiverConsumerAudio.id,
producerId,
kind: 'audio',
rtpParameters: videoCalls[callId].receiverConsumerAudio.rtpParameters,
};
} else {
return null;
}
} catch (error) {
console.error(`[consumeAudio] | ERROR | error: ${error}`);
}
};
}
const consumeAudio = async (callId, socketId, rtpCapabilities) => {
if(isInitiator(callId, socketId)) {
videoCalls[callId].initiatorConsumerAudio = await videoCalls[callId].initiatorConsumerTransport.consume({
producerId: videoCalls[callId].receiverAudioProducer.id,
rtpCapabilities,
paused: true,
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-transportclose
videoCalls[callId].initiatorConsumerAudio.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
// https://mediasoup.org/documentation/v3/mediasoup/api/#consumer-on-producerclose
videoCalls[callId].initiatorConsumerAudio.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].initiatorConsumerAudio.id,
producerId: videoCalls[callId].receiverAudioProducer.id,
kind: 'audio',
rtpParameters: videoCalls[callId].initiatorConsumerAudio.rtpParameters,
}
} else {
videoCalls[callId].receiverConsumerAudio = await videoCalls[callId].receiverConsumerTransport.consume({
producerId: videoCalls[callId].initiatorAudioProducer.id,
rtpCapabilities,
paused: true,
});
videoCalls[callId].receiverConsumerAudio.on('transportclose', () => {
const callId = socketDetails[socket.id];
console.log('transport close from consumer', callId);
closeCall(callId);
});
videoCalls[callId].receiverConsumerAudio.on('producerclose', () => {
const callId = socketDetails[socket.id];
console.log('producer of consumer closed', callId);
closeCall(callId);
});
return {
id: videoCalls[callId].receiverConsumerAudio.id,
producerId: videoCalls[callId].initiatorAudioProducer.id,
kind: 'audio',
rtpParameters: videoCalls[callId].receiverConsumerAudio.rtpParameters,
}
}
}
const isInitiator = (callId, socketId) => {
return videoCalls[callId]?.initiatorSocket?.id === socketId;
};
return (videoCalls[callId].initiatorSockerId === socketId);
}
/*
- Called from at event 'createWebRtcTransport' and assigned to the consumer or producer transport
@ -546,25 +589,26 @@ const isInitiator = (callId, socketId) => {
*/
const createWebRtcTransportLayer = async (callId, callback) => {
try {
console.log(`[createWebRtcTransportLayer] callId: ${callId}`);
console.log('[createWebRtcTransportLayer] callId', callId);
// https://mediasoup.org/documentation/v3/mediasoup/api/#WebRtcTransportOptions
const webRtcTransport_options = {
listenIps: [
{
ip: process.env.IP, // Listening IPv4 or IPv6.
announcedIp: process.env.ANNOUNCED_IP, // Announced IPv4 or IPv6 (useful when running mediasoup behind NAT with private IP).
},
}
],
enableUdp: true,
enableTcp: true,
preferUdp: true,
};
// https://mediasoup.org/documentation/v3/mediasoup/api/#router-createWebRtcTransport
let transport = await videoCalls[callId].router.createWebRtcTransport(webRtcTransport_options);
let transport = await videoCalls[callId].router.createWebRtcTransport(webRtcTransport_options)
console.log(`callId: ${callId} | transport id: ${transport.id}`)
// Handler for when DTLS(Datagram Transport Layer Security) changes
transport.on('dtlsstatechange', (dtlsState) => {
transport.on('dtlsstatechange', dtlsState => {
console.log(`transport | dtlsstatechange | calldId ${callId} | dtlsState ${dtlsState}`);
if (dtlsState === 'closed') {
transport.close();
@ -583,15 +627,15 @@ const createWebRtcTransportLayer = async (callId, callback) => {
dtlsParameters: transport.dtlsParameters,
};
console.log('[createWebRtcTransportLayer] callback params', params);
// Send back to the client the params
callback({ params });
// Set transport to producerTransport or consumerTransport
return transport;
} catch (error) {
console.error(
`[createWebRtcTransportLayer] | ERROR | callId: ${socketDetails[socket.id]} | error: ${error.message}`
);
console.log(`ERROR | createWebRtcTransportLayer | callId ${socketDetails[socket.id]} | ${error.message}`);
callback({ params: { error } });
}
};
}

View File

@ -9,21 +9,14 @@ else
## CLEANUP
rm -fr dist/*
fi
if [ -d "node_modules" ]; then
rm -fr node_modules
fi
# Install dependencies
#npm install
## PROJECT NEEDS
echo "Building app... from $(git rev-parse --abbrev-ref HEAD)"
#npm run-script build
cp -r {.env,app.js,package.json,server,public,doc,Dockerfile} dist/
#cp -r ./* dist/
dateString=$(date +"%Y%m%d-%H%M%S")
git log --pretty=format:"%ad%x09%an%x09%s" --no-merges -20 > "dist/git--$dateString.log"
cp -r {.env,app.js,package.json,server,public} dist/
#Add version control for pm2
cd dist
#Add version control for pm2
@ -50,4 +43,4 @@ fi
## POST BUILD
cd -
cd -

Binary file not shown.

Before

Width:  |  Height:  |  Size: 614 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 994 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 354 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 794 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 462 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 346 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 407 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 439 KiB

View File

@ -20353,7 +20353,7 @@ module.exports = yeast;
},{}],94:[function(require,module,exports){
module.exports = {
hubAddress: 'https://hub.dev.linx.safemobile.com/',
mediasoupAddress: 'https://testing.video.safemobile.org/',
mediasoupAddress: 'https://testing.video.safemobile.org',
}
},{}],95:[function(require,module,exports){
const io = require('socket.io-client')
@ -20368,24 +20368,10 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub
let device
let rtpCapabilities
@ -20395,21 +20381,6 @@ let producerVideo
let producerAudio
let consumer
let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
@ -20449,23 +20420,10 @@ setTimeout(() => {
console.log(`[MEDIA] ${config.mediasoupAddress} | connected: ${socket.connected} | existsProducer: ${existsProducer}`)
if (!IS_PRODUCER && existsProducer && consumer === undefined) {
goConnect()
// document.getElementById('btnRecvSendTransport').click();
}
if (IS_PRODUCER && urlParams.get('testing') === 'true') { getLocalStream() }
})
socket.on('new-producer', ({ callId, kind }) => {
console.log(`🟢 new-producer | callId: ${callId} | kind: ${kind} | Ready to consume`);
connectRecvTransport();
})
socket.on('close-producer', ({ callId, kind }) => {
console.log(`🔴 close-producer | callId: ${callId} | kind: ${kind}`);
if (kind === 'video') {
consumerVideo.close()
remoteVideo.srcObject = null
}
else if (kind === 'audio') consumerAudio.close()
})
}
if (IS_PRODUCER === true) {
@ -20544,7 +20502,7 @@ const streamSuccess = (stream) => {
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: produceAudio ? true : false,
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
@ -20693,36 +20651,34 @@ const connectSendTransport = async () => {
// this action will trigger the 'connect' and 'produce' events above
// Produce video
let producerVideoHandler = await producerTransport.produce(videoParams)
producerVideo = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideoHandler.on('trackended', () => {
producerVideo.on('trackended', () => {
console.log('track ended')
// close video track
})
producerVideoHandler.on('transportclose', () => {
producerVideo.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
if (produceAudio) {
let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudioHandler.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudioHandler.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
}
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
const answer = {
origin_asset_id: ASSET_ID,
@ -20744,8 +20700,6 @@ const connectSendTransport = async () => {
// Enable Close call button
const closeCallBtn = document.getElementById('btnCloseCall');
closeCallBtn.removeAttribute('disabled');
createRecvTransport();
}
const createRecvTransport = async () => {
@ -20785,8 +20739,7 @@ const createRecvTransport = async () => {
errback(error)
}
})
// We call it in new-rpoducer, we don't need it here anymore
// connectRecvTransport()
connectRecvTransport()
})
}
@ -20809,91 +20762,36 @@ const connectRecvTransport = async () => {
await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities,
callId
}, async ({videoParams, audioParams}) => {
console.log(`[consume] 🟩 videoParams`, videoParams)
console.log(`[consume] 🟩 audioParams`, audioParams)
console.log('[consume] 🟩 consumerTransport', consumerTransport)
}, async ({ params }) => {
// if (params.error) {
// console.log('Cannot Consume')
// return
// }
console.log(`[connectRecvTransport] consume params ${params}`);
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
let stream = new MediaStream()
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
socket.emit('consumer-resume')
console.log('consumer', consumer);
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
console.error(`remoteVideo PLAY ERROR | ${error.message}`)
})
})
}
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => {
console.log('closeCall');
@ -20915,31 +20813,31 @@ const closeCall = () => {
resetCallSettings()
}
// const consume = async (kind) => {
// console.log(`[consume] kind: ${kind}`)
// console.log('createRecvTransport Consumer')
// await socket.emit('createWebRtcTransport', { sender: false, callId, dispatcher: true }, ({ params }) => {
// if (params.error) {
// console.log('createRecvTransport | createWebRtcTransport | Error', params.error)
// return
// }
// consumerTransport = device.createRecvTransport(params)
// consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
// try {
// await socket.emit('transport-recv-connect', {
// dtlsParameters,
// })
// callback()
// } catch (error) {
// errback(error)
// }
// })
const consume = async () => {
console.log('[consume]')
console.log('createRecvTransport Consumer')
await socket.emit('createWebRtcTransport', { sender: false, callId, dispatcher: true }, ({ params }) => {
if (params.error) {
console.log('createRecvTransport | createWebRtcTransport | Error', params.error)
return
}
consumerTransport = device.createRecvTransport(params)
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
await socket.emit('transport-recv-connect', {
dtlsParameters,
})
callback()
} catch (error) {
errback(error)
}
})
// connectRecvTransport()
// })
// }
connectRecvTransport()
})
}
btnLocalVideo.addEventListener('click', getLocalStream)
// btnRecvSendTransport.addEventListener('click', consume)
btnRecvSendTransport.addEventListener('click', consume)
btnCloseCall.addEventListener('click', closeCall)
},{"./config":94,"mediasoup-client":66,"socket.io-client":82}]},{},[95]);

View File

@ -1,4 +1,4 @@
module.exports = {
hubAddress: 'https://hub.dev.linx.safemobile.com/',
mediasoupAddress: 'https://testing.video.safemobile.org/',
mediasoupAddress: 'https://testing.video.safemobile.org',
}

View File

@ -34,9 +34,6 @@
<body>
<body>
<div id="video">
<legend>Client options:</legend>
<input type="checkbox" id="produceAudio" name="produceAudio">
<label for="produceAudio">Produce audio</label><br>
<table>
<thead>
<th>Local Video</th>
@ -46,24 +43,12 @@
<tr>
<td>
<div id="sharedBtns">
<video
id="localVideo"
class="video"
autoplay
muted
playsinline
></video>
<video id="localVideo" autoplay class="video" muted></video>
</div>
</td>
<td>
<div id="sharedBtns">
<video
id="remoteVideo"
class="video"
autoplay
muted
playsinline
></video>
<video id="remoteVideo" autoplay class="video" ></video>
</div>
</td>
</tr>
@ -75,11 +60,34 @@
</td>
<td>
<div id="sharedBtns">
<!-- <button id="btnRecvSendTransport">Consume</button> -->
<button id="remoteSoundControl">Unmute</button>
<button id="btnRecvSendTransport">Consume</button>
</div>
</td>
</tr>
<!-- <tr>
<td colspan="2">
<div id="sharedBtns">
<button id="btnRtpCapabilities">2. Get Rtp Capabilities</button>
<br />
<button id="btnDevice">3. Create Device</button>
</div>
</td>
</tr>
<tr>
<td>
<div id="sharedBtns">
<button id="btnCreateSendTransport">4. Create Send Transport</button>
<br />
<button id="btnConnectSendTransport">5. Connect Send Transport & Produce</button></td>
</div>
<td>
<div id="sharedBtns">
<button id="btnRecvSendTransport">6. Create Recv Transport</button>
<br />
<button id="btnConnectRecvTransport">7. Connect Recv Transport & Consume</button>
</div>
</td>
</tr> -->
</tbody>
</table>
<div id="closeCallBtn">

View File

@ -10,24 +10,10 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub
let device
let rtpCapabilities
@ -37,21 +23,6 @@ let producerVideo
let producerAudio
let consumer
let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
@ -91,23 +62,10 @@ setTimeout(() => {
console.log(`[MEDIA] ${config.mediasoupAddress} | connected: ${socket.connected} | existsProducer: ${existsProducer}`)
if (!IS_PRODUCER && existsProducer && consumer === undefined) {
goConnect()
// document.getElementById('btnRecvSendTransport').click();
}
if (IS_PRODUCER && urlParams.get('testing') === 'true') { getLocalStream() }
})
socket.on('new-producer', ({ callId, kind }) => {
console.log(`🟢 new-producer | callId: ${callId} | kind: ${kind} | Ready to consume`);
connectRecvTransport();
})
socket.on('close-producer', ({ callId, kind }) => {
console.log(`🔴 close-producer | callId: ${callId} | kind: ${kind}`);
if (kind === 'video') {
consumerVideo.close()
remoteVideo.srcObject = null
}
else if (kind === 'audio') consumerAudio.close()
})
}
if (IS_PRODUCER === true) {
@ -186,7 +144,7 @@ const streamSuccess = (stream) => {
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: produceAudio ? true : false,
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
@ -335,36 +293,34 @@ const connectSendTransport = async () => {
// this action will trigger the 'connect' and 'produce' events above
// Produce video
let producerVideoHandler = await producerTransport.produce(videoParams)
producerVideo = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideoHandler.on('trackended', () => {
producerVideo.on('trackended', () => {
console.log('track ended')
// close video track
})
producerVideoHandler.on('transportclose', () => {
producerVideo.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
if (produceAudio) {
let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudioHandler.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudioHandler.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
}
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
const answer = {
origin_asset_id: ASSET_ID,
@ -386,8 +342,6 @@ const connectSendTransport = async () => {
// Enable Close call button
const closeCallBtn = document.getElementById('btnCloseCall');
closeCallBtn.removeAttribute('disabled');
createRecvTransport();
}
const createRecvTransport = async () => {
@ -427,8 +381,7 @@ const createRecvTransport = async () => {
errback(error)
}
})
// We call it in new-rpoducer, we don't need it here anymore
// connectRecvTransport()
connectRecvTransport()
})
}
@ -451,91 +404,36 @@ const connectRecvTransport = async () => {
await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities,
callId
}, async ({videoParams, audioParams}) => {
console.log(`[consume] 🟩 videoParams`, videoParams)
console.log(`[consume] 🟩 audioParams`, audioParams)
console.log('[consume] 🟩 consumerTransport', consumerTransport)
}, async ({ params }) => {
// if (params.error) {
// console.log('Cannot Consume')
// return
// }
console.log(`[connectRecvTransport] consume params ${params}`);
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
let stream = new MediaStream()
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
socket.emit('consumer-resume')
console.log('consumer', consumer);
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
console.error(`remoteVideo PLAY ERROR | ${error.message}`)
})
})
}
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => {
console.log('closeCall');
@ -557,30 +455,30 @@ const closeCall = () => {
resetCallSettings()
}
// const consume = async (kind) => {
// console.log(`[consume] kind: ${kind}`)
// console.log('createRecvTransport Consumer')
// await socket.emit('createWebRtcTransport', { sender: false, callId, dispatcher: true }, ({ params }) => {
// if (params.error) {
// console.log('createRecvTransport | createWebRtcTransport | Error', params.error)
// return
// }
// consumerTransport = device.createRecvTransport(params)
// consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
// try {
// await socket.emit('transport-recv-connect', {
// dtlsParameters,
// })
// callback()
// } catch (error) {
// errback(error)
// }
// })
const consume = async () => {
console.log('[consume]')
console.log('createRecvTransport Consumer')
await socket.emit('createWebRtcTransport', { sender: false, callId, dispatcher: true }, ({ params }) => {
if (params.error) {
console.log('createRecvTransport | createWebRtcTransport | Error', params.error)
return
}
consumerTransport = device.createRecvTransport(params)
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
await socket.emit('transport-recv-connect', {
dtlsParameters,
})
callback()
} catch (error) {
errback(error)
}
})
// connectRecvTransport()
// })
// }
connectRecvTransport()
})
}
btnLocalVideo.addEventListener('click', getLocalStream)
// btnRecvSendTransport.addEventListener('click', consume)
btnRecvSendTransport.addEventListener('click', consume)
btnCloseCall.addEventListener('click', closeCall)