Compare commits

..

27 Commits

Author SHA1 Message Date
a31e646e2b Update 2022-12-16 14:10:26 +02:00
fe792f93b6 Update 2022-12-16 14:10:07 +02:00
dafbc486ad Update 2022-12-16 14:00:16 +02:00
b606a72030 Update 2022-12-16 13:43:55 +02:00
c174e92e3c Update 2022-12-16 13:33:13 +02:00
449724537e Update 2022-12-16 12:01:20 +02:00
9634aac153 Update 2022-12-16 11:59:10 +02:00
e0bc4642cb Update 2022-12-16 11:31:26 +02:00
f950142188 Update 2022-12-16 11:28:57 +02:00
5ba1f76585 Update 2022-12-16 11:23:10 +02:00
dc9c91fccc Update 2022-12-16 11:10:30 +02:00
5abcddc115 Update 2022-12-16 11:02:08 +02:00
bf65221664 Update 2022-12-16 10:54:25 +02:00
5687569bc1 Update 2022-12-16 10:47:13 +02:00
44c8d9b8ee Update 2022-12-16 02:15:26 +02:00
0a6985f9b9 Update 2022-12-16 02:12:22 +02:00
d29def364c Update 2022-12-16 02:10:10 +02:00
acd6025f59 Update 2022-12-16 02:07:01 +02:00
4b0c06e0b0 Added socket id to createWebRtcTransport 2022-12-16 02:02:38 +02:00
c1fe524ec7 LINXD-2270: Remove commented code; Update comments/logs 2022-12-14 11:57:19 +02:00
f8fcfb3165 Fix isInitiator in transport-produce 2022-12-14 11:33:16 +02:00
d324528d52 Added logs on transport-produce 2022-12-14 11:05:40 +02:00
d1eb7afc3a Added logs on createRoom on videoCalls 2022-12-14 10:15:34 +02:00
695964d342 Refactor code to use initiator/receiver 2022-12-14 09:55:45 +02:00
3ca555ef9e Set initiatorSocketId to be dispatcher 2022-12-13 13:23:15 +02:00
92fbecc36a Set initiatorSocketId to be dispatcher 2022-12-13 13:04:43 +02:00
d633eec92f Add socket info 2022-12-13 10:28:45 +02:00
12 changed files with 142 additions and 336 deletions

72
app.js
View File

@ -13,40 +13,40 @@ try {
}
const mediasoup = require('mediasoup');
let worker;
let worker
/**
*
* videoCalls - Dictionary of Object(s)
* '<callId>': {
* router: Router,
* router: Router, router
* initiatorAudioProducer: Producer,
* initiatorVideoProducer: Producer,
* receiverVideoProducer: Producer,
* receiverAudioProducer: Producer,
* receiverVideoProducer: Producer, producerVideo
* receiverAudioProducer: Producer, producerAudio
* initiatorProducerTransport: Producer Transport,
* receiverProducerTransport: Producer Transport,
* initiatorConsumerVideo: Consumer,
* initiatorConsumerAudio: Consumer,
* initiatorConsumerTransport: Consumer Transport
* initiatorSocket
* receiverSocket
* receiverProducerTransport: Producer Transport, producerTransport
* initiatorConsumerVideo: Consumer, consumerVideo
* initiatorConsumerAudio: Consumer, consumerAudio
* initiatorConsumerTransport: Consumer Transport consumerTransport
* initiatorSockerId
* receiverSocketId
* }
*
**/
let videoCalls = {};
let socketDetails = {};
let videoCalls = {}
let socketDetails = {}
app.get('/', (_req, res) => {
res.send('Hello from mediasoup app!')
});
})
app.use('/sfu', express.static(path.join(__dirname, 'public')));
app.use('/sfu', express.static(path.join(__dirname, 'public')))
// SSL cert for HTTPS access
const options = {
key: fs.readFileSync(process.env.SERVER_KEY, 'utf-8'),
cert: fs.readFileSync(process.env.SERVER_CERT, 'utf-8'),
};
}
const httpsServer = https.createServer(options, app);
@ -192,11 +192,12 @@ peers.on('connection', async socket => {
if (callId) {
console.log(`[createRoom] socket.id ${socket.id} callId ${callId}`);
if (!videoCalls[callId]) {
videoCalls[callId] = { router: await worker.createRouter({ mediaCodecs }) };
console.log('[createRoom] callId', callId);
videoCalls[callId] = { router: await worker.createRouter({ mediaCodecs }) }
console.log(`[createRoom] Router ID: ${videoCalls[callId].router.id}`);
videoCalls[callId].receiverSocket = socket;
videoCalls[callId].receiverSocketId = socket.id
} else {
videoCalls[callId].initiatorSocket = socket;
videoCalls[callId].initiatorSockerId = socket.id
}
socketDetails[socket.id] = callId;
// rtpCapabilities is set for callback
@ -224,6 +225,7 @@ peers.on('connection', async socket => {
*/
socket.on('createWebRtcTransport', async ({ sender }, callback) => {
try {
console.log('🟥', socket.id, JSON.stringify(sender));
const callId = socketDetails[socket.id];
console.log(`[createWebRtcTransport] socket ${socket.id} | sender ${sender} | callId ${callId}`);
if (sender) {
@ -258,19 +260,11 @@ peers.on('connection', async socket => {
if (typeof dtlsParameters === 'string') dtlsParameters = JSON.parse(dtlsParameters);
console.log(`[transport-connect] socket ${socket.id} | callId ${callId}`);
if (!isInitiator(callId, socket.id)) {
await videoCalls[callId].receiverProducerTransport.connect({ dtlsParameters });
videoCalls[callId].initiatorSocket.emit('new-producer', {
callId
});
} else {
await videoCalls[callId].initiatorProducerTransport.connect({ dtlsParameters });
videoCalls[callId].receiverSocket.emit('new-producer', {
callId
});
}
} catch (error) {
console.log(`ERROR | transport-connect | callId ${socketDetails[socket.id]} | ${error.message}`);
}
@ -287,7 +281,6 @@ peers.on('connection', async socket => {
if (typeof rtpParameters === 'string') rtpParameters = JSON.parse(rtpParameters);
console.log(`[transport-produce] kind: ${kind} | socket: ${socket.id} | callId: ${callId}`);
if (kind === 'video') {
if (!isInitiator(callId, socket.id)) {
videoCalls[callId].receiverVideoProducer = await videoCalls[callId].receiverProducerTransport.produce({
@ -377,7 +370,6 @@ peers.on('connection', async socket => {
try {
const callId = socketDetails[socket.id];
console.log(`[transport-recv-connect] socket ${socket.id} | callId ${callId}`);
if (typeof dtlsParameters === 'string') dtlsParameters = JSON.parse(dtlsParameters);
// await videoCalls[callId].consumerTransport.connect({ dtlsParameters });
if(!isInitiator(callId, socket.id)) {
await videoCalls[callId].receiverConsumerTransport.connect({ dtlsParameters });
@ -401,8 +393,6 @@ peers.on('connection', async socket => {
const callId = socketDetails[socket.id];
console.log(`[consume] socket ${socket.id} | callId ${callId} | rtpCapabilities: ${JSON.stringify(rtpCapabilities)}`);
if (typeof rtpCapabilities === 'string') rtpCapabilities = JSON.parse(rtpCapabilities);
console.log('[consume] callId', callId);
let canConsumeVideo, canConsumeAudio;
if (isInitiator(callId, socket.id)) {
@ -438,8 +428,7 @@ peers.on('connection', async socket => {
callback({ videoParams, audioParams });
} else if (!canConsumeVideo && canConsumeAudio) {
const audioParams = await consumeAudio(callId, socket.id, rtpCapabilities)
const data = { videoParams: null, audioParams };
callback(data);
callback({ videoParams: null, audioParams });
} else {
console.log(`[consume] Can't consume | callId ${callId}`);
callback(null);
@ -452,20 +441,22 @@ peers.on('connection', async socket => {
/*
- Event sent by the consumer after consuming to resume the pause
- When consuming on consumerTransport, it is initially done with paused: true, here we will resume
- For the initiator we resume the initiatorConsumerAUDIO/VIDEO and for receiver the receiverConsumerAUDIO/VIDEO
- When consuming on consumerTransport, it is initially done with paused: true, here we will resume
*/
socket.on('consumer-resume',() => {
socket.on('consumer-resume', async () => {
try {
const callId = socketDetails[socket.id];
console.log(`[consumer-resume] callId ${callId}`)
if (isInitiator(callId, socket.id)) {
videoCalls[callId]?.initiatorConsumerVideo?.resume();
videoCalls[callId]?.initiatorConsumerAudio?.resume();
await videoCalls[callId].initiatorConsumerVideo.resume();
await videoCalls[callId].initiatorConsumerAudio.resume();
} else {
videoCalls[callId]?.receiverConsumerVideo?.resume();
videoCalls[callId]?.receiverConsumerAudio?.resume();
await videoCalls[callId].receiverConsumerVideo.resume();
await videoCalls[callId].receiverConsumerAudio.resume();
}
// await videoCalls[callId].consumerVideo.resume();
// await videoCalls[callId].consumerAudio.resume();
} catch (error) {
console.log(`ERROR | consumer-resume | callId ${socketDetails[socket.id]} | ${error.message}`);
}
@ -586,7 +577,7 @@ const consumeAudio = async (callId, socketId, rtpCapabilities) => {
}
const isInitiator = (callId, socketId) => {
return (videoCalls[callId]?.initiatorSocket?.id === socketId);
return (videoCalls[callId].initiatorSockerId === socketId);
}
/*
@ -614,6 +605,7 @@ const createWebRtcTransportLayer = async (callId, callback) => {
// https://mediasoup.org/documentation/v3/mediasoup/api/#router-createWebRtcTransport
let transport = await videoCalls[callId].router.createWebRtcTransport(webRtcTransport_options)
console.log(`callId: ${callId} | transport id: ${transport.id}`)
// Handler for when DTLS(Datagram Transport Layer Security) changes
transport.on('dtlsstatechange', dtlsState => {

View File

@ -9,19 +9,13 @@ else
## CLEANUP
rm -fr dist/*
fi
if [ -d "node_modules" ]; then
rm -fr node_modules
fi
# Install dependencies
#npm install
## PROJECT NEEDS
echo "Building app... from $(git rev-parse --abbrev-ref HEAD)"
#npm run-script build
#cp -r {.env,app.js,package.json,server,public} dist/
cp -r ./* dist/
cp -r {.env,app.js,package.json,server,public} dist/
#Add version control for pm2
cd dist
@ -49,4 +43,4 @@ fi
## POST BUILD
cd -
cd -

Binary file not shown.

Before

Width:  |  Height:  |  Size: 614 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 354 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 462 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 346 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 407 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 439 KiB

View File

@ -20368,24 +20368,10 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub
let device
let rtpCapabilities
@ -20395,21 +20381,6 @@ let producerVideo
let producerAudio
let consumer
let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
@ -20449,14 +20420,10 @@ setTimeout(() => {
console.log(`[MEDIA] ${config.mediasoupAddress} | connected: ${socket.connected} | existsProducer: ${existsProducer}`)
if (!IS_PRODUCER && existsProducer && consumer === undefined) {
goConnect()
// document.getElementById('btnRecvSendTransport').click();
}
if (IS_PRODUCER && urlParams.get('testing') === 'true') { getLocalStream() }
})
socket.on('new-producer', ({ callId }) => {
console.log(`🟢 new-producer | callId: ${callId} | Ready to consume`);
consume()
})
}
if (IS_PRODUCER === true) {
@ -20535,7 +20502,7 @@ const streamSuccess = (stream) => {
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: produceAudio ? true : false,
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
@ -20684,36 +20651,34 @@ const connectSendTransport = async () => {
// this action will trigger the 'connect' and 'produce' events above
// Produce video
let producerVideoHandler = await producerTransport.produce(videoParams)
producerVideo = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideoHandler.on('trackended', () => {
producerVideo.on('trackended', () => {
console.log('track ended')
// close video track
})
producerVideoHandler.on('transportclose', () => {
producerVideo.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
if (produceAudio) {
let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudioHandler.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudioHandler.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
}
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
const answer = {
origin_asset_id: ASSET_ID,
@ -20797,95 +20762,36 @@ const connectRecvTransport = async () => {
await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities,
callId
}, async ({videoParams, audioParams}) => {
console.log(`[consume] 🟩 videoParams`, videoParams)
console.log(`[consume] 🟩 audioParams`, audioParams)
console.log('[consume] 🟩 consumerTransport', consumerTransport)
}, async ({ params }) => {
// if (params.error) {
// console.log('Cannot Consume')
// return
// }
console.log(`[connectRecvTransport] consume params ${params}`);
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
let stream = new MediaStream()
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
socket.emit('consumer-resume')
console.log('consumer', consumer);
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
displayError(`remoteVideo PLAY ERROR | ${error.message}`)
})
})
}
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
consumerVideo.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => {
console.log('closeCall');

View File

@ -34,9 +34,6 @@
<body>
<body>
<div id="video">
<legend>Client options:</legend>
<input type="checkbox" id="produceAudio" name="produceAudio">
<label for="produceAudio">Produce audio</label><br>
<table>
<thead>
<th>Local Video</th>
@ -46,24 +43,12 @@
<tr>
<td>
<div id="sharedBtns">
<video
id="localVideo"
class="video"
autoplay
muted
playsinline
></video>
<video id="localVideo" autoplay class="video" muted></video>
</div>
</td>
<td>
<div id="sharedBtns">
<video
id="remoteVideo"
class="video"
autoplay
muted
playsinline
></video>
<video id="remoteVideo" autoplay class="video" ></video>
</div>
</td>
</tr>
@ -76,10 +61,33 @@
<td>
<div id="sharedBtns">
<button id="btnRecvSendTransport">Consume</button>
<button id="remoteSoundControl">Unmute</button>
</div>
</td>
</tr>
<!-- <tr>
<td colspan="2">
<div id="sharedBtns">
<button id="btnRtpCapabilities">2. Get Rtp Capabilities</button>
<br />
<button id="btnDevice">3. Create Device</button>
</div>
</td>
</tr>
<tr>
<td>
<div id="sharedBtns">
<button id="btnCreateSendTransport">4. Create Send Transport</button>
<br />
<button id="btnConnectSendTransport">5. Connect Send Transport & Produce</button></td>
</div>
<td>
<div id="sharedBtns">
<button id="btnRecvSendTransport">6. Create Recv Transport</button>
<br />
<button id="btnConnectRecvTransport">7. Connect Recv Transport & Consume</button>
</div>
</td>
</tr> -->
</tbody>
</table>
<div id="closeCallBtn">

View File

@ -10,24 +10,10 @@ const ASSET_NAME = urlParams.get('assetName') || null;
const ASSET_TYPE = urlParams.get('assetType') || null;
let callId = parseInt(urlParams.get('callId')) || null;
const IS_PRODUCER = urlParams.get('producer') === 'true' ? true : false
let remoteVideo = document.getElementById('remoteVideo')
remoteVideo.defaultMuted = true
let produceAudio = false
console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId', callId, ' | IS_PRODUCER', IS_PRODUCER)
console.log('🟩 config', config)
produceAudioSelector = document.getElementById('produceAudio');
produceAudioSelector.addEventListener('change', e => {
if(e.target.checked) {
produceAudio = true
console.log('produce audio');
} else {
produceAudio = false
}
});
let socket, hub
let device
let rtpCapabilities
@ -37,21 +23,6 @@ let producerVideo
let producerAudio
let consumer
let originAssetId
let consumerVideo // local consumer video(consumer not transport)
let consumerAudio // local consumer audio(consumer not transport)
const remoteSoundControl = document.getElementById('remoteSoundControl');
remoteSoundControl.addEventListener('click', function handleClick() {
console.log('remoteSoundControl.textContent', remoteSoundControl.textContent);
if (remoteSoundControl.textContent === 'Unmute') {
remoteVideo.muted = false
remoteSoundControl.textContent = 'Mute';
} else {
remoteVideo.muted = true
remoteSoundControl.textContent = 'Unmute';
}
});
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#ProducerOptions
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
@ -91,14 +62,10 @@ setTimeout(() => {
console.log(`[MEDIA] ${config.mediasoupAddress} | connected: ${socket.connected} | existsProducer: ${existsProducer}`)
if (!IS_PRODUCER && existsProducer && consumer === undefined) {
goConnect()
// document.getElementById('btnRecvSendTransport').click();
}
if (IS_PRODUCER && urlParams.get('testing') === 'true') { getLocalStream() }
})
socket.on('new-producer', ({ callId }) => {
console.log(`🟢 new-producer | callId: ${callId} | Ready to consume`);
consume()
})
}
if (IS_PRODUCER === true) {
@ -177,7 +144,7 @@ const streamSuccess = (stream) => {
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: produceAudio ? true : false,
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
@ -326,36 +293,34 @@ const connectSendTransport = async () => {
// this action will trigger the 'connect' and 'produce' events above
// Produce video
let producerVideoHandler = await producerTransport.produce(videoParams)
producerVideo = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideoHandler.on('trackended', () => {
producerVideo.on('trackended', () => {
console.log('track ended')
// close video track
})
producerVideoHandler.on('transportclose', () => {
producerVideo.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// Produce audio
if (produceAudio) {
let producerAudioHandler = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudioHandler.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudioHandler.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
}
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
const answer = {
origin_asset_id: ASSET_ID,
@ -439,95 +404,36 @@ const connectRecvTransport = async () => {
await socket.emit('consume', {
rtpCapabilities: device.rtpCapabilities,
callId
}, async ({videoParams, audioParams}) => {
console.log(`[consume] 🟩 videoParams`, videoParams)
console.log(`[consume] 🟩 audioParams`, audioParams)
console.log('[consume] 🟩 consumerTransport', consumerTransport)
}, async ({ params }) => {
// if (params.error) {
// console.log('Cannot Consume')
// return
// }
console.log(`[connectRecvTransport] consume params ${params}`);
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,
producerId: params.producerId,
kind: params.kind,
rtpParameters: params.rtpParameters
})
// destructure and retrieve the video track from the producer
const { track } = consumer
let stream = new MediaStream()
// Maybe the unit does not produce video or audio, so we must only consume what is produced
if (videoParams) {
console.log('❗ Have VIDEO stream to consume');
stream.addTrack(await getVideoTrask(videoParams))
} else {
console.log('❗ Don\'t have VIDEO stream to consume');
}
if (audioParams) {
console.log('❗ Have AUDIO stream to consume');
let audioTrack = await getAudioTrask(audioParams)
stream.addTrack(audioTrack)
} else {
console.log('❗ Don\'t have AUDIO stream to consume');
}
socket.emit('consumer-resume')
stream.addTrack(track)
// stream.removeTrack(track)
remoteVideo.srcObject = stream
remoteVideo.setAttribute('autoplay', true)
socket.emit('consumer-resume')
console.log('consumer', consumer);
remoteVideo.play()
.then(() => {
console.log('remoteVideo PLAY')
})
.catch((error) => {
displayError(`remoteVideo PLAY ERROR | ${error.message}`)
})
})
}
const getVideoTrask = async (videoParams) => {
consumerVideo = await consumerTransport.consume({
id: videoParams.id,
producerId: videoParams.producerId,
kind: videoParams.kind,
rtpParameters: videoParams.rtpParameters
})
consumerVideo.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
return consumerVideo.track
}
const getAudioTrask = async (audioParams) => {
consumerAudio = await consumerTransport.consume({
id: audioParams.id,
producerId: audioParams.producerId,
kind: audioParams.kind,
rtpParameters: audioParams.rtpParameters
})
consumerAudio.on('transportclose', () => {
console.log('transport closed so consumer closed')
})
const audioTrack = consumerAudio.track
audioTrack.applyConstraints({
audio: {
advanced: [
{
echoCancellation: {exact: true}
},
{
autoGainControl: {exact: true}
},
{
noiseSuppression: {exact: true}
},
{
highpassFilter: {exact: true}
}
]
}
})
return audioTrack
}
const closeCall = () => {
console.log('closeCall');