Compare commits

..

3 Commits

Author SHA1 Message Date
8a9c370f02 Merge pull request 'LH-265-audio' (#17) from LH-265-audio into develop
Reviewed-on: #17
Reviewed-by: Cristi Ene <cristi.ene@safemobile.com>
2022-12-06 12:45:08 +00:00
652019b07d LH-265: Update doc; Update bundle 2022-11-29 15:35:28 +02:00
09c4a4b90e LH-265: Added audio on client and server 2022-11-29 14:19:02 +02:00
4 changed files with 89 additions and 229 deletions

View File

@ -22,18 +22,20 @@
2. Run the `npm start:prod` command to start the server in production mode.
(To connect to the terminal, use `pm2 log video-server`)
---
### Web client
- The server will start by default on port 3000, and the ssl certificates will have to be configured
- The web client can be accessed using the /sfu path
ex: http://localhost:3000/sfu/?assetId=1&&accountId=1&producer=true&assetName=Adi&assetType=linx
ex: https://HOST/sfu/?assetId=1&&accountId=1&producer=true&dest_asset_id=75&assetName=Adi
assetId = asset id of the unit on which you are doing the test
accountId = account id of the unit on which you are doing the test
producer = it will always be true because you are the producer
(it's possible to put false, but then you have to have another client with producer true)
assetName = asset name of the unit on which you are doing the test
assetType = asset type of the unit on which you are doing the test
dest_asset_id= the addressee with whom the call is made
- To make a call using this client, you need a microphone and permission to use it
- For any changes related to the client, the command `npm run watch' will have to be used to generate the bundle.js used by the web client
### Demo project
The demo project used initially and then modified for our needs `https://github.com/jamalag/mediasoup2`

View File

@ -43,6 +43,4 @@ fi
## POST BUILD
cd -
cd -

View File

@ -20373,7 +20373,6 @@ console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId',
console.log('🟩 config', config)
let socket, hub
let doIHaveAudio = false
let device
let rtpCapabilities
let producerTransport
@ -20488,8 +20487,6 @@ const streamSuccess = (stream) => {
videoParams = {
track: videoTrack,
// codec : device.rtpCapabilities.codecs.find((codec) => codec.mimeType.toLowerCase() === 'video/vp9'),
// codec : 'video/vp9',
...videoParams
}
@ -20500,96 +20497,30 @@ const streamSuccess = (stream) => {
console.log('[streamSuccess] videoParams', videoParams, ' | audioParams', audioParams);
goConnect()
// console.log('[streamSuccess]');
// localVideo.srcObject = stream
// const track = stream.getVideoTracks()[0]
// videoParams = {
// track,
// ...videoParams
// }
// goConnect()
}
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
navigator.permissions.query(
{ name: 'microphone' }
).then(function(permissionStatus) {
console.log('🟨 [PERMISSION] onchange', permissionStatus.state); // granted, denied, prompt
// If he has entered before, the saved access is already saved
if (permissionStatus === 'grated') {
doIHaveAudio = true;
}
// 🟨 [PERMISSION] onchange denied
// If it is the first time client enter and give permission
permissionStatus.onchange = function() {
console.log('🟨 [PERMISSION] onchange', this.state);
if (this.state === 'granted') {
doIHaveAudio = true;
navigator.mediaDevices.getUserMedia({
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
} else /*if (this.state === 'denied') */ {
doIHaveAudio = false;
console.log('Getting user permission');
navigator.mediaDevices.getUserMedia({
audio: false,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
}
// navigator.mediaDevices.getUserMedia({
// audio: true,
// video: {
// qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
// vga : { width: { ideal: 640 }, height: { ideal: 480 } },
// hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
// }
// })
// .then(streamSuccess)
// .catch(error => {
// console.log(error.message)
// })
}
).then((permissionStatus) =>{
console.log('🟨 [PERMISSION] permissionStatus', permissionStatus); // granted, denied, prompt
// It will block the code from execution and display "Permission denied" if we don't have microphone permissions
})
// navigator.mediaDevices.getUserMedia({
// audio: true,
// video: {
// qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
// vga : { width: { ideal: 640 }, height: { ideal: 480 } },
// hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
// }
// })
// .then((streamSuccess) => {
// return streamSuccess
// })
// .catch(error => {
// console.log(error.message)
// })
}
const goConnect = () => {
@ -20606,7 +20537,6 @@ const goCreateTransport = () => {
// server side to send/recive media
const createDevice = async () => {
try {
console.log('[createDevice] 1 device', device);
device = new mediasoupClient.Device()
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-load
@ -20617,7 +20547,7 @@ const createDevice = async () => {
})
console.log('Device RTP Capabilities', device.rtpCapabilities)
console.log('[createDevice] 2 device', device);
console.log('[createDevice] device', device);
// once the device loads, create transport
goCreateTransport()
@ -20689,7 +20619,7 @@ const createSendTransport = () => {
console.log('[produce] parameters', parameters)
try {
// tell the server to create a Producer
// Tell the server to create a Producer
// with the following parameters and produce
// and expect back a server side producer id
// see server's socket.on('transport-produce', ...)
@ -20715,14 +20645,16 @@ const connectSendTransport = async () => {
console.log('[connectSendTransport] producerTransport');
// we now call produce() to instruct the producer transport
// We now call produce() to instruct the producer transport
// to send media to the Router
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
// this action will trigger the 'connect' and 'produce' events above
console.log('videoParams', videoParams);
// Produce video
producerVideo = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideo.on('trackended', () => {
console.log('track ended')
// close video track
@ -20733,22 +20665,20 @@ const connectSendTransport = async () => {
// close video track
})
console.log('doIHaveAudio', doIHaveAudio);
// Video is mandatory, but audio may not be included
// if (doIHaveAudio) {
console.log('audioParams', audioParams);
producerAudio = await producerTransport.produce(audioParams)
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close video track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// }
// Produce audio
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
const answer = {
origin_asset_id: ASSET_ID,
@ -20758,7 +20688,7 @@ const connectSendTransport = async () => {
origin_asset_type_name: ASSET_TYPE,
origin_asset_name: ASSET_NAME,
video_call_id: callId,
answer: 'accepted', // answer: 'rejected'
answer: 'accepted', // answer: accepted/rejected
};
console.log('SEND answer', answer);
@ -20774,7 +20704,7 @@ const connectSendTransport = async () => {
const createRecvTransport = async () => {
console.log('createRecvTransport');
// see server's socket.on('consume', sender?, ...)
// See server's socket.on('consume', sender?, ...)
// this is a call from Consumer, so sender = false
await socket.emit('createWebRtcTransport', { sender: false, callId }, ({ params }) => {
// The server sends back params needed
@ -20786,13 +20716,13 @@ const createRecvTransport = async () => {
console.log('[createRecvTransport] params', params)
// creates a new WebRTC Transport to receive media
// Creates a new WebRTC Transport to receive media
// based on server's consumer transport params
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-createRecvTransport
consumerTransport = device.createRecvTransport(params)
// https://mediasoup.org/documentation/v3/communication-between-client-and-server/#producing-media
// this event is raised when a first call to transport.produce() is made
// This event is raised when a first call to transport.produce() is made
// see connectRecvTransport() below
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
@ -20826,7 +20756,7 @@ const resetCallSettings = () => {
const connectRecvTransport = async () => {
console.log('connectRecvTransport');
// for consumer, we need to tell the server first
// For consumer, we need to tell the server first
// to create a consumer based on the rtpCapabilities and consume
// if the router can consume, it will send back a set of params as below
await socket.emit('consume', {
@ -20838,7 +20768,7 @@ const connectRecvTransport = async () => {
return
}
// then consume with the local consumer transport
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,

View File

@ -15,7 +15,6 @@ console.log('[URL] ASSET_ID', ASSET_ID, '| ACCOUNT_ID', ACCOUNT_ID, '| callId',
console.log('🟩 config', config)
let socket, hub
let doIHaveAudio = false
let device
let rtpCapabilities
let producerTransport
@ -130,8 +129,6 @@ const streamSuccess = (stream) => {
videoParams = {
track: videoTrack,
// codec : device.rtpCapabilities.codecs.find((codec) => codec.mimeType.toLowerCase() === 'video/vp9'),
// codec : 'video/vp9',
...videoParams
}
@ -142,96 +139,30 @@ const streamSuccess = (stream) => {
console.log('[streamSuccess] videoParams', videoParams, ' | audioParams', audioParams);
goConnect()
// console.log('[streamSuccess]');
// localVideo.srcObject = stream
// const track = stream.getVideoTracks()[0]
// videoParams = {
// track,
// ...videoParams
// }
// goConnect()
}
const getLocalStream = () => {
console.log('[getLocalStream]');
navigator.mediaDevices.getUserMedia({
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
navigator.permissions.query(
{ name: 'microphone' }
).then(function(permissionStatus) {
console.log('🟨 [PERMISSION] onchange', permissionStatus.state); // granted, denied, prompt
// If he has entered before, the saved access is already saved
if (permissionStatus === 'grated') {
doIHaveAudio = true;
}
// 🟨 [PERMISSION] onchange denied
// If it is the first time client enter and give permission
permissionStatus.onchange = function() {
console.log('🟨 [PERMISSION] onchange', this.state);
if (this.state === 'granted') {
doIHaveAudio = true;
navigator.mediaDevices.getUserMedia({
audio: true,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
} else /*if (this.state === 'denied') */ {
doIHaveAudio = false;
console.log('Getting user permission');
navigator.mediaDevices.getUserMedia({
audio: false,
video: {
qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
vga : { width: { ideal: 640 }, height: { ideal: 480 } },
hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
}
})
.then(streamSuccess)
.catch(error => {
console.log(error.message)
})
}
// navigator.mediaDevices.getUserMedia({
// audio: true,
// video: {
// qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
// vga : { width: { ideal: 640 }, height: { ideal: 480 } },
// hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
// }
// })
// .then(streamSuccess)
// .catch(error => {
// console.log(error.message)
// })
}
).then((permissionStatus) =>{
console.log('🟨 [PERMISSION] permissionStatus', permissionStatus); // granted, denied, prompt
// It will block the code from execution and display "Permission denied" if we don't have microphone permissions
})
// navigator.mediaDevices.getUserMedia({
// audio: true,
// video: {
// qvga : { width: { ideal: 320 }, height: { ideal: 240 } },
// vga : { width: { ideal: 640 }, height: { ideal: 480 } },
// hd : { width: { ideal: 1280 }, height: { ideal: 720 } }
// }
// })
// .then((streamSuccess) => {
// return streamSuccess
// })
// .catch(error => {
// console.log(error.message)
// })
}
const goConnect = () => {
@ -248,7 +179,6 @@ const goCreateTransport = () => {
// server side to send/recive media
const createDevice = async () => {
try {
console.log('[createDevice] 1 device', device);
device = new mediasoupClient.Device()
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-load
@ -259,7 +189,7 @@ const createDevice = async () => {
})
console.log('Device RTP Capabilities', device.rtpCapabilities)
console.log('[createDevice] 2 device', device);
console.log('[createDevice] device', device);
// once the device loads, create transport
goCreateTransport()
@ -331,7 +261,7 @@ const createSendTransport = () => {
console.log('[produce] parameters', parameters)
try {
// tell the server to create a Producer
// Tell the server to create a Producer
// with the following parameters and produce
// and expect back a server side producer id
// see server's socket.on('transport-produce', ...)
@ -357,14 +287,16 @@ const connectSendTransport = async () => {
console.log('[connectSendTransport] producerTransport');
// we now call produce() to instruct the producer transport
// We now call produce() to instruct the producer transport
// to send media to the Router
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#transport-produce
// this action will trigger the 'connect' and 'produce' events above
console.log('videoParams', videoParams);
// Produce video
producerVideo = await producerTransport.produce(videoParams)
console.log('videoParams', videoParams);
console.log('producerVideo', producerVideo);
producerVideo.on('trackended', () => {
console.log('track ended')
// close video track
@ -375,22 +307,20 @@ const connectSendTransport = async () => {
// close video track
})
console.log('doIHaveAudio', doIHaveAudio);
// Video is mandatory, but audio may not be included
// if (doIHaveAudio) {
console.log('audioParams', audioParams);
producerAudio = await producerTransport.produce(audioParams)
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close video track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close video track
})
// }
// Produce audio
producerAudio = await producerTransport.produce(audioParams)
console.log('audioParams', audioParams);
console.log('producerAudio', producerAudio);
producerAudio.on('trackended', () => {
console.log('track ended')
// close audio track
})
producerAudio.on('transportclose', () => {
console.log('transport ended')
// close audio track
})
const answer = {
origin_asset_id: ASSET_ID,
@ -400,7 +330,7 @@ const connectSendTransport = async () => {
origin_asset_type_name: ASSET_TYPE,
origin_asset_name: ASSET_NAME,
video_call_id: callId,
answer: 'accepted', // answer: 'rejected'
answer: 'accepted', // answer: accepted/rejected
};
console.log('SEND answer', answer);
@ -416,7 +346,7 @@ const connectSendTransport = async () => {
const createRecvTransport = async () => {
console.log('createRecvTransport');
// see server's socket.on('consume', sender?, ...)
// See server's socket.on('consume', sender?, ...)
// this is a call from Consumer, so sender = false
await socket.emit('createWebRtcTransport', { sender: false, callId }, ({ params }) => {
// The server sends back params needed
@ -428,13 +358,13 @@ const createRecvTransport = async () => {
console.log('[createRecvTransport] params', params)
// creates a new WebRTC Transport to receive media
// Creates a new WebRTC Transport to receive media
// based on server's consumer transport params
// https://mediasoup.org/documentation/v3/mediasoup-client/api/#device-createRecvTransport
consumerTransport = device.createRecvTransport(params)
// https://mediasoup.org/documentation/v3/communication-between-client-and-server/#producing-media
// this event is raised when a first call to transport.produce() is made
// This event is raised when a first call to transport.produce() is made
// see connectRecvTransport() below
consumerTransport.on('connect', async ({ dtlsParameters }, callback, errback) => {
try {
@ -468,7 +398,7 @@ const resetCallSettings = () => {
const connectRecvTransport = async () => {
console.log('connectRecvTransport');
// for consumer, we need to tell the server first
// For consumer, we need to tell the server first
// to create a consumer based on the rtpCapabilities and consume
// if the router can consume, it will send back a set of params as below
await socket.emit('consume', {
@ -480,7 +410,7 @@ const connectRecvTransport = async () => {
return
}
// then consume with the local consumer transport
// Then consume with the local consumer transport
// which creates a consumer
consumer = await consumerTransport.consume({
id: params.id,