2.12.0
Removed:
- Remove unused samples (users, roster). All cases will be shown at chat, data, webrtc samples;
- The window.navigator.onLine check was remove (the check was move to the WebRTC Sample); To make sure that a client has internet connecteion before make a call;
Fixed:
- during the call (WebRTC session) the WebRTC Sample will be able to switch to other camera, if it is possible and would stop call if all media devices were unplugged;
- bugs were fixed when user doesn't allow permanent permissions for getUserMedia;
Added:
- Ability to restore connect to chat by time interval after disconnect, if it wasn't voluntary (Added for node-xmpp-client and nativescript-xmpp-client, updated for Strophe.js client).
Add a propertychatReconnectionTimeInterval
to config, by default is 5 sec;
QB.chat.disconnect()
stops reconnection actions; - The call of
QB.chat.connect(params, callback)
when chat is in connecting state (the connection one by one) was blocked, the callback funtion will return an error ('Status.REJECT - The connection is still in the Status.CONNECTING state'); QB.webrtc.onDevicesChangeListener()
was added - the listener that is called when a media device has been plugged or unplugged;- An ability to change audio and video tracks was added (switch cameras), use the method
webRTCSession.switchMediaTracks(deviceIds, cb)
.
Supported and tested on Firefox from v.60.
Here is code snippet how to replace audio/video tracks:
var switchMediaTracksBtn = document.getElementById('confirmSwitchMediaTracks');
var webRTCSession = QB.webrtc.createNewSession(params);
QB.webrtc.getMediaDevices('videoinput').then(function(devices) {
var selectVideoInput = document.createElement('select'),
selectVideoInput.id = 'videoInput',
someDocumentElement.appendChild(selectVideoInput);
if (devices.length > 1) {
for (var i = 0; i !== devices.length; ++i) {
var device = devices[i],
option = document.createElement('option');
if (device.kind === 'videoinput') {
option.value = device.deviceId;
option.text = device.label;
selectVideoInput.appendChild(option);
}
}
}
}).catch(function(error) {
console.error(error);
});
QB.webrtc.getMediaDevices('audioinput').then(function(devices) {
var selectAudioInput = document.createElement('select'),
selectAudioInput.id = 'audioInput',
someDocumentElement.appendChild(selectAudioInput);
if (devices.length > 1) {
for (var i = 0; i !== devices.length; ++i) {
var device = devices[i],
option = document.createElement('option');
if (device.kind === 'audioinput') {
option.value = device.deviceId;
option.text = device.label;
selectAudioInput.appendChild(option);
}
}
}
}).catch(function(error) {
console.error(error);
});
switchMediaTracksBtn.onclick = function(event) {
var audioDeviceId = document.getElementById('audioInput').value || undefined,
videoDeviceId = document.getElementById('videoInput').value || undefined,
deviceIds = {
audio: audioDeviceId,
video: videoDeviceId,
};
var callback = function(error, stream) {
if (err) {
console.error(error);
} else {
console.log(stream);
}
};
// Switch media tracks in audio/video HTML's element (the local stream)
// replace media tracks in peers (will change media tracks for each user in WebRTC session)
webRTCSession.switchMediaTracks(deviceIds, callback);
}