File: source/stream-media.js

  1. /**
  2. * <blockquote class="info">
  3. * For a better user experience, the functionality is throttled when invoked many times in less
  4. * than the milliseconds interval configured in the <a href="#method_init"><code>init()</code> method</a>.
  5. * </blockquote>
  6. * Function that retrieves camera Stream.
  7. * @method getUserMedia
  8. * @param {JSON} [options] The camera Stream configuration options.
  9. * - When not provided, the value is set to <code>{ audio: true, video: true }</code>.
  10. * <small>To fallback to retrieve audio track only when retrieving of audio and video tracks failed,
  11. * enable the <code>audioFallback</code> flag in the <a href="#method_init"><code>init()</code> method</a>.</small>
  12. * @param {Boolean} [options.useExactConstraints=false] <blockquote class="info">
  13. * Note that by enabling this flag, exact values will be requested when retrieving camera Stream,
  14. * but it does not prevent constraints related errors. By default when not enabled,
  15. * expected mandatory maximum values (or optional values for source ID) will requested to prevent constraints related
  16. * errors, with an exception for <code>options.video.frameRate</code> option in Safari and IE (any plugin-enabled) browsers,
  17. * where the expected maximum value will not be requested due to the lack of support.</blockquote>
  18. * The flag if <code>getUserMedia()</code> should request for camera Stream to match exact requested values of
  19. * <code>options.audio.deviceId</code> and <code>options.video.deviceId</code>, <code>options.video.resolution</code>
  20. * and <code>options.video.frameRate</code> when provided.
  21. * @param {Boolean|JSON} [options.audio=false] <blockquote class="info">
  22. * Note that the current Edge browser implementation does not support the <code>options.audio.optional</code>,
  23. * <code>options.audio.deviceId</code>, <code>options.audio.echoCancellation</code>.</blockquote>
  24. * The audio configuration options.
  25. * @param {Boolean} [options.audio.stereo=false] <blockquote class="info"><b>Deprecation Warning!</b>
  26. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code> and
  27. * the <code>options.codecParams.audio.opus["sprop-stereo"]</code>
  28. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  29. * <code>options.codecParams.audio.opus.stereo</code> or <code>options.codecParams.audio.opus["sprop-stereo"]</code>
  30. * is configured, this overrides the <code>options.audio.stereo</code> setting.</blockquote>
  31. * The flag if OPUS audio codec stereo band should be configured for sending encoded audio data.
  32. * <small>When not provided, the default browser configuration is used.</small>
  33. * @param {Boolean} [options.audio.usedtx] <blockquote class="info"><b>Deprecation Warning!</b>
  34. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code>
  35. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  36. * <code>options.codecParams.audio.opus.stereo</code> is configured, this overrides the
  37. * <code>options.audio.stereo</code> setting. Note that this feature might
  38. * not work depending on the browser support and implementation.</blockquote>
  39. * The flag if OPUS audio codec should enable DTX (Discontinuous Transmission) for sending encoded audio data.
  40. * <small>This might help to reduce bandwidth as it reduces the bitrate during silence or background noise, and
  41. * goes hand-in-hand with the <code>options.voiceActivityDetection</code> flag in <a href="#method_joinRoom">
  42. * <code>joinRoom()</code> method</a>.</small>
  43. * <small>When not provided, the default browser configuration is used.</small>
  44. * @param {Boolean} [options.audio.useinbandfec] <blockquote class="info"><b>Deprecation Warning!</b>
  45. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.useinbandfec</code>
  46. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  47. * <code>options.codecParams.audio.opus.useinbandfec</code> is configured, this overrides the
  48. * <code>options.audio.useinbandfec</code> setting. Note that this parameter should only be used
  49. * for debugging purposes only.</blockquote>
  50. * The flag if OPUS audio codec has the capability to take advantage of the in-band FEC
  51. * (Forward Error Correction) when sending encoded audio data.
  52. * <small>This helps to reduce the harm of packet loss by encoding information about the previous packet loss.</small>
  53. * <small>When not provided, the default browser configuration is used.</small>
  54. * @param {Number} [options.audio.maxplaybackrate] <blockquote class="info"><b>Deprecation Warning!</b>
  55. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.maxplaybackrate</code>
  56. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  57. * <code>options.codecParams.audio.opus.maxplaybackrate</code> is configured, this overrides the
  58. * <code>options.audio.maxplaybackrate</code> setting. Note that this feature might
  59. * not work depending on the browser support and implementation.
  60. * Note that this parameter should only be used for debugging purposes only.</blockquote>
  61. * The OPUS audio codec maximum output sampling rate in Hz (hertz) that is is capable of receiving
  62. * decoded audio data, to adjust to the hardware limitations and ensure that any sending audio data
  63. * would not encode at a higher sampling rate specified by this.
  64. * <small>This value must be between <code>8000</code> to <code>48000</code>.</small>
  65. * <small>When not provided, the default browser configuration is used.</small>
  66. * @param {Boolean} [options.audio.mute=false] The flag if audio tracks should be muted upon receiving them.
  67. * <small>Providing the value as <code>false</code> does nothing to <code>peerInfo.mediaStatus.audioMuted</code>,
  68. * but when provided as <code>true</code>, this sets the <code>peerInfo.mediaStatus.audioMuted</code> value to
  69. * <code>true</code> and mutes any existing <a href="#method_shareScreen">
  70. * <code>shareScreen()</code> Stream</a> audio tracks as well.</small>
  71. * @param {Array} [options.audio.optional] <blockquote class="info">
  72. * This property has been deprecated. "optional" constraints has been moved from specs.<br>
  73. * Note that this may result in constraints related error when <code>options.useExactConstraints</code> value is
  74. * <code>true</code>. If you are looking to set the requested source ID of the audio track,
  75. * use <code>options.audio.deviceId</code> instead.</blockquote>
  76. * The <code>navigator.getUserMedia()</code> API <code>audio: { optional [..] }</code> property.
  77. * @param {String} [options.audio.deviceId] <blockquote class="info">
  78. * Note this is currently not supported in Firefox browsers.
  79. * </blockquote> The audio track source ID of the device to use.
  80. * <small>The list of available audio source ID can be retrieved by the <a href="https://developer.
  81. * mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices"><code>navigator.mediaDevices.enumerateDevices</code>
  82. * API</a>.</small>
  83. * @param {Boolean} [options.audio.echoCancellation=true] <blockquote class="info">
  84. * For Chrome/Opera/IE/Safari/Bowser, the echo cancellation functionality may not work and may produce a terrible
  85. * feedback. It is recommended to use headphones or other microphone devices rather than the device
  86. * in-built microphones.</blockquote> The flag to enable echo cancellation for audio track.
  87. * @param {Boolean|JSON} [options.video=false] <blockquote class="info">
  88. * Note that the current Edge browser implementation does not support the <code>options.video.optional</code>,
  89. * <code>options.video.deviceId</code>, <code>options.video.resolution</code> and
  90. * <code>options.video.frameRate</code>, <code>options.video.facingMode</code>.</blockquote>
  91. * The video configuration options.
  92. * @param {Boolean} [options.video.mute=false] The flag if video tracks should be muted upon receiving them.
  93. * <small>Providing the value as <code>false</code> does nothing to <code>peerInfo.mediaStatus.videoMuted</code>,
  94. * but when provided as <code>true</code>, this sets the <code>peerInfo.mediaStatus.videoMuted</code> value to
  95. * <code>true</code> and mutes any existing <a href="#method_shareScreen">
  96. * <code>shareScreen()</code> Stream</a> video tracks as well.</small>
  97. * @param {JSON} [options.video.resolution] The video resolution.
  98. * <small>By default, <a href="#attr_VIDEO_RESOLUTION"><code>VGA</code></a> resolution option
  99. * is selected when not provided.</small>
  100. * [Rel: Skylink.VIDEO_RESOLUTION]
  101. * @param {Number|JSON} [options.video.resolution.width] The video resolution width.
  102. * - When provided as a number, it is the video resolution width.
  103. * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.width</code> settings.
  104. * Parameters are <code>"ideal"</code> for ideal resolution width, <code>"exact"</code> for exact video resolution width,
  105. * <code>"min"</code> for min video resolution width and <code>"max"</code> for max video resolution width.
  106. * Note that this may result in constraints related errors depending on the browser/hardware supports.
  107. * @param {Number|JSON} [options.video.resolution.height] The video resolution height.
  108. * - When provided as a number, it is the video resolution height.
  109. * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.height</code> settings.
  110. * Parameters are <code>"ideal"</code> for ideal video resolution height, <code>"exact"</code> for exact video resolution height,
  111. * <code>"min"</code> for min video resolution height and <code>"max"</code> for max video resolution height.
  112. * Note that this may result in constraints related errors depending on the browser/hardware supports.
  113. * @param {Number|JSON} [options.video.frameRate] The video <a href="https://en.wikipedia.org/wiki/Frame_rate">
  114. * frameRate</a> per second (fps).
  115. * - When provided as a number, it is the video framerate.
  116. * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.frameRate</code> settings.
  117. * Parameters are <code>"ideal"</code> for ideal video framerate, <code>"exact"</code> for exact video framerate,
  118. * <code>"min"</code> for min video framerate and <code>"max"</code> for max video framerate.
  119. * Note that this may result in constraints related errors depending on the browser/hardware supports.
  120. * @param {Array} [options.video.optional] <blockquote class="info">
  121. * This property has been deprecated. "optional" constraints has been moved from specs.<br>
  122. * Note that this may result in constraints related error when <code>options.useExactConstraints</code> value is
  123. * <code>true</code>. If you are looking to set the requested source ID of the video track,
  124. * use <code>options.video.deviceId</code> instead.</blockquote>
  125. * The <code>navigator.getUserMedia()</code> API <code>video: { optional [..] }</code> property.
  126. * @param {String} [options.video.deviceId] <blockquote class="info">
  127. * Note this is currently not supported in Firefox browsers.
  128. * </blockquote> The video track source ID of the device to use.
  129. * <small>The list of available video source ID can be retrieved by the <a href="https://developer.
  130. * mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices"><code>navigator.mediaDevices.enumerateDevices</code>
  131. * API</a>.</small>
  132. * @param {String|JSON} [options.video.facingMode] The video camera facing mode.
  133. * <small>The list of available video source ID can be retrieved by the <a href="https://developer.mozilla.org
  134. * /en-US/docs/Web/API/MediaTrackConstraints/facingMode">MediaTrackConstraints <code>facingMode</code> API</a>.</small>
  135. * @param {Function} [callback] The callback function fired when request has completed.
  136. * <small>Function parameters signature is <code>function (error, success)</code></small>
  137. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  138. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter
  139. * payload value as <code>false</code> for request success.</small>
  140. * @param {Error|String} callback.error The error result in request.
  141. * <small>Defined as <code>null</code> when there are no errors in request</small>
  142. * <small>Object signature is the <code>getUserMedia()</code> error when retrieving camera Stream.</small>
  143. * @param {MediaStream} callback.success The success result in request.
  144. * <small>Defined as <code>null</code> when there are errors in request</small>
  145. * <small>Object signature is the camera Stream object.</small>
  146. * @example
  147. * // Example 1: Get both audio and video.
  148. * skylinkDemo.getUserMedia(function (error, success) {
  149. * if (error) return;
  150. * attachMediaStream(document.getElementById("my-video"), success);
  151. * });
  152. *
  153. * // Example 2: Get only audio.
  154. * skylinkDemo.getUserMedia({
  155. * audio: true
  156. * }, function (error, success) {
  157. * if (error) return;
  158. * attachMediaStream(document.getElementById("my-audio"), success);
  159. * });
  160. *
  161. * // Example 3: Configure resolution for video
  162. * skylinkDemo.getUserMedia({
  163. * audio: true,
  164. * video: {
  165. * resolution: skylinkDemo.VIDEO_RESOLUTION.HD
  166. * }
  167. * }, function (error, success) {
  168. * if (error) return;
  169. * attachMediaStream(document.getElementById("my-video"), success);
  170. * });
  171. *
  172. * // Example 4: Configure stereo flag for OPUS codec audio (OPUS is always used by default)
  173. * skylinkDemo.init({
  174. * appKey: "xxxxxx",
  175. * audioCodec: skylinkDemo.AUDIO_CODEC.OPUS
  176. * }, function (initErr, initSuccess) {
  177. * skylinkDemo.getUserMedia({
  178. * audio: {
  179. * stereo: true
  180. * },
  181. * video: true
  182. * }, function (error, success) {
  183. * if (error) return;
  184. * attachMediaStream(document.getElementById("my-video"), success);
  185. * });
  186. * });
  187. *
  188. * // Example 5: Configure frameRate for video
  189. * skylinkDemo.getUserMedia({
  190. * audio: true,
  191. * video: {
  192. * frameRate: 50
  193. * }
  194. * }, function (error, success) {
  195. * if (error) return;
  196. * attachMediaStream(document.getElementById("my-video"), success);
  197. * });
  198. *
  199. * // Example 6: Configure video and audio based on selected sources. Does not work for Firefox currently.
  200. * var sources = { audio: [], video: [] };
  201. *
  202. * function selectStream (audioSourceId, videoSourceId) {
  203. * if (AdapterJS.webrtcDetectedBrowser === 'firefox') {
  204. * console.warn("Currently this feature is not supported by Firefox browsers!");
  205. * return;
  206. * }
  207. * skylinkDemo.getUserMedia({
  208. * audio: {
  209. * optional: [{ sourceId: audioSourceId }]
  210. * },
  211. * video: {
  212. * optional: [{ sourceId: videoSourceId }]
  213. * }
  214. * }, function (error, success) {
  215. * if (error) return;
  216. * attachMediaStream(document.getElementById("my-video"), success);
  217. * });
  218. * }
  219. *
  220. * navigator.mediaDevices.enumerateDevices().then(function(devices) {
  221. * var selectedAudioSourceId = "";
  222. * var selectedVideoSourceId = "";
  223. * devices.forEach(function(device) {
  224. * console.log(device.kind + ": " + device.label + " source ID = " + device.deviceId);
  225. * if (device.kind === "audio") {
  226. * selectedAudioSourceId = device.deviceId;
  227. * } else {
  228. * selectedVideoSourceId = device.deviceId;
  229. * }
  230. * });
  231. * selectStream(selectedAudioSourceId, selectedVideoSourceId);
  232. * }).catch(function (error) {
  233. * console.error("Failed", error);
  234. * });
  235. * @trigger <ol class="desc-seq">
  236. * <li>If <code>options.audio</code> value is <code>false</code> and <code>options.video</code>
  237. * value is <code>false</code>: <ol><li><b>ABORT</b> and return error.</li></ol></li>
  238. * <li>Retrieve camera Stream. <ol><li>If retrieval was succesful: <ol>
  239. * <li>If there is any previous <code>getUserMedia()</code> Stream: <ol>
  240. * <li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a>.</li></ol></li>
  241. * <li>If there are missing audio or video tracks requested: <ol>
  242. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> triggers parameter payload
  243. * <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code> value as <code>false</code> and
  244. * <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li>
  245. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in <code>peerInfo.mediaStatus</code>.
  246. * <small>This can be retrieved with <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  247. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers parameter payload
  248. * <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallback</code>
  249. * value as <code>false</code>.</li></ol></li><li>Else: <ol>
  250. * <li>If <code>options.audioFallback</code> is enabled in the <a href="#method_init"><code>init()</code> method</a>,
  251. * <code>options.audio</code> value is <code>true</code> and <code>options.video</code> value is <code>true</code>: <ol>
  252. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  253. * parameter payload <code>state</code> as <code>FALLBACKING</code>, <code>isScreensharing</code>
  254. * value as <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  255. * <li>Retrieve camera Stream with audio tracks only. <ol><li>If retrieval was successful: <ol>
  256. * <li>If there is any previous <code>getUserMedia()</code> Stream: <ol>
  257. * <li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a>.</li></ol></li>
  258. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  259. * parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  260. * value as <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  261. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in <code>peerInfo.mediaStatus</code>.
  262. * <small>This can be retrieved with <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  263. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  264. * parameter payload <code>isScreensharing</code> value as <code>false</code> and
  265. * <code>isAudioFallback</code> value as <code>true</code>.</li></ol></li><li>Else: <ol>
  266. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers
  267. * parameter payload <code>isScreensharing</code> value as <code>false</code> and
  268. * <code>isAudioFallbackError</code> value as <code>true</code>.</li>
  269. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  270. * parameter payload <code>state</code> as <code>ERROR</code>, <code>isScreensharing</code> value as
  271. * <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  272. * <li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  273. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers parameter payload
  274. * <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallbackError</code> value as
  275. * <code>false</code>.</li><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li></ol></li></ol>
  276. * @for Skylink
  277. * @since 0.5.6
  278. */
  279. Skylink.prototype.getUserMedia = function(options,callback) {
  280. var self = this;
  281.  
  282. if (typeof options === 'function'){
  283. callback = options;
  284. options = {
  285. audio: true,
  286. video: true
  287. };
  288.  
  289. } else if (typeof options !== 'object' || options === null) {
  290. if (typeof options === 'undefined') {
  291. options = {
  292. audio: true,
  293. video: true
  294. };
  295.  
  296. } else {
  297. var invalidOptionsError = 'Please provide a valid options';
  298. log.error(invalidOptionsError, options);
  299. if (typeof callback === 'function') {
  300. callback(new Error(invalidOptionsError), null);
  301. }
  302. return;
  303. }
  304.  
  305. } else if (!options.audio && !options.video) {
  306. var noConstraintOptionsSelectedError = 'Please select audio or video';
  307. log.error(noConstraintOptionsSelectedError, options);
  308. if (typeof callback === 'function') {
  309. callback(new Error(noConstraintOptionsSelectedError), null);
  310. }
  311. return;
  312. }
  313.  
  314. /*if (window.location.protocol !== 'https:' && AdapterJS.webrtcDetectedBrowser === 'chrome' &&
  315. AdapterJS.webrtcDetectedVersion > 46) {
  316. errorMsg = 'getUserMedia() has to be called in https:// application';
  317. log.error(errorMsg, options);
  318. if (typeof callback === 'function') {
  319. callback(new Error(errorMsg), null);
  320. }
  321. return;
  322. }*/
  323.  
  324. self._throttle(function (runFn) {
  325. if (!runFn) {
  326. if (self._initOptions.throttlingShouldThrowError) {
  327. var throttleLimitError = 'Unable to run as throttle interval has not reached (' + self._initOptions.throttleIntervals.getUserMedia + 'ms).';
  328. log.error(throttleLimitError);
  329.  
  330. if (typeof callback === 'function') {
  331. callback(new Error(throttleLimitError), null);
  332. }
  333. }
  334. return;
  335. }
  336.  
  337. if (typeof callback === 'function') {
  338. var mediaAccessSuccessFn = function (stream) {
  339. self.off('mediaAccessError', mediaAccessErrorFn);
  340. callback(null, stream);
  341. };
  342. var mediaAccessErrorFn = function (error) {
  343. self.off('mediaAccessSuccess', mediaAccessSuccessFn);
  344. callback(error, null);
  345. };
  346.  
  347. self.once('mediaAccessSuccess', mediaAccessSuccessFn, function (stream, isScreensharing) {
  348. return !isScreensharing;
  349. });
  350.  
  351. self.once('mediaAccessError', mediaAccessErrorFn, function (error, isScreensharing) {
  352. return !isScreensharing;
  353. });
  354. }
  355.  
  356. // Parse stream settings
  357. var settings = self._parseStreamSettings(options);
  358.  
  359. var onSuccessCbFn = function (stream) {
  360. if (settings.mutedSettings.shouldAudioMuted) {
  361. self._streamsMutedSettings.audioMuted = true;
  362. }
  363.  
  364. if (settings.mutedSettings.shouldVideoMuted) {
  365. self._streamsMutedSettings.videoMuted = true;
  366. }
  367.  
  368. self._onStreamAccessSuccess(stream, settings, false, false);
  369. };
  370.  
  371. var onErrorCbFn = function (error) {
  372. self._onStreamAccessError(error, settings, false, false);
  373. };
  374.  
  375. try {
  376. if (typeof (AdapterJS || {}).webRTCReady !== 'function') {
  377. return onErrorCbFn(new Error('Failed to call getUserMedia() as AdapterJS is not yet loaded!'));
  378. }
  379.  
  380. AdapterJS.webRTCReady(function () {
  381. navigator.getUserMedia(settings.getUserMediaSettings, onSuccessCbFn, onErrorCbFn);
  382. });
  383. } catch (error) {
  384. onErrorCbFn(error);
  385. }
  386.  
  387. }, 'getUserMedia', self._initOptions.throttleIntervals.getUserMedia);
  388. };
  389.  
  390. /**
  391. * <blockquote class="info">
  392. * Note that if <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> is available despite having
  393. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> available, the
  394. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> is sent instead of the
  395. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> to Peers.
  396. * </blockquote>
  397. * Function that sends a new <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>
  398. * to all connected Peers in the Room.
  399. * @method sendStream
  400. * @param {JSON|MediaStream} options The <a href="#method_getUserMedia"><code>getUserMedia()</code>
  401. * method</a> <code>options</code> parameter settings.
  402. * - When provided as a <code>MediaStream</code> object, this configures the <code>options.audio</code> and
  403. * <code>options.video</code> based on the tracks available in the <code>MediaStream</code> object,
  404. * and configures the <code>options.audio.mute</code> and <code>options.video.mute</code> based on the tracks
  405. * <code>.enabled</code> flags in the tracks provided in the <code>MediaStream</code> object without
  406. * invoking <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.
  407. * <small>Object signature matches the <code>options</code> parameter in the
  408. * <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.</small>
  409. * @param {Function} [callback] The callback function fired when request has completed.
  410. * <small>Function parameters signature is <code>function (error, success)</code></small>
  411. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  412. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter payload value
  413. * as <code>false</code> for request success when User is in Room without Peers,
  414. * or by the <a href="#event_peerRestart"><code>peerRestart</code> event</a> triggering
  415. * <code>isSelfInitiateRestart</code> parameter payload value as <code>true</code> for all connected Peers
  416. * for request success when User is in Room with Peers.</small>
  417. * @param {Error|String} callback.error The error result in request.
  418. * <small>Defined as <code>null</code> when there are no errors in request</small>
  419. * <small>Object signature is the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> error or
  420. * when invalid <code>options</code> is provided.</small>
  421. * @param {MediaStream} callback.success The success result in request.
  422. * <small>Defined as <code>null</code> when there are errors in request</small>
  423. * <small>Object signature is the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>
  424. * Stream object.</small>
  425. * @example
  426. * // Example 1: Send MediaStream object before being connected to Room
  427. * function retrieveStreamBySourceForFirefox (sourceId) {
  428. * navigator.mediaDevices.getUserMedia({
  429. * audio: true,
  430. * video: {
  431. * sourceId: { exact: sourceId }
  432. * }
  433. * }).then(function (stream) {
  434. * skylinkDemo.sendStream(stream, function (error, success) {
  435. * if (err) return;
  436. * if (stream === success) {
  437. * console.info("Same MediaStream has been sent");
  438. * }
  439. * console.log("Stream is now being sent to Peers");
  440. * attachMediaStream(document.getElementById("my-video"), success);
  441. * });
  442. * });
  443. * }
  444. *
  445. * // Example 2: Send video after being connected to Room
  446. * function sendVideo () {
  447. * skylinkDemo.joinRoom(function (jRError, jRSuccess) {
  448. * if (jRError) return;
  449. * skylinkDemo.sendStream({
  450. * audio: true,
  451. * video: true
  452. * }, function (error, success) {
  453. * if (error) return;
  454. * console.log("getUserMedia() Stream with video is now being sent to Peers");
  455. * attachMediaStream(document.getElementById("my-video"), success);
  456. * });
  457. * });
  458. * }
  459. * @trigger <ol class="desc-seq">
  460. * <li>Checks <code>options</code> provided. <ol><li>If provided parameter <code>options</code> is not valid: <ol>
  461. * <li><b>ABORT</b> and return error.</li></ol></li>
  462. * <li>Else if provided parameter <code>options</code> is a Stream object: <ol>
  463. * <li>Checks if there is any audio or video tracks. <ol><li>If there is no tracks: <ol>
  464. * <li><b>ABORT</b> and return error.</li></ol></li><li>Else: <ol>
  465. * <li>Set <code>options.audio</code> value as <code>true</code> if Stream has audio tracks.</li>
  466. * <li>Set <code>options.video</code> value as <code>false</code> if Stream has video tracks.</li>
  467. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in
  468. * <code>peerInfo.mediaStatus</code>. <small>This can be retrieved with
  469. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  470. * <li>If there is any previous <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>:
  471. * <ol><li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a> to stop previous Stream.</li></ol></li>
  472. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  473. * parameter payload <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallback</code>
  474. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  475. * <li>Invoke <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> with
  476. * <code>options</code> provided in <code>sendStream()</code>. <ol><li>If request has errors: <ol>
  477. * <li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li></ol></li>
  478. * <li>If there is currently no <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> and User is in Room: <ol>
  479. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  480. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as
  481. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.</li>
  482. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  483. * <code>isSelf</code> value as <code>true</code>.</li>
  484. * <li>Checks if MCU is enabled for App Key provided in <a href="#method_init"><code>init()</code> method</a>. <ol>
  485. * <li>If MCU is enabled: <ol><li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code>
  486. * method</a>. <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li>
  487. * <li>Else: <ol><li>If there are connected Peers in the Room: <ol>
  488. * <li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>. <ol>
  489. * <li>If request has errors: <ol><li><b>ABORT</b> and return error.
  490. * </li></ol></li></ol></li></ol></li></ol></li></ol></li></ol></li></ol>
  491. * @for Skylink
  492. * @since 0.5.6
  493. */
  494.  
  495. Skylink.prototype.sendStream = function(options, callback) {
  496. var self = this;
  497.  
  498. var renegotiate = function(newStream, cb) {
  499. if (Object.keys(self._peerConnections).length > 0 || self._hasMCU) {
  500. self._refreshPeerConnection(Object.keys(self._peerConnections), false, {}, function (err, success) {
  501. if (err) {
  502. log.error('Failed refreshing connections for sendStream() ->', err);
  503. if (typeof cb === 'function') {
  504. cb(new Error('Failed refreshing connections.'), null);
  505. }
  506. return;
  507. }
  508. if (typeof cb === 'function') {
  509. cb(null, newStream);
  510. }
  511. });
  512. } else if (typeof cb === 'function') {
  513. cb(null, newStream);
  514. }
  515. }
  516.  
  517. var performReplaceTracks = function (originalStream, newStream, cb) {
  518. if (!originalStream) {
  519. renegotiate(newStream, cb);
  520. return;
  521. }
  522. var newStreamHasVideoTrack = Array.isArray(newStream.getVideoTracks()) && newStream.getVideoTracks().length;
  523. var newStreamHasAudioTrack = Array.isArray(newStream.getAudioTracks()) && newStream.getAudioTracks().length;
  524. var originalStreamHasVideoTrack = Array.isArray(originalStream.getVideoTracks()) && originalStream.getVideoTracks().length;
  525. var originalStreamHasAudioTrack = Array.isArray(originalStream.getAudioTracks()) && originalStream.getAudioTracks().length;
  526.  
  527. if ((newStreamHasVideoTrack && !originalStreamHasVideoTrack) || (newStreamHasAudioTrack && !originalStreamHasAudioTrack)) {
  528. renegotiate(newStream, cb);
  529. return;
  530. }
  531.  
  532. if (newStreamHasVideoTrack && originalStreamHasVideoTrack) {
  533. self._replaceTrack(originalStream.getVideoTracks()[0].id, newStream.getVideoTracks()[0]);
  534. }
  535.  
  536. if (newStreamHasAudioTrack && originalStreamHasAudioTrack) {
  537. self._replaceTrack(originalStream.getAudioTracks()[0].id, newStream.getAudioTracks()[0]);
  538. }
  539. };
  540.  
  541. var restartFn = function (originalStream, stream) {
  542. if (self._inRoom) {
  543.  
  544. if (!self._streams.screenshare) {
  545. self._trigger('incomingStream', self._user.sid, stream, true, self.getPeerInfo(), false, stream.id || stream.label);
  546. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  547. } else {
  548. performReplaceTracks(originalStream, stream, callback);
  549. }
  550.  
  551. if (self._streams.userMedia) {
  552. performReplaceTracks(originalStream, stream, callback);
  553. }
  554.  
  555. } else if (typeof callback === 'function') {
  556. callback(null, stream);
  557. }
  558. };
  559.  
  560. // Note: Sometimes it may be "function" or "object" but then "function" might be mistaken for callback function, so for now fixing it that way
  561. if ((typeof options !== 'object' || options === null) && !(AdapterJS && AdapterJS.WebRTCPlugin &&
  562. AdapterJS.WebRTCPlugin.plugin && ['function', 'object'].indexOf(typeof options) > -1)) {
  563. var invalidOptionsError = 'Provided stream settings is invalid';
  564. log.error(invalidOptionsError, options);
  565. if (typeof callback === 'function'){
  566. callback(new Error(invalidOptionsError),null);
  567. }
  568. return;
  569. }
  570.  
  571. if (!self._inRoom) {
  572. log.warn('There are no peers to send stream to as not in room!');
  573. }
  574.  
  575. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  576. var edgeNotSupportError = 'Edge browser currently does not support renegotiation.';
  577. log.error(edgeNotSupportError, options);
  578. if (typeof callback === 'function'){
  579. callback(new Error(edgeNotSupportError),null);
  580. }
  581. return;
  582. }
  583.  
  584. var origStream = null;
  585.  
  586. if (self._streams.userMedia) {
  587. origStream = self._streams.userMedia.stream;
  588. }
  589.  
  590. if (self._streams.screenshare) {
  591. origStream = self._streams.screenshare.stream;
  592. }
  593.  
  594. if (typeof options.getAudioTracks === 'function' || typeof options.getVideoTracks === 'function') {
  595. var checkActiveTracksFn = function (tracks) {
  596. for (var t = 0; t < tracks.length; t++) {
  597. if (!(tracks[t].ended || (typeof tracks[t].readyState === 'string' ?
  598. tracks[t].readyState !== 'live' : false))) {
  599. return true;
  600. }
  601. }
  602. return false;
  603. };
  604.  
  605. if (!checkActiveTracksFn( options.getAudioTracks() ) && !checkActiveTracksFn( options.getVideoTracks() )) {
  606. var invalidStreamError = 'Provided stream object does not have audio or video tracks.';
  607. log.error(invalidStreamError, options);
  608. if (typeof callback === 'function'){
  609. callback(new Error(invalidStreamError),null);
  610. }
  611. return;
  612. }
  613.  
  614. self._onStreamAccessSuccess(options, {
  615. settings: {
  616. audio: true,
  617. video: true
  618. },
  619. getUserMediaSettings: {
  620. audio: true,
  621. video: true
  622. }
  623. }, false, false);
  624.  
  625. restartFn(origStream, options);
  626.  
  627. } else {
  628. self.getUserMedia(options, function (err, stream) {
  629. if (err) {
  630. if (typeof callback === 'function') {
  631. callback(err, null);
  632. }
  633. return;
  634. }
  635. restartFn(origStream, stream);
  636. });
  637. }
  638. };
  639.  
  640. /**
  641. * <blockquote class="info">
  642. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  643. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  644. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  645. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  646. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  647. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  648. * sent within less than an interval.
  649. * </blockquote>
  650. * Function that stops <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.
  651. * @method stopStream
  652. * @example
  653. * function stopStream () {
  654. * skylinkDemo.stopStream();
  655. * }
  656. *
  657. * skylinkDemo.getUserMedia();
  658. * @trigger <ol class="desc-seq">
  659. * <li>Checks if there is <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>. <ol>
  660. * <li>If there is <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>: <ol>
  661. * <li>Stop <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> Stream. <ol>
  662. * <li><a href="#event_mediaAccessStopped"><code>mediaAccessStopped</code> event</a> triggers
  663. * parameter payload <code>isScreensharing</code> value as <code>false</code>.</li><li>If User is in Room: <ol>
  664. * <li><a href="#event_streamEnded"><code>streamEnded</code> event</a> triggers parameter
  665. * payload <code>isSelf</code> value as <code>true</code> and <code>isScreensharing</code> value as<code>false</code>
  666. * .</li><li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  667. * <code>isSelf</code> value as <code>true</code>.</li></ol></li></ol></li></ol></li></ol></li></ol>
  668. * @for Skylink
  669. * @since 0.5.6
  670. */
  671. Skylink.prototype.stopStream = function () {
  672. if (this._streams.userMedia) {
  673. this._stopStreams({
  674. userMedia: true
  675. });
  676. }
  677. };
  678.  
  679. /**
  680. * <blockquote class="info">
  681. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  682. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  683. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  684. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  685. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  686. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  687. * sent within less than an interval.
  688. * </blockquote>
  689. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  690. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio or video tracks.
  691. * @method muteStream
  692. * @param {JSON} options The Streams muting options.
  693. * @param {Boolean} [options.audioMuted=true] The flag if all Streams audio
  694. * tracks should be muted or not.
  695. * @param {Boolean} [options.videoMuted=true] The flag if all Strea.ms video
  696. * tracks should be muted or not.
  697. * @example
  698. * // Example 1: Mute both audio and video tracks in all Streams
  699. * skylinkDemo.muteStream({
  700. * audioMuted: true,
  701. * videoMuted: true
  702. * });
  703. *
  704. * // Example 2: Mute only audio tracks in all Streams
  705. * skylinkDemo.muteStream({
  706. * audioMuted: true,
  707. * videoMuted: false
  708. * });
  709. *
  710. * // Example 3: Mute only video tracks in all Streams
  711. * skylinkDemo.muteStream({
  712. * audioMuted: false,
  713. * videoMuted: true
  714. * });
  715. * @trigger <ol class="desc-seq">
  716. * <li>If provided parameter <code>options</code> is invalid: <ol><li><b>ABORT</b> and return error.</li></ol></li>
  717. * <li>Checks if there is any available Streams: <ol><li>If there is no available Streams: <ol>
  718. * <li><b>ABORT</b> and return error.</li></ol></li><li>If User is in Room: <ol>
  719. * <li>Checks if there is audio tracks to mute / unmute: <ol><li>If there is audio tracks to mute / unmute: <ol>
  720. * <li>If <code>options.audioMuted</code> value is not the same as the current
  721. * <code>peerInfo.mediaStatus.audioMuted</code>: <small>This can be retrieved with
  722. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small> <ol>
  723. * <li><em>For Peer only</em> <a href="#event_peerUpdated"><code>peerUpdated</code> event</a>
  724. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li>
  725. * <li><em>For Peer only</em> <a href="#event_streamMuted"><code>streamMuted</code> event</a>
  726. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li>
  727. * <li>Checks if there is video tracks to mute / unmute: <ol><li>If there is video tracks to mute / unmute: <ol>
  728. * <li>If <code>options.videoMuted</code> value is not the same as the current
  729. * <code>peerInfo.mediaStatus.videoMuted</code>: <small>This can be retrieved with
  730. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small> <ol>
  731. * <li><em>For Peer only</em> <a href="#event_peerUpdated"><code>peerUpdated</code> event</a>
  732. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li>
  733. * <li><em>For Peer only</em> <a href="#event_streamMuted"><code>streamMuted</code> event</a> triggers with
  734. * parameter payload <code>isSelf</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li></ol></li>
  735. * <li>If <code>options.audioMuted</code> value is not the same as the current
  736. * <code>peerInfo.mediaStatus.audioMuted</code> or <code>options.videoMuted</code> value is not
  737. * the same as the current <code>peerInfo.mediaStatus.videoMuted</code>: <ol>
  738. * <li><a href="#event_localMediaMuted"><code>localMediaMuted</code> event</a> triggers.</li>
  739. * <li>If User is in Room: <ol><li><a href="#event_streamMuted"><code>streamMuted</code> event</a>
  740. * triggers with parameter payload <code>isSelf</code> value as <code>true</code>.</li>
  741. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers with
  742. * parameter payload <code>isSelf</code> value as <code>true</code>.</li></ol></li></ol></li></ol></li></ol>
  743. * @for Skylink
  744. * @since 0.5.7
  745. */
  746. Skylink.prototype.muteStream = function(options) {
  747. var self = this;
  748.  
  749. if (typeof options !== 'object') {
  750. log.error('Provided settings is not an object');
  751. return;
  752. }
  753.  
  754. if (!(self._streams.userMedia && self._streams.userMedia.stream) &&
  755. !(self._streams.screenshare && self._streams.screenshare.stream)) {
  756. log.warn('No streams are available to mute / unmute!');
  757. return;
  758. }
  759.  
  760. var audioMuted = typeof options.audioMuted === 'boolean' ? options.audioMuted : true;
  761. var videoMuted = typeof options.videoMuted === 'boolean' ? options.videoMuted : true;
  762. var hasToggledAudio = false;
  763. var hasToggledVideo = false;
  764.  
  765. if (self._streamsMutedSettings.audioMuted !== audioMuted) {
  766. self._streamsMutedSettings.audioMuted = audioMuted;
  767. hasToggledAudio = true;
  768. }
  769.  
  770. if (self._streamsMutedSettings.videoMuted !== videoMuted) {
  771. self._streamsMutedSettings.videoMuted = videoMuted;
  772. hasToggledVideo = true;
  773. }
  774.  
  775. if (hasToggledVideo || hasToggledAudio) {
  776. var streamTracksAvailability = self._muteStreams();
  777.  
  778. if (hasToggledVideo && self._inRoom) {
  779. self._sendChannelMessage({
  780. type: self._SIG_MESSAGE_TYPE.MUTE_VIDEO,
  781. mid: self._user.sid,
  782. rid: self._room.id,
  783. muted: self._streamsMutedSettings.videoMuted,
  784. stamp: (new Date()).getTime()
  785. });
  786. }
  787.  
  788. if (hasToggledAudio && self._inRoom) {
  789. setTimeout(function () {
  790. self._sendChannelMessage({
  791. type: self._SIG_MESSAGE_TYPE.MUTE_AUDIO,
  792. mid: self._user.sid,
  793. rid: self._room.id,
  794. muted: self._streamsMutedSettings.audioMuted,
  795. stamp: (new Date()).getTime()
  796. });
  797. }, hasToggledVideo ? 1050 : 0);
  798. }
  799.  
  800. if ((streamTracksAvailability.hasVideo && hasToggledVideo) ||
  801. (streamTracksAvailability.hasAudio && hasToggledAudio)) {
  802.  
  803. self._trigger('localMediaMuted', {
  804. audioMuted: streamTracksAvailability.hasAudio ? self._streamsMutedSettings.audioMuted : true,
  805. videoMuted: streamTracksAvailability.hasVideo ? self._streamsMutedSettings.videoMuted : true
  806. });
  807.  
  808. if (self._inRoom) {
  809. self._trigger('streamMuted', self._user.sid, self.getPeerInfo(), true,
  810. self._streams.screenshare && self._streams.screenshare.stream);
  811. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  812. }
  813. }
  814. }
  815. };
  816.  
  817. /**
  818. * <blockquote class="info"><b>Deprecation Warning!</b>
  819. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  820. * </blockquote>
  821. * Function that unmutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  822. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio tracks.
  823. * @method enableAudio
  824. * @deprecated true
  825. * @example
  826. * function unmuteAudio () {
  827. * skylinkDemo.enableAudio();
  828. * }
  829. * @trigger <ol class="desc-seq">
  830. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  831. * <code>options.audioMuted</code> value as <code>false</code> and
  832. * <code>options.videoMuted</code> value with current <code>peerInfo.mediaStatus.videoMuted</code> value.
  833. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  834. * @for Skylink
  835. * @since 0.5.5
  836. */
  837. Skylink.prototype.enableAudio = function() {
  838. this.muteStream({
  839. audioMuted: false,
  840. videoMuted: this._streamsMutedSettings.videoMuted
  841. });
  842. };
  843.  
  844. /**
  845. * <blockquote class="info"><b>Deprecation Warning!</b>
  846. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  847. * </blockquote>
  848. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  849. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio tracks.
  850. * @method disableAudio
  851. * @deprecated true
  852. * @example
  853. * function muteAudio () {
  854. * skylinkDemo.disableAudio();
  855. * }
  856. * @trigger <ol class="desc-seq">
  857. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  858. * <code>options.audioMuted</code> value as <code>true</code> and
  859. * <code>options.videoMuted</code> value with current <code>peerInfo.mediaStatus.videoMuted</code> value.
  860. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  861. * @for Skylink
  862. * @since 0.5.5
  863. */
  864. Skylink.prototype.disableAudio = function() {
  865. this.muteStream({
  866. audioMuted: true,
  867. videoMuted: this._streamsMutedSettings.videoMuted
  868. });
  869. };
  870.  
  871. /**
  872. * <blockquote class="info"><b>Deprecation Warning!</b>
  873. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  874. * </blockquote>
  875. * Function that unmutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  876. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> video tracks.
  877. * @method enableVideo
  878. * @deprecated true
  879. * @example
  880. * function unmuteVideo () {
  881. * skylinkDemo.enableVideo();
  882. * }
  883. * @trigger <ol class="desc-seq">
  884. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  885. * <code>options.videoMuted</code> value as <code>false</code> and
  886. * <code>options.audioMuted</code> value with current <code>peerInfo.mediaStatus.audioMuted</code> value.
  887. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  888. * @for Skylink
  889. * @since 0.5.5
  890. */
  891. Skylink.prototype.enableVideo = function() {
  892. this.muteStream({
  893. videoMuted: false,
  894. audioMuted: this._streamsMutedSettings.audioMuted
  895. });
  896. };
  897.  
  898. /**
  899. * <blockquote class="info"><b>Deprecation Warning!</b>
  900. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  901. * </blockquote>
  902. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  903. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> video tracks.
  904. * @method disableVideo
  905. * @deprecated true
  906. * @example
  907. * function muteVideo () {
  908. * skylinkDemo.disableVideo();
  909. * }
  910. * @trigger <ol class="desc-seq">
  911. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  912. * <code>options.videoMuted</code> value as <code>true</code> and
  913. * <code>options.audioMuted</code> value with current <code>peerInfo.mediaStatus.audioMuted</code> value.
  914. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  915. * @for Skylink
  916. * @since 0.5.5
  917. */
  918. Skylink.prototype.disableVideo = function() {
  919. this.muteStream({
  920. videoMuted: true,
  921. audioMuted: this._streamsMutedSettings.audioMuted
  922. });
  923. };
  924.  
  925. /**
  926. * <blockquote class="info">
  927. * For a better user experience, the functionality is throttled when invoked many times in less
  928. * than the milliseconds interval configured in the <a href="#method_init"><code>init()</code> method</a>.
  929. * Note that the Opera and Edge browser does not support screensharing, and as for IE / Safari browsers using
  930. * the Temasys Plugin screensharing support, check out the <a href="https://temasys.com.sg/plugin/#commercial-licensing">
  931. * commercial licensing</a> for more options.
  932. * </blockquote>
  933. * Function that retrieves screensharing Stream.
  934. * @method shareScreen
  935. * @param {JSON|Boolean} [enableAudio=false] The flag if audio tracks should be retrieved.
  936. * @param {Boolean} [enableAudio.stereo=false] <blockquote class="info"><b>Deprecation Warning!</b>
  937. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code> and
  938. * the <code>options.codecParams.audio.opus["sprop-stereo"]</code>
  939. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  940. * <code>options.codecParams.audio.opus.stereo</code> or <code>options.codecParams.audio.opus["sprop-stereo"]</code>
  941. * is configured, this overrides the <code>options.audio.stereo</code> setting.</blockquote>
  942. * The flag if OPUS audio codec stereo band should be configured for sending encoded audio data.
  943. * <small>When not provided, the default browser configuration is used.</small>
  944. * @param {Boolean} [enableAudio.usedtx] <blockquote class="info"><b>Deprecation Warning!</b>
  945. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code>
  946. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  947. * <code>options.codecParams.audio.opus.stereo</code> is configured, this overrides the
  948. * <code>options.audio.stereo</code> setting. Note that this feature might
  949. * not work depending on the browser support and implementation.</blockquote>
  950. * The flag if OPUS audio codec should enable DTX (Discontinuous Transmission) for sending encoded audio data.
  951. * <small>This might help to reduce bandwidth as it reduces the bitrate during silence or background noise, and
  952. * goes hand-in-hand with the <code>options.voiceActivityDetection</code> flag in <a href="#method_joinRoom">
  953. * <code>joinRoom()</code> method</a>.</small>
  954. * <small>When not provided, the default browser configuration is used.</small>
  955. * @param {Boolean} [enableAudio.useinbandfec] <blockquote class="info"><b>Deprecation Warning!</b>
  956. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.useinbandfec</code>
  957. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  958. * <code>options.codecParams.audio.opus.useinbandfec</code> is configured, this overrides the
  959. * <code>options.audio.useinbandfec</code> setting. Note that this parameter should only be used
  960. * for debugging purposes only.</blockquote>
  961. * The flag if OPUS audio codec has the capability to take advantage of the in-band FEC
  962. * (Forward Error Correction) when sending encoded audio data.
  963. * <small>This helps to reduce the harm of packet loss by encoding information about the previous packet loss.</small>
  964. * <small>When not provided, the default browser configuration is used.</small>
  965. * @param {Number} [enableAudio.maxplaybackrate] <blockquote class="info"><b>Deprecation Warning!</b>
  966. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.maxplaybackrate</code>
  967. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  968. * <code>options.codecParams.audio.opus.maxplaybackrate</code> is configured, this overrides the
  969. * <code>options.audio.maxplaybackrate</code> setting. Note that this feature might
  970. * not work depending on the browser support and implementation.
  971. * Note that this parameter should only be used for debugging purposes only.</blockquote>
  972. * The OPUS audio codec maximum output sampling rate in Hz (hertz) that is is capable of receiving
  973. * decoded audio data, to adjust to the hardware limitations and ensure that any sending audio data
  974. * would not encode at a higher sampling rate specified by this.
  975. * <small>This value must be between <code>8000</code> to <code>48000</code>.</small>
  976. * <small>When not provided, the default browser configuration is used.</small>
  977. * @param {Boolean} [enableAudio.echoCancellation=true] <blockquote class="info">
  978. * For Chrome/Opera/IE/Safari/Bowser, the echo cancellation functionality may not work and may produce a terrible
  979. * feedback. It is recommended to use headphones or other microphone devices rather than the device
  980. * in-built microphones.</blockquote> The flag to enable echo cancellation for audio track.
  981. * <small>Note that this will not be toggled for Chrome/Opera case when `mediaSource` value is `["tab","audio"]`.</small>
  982. * @param {String|Array|JSON} [mediaSource=screen] The screensharing media source to select.
  983. * <small>Note that multiple sources are not supported by Firefox as of the time of this release.
  984. * Firefox will use the first item specified in the Array in the event that multiple sources are defined.</small>
  985. * <small>E.g. <code>["screen", "window"]</code>, <code>["tab", "audio"]</code>, <code>"screen"</code> or <code>"tab"</code>
  986. * or <code>{ sourceId: "xxxxx", mediaSource: "screen" }</code>.</small>
  987. * [Rel: Skylink.MEDIA_SOURCE]
  988. * @param {Function} [callback] The callback function fired when request has completed.
  989. * <small>Function parameters signature is <code>function (error, success)</code></small>
  990. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  991. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter payload value
  992. * as <code>true</code> for request success when User is not in the Room or is in Room without Peers,
  993. * or by the <a href="#event_peerRestart"><code>peerRestart</code> event</a> triggering
  994. * <code>isSelfInitiateRestart</code> parameter payload value as <code>true</code> for all connected Peers
  995. * for request success when User is in Room with Peers.</small>
  996. * @param {Error|String} callback.error The error result in request.
  997. * <small>Defined as <code>null</code> when there are no errors in request</small>
  998. * <small>Object signature is the <code>shareScreen()</code> error when retrieving screensharing Stream.</small>
  999. * @param {MediaStream} callback.success The success result in request.
  1000. * <small>Defined as <code>null</code> when there are errors in request</small>
  1001. * <small>Object signature is the screensharing Stream object.</small>
  1002. * @example
  1003. * // Example 1: Share screen with audio
  1004. * skylinkDemo.shareScreen(true, function (error, success) {
  1005. * if (error) return;
  1006. * attachMediaStream(document.getElementById("my-screen"), success);
  1007. * });
  1008. *
  1009. * // Example 2: Share screen without audio
  1010. * skylinkDemo.shareScreen(false, function (error, success) {
  1011. * if (error) return;
  1012. * attachMediaStream(document.getElementById("my-screen"), success);
  1013. * });
  1014. *
  1015. * // Example 3: Share "window" media source
  1016. * skylinkDemo.shareScreen("window", function (error, success) {
  1017. * if (error) return;
  1018. * attachMediaStream(document.getElementById("my-screen"), success);
  1019. * });
  1020. *
  1021. * // Example 4: Share tab and its audio media source
  1022. * skylinkDemo.shareScreen(true, ["tab", "audio"], function (error, success) {
  1023. * if (error) return;
  1024. * attachMediaStream(document.getElementById("my-screen"), success);
  1025. * });
  1026. *
  1027. * // Example 5: Share "window" and "screen" media source
  1028. * skylinkDemo.shareScreen(["window", "screen"], function (error, success) {
  1029. * if (error) return;
  1030. * attachMediaStream(document.getElementById("my-screen"), success);
  1031. * });
  1032. *
  1033. * // Example 6: Share "window" with specific media source for specific plugin build users.
  1034. * skylinkDemo.shareScreen({ mediaSource: "window", sourceId: "xxxxx" }, function (error, success) {
  1035. * if (error) return;
  1036. * attachMediaStream(document.getElementById("my-screen"), success);
  1037. * });
  1038. * @trigger <ol class="desc-seq">
  1039. * <li>Retrieves screensharing Stream. <ol><li>If retrieval was successful: <ol><li>If browser is Firefox: <ol>
  1040. * <li>If there are missing audio or video tracks requested: <ol>
  1041. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  1042. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  1043. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a>
  1044. * triggers parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  1045. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li>
  1046. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1047. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1048. * value as <code>false</code>.</li></ol></li><li>Else: <ol>
  1049. * <li>If audio is requested: <small>Chrome, Safari and IE currently doesn't support retrieval of
  1050. * audio track together with screensharing video track.</small> <ol><li>Retrieves audio Stream: <ol>
  1051. * <li>If retrieval was successful: <ol><li>Attempts to attach screensharing Stream video track to audio Stream. <ol>
  1052. * <li>If attachment was successful: <ol><li><a href="#event_mediaAccessSuccess">
  1053. * <code>mediaAccessSuccess</code> event</a> triggers parameter payload <code>isScreensharing</code>
  1054. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li><li>Else: <ol>
  1055. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  1056. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  1057. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> triggers parameter payload
  1058. * <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code> value as <code>true</code> and
  1059. * <code>isAudioFallback</code> value as <code>false</code>.</li>
  1060. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1061. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1062. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1063. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  1064. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  1065. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a>
  1066. * triggers parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  1067. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li>
  1068. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1069. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1070. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1071. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a>
  1072. * triggers parameter payload <code>isScreensharing</code> value as <code>true</code>
  1073. * and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1074. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers parameter payload
  1075. * <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code> value as
  1076. * <code>false</code>.</li><li><b>ABORT</b> and return error.</li></ol></li></ol></li><li>If User is in Room: <ol>
  1077. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  1078. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as <code>shareScreen()</code> Stream.</li>
  1079. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1080. * <code>isSelf</code> value as <code>true</code>.</li>
  1081. * <li>Checks if MCU is enabled for App Key provided in <a href="#method_init"><code>init()</code> method</a>. <ol>
  1082. * <li>If MCU is enabled: <ol><li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>.
  1083. * <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1084. * <li>If there are connected Peers in the Room: <ol><li>Invoke <a href="#method_refreshConnection">
  1085. * <code>refreshConnection()</code> method</a>. <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li>
  1086. * </ol></li></ol></li></ol></li></ol></li></ol></li></ol></li></ol>
  1087. * @for Skylink
  1088. * @since 0.6.0
  1089. */
  1090. Skylink.prototype.shareScreen = function (enableAudio, mediaSource, callback) {
  1091. var self = this;
  1092. var enableAudioSettings = false;
  1093. var useMediaSource = [self.MEDIA_SOURCE.SCREEN];
  1094. var useMediaSourceId = null;
  1095. var checkIfSourceExistsFn = function (val) {
  1096. for (var prop in self.MEDIA_SOURCE) {
  1097. if (self.MEDIA_SOURCE.hasOwnProperty(prop) && self.MEDIA_SOURCE[prop] === val) {
  1098. return true;
  1099. }
  1100. }
  1101. return false;
  1102. };
  1103.  
  1104. // shareScreen("screen") or shareScreen({ sourceId: "xxxx", mediaSource: "xxxxx" })
  1105. if (enableAudio && typeof enableAudio === 'string' ||
  1106. (enableAudio && typeof enableAudio === 'object' && enableAudio.sourceId && enableAudio.mediaSource)) {
  1107. if (checkIfSourceExistsFn(typeof enableAudio === 'object' ? enableAudio.mediaSource : enableAudio)) {
  1108. useMediaSource = [typeof enableAudio === 'object' ? enableAudio.mediaSource : enableAudio];
  1109. }
  1110. useMediaSourceId = typeof enableAudio === 'object' ? enableAudio.sourceId : null;
  1111. // shareScreen(["screen", "window"])
  1112. } else if (Array.isArray(enableAudio)) {
  1113. var enableAudioArr = [];
  1114.  
  1115. for (var i = 0; i < enableAudio.length; i++) {
  1116. if (checkIfSourceExistsFn(enableAudio[i])) {
  1117. enableAudioArr.push(enableAudio[i]);
  1118. }
  1119. }
  1120.  
  1121. if (enableAudioArr.length > 0) {
  1122. useMediaSource = enableAudioArr;
  1123. }
  1124. // shareScreen({ stereo: true })
  1125. } else if (enableAudio && typeof enableAudio === 'object') {
  1126. if (enableAudio.sourceId && enableAudio.mediaSource) {
  1127.  
  1128. } else {
  1129. enableAudioSettings = {
  1130. usedtx: typeof enableAudio.usedtx === 'boolean' ? enableAudio.usedtx : null,
  1131. useinbandfec: typeof enableAudio.useinbandfec === 'boolean' ? enableAudio.useinbandfec : null,
  1132. stereo: enableAudio.stereo === true,
  1133. echoCancellation: enableAudio.echoCancellation !== false,
  1134. deviceId: enableAudio.deviceId
  1135. };
  1136. }
  1137. // shareScreen(true)
  1138. } else if (enableAudio === true) {
  1139. enableAudioSettings = enableAudio === true ? {
  1140. usedtx: null,
  1141. useinbandfec: null,
  1142. stereo: false,
  1143. echoCancellation: true,
  1144. deviceId: null
  1145. } : false;
  1146. // shareScreen(function () {})
  1147. } else if (typeof enableAudio === 'function') {
  1148. callback = enableAudio;
  1149. enableAudio = false;
  1150. }
  1151.  
  1152. // shareScreen(.., "screen") or shareScreen({ sourceId: "xxxx", mediaSource: "xxxxx" })
  1153. if (mediaSource && typeof mediaSource === 'string' ||
  1154. (mediaSource && typeof mediaSource === 'object' && mediaSource.sourceId && mediaSource.mediaSource)) {
  1155. if (checkIfSourceExistsFn(typeof mediaSource === 'object' ? mediaSource.mediaSource : mediaSource)) {
  1156. useMediaSource = [typeof mediaSource === 'object' ? mediaSource.mediaSource : mediaSource];
  1157. }
  1158. useMediaSourceId = typeof mediaSource === 'object' ? mediaSource.sourceId : null;
  1159. // shareScreen(.., ["screen", "window"])
  1160. } else if (Array.isArray(mediaSource)) {
  1161. var mediaSourceArr = [];
  1162. for (var i = 0; i < mediaSource.length; i++) {
  1163. if (checkIfSourceExistsFn(mediaSource[i])) {
  1164. mediaSourceArr.push(mediaSource[i]);
  1165. }
  1166. }
  1167. if (mediaSourceArr.length > 0) {
  1168. useMediaSource = mediaSourceArr;
  1169. }
  1170. // shareScreen(.., function () {})
  1171. } else if (typeof mediaSource === 'function') {
  1172. callback = mediaSource;
  1173. }
  1174.  
  1175. if (useMediaSource.indexOf('audio') > -1 && useMediaSource.indexOf('tab') === -1) {
  1176. useMediaSource.splice(useMediaSource.indexOf('audio'), 1);
  1177. if (useMediaSource.length === 0) {
  1178. useMediaSource = [self.MEDIA_SOURCE.SCREEN];
  1179. }
  1180. }
  1181.  
  1182. self._throttle(function (runFn) {
  1183. if (!runFn) {
  1184. if (self._initOptions.throttlingShouldThrowError) {
  1185. var throttleLimitError = 'Unable to run as throttle interval has not reached (' + self._initOptions.throttleIntervals.shareScreen + 'ms).';
  1186. log.error(throttleLimitError);
  1187.  
  1188. if (typeof callback === 'function') {
  1189. callback(new Error(throttleLimitError), null);
  1190. }
  1191. }
  1192. return;
  1193. }
  1194.  
  1195. var settings = {
  1196. settings: {
  1197. audio: enableAudioSettings,
  1198. video: {
  1199. screenshare: true,
  1200. exactConstraints: false
  1201. }
  1202. },
  1203. getUserMediaSettings: {
  1204. audio: false,
  1205. video: {
  1206. mediaSource: useMediaSource
  1207. }
  1208. }
  1209. };
  1210.  
  1211. if (AdapterJS.webrtcDetectedType === 'plugin' && useMediaSourceId) {
  1212. settings.getUserMediaSettings.video.optional = [{
  1213. screenId: useMediaSourceId
  1214. }];
  1215. }
  1216.  
  1217. var mediaAccessSuccessFn = function (stream) {
  1218. self.off('mediaAccessError', mediaAccessErrorFn);
  1219.  
  1220. if (self._inRoom) {
  1221. self._trigger('incomingStream', self._user.sid, stream, true, self.getPeerInfo(), true, stream.id || stream.label);
  1222. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  1223. var shouldRenegotiate = true;
  1224.  
  1225. if (self._streams.userMedia && self._streams.userMedia.stream && Array.isArray(self._streams.userMedia.stream.getVideoTracks()) && self._streams.userMedia.stream.getVideoTracks().length) {
  1226. shouldRenegotiate = false;
  1227. }
  1228.  
  1229. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  1230. shouldRenegotiate = true;
  1231. }
  1232.  
  1233. if (shouldRenegotiate) {
  1234. if (Object.keys(self._peerConnections).length > 0 || self._hasMCU) {
  1235. stream.wasNegotiated = true;
  1236. self._refreshPeerConnection(Object.keys(self._peerConnections), false, {}, function (err, success) {
  1237. if (err) {
  1238. log.error('Failed refreshing connections for shareScreen() ->', err);
  1239. if (typeof callback === 'function') {
  1240. callback(new Error('Failed refreshing connections.'), null);
  1241. }
  1242. return;
  1243. }
  1244. if (typeof callback === 'function') {
  1245. callback(null, stream);
  1246. }
  1247. });
  1248. } else if (typeof callback === 'function') {
  1249. callback(null, stream);
  1250. }
  1251. } else {
  1252. var gDMVideoTrack = stream.getVideoTracks()[0];
  1253. var gUMVideoTrack = self._streams.userMedia.stream.getVideoTracks()[0];
  1254. self._replaceTrack(gUMVideoTrack.id, gDMVideoTrack);
  1255. }
  1256. } else if (typeof callback === 'function') {
  1257. callback(null, stream);
  1258. }
  1259. };
  1260.  
  1261. var mediaAccessErrorFn = function (error) {
  1262. self.off('mediaAccessSuccess', mediaAccessSuccessFn);
  1263.  
  1264. if (typeof callback === 'function') {
  1265. callback(error, null);
  1266. }
  1267. };
  1268.  
  1269. self.once('mediaAccessSuccess', mediaAccessSuccessFn, function (stream, isScreensharing) {
  1270. return isScreensharing;
  1271. });
  1272.  
  1273. self.once('mediaAccessError', mediaAccessErrorFn, function (error, isScreensharing) {
  1274. return isScreensharing;
  1275. });
  1276.  
  1277. var getUserMediaAudioSettings = enableAudioSettings ? {
  1278. echoCancellation: enableAudioSettings.echoCancellation
  1279. } : false;
  1280.  
  1281. try {
  1282. var hasDefaultAudioTrack = false;
  1283. if (enableAudioSettings) {
  1284. if (AdapterJS.webrtcDetectedBrowser === 'firefox') {
  1285. hasDefaultAudioTrack = true;
  1286. settings.getUserMediaSettings.audio = getUserMediaAudioSettings;
  1287. } else if (useMediaSource.indexOf('audio') > -1 && useMediaSource.indexOf('tab') > -1) {
  1288. hasDefaultAudioTrack = true;
  1289. settings.getUserMediaSettings.audio = {};
  1290. }
  1291. }
  1292.  
  1293. var onSuccessCbFn = function (stream) {
  1294. if (hasDefaultAudioTrack || !enableAudioSettings) {
  1295. self._onStreamAccessSuccess(stream, settings, true, false);
  1296. return;
  1297. }
  1298.  
  1299. settings.getUserMediaSettings.audio = getUserMediaAudioSettings;
  1300.  
  1301. var onAudioSuccessCbFn = function (audioStream) {
  1302. try {
  1303. audioStream.addTrack(stream.getVideoTracks()[0]);
  1304.  
  1305. self.once('mediaAccessSuccess', function () {
  1306. self._streams.screenshare.streamClone = stream;
  1307. }, function (stream, isScreensharing) {
  1308. return isScreensharing;
  1309. });
  1310.  
  1311. self._onStreamAccessSuccess(audioStream, settings, true, false);
  1312.  
  1313. } catch (error) {
  1314. log.error('Failed retrieving audio stream for screensharing stream', error);
  1315. self._onStreamAccessSuccess(stream, settings, true, false);
  1316. }
  1317. };
  1318.  
  1319. var onAudioErrorCbFn = function (error) {
  1320. log.error('Failed retrieving audio stream for screensharing stream', error);
  1321. self._onStreamAccessSuccess(stream, settings, true, false);
  1322. };
  1323.  
  1324. navigator.getUserMedia({ audio: getUserMediaAudioSettings }, onAudioSuccessCbFn, onAudioErrorCbFn);
  1325. };
  1326.  
  1327. var onErrorCbFn = function (error) {
  1328. self._onStreamAccessError(error, settings, true, false);
  1329. if (typeof callback === 'function') {
  1330. callback(error, null);
  1331. }
  1332. };
  1333.  
  1334. if (typeof (AdapterJS || {}).webRTCReady !== 'function') {
  1335. return onErrorCbFn(new Error('Failed to call getUserMedia() as AdapterJS is not yet loaded!'));
  1336. }
  1337.  
  1338. AdapterJS.webRTCReady(function () {
  1339. if (AdapterJS.webrtcDetectedBrowser === 'edge' && typeof navigator.getDisplayMedia === 'function') {
  1340. navigator.getDisplayMedia(settings.getUserMediaSettings).then(function(stream) {
  1341. onSuccessCbFn(stream);
  1342. }).catch(function(err) {
  1343. onErrorCbFn(err);
  1344. });
  1345. } else if (typeof navigator.mediaDevices.getDisplayMedia === 'function') {
  1346. navigator.mediaDevices.getDisplayMedia(settings.getUserMediaSettings).then(function(stream) {
  1347. onSuccessCbFn(stream);
  1348. }).catch(function(err) {
  1349. onErrorCbFn(err);
  1350. });
  1351. } else {
  1352. navigator.getUserMedia(settings.getUserMediaSettings, onSuccessCbFn, onErrorCbFn);
  1353. }
  1354. });
  1355. } catch (error) {
  1356. self._onStreamAccessError(error, settings, true, false);
  1357. }
  1358. }, 'shareScreen', self._initOptions.throttleIntervals.shareScreen);
  1359. };
  1360.  
  1361. /**
  1362. * <blockquote class="info">
  1363. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  1364. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  1365. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  1366. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  1367. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  1368. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  1369. * sent within less than an interval.
  1370. * </blockquote>
  1371. * Function that stops <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>.
  1372. * @method stopScreen
  1373. * @example
  1374. * function stopScreen () {
  1375. * skylinkDemo.stopScreen();
  1376. * }
  1377. *
  1378. * skylinkDemo.shareScreen();
  1379. * @trigger <ol class="desc-seq">
  1380. * <li>Checks if there is <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>. <ol>
  1381. * <li>If there is <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>: <ol>
  1382. * <li>Stop <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> Stream. <ol>
  1383. * <li><a href="#event_mediaAccessStopped"><code>mediaAccessStopped</code> event</a>
  1384. * triggers parameter payload <code>isScreensharing</code> value as <code>true</code> and
  1385. * <code>isAudioFallback</code> value as <code>false</code>.</li><li>If User is in Room: <ol>
  1386. * <li><a href="#event_streamEnded"><code>streamEnded</code> event</a> triggers parameter payload
  1387. * <code>isSelf</code> value as <code>true</code> and <code>isScreensharing</code> value as <code>true</code>.</li>
  1388. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1389. * <code>isSelf</code> value as <code>true</code>.</li>
  1390. * </ol></li></ol></li><li>If User is in Room: <small><b>SKIP</b> this step if <code>stopScreen()</code>
  1391. * was invoked from <a href="#method_shareScreen"><code>shareScreen()</code> method</a>.</small> <ol>
  1392. * <li>If there is <a href="#method_getUserMedia"> <code>getUserMedia()</code>Stream</a> Stream: <ol>
  1393. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  1394. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as
  1395. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.</li>
  1396. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1397. * <code>isSelf</code> value as <code>true</code>.</li></ol></li>
  1398. * <li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>.</li>
  1399. * </ol></li></ol></li></ol></li></ol>
  1400. * @for Skylink
  1401. * @since 0.6.0
  1402. */
  1403. Skylink.prototype.stopScreen = function () {
  1404. var self = this;
  1405. if (self._streams.screenshare) {
  1406. if (self._inRoom) {
  1407. if (self._streams.userMedia && self._streams.userMedia.stream) {
  1408. self._trigger('incomingStream', self._user.sid, self._streams.userMedia.stream, true, self.getPeerInfo(),
  1409. false, self._streams.userMedia.stream.id || self._streams.userMedia.stream.label);
  1410. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  1411. }
  1412.  
  1413. if (self._streams.screenshare.stream.wasNegotiated === true) {
  1414. this._refreshPeerConnection(Object.keys(this._peerConnections), {}, false);
  1415. } else {
  1416. var gDMVideoTrack = self._streams.screenshare.stream.getVideoTracks()[0];
  1417. var gUMVideoTrack = self._streams.userMedia.stream.getVideoTracks()[0];
  1418.  
  1419. self._replaceTrack(gDMVideoTrack.id, gUMVideoTrack);
  1420. }
  1421. }
  1422. self._stopStreams({
  1423. screenshare: true
  1424. });
  1425. }
  1426. };
  1427.  
  1428. /**
  1429. * Function that returns the camera and microphone sources.
  1430. * @method getStreamSources
  1431. * @param {Function} callback The callback function fired when request has completed.
  1432. * <small>Function parameters signature is <code>function (success)</code></small>
  1433. * @param {JSON} callback.success The success result in request.
  1434. * <small>Object signature is the list of sources.</small>
  1435. * @param {JSON} callback.success.audio The list of audio input (microphone) and output (speakers) sources.
  1436. * @param {Array} callback.success.audio.input The list of audio input (microphone) sources.
  1437. * @param {JSON} callback.success.audio.input.#index The audio input source item.
  1438. * @param {String} callback.success.audio.input.#index.deviceId The audio input source item device ID.
  1439. * @param {String} callback.success.audio.input.#index.label The audio input source item device label name.
  1440. * @param {String} [callback.success.audio.input.#index.groupId] The audio input source item device physical device ID.
  1441. * <small>Note that there can be different <code>deviceId</code> due to differing sources but can share a
  1442. * <code>groupId</code> because it's the same device.</small>
  1443. * @param {Array} callback.success.audio.output The list of audio output (speakers) sources.
  1444. * @param {JSON} callback.success.audio.output.#index The audio output source item.
  1445. * <small>Object signature matches <code>callback.success.audio.input.#index</code> format.</small>
  1446. * @param {JSON} callback.success.video The list of video input (camera) sources.
  1447. * @param {Array} callback.success.video.input The list of video input (camera) sources.
  1448. * @param {JSON} callback.success.video.input.#index The video input source item.
  1449. * <small>Object signature matches <code>callback.success.audio.input.#index</code> format.</small>
  1450. * @example
  1451. * // Example 1: Retrieve the getUserMedia() stream with selected source ID.
  1452. * skylinkDemo.getStreamSources(function (sources) {
  1453. * skylinkDemo.getUserMedia({
  1454. * audio: sources.audio.input[0].deviceId,
  1455. * video: sources.video.input[0].deviceId
  1456. * });
  1457. * });
  1458. *
  1459. * // Example 2: Set the output audio speaker (Chrome 49+ supported only)
  1460. * skylinkDemo.getStreamSources(function (sources) {
  1461. * var videoElement = document.getElementById('video');
  1462. * if (videoElement && typeof videoElement.setSinkId === 'function') {
  1463. * videoElement.setSinkId(sources.audio.output[0].deviceId)
  1464. * }
  1465. * });
  1466. * @for Skylink
  1467. * @since 0.6.27
  1468. */
  1469. Skylink.prototype.getStreamSources = function(callback) {
  1470. var outputSources = {
  1471. audio: {
  1472. input: [],
  1473. output: []
  1474. },
  1475. video: {
  1476. input: []
  1477. }
  1478. };
  1479.  
  1480. if (typeof callback !== 'function') {
  1481. return log.error('Please provide the callback.');
  1482. }
  1483.  
  1484. var sourcesListFn = function (sources) {
  1485. sources.forEach(function (sourceItem) {
  1486. var item = {
  1487. deviceId: sourceItem.deviceId || sourceItem.sourceId || 'default',
  1488. label: sourceItem.label,
  1489. groupId: sourceItem.groupId || null
  1490. };
  1491.  
  1492. item.label = item.label || 'Source for ' + item.deviceId;
  1493.  
  1494. if (['audio', 'audioinput'].indexOf(sourceItem.kind) > -1) {
  1495. outputSources.audio.input.push(item);
  1496. } else if (['video', 'videoinput'].indexOf(sourceItem.kind) > -1) {
  1497. outputSources.video.input.push(item);
  1498. } else if (sourceItem.kind === 'audiooutput') {
  1499. outputSources.audio.output.push(item);
  1500. }
  1501. });
  1502.  
  1503. callback(outputSources);
  1504. };
  1505.  
  1506. if (navigator.mediaDevices && typeof navigator.mediaDevices.enumerateDevices === 'function') {
  1507. navigator.mediaDevices.enumerateDevices().then(sourcesListFn);
  1508. } else if (window.MediaStreamTrack && typeof MediaStreamTrack.getSources === 'function') {
  1509. MediaStreamTrack.getSources(sourcesListFn);
  1510. } else if (typeof navigator.getUserMedia === 'function') {
  1511. sourcesListFn([
  1512. { deviceId: 'default', kind: 'audioinput', label: 'Default Audio Track' },
  1513. { deviceId: 'default', kind: 'videoinput', label: 'Default Video Track' }
  1514. ]);
  1515. } else {
  1516. sourcesListFn([]);
  1517. }
  1518. };
  1519.  
  1520. /**
  1521. * Function that returns the screensharing sources.
  1522. * @method getScreenSources
  1523. * @param {Function} callback The callback function fired when request has completed.
  1524. * <small>Function parameters signature is <code>function (success)</code></small>
  1525. * @param {JSON} callback.success The success result in request.
  1526. * <small>Object signature is the list of sources.</small>
  1527. * @param {JSON} callback.success The list of screensharing media sources and screen sources.
  1528. * @param {Array} callback.success.mediaSource The array of screensharing media sources.
  1529. * @param {String} callback.success.mediaSource.#index The screensharing media source item.
  1530. * [Rel: Skylink.MEDIA_SOURCE]
  1531. * @param {Array} callback.success.mediaSourceInput The list of specific media source screen inputs.
  1532. * @param {JSON} callback.success.mediaSourceInput.#index The media source screen input item.
  1533. * @param {String} callback.success.mediaSourceInput.#index.sourceId The screen input item ID.
  1534. * @param {String} callback.success.mediaSourceInput.#index.label The screen input item label name.
  1535. * @param {String} callback.success.mediaSourceInput.#index.mediaSource The screen input item media source it belongs to.
  1536. * [Rel: Skylink.MEDIA_SOURCE]
  1537. * @example
  1538. * // Example 1: Retrieve the list of available shareScreen() sources.
  1539. * skylinkDemo.getScreenSources(function (sources) {
  1540. * skylinkDemo.shareScreen(sources.mediaSource[0] || null);
  1541. * });
  1542. *
  1543. * // Example 2: Retrieve the list of available shareScreen() sources with a specific item.
  1544. * skylinkDemo.getScreenSources(function (sources) {
  1545. * if (sources.mediaSourceInput[0]) {
  1546. * skylinkDemo.shareScreen({
  1547. * mediaSource: mediaSourceInput[0].mediaSource,
  1548. * sourceId: mediaSourceInput[0].sourceId
  1549. * });
  1550. * } else {
  1551. * skylinkDemo.shareScreen();
  1552. * }
  1553. * });
  1554. * @for Skylink
  1555. * @since 0.6.27
  1556. */
  1557. Skylink.prototype.getScreenSources = function(callback) {
  1558. var outputSources = {
  1559. mediaSource: [],
  1560. mediaSourceInput: []
  1561. };
  1562.  
  1563. if (typeof callback !== 'function') {
  1564. return log.error('Please provide the callback.');
  1565. }
  1566.  
  1567. // For chrome android 59+ has screensharing support behind chrome://flags (needs to be enabled by user)
  1568. // Reference: https://bugs.chromium.org/p/chromium/issues/detail?id=487935
  1569. if (navigator.userAgent.toLowerCase().indexOf('android') > -1) {
  1570. if (AdapterJS.webrtcDetectedBrowser === 'chrome' && AdapterJS.webrtcDetectedVersion >= 59) {
  1571. outputSources.mediaSource = ['screen'];
  1572. }
  1573. callback(outputSources);
  1574. return;
  1575. }
  1576.  
  1577. // IE / Safari (plugin) needs commerical screensharing enabled
  1578. if (AdapterJS.webrtcDetectedType === 'plugin') {
  1579. AdapterJS.webRTCReady(function () {
  1580. // IE / Safari (plugin) is not available or do not support screensharing
  1581. if (AdapterJS.WebRTCPlugin.plugin && AdapterJS.WebRTCPlugin.plugin.isScreensharingAvailable &&
  1582. AdapterJS.WebRTCPlugin.plugin.HasScreensharingFeature) {
  1583. outputSources.mediaSource = ['window', 'screen'];
  1584.  
  1585. // Do not provide the error callback as well or it will throw NPError.
  1586. if (typeof AdapterJS.WebRTCPlugin.plugin.getScreensharingSources === 'function') {
  1587. AdapterJS.WebRTCPlugin.plugin.getScreensharingSources(function (sources) {
  1588. sources.forEach(sources, function (sourceItem) {
  1589. var item = {
  1590. sourceId: sourceItem.id || sourceItem.sourceId || 'default',
  1591. label: sourceItem.label,
  1592. mediaSource: sourceItem.kind || 'screen'
  1593. };
  1594.  
  1595. item.label = item.label || 'Source for ' + item.sourceId;
  1596. outputSources.mediaSourceInput.push(item);
  1597. });
  1598.  
  1599. callback(outputSources);
  1600. });
  1601. return;
  1602. }
  1603. }
  1604.  
  1605. callback(outputSources);
  1606. });
  1607. return;
  1608.  
  1609. // Chrome 34+ and Opera 21(?)+ supports screensharing
  1610. // Firefox 38(?)+ supports screensharing
  1611. } else if ((AdapterJS.webrtcDetectedBrowser === 'chrome' && AdapterJS.webrtcDetectedVersion >= 34) ||
  1612. (AdapterJS.webrtcDetectedBrowser === 'firefox' && AdapterJS.webrtcDetectedVersion >= 38) ||
  1613. (AdapterJS.webrtcDetectedBrowser === 'opera' && AdapterJS.webrtcDetectedVersion >= 21)) {
  1614. // Just warn users for those who did not configure the Opera screensharing extension settings, it will not work!
  1615. if (AdapterJS.webrtcDetectedBrowser === 'opera' && !(AdapterJS.extensionInfo &&
  1616. AdapterJS.extensionInfo.opera && AdapterJS.extensionInfo.opera.extensionId)) {
  1617. log.warn('Please ensure that your application allows Opera screensharing!');
  1618. }
  1619.  
  1620. outputSources.mediaSource = ['window', 'screen'];
  1621.  
  1622. // Chrome 52+ and Opera 39+ supports tab and audio
  1623. // Reference: https://developer.chrome.com/extensions/desktopCapture
  1624. if ((AdapterJS.webrtcDetectedBrowser === 'chrome' && AdapterJS.webrtcDetectedVersion >= 52) ||
  1625. (AdapterJS.webrtcDetectedBrowser === 'opera' && AdapterJS.webrtcDetectedVersion >= 39)) {
  1626. outputSources.mediaSource.push('tab', 'audio');
  1627.  
  1628. // Firefox supports some other sources
  1629. // Reference: http://fluffy.github.io/w3c-screen-share/#screen-based-video-constraints
  1630. // https://bugzilla.mozilla.org/show_bug.cgi?id=1313758
  1631. // https://bugzilla.mozilla.org/show_bug.cgi?id=1037405
  1632. // https://bugzilla.mozilla.org/show_bug.cgi?id=1313758
  1633. } else if (AdapterJS.webrtcDetectedBrowser === 'firefox') {
  1634. outputSources.mediaSource.push('browser', 'camera', 'application');
  1635. }
  1636. }
  1637.  
  1638. callback(outputSources);
  1639. };
  1640.  
  1641. /**
  1642. * Function that handles the muting of Stream audio and video tracks.
  1643. * @method _muteStreams
  1644. * @private
  1645. * @for Skylink
  1646. * @since 0.6.15
  1647. */
  1648. Skylink.prototype._muteStreams = function () {
  1649. var self = this;
  1650. var hasVideo = false;
  1651. var hasAudio = false;
  1652.  
  1653. var muteFn = function (stream) {
  1654. var audioTracks = stream.getAudioTracks();
  1655. var videoTracks = stream.getVideoTracks();
  1656.  
  1657. for (var a = 0; a < audioTracks.length; a++) {
  1658. audioTracks[a].enabled = !self._streamsMutedSettings.audioMuted;
  1659. hasAudio = true;
  1660. }
  1661.  
  1662. for (var v = 0; v < videoTracks.length; v++) {
  1663. videoTracks[v].enabled = !self._streamsMutedSettings.videoMuted;
  1664. hasVideo = true;
  1665. }
  1666. };
  1667.  
  1668. if (self._streams.userMedia && self._streams.userMedia.stream) {
  1669. muteFn(self._streams.userMedia.stream);
  1670. }
  1671.  
  1672. if (self._streams.screenshare && self._streams.screenshare.stream) {
  1673. muteFn(self._streams.screenshare.stream);
  1674. }
  1675.  
  1676. if (self._streams.screenshare && self._streams.screenshare.streamClone) {
  1677. muteFn(self._streams.screenshare.streamClone);
  1678. }
  1679.  
  1680. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  1681. for (var peerId in self._peerConnections) {
  1682. if (self._peerConnections.hasOwnProperty(peerId) && self._peerConnections[peerId]) {
  1683. var localStreams = self._peerConnections[peerId].getLocalStreams();
  1684. for (var s = 0; s < localStreams.length; s++) {
  1685. muteFn(localStreams[s]);
  1686. }
  1687. }
  1688. }
  1689. }
  1690.  
  1691. log.debug('Updated Streams muted status ->', self._streamsMutedSettings);
  1692.  
  1693. return {
  1694. hasVideo: hasVideo,
  1695. hasAudio: hasAudio
  1696. };
  1697. };
  1698.  
  1699. /**
  1700. * Function that handles stopping the Stream streaming.
  1701. * @method _stopStreams
  1702. * @private
  1703. * @for Skylink
  1704. * @since 0.6.15
  1705. */
  1706. Skylink.prototype._stopStreams = function (options) {
  1707. var self = this;
  1708. var stopFn = function (stream) {
  1709. var streamId = stream.id || stream.label;
  1710. log.debug([null, 'MediaStream', streamId, 'Stopping Stream ->'], stream);
  1711.  
  1712. try {
  1713. var audioTracks = stream.getAudioTracks();
  1714. var videoTracks = stream.getVideoTracks();
  1715.  
  1716. for (var a = 0; a < audioTracks.length; a++) {
  1717. audioTracks[a].stop();
  1718. }
  1719.  
  1720. for (var v = 0; v < videoTracks.length; v++) {
  1721. videoTracks[v].stop();
  1722. }
  1723.  
  1724. } catch (error) {
  1725. stream.stop();
  1726. }
  1727.  
  1728. if (self._streamsStoppedCbs[streamId]) {
  1729. self._streamsStoppedCbs[streamId]();
  1730. delete self._streamsStoppedCbs[streamId];
  1731. }
  1732. };
  1733.  
  1734. var stopUserMedia = false;
  1735. var stopScreenshare = false;
  1736. var hasStoppedMedia = false;
  1737.  
  1738. if (typeof options === 'object') {
  1739. stopUserMedia = options.userMedia === true;
  1740. stopScreenshare = options.screenshare === true;
  1741. }
  1742.  
  1743. if (stopUserMedia && self._streams.userMedia) {
  1744. if (self._streams.userMedia.stream) {
  1745. stopFn(self._streams.userMedia.stream);
  1746. }
  1747.  
  1748. self._streams.userMedia = null;
  1749. hasStoppedMedia = true;
  1750. }
  1751.  
  1752. if (stopScreenshare && self._streams.screenshare) {
  1753. if (self._streams.screenshare.streamClone) {
  1754. stopFn(self._streams.screenshare.streamClone);
  1755. }
  1756.  
  1757. if (self._streams.screenshare.stream) {
  1758. stopFn(self._streams.screenshare.stream);
  1759. }
  1760.  
  1761. self._streams.screenshare = null;
  1762. hasStoppedMedia = true;
  1763. }
  1764.  
  1765. if (self._inRoom && hasStoppedMedia) {
  1766. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  1767. }
  1768.  
  1769. log.log('Stopping Streams with settings ->', options);
  1770. };
  1771.  
  1772. /**
  1773. * Function that parses the <code>getUserMedia()</code> settings provided.
  1774. * @method _parseStreamSettings
  1775. * @private
  1776. * @for Skylink
  1777. * @since 0.6.15
  1778. */
  1779. Skylink.prototype._parseStreamSettings = function(options) {
  1780. var settings = {
  1781. settings: { audio: false, video: false },
  1782. mutedSettings: { shouldAudioMuted: false, shouldVideoMuted: false },
  1783. getUserMediaSettings: { audio: false, video: false }
  1784. };
  1785.  
  1786. if (options.audio) {
  1787. // For Edge to work since they do not support the advanced constraints yet
  1788. settings.settings.audio = {
  1789. stereo: false,
  1790. exactConstraints: !!options.useExactConstraints,
  1791. echoCancellation: true
  1792. };
  1793. settings.getUserMediaSettings.audio = {
  1794. echoCancellation: true
  1795. };
  1796.  
  1797. if (typeof options.audio === 'object') {
  1798. if (typeof options.audio.stereo === 'boolean') {
  1799. settings.settings.audio.stereo = options.audio.stereo;
  1800. }
  1801.  
  1802. if (typeof options.audio.useinbandfec === 'boolean') {
  1803. settings.settings.audio.useinbandfec = options.audio.useinbandfec;
  1804. }
  1805.  
  1806. if (typeof options.audio.usedtx === 'boolean') {
  1807. settings.settings.audio.usedtx = options.audio.usedtx;
  1808. }
  1809.  
  1810. if (typeof options.audio.maxplaybackrate === 'number' &&
  1811. options.audio.maxplaybackrate >= 8000 && options.audio.maxplaybackrate <= 48000) {
  1812. settings.settings.audio.maxplaybackrate = options.audio.maxplaybackrate;
  1813. }
  1814.  
  1815. if (typeof options.audio.mute === 'boolean') {
  1816. settings.mutedSettings.shouldAudioMuted = options.audio.mute;
  1817. }
  1818.  
  1819. // Not supported in Edge browser features
  1820. if (AdapterJS.webrtcDetectedBrowser !== 'edge') {
  1821. if (typeof options.audio.echoCancellation === 'boolean') {
  1822. settings.settings.audio.echoCancellation = options.audio.echoCancellation;
  1823. settings.getUserMediaSettings.audio.echoCancellation = options.audio.echoCancellation;
  1824. }
  1825.  
  1826. if (Array.isArray(options.audio.optional)) {
  1827. settings.settings.audio.optional = clone(options.audio.optional);
  1828. settings.getUserMediaSettings.audio.optional = clone(options.audio.optional);
  1829. }
  1830.  
  1831. if (options.audio.deviceId && typeof options.audio.deviceId === 'string' &&
  1832. AdapterJS.webrtcDetectedBrowser !== 'firefox') {
  1833. settings.settings.audio.deviceId = options.audio.deviceId;
  1834. settings.getUserMediaSettings.audio.deviceId = options.useExactConstraints ?
  1835. { exact: options.audio.deviceId } : { ideal: options.audio.deviceId };
  1836. }
  1837. }
  1838. }
  1839.  
  1840. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  1841. settings.getUserMediaSettings.audio = true;
  1842. }
  1843. }
  1844.  
  1845. if (options.video) {
  1846. // For Edge to work since they do not support the advanced constraints yet
  1847. settings.settings.video = {
  1848. resolution: clone(this.VIDEO_RESOLUTION.VGA),
  1849. screenshare: false,
  1850. exactConstraints: !!options.useExactConstraints
  1851. };
  1852. settings.getUserMediaSettings.video = {};
  1853.  
  1854. if (typeof options.video === 'object') {
  1855. if (typeof options.video.mute === 'boolean') {
  1856. settings.mutedSettings.shouldVideoMuted = options.video.mute;
  1857. }
  1858.  
  1859. if (Array.isArray(options.video.optional)) {
  1860. settings.settings.video.optional = clone(options.video.optional);
  1861. settings.getUserMediaSettings.video.optional = clone(options.video.optional);
  1862. }
  1863.  
  1864. if (options.video.deviceId && typeof options.video.deviceId === 'string') {
  1865. settings.settings.video.deviceId = options.video.deviceId;
  1866. settings.getUserMediaSettings.video.deviceId = options.useExactConstraints ?
  1867. { exact: options.video.deviceId } : { ideal: options.video.deviceId };
  1868. }
  1869.  
  1870. if (options.video.resolution && typeof options.video.resolution === 'object') {
  1871. if ((options.video.resolution.width && typeof options.video.resolution.width === 'object') ||
  1872. typeof options.video.resolution.width === 'number') {
  1873. settings.settings.video.resolution.width = options.video.resolution.width;
  1874. }
  1875. if ((options.video.resolution.height && typeof options.video.resolution.height === 'object') ||
  1876. typeof options.video.resolution.height === 'number') {
  1877. settings.settings.video.resolution.height = options.video.resolution.height;
  1878. }
  1879. }
  1880.  
  1881. settings.getUserMediaSettings.video.width = typeof settings.settings.video.resolution.width === 'object' ?
  1882. settings.settings.video.resolution.width : (options.useExactConstraints ?
  1883. { exact: settings.settings.video.resolution.width } : { max: settings.settings.video.resolution.width });
  1884.  
  1885. settings.getUserMediaSettings.video.height = typeof settings.settings.video.resolution.height === 'object' ?
  1886. settings.settings.video.resolution.height : (options.useExactConstraints ?
  1887. { exact: settings.settings.video.resolution.height } : { max: settings.settings.video.resolution.height });
  1888.  
  1889. if ((options.video.frameRate && typeof options.video.frameRate === 'object') ||
  1890. typeof options.video.frameRate === 'number' && AdapterJS.webrtcDetectedType !== 'plugin') {
  1891. settings.settings.video.frameRate = options.video.frameRate;
  1892. settings.getUserMediaSettings.video.frameRate = typeof settings.settings.video.frameRate === 'object' ?
  1893. settings.settings.video.frameRate : (options.useExactConstraints ?
  1894. { exact: settings.settings.video.frameRate } : { max: settings.settings.video.frameRate });
  1895. }
  1896.  
  1897. if (options.video.facingMode && ['string', 'object'].indexOf(typeof options.video.facingMode) > -1 && AdapterJS.webrtcDetectedType === 'plugin') {
  1898. settings.settings.video.facingMode = options.video.facingMode;
  1899. settings.getUserMediaSettings.video.facingMode = typeof settings.settings.video.facingMode === 'object' ?
  1900. settings.settings.video.facingMode : (options.useExactConstraints ?
  1901. { exact: settings.settings.video.facingMode } : { max: settings.settings.video.facingMode });
  1902. }
  1903. } else {
  1904. settings.getUserMediaSettings.video = {
  1905. width: options.useExactConstraints ? { exact: settings.settings.video.resolution.width } :
  1906. { max: settings.settings.video.resolution.width },
  1907. height: options.useExactConstraints ? { exact: settings.settings.video.resolution.height } :
  1908. { max: settings.settings.video.resolution.height }
  1909. };
  1910. }
  1911.  
  1912. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  1913. settings.settings.video = {
  1914. screenshare: false,
  1915. exactConstraints: !!options.useExactConstraints
  1916. };
  1917. settings.getUserMediaSettings.video = true;
  1918. }
  1919. }
  1920.  
  1921. return settings;
  1922. };
  1923.  
  1924. /**
  1925. * Function that parses the mediastream tracks for details.
  1926. * @method _parseStreamTracksInfo
  1927. * @private
  1928. * @for Skylink
  1929. * @since 0.6.31
  1930. */
  1931. Skylink.prototype._parseStreamTracksInfo = function (streamKey, callback) {
  1932. var self = this;
  1933. var stream = self._streams[streamKey].stream;
  1934.  
  1935. if (!stream) {
  1936. log.warn('Unable to parse stream tracks information as the stream is not defined');
  1937. return callback();
  1938. }
  1939.  
  1940. self._streams[streamKey].tracks = {
  1941. audio: null,
  1942. video: null
  1943. };
  1944.  
  1945. // Currently, we are sending 1 audio and video track.
  1946. var audioTracks = stream.getAudioTracks();
  1947. var videoTracks = stream.getVideoTracks();
  1948.  
  1949. if (audioTracks.length > 0) {
  1950. self._streams[streamKey].tracks.audio = {
  1951. id: audioTracks[0].id || '',
  1952. label: audioTracks[0].label || 'audio_track_0'
  1953. };
  1954. }
  1955.  
  1956. if (videoTracks.length === 0) {
  1957. return callback();
  1958. }
  1959.  
  1960. self._streams[streamKey].tracks.video = {
  1961. id: videoTracks[0].id || '',
  1962. label: videoTracks[0].label || 'video_track_0',
  1963. width: null,
  1964. height: null
  1965. };
  1966.  
  1967. // Append the stream to a dummy <video> element to retrieve the resolution width and height.
  1968. var videoElement = document.createElement('video');
  1969. videoElement.autoplay = true;
  1970. // Mute the audio of the <video> element to prevent feedback.
  1971. videoElement.muted = true;
  1972. videoElement.volume = 0;
  1973.  
  1974. var onVideoLoaded = function () {
  1975. if (!self._streams[streamKey]) {
  1976. return;
  1977. }
  1978. self._streams[streamKey].tracks.video.width = videoElement.videoWidth;
  1979. self._streams[streamKey].tracks.video.height = videoElement.videoHeight;
  1980. videoElement.srcObject = null;
  1981. callback();
  1982. };
  1983.  
  1984. // Because the plugin does not support the "loadeddata" event.
  1985. if (AdapterJS.webrtcDetectedType === 'plugin') {
  1986. setTimeout(onVideoLoaded, 1500);
  1987.  
  1988. } else {
  1989. videoElement.addEventListener('loadeddata', onVideoLoaded);
  1990. }
  1991.  
  1992. AdapterJS.attachMediaStream(videoElement, stream);
  1993. }
  1994.  
  1995. /**
  1996. * Function that handles the native <code>navigator.getUserMedia()</code> API success callback result.
  1997. * @method _onStreamAccessSuccess
  1998. * @private
  1999. * @for Skylink
  2000. * @since 0.3.0
  2001. */
  2002. Skylink.prototype._onStreamAccessSuccess = function(stream, settings, isScreenSharing, isAudioFallback) {
  2003. var self = this;
  2004. var streamId = stream.id || stream.label;
  2005. var streamHasEnded = false;
  2006.  
  2007. log.log([null, 'MediaStream', streamId, 'Has access to stream ->'], stream);
  2008.  
  2009. // Stop previous stream
  2010. if (!isScreenSharing && self._streams.userMedia) {
  2011. self._stopStreams({
  2012. userMedia: true,
  2013. screenshare: false
  2014. });
  2015.  
  2016. } else if (isScreenSharing && self._streams.screenshare) {
  2017. self._stopStreams({
  2018. userMedia: false,
  2019. screenshare: true
  2020. });
  2021. }
  2022.  
  2023. self._streamsStoppedCbs[streamId] = function () {
  2024. log.log([null, 'MediaStream', streamId, 'Stream has ended']);
  2025. streamHasEnded = true;
  2026. self._trigger('mediaAccessStopped', !!isScreenSharing, !!isAudioFallback, streamId);
  2027.  
  2028. if (self._inRoom) {
  2029. log.debug([null, 'MediaStream', streamId, 'Sending Stream ended status to Peers']);
  2030.  
  2031. self._sendChannelMessage({
  2032. type: self._SIG_MESSAGE_TYPE.STREAM,
  2033. mid: self._user.sid,
  2034. rid: self._room.id,
  2035. cid: self._key,
  2036. streamId: streamId,
  2037. settings: settings.settings,
  2038. status: 'ended'
  2039. });
  2040.  
  2041. self._trigger('streamEnded', self._user.sid, self.getPeerInfo(), true, !!isScreenSharing, streamId);
  2042.  
  2043. if (isScreenSharing && self._streams.screenshare && self._streams.screenshare.stream &&
  2044. (self._streams.screenshare.stream.id || self._streams.screenshare.stream.label) === streamId) {
  2045. self._streams.screenshare = null;
  2046.  
  2047. } else if (!isScreenSharing && self._streams.userMedia && self._streams.userMedia.stream &&
  2048. (self._streams.userMedia.stream.id || self._streams.userMedia.stream.label) === streamId) {
  2049. self._streams.userMedia = null;
  2050. }
  2051. }
  2052. };
  2053.  
  2054. // Handle event for Chrome / Opera
  2055. if (['chrome', 'opera'].indexOf(AdapterJS.webrtcDetectedBrowser) > -1) {
  2056. stream.oninactive = function () {
  2057. if (self._streamsStoppedCbs[streamId]) {
  2058. self._streamsStoppedCbs[streamId]();
  2059. delete self._streamsStoppedCbs[streamId];
  2060. }
  2061. };
  2062.  
  2063. if (isScreenSharing && stream.getVideoTracks().length > 0) {
  2064. stream.getVideoTracks()[0].onended = function () {
  2065. setTimeout(function () {
  2066. if (!streamHasEnded && self._inRoom) {
  2067. self.stopScreen();
  2068. }
  2069. }, 350);
  2070. };
  2071. }
  2072.  
  2073. // Handle event for Firefox (use an interval)
  2074. } else if (AdapterJS.webrtcDetectedBrowser === 'firefox') {
  2075. stream.endedInterval = setInterval(function () {
  2076. if (typeof stream.recordedTime === 'undefined') {
  2077. stream.recordedTime = 0;
  2078. }
  2079. if (stream.recordedTime === stream.currentTime) {
  2080. clearInterval(stream.endedInterval);
  2081.  
  2082. if (self._streamsStoppedCbs[streamId]) {
  2083. self._streamsStoppedCbs[streamId]();
  2084. delete self._streamsStoppedCbs[streamId];
  2085. }
  2086.  
  2087. } else {
  2088. stream.recordedTime = stream.currentTime;
  2089. }
  2090. }, 1000);
  2091.  
  2092. } else {
  2093. stream.onended = function () {
  2094. if (self._streamsStoppedCbs[streamId]) {
  2095. self._streamsStoppedCbs[streamId]();
  2096. delete self._streamsStoppedCbs[streamId];
  2097. }
  2098. };
  2099. }
  2100.  
  2101. if ((settings.settings.audio && stream.getAudioTracks().length === 0) ||
  2102. (settings.settings.video && stream.getVideoTracks().length === 0)) {
  2103.  
  2104. var tracksNotSameError = 'Expected audio tracks length with ' +
  2105. (settings.settings.audio ? '1' : '0') + ' and video tracks length with ' +
  2106. (settings.settings.video ? '1' : '0') + ' but received audio tracks length ' +
  2107. 'with ' + stream.getAudioTracks().length + ' and video ' +
  2108. 'tracks length with ' + stream.getVideoTracks().length;
  2109.  
  2110. log.warn([null, 'MediaStream', streamId, tracksNotSameError]);
  2111.  
  2112. var requireAudio = !!settings.settings.audio;
  2113. var requireVideo = !!settings.settings.video;
  2114.  
  2115. if (settings.settings.audio && stream.getAudioTracks().length === 0) {
  2116. settings.settings.audio = false;
  2117. }
  2118.  
  2119. if (settings.settings.video && stream.getVideoTracks().length === 0) {
  2120. settings.settings.video = false;
  2121. }
  2122.  
  2123. self._trigger('mediaAccessFallback', {
  2124. error: new Error(tracksNotSameError),
  2125. diff: {
  2126. video: { expected: requireVideo ? 1 : 0, received: stream.getVideoTracks().length },
  2127. audio: { expected: requireAudio ? 1 : 0, received: stream.getAudioTracks().length }
  2128. }
  2129. }, self.MEDIA_ACCESS_FALLBACK_STATE.FALLBACKED, !!isScreenSharing, !!isAudioFallback, streamId);
  2130. }
  2131.  
  2132. self._streams[ isScreenSharing ? 'screenshare' : 'userMedia' ] = {
  2133. id: streamId,
  2134. stream: stream,
  2135. settings: settings.settings,
  2136. constraints: settings.getUserMediaSettings
  2137. };
  2138.  
  2139. self._muteStreams();
  2140.  
  2141. self._parseStreamTracksInfo(isScreenSharing ? 'screenshare' : 'userMedia', function () {
  2142. self._trigger('mediaAccessSuccess', stream, !!isScreenSharing, !!isAudioFallback, streamId);
  2143. });
  2144. };
  2145.  
  2146. /**
  2147. * Function that handles the native <code>navigator.getUserMedia()</code> API failure callback result.
  2148. * @method _onStreamAccessError
  2149. * @private
  2150. * @for Skylink
  2151. * @since 0.6.15
  2152. */
  2153. Skylink.prototype._onStreamAccessError = function(error, settings, isScreenSharing) {
  2154. var self = this;
  2155.  
  2156. if (!isScreenSharing && settings.settings.audio && settings.settings.video && self._initOptions.audioFallback) {
  2157. log.debug('Fallbacking to retrieve audio only Stream');
  2158.  
  2159. self._trigger('mediaAccessFallback', {
  2160. error: error,
  2161. diff: null
  2162. }, self.MEDIA_ACCESS_FALLBACK_STATE.FALLBACKING, false, true);
  2163.  
  2164. var onAudioSuccessCbFn = function (stream) {
  2165. self._onStreamAccessSuccess(stream, settings, false, true);
  2166. };
  2167.  
  2168. var onAudioErrorCbFn = function (error) {
  2169. log.error('Failed fallbacking to retrieve audio only Stream ->', error);
  2170.  
  2171. self._trigger('mediaAccessError', error, false, true);
  2172. self._trigger('mediaAccessFallback', {
  2173. error: error,
  2174. diff: null
  2175. }, self.MEDIA_ACCESS_FALLBACK_STATE.ERROR, false, true);
  2176. };
  2177.  
  2178. navigator.getUserMedia({ audio: true }, onAudioSuccessCbFn, onAudioErrorCbFn);
  2179. return;
  2180. }
  2181. if (isScreenSharing) {
  2182. log.error('Failed retrieving screensharing Stream ->', error);
  2183. } else {
  2184. log.error('Failed retrieving camera Stream ->', error);
  2185. }
  2186.  
  2187.  
  2188. self._trigger('mediaAccessError', error, !!isScreenSharing, false);
  2189. };
  2190.  
  2191. /**
  2192. * Function that handles the <code>RTCPeerConnection.onaddstream</code> remote MediaStream received.
  2193. * @method _onRemoteStreamAdded
  2194. * @private
  2195. * @for Skylink
  2196. * @since 0.5.2
  2197. */
  2198. Skylink.prototype._onRemoteStreamAdded = function(targetMid, stream, isScreenSharing) {
  2199. var self = this;
  2200. var streamId = (self._peerConnections[targetMid] && self._peerConnections[targetMid].remoteStreamId) || stream.id || stream.label;
  2201.  
  2202. // if (!self._peerInformations[targetMid]) {
  2203. // log.warn([targetMid, 'MediaStream', streamId, 'Received remote stream when peer is not connected. Ignoring stream ->'], stream);
  2204. // return;
  2205. // }
  2206.  
  2207. /*if (!self._peerInformations[targetMid].settings.audio &&
  2208. !self._peerInformations[targetMid].settings.video && !isScreenSharing) {
  2209. log.log([targetMid, 'MediaStream', stream.id,
  2210. 'Receive remote stream but ignoring stream as it is empty ->'
  2211. ], stream);
  2212. return;
  2213. }*/
  2214. log.log([targetMid, 'MediaStream', streamId, 'Received remote stream ->'], stream);
  2215.  
  2216. if (isScreenSharing) {
  2217. log.log([targetMid, 'MediaStream', streamId, 'Peer is having a screensharing session with user']);
  2218. }
  2219.  
  2220. self._trigger('incomingStream', targetMid, stream, false, self.getPeerInfo(targetMid), isScreenSharing, streamId);
  2221. self._trigger('peerUpdated', targetMid, self.getPeerInfo(targetMid), false);
  2222. };
  2223.  
  2224.  
  2225. /**
  2226. * Function that sets User's Stream to send to Peer connection.
  2227. * Priority for <code>shareScreen()</code> Stream over <code>getUserMedia()</code> Stream.
  2228. * @method _addLocalMediaStreams
  2229. * @private
  2230. * @for Skylink
  2231. * @since 0.5.2
  2232. */
  2233. Skylink.prototype._addLocalMediaStreams = function(peerId) {
  2234. var self = this;
  2235.  
  2236. // NOTE ALEX: here we could do something smarter
  2237. // a mediastream is mainly a container, most of the info
  2238. // are attached to the tracks. We should iterates over track and print
  2239. try {
  2240. log.log([peerId, null, null, 'Adding local stream']);
  2241.  
  2242. var pc = self._peerConnections[peerId];
  2243.  
  2244. if (pc) {
  2245. var offerToReceiveAudio = !(!self._sdpSettings.connection.audio && peerId !== 'MCU') &&
  2246. self._getSDPCommonSupports(peerId, pc.remoteDescription).video;
  2247. var offerToReceiveVideo = !(!self._sdpSettings.connection.video && peerId !== 'MCU') &&
  2248. self._getSDPCommonSupports(peerId, pc.remoteDescription).audio;
  2249.  
  2250. if (pc.signalingState !== self.PEER_CONNECTION_STATE.CLOSED) {
  2251. // Updates the streams accordingly
  2252. var updateStreamFn = function (updatedStream) {
  2253. if (updatedStream ? (pc.localStreamId ? updatedStream.id !== pc.localStreamId : true) : true) {
  2254.  
  2255. pc.getSenders().forEach(function (sender) {
  2256. pc.removeTrack(sender);
  2257. });
  2258.  
  2259. if (!offerToReceiveAudio && !offerToReceiveVideo) {
  2260. return;
  2261. }
  2262.  
  2263. if (updatedStream) {
  2264. updatedStream.getTracks().forEach(function (track) {
  2265. if ((track.kind === 'audio' && !offerToReceiveAudio) || (track.kind === 'video' && !offerToReceiveVideo)) {
  2266. return;
  2267. }
  2268. pc.addTrack(track, updatedStream);
  2269. });
  2270.  
  2271. pc.localStreamId = updatedStream.id || updatedStream.label;
  2272. pc.localStream = updatedStream;
  2273. }
  2274. }
  2275. };
  2276.  
  2277. if (self._streams.screenshare && self._streams.screenshare.stream) {
  2278. log.debug([peerId, 'MediaStream', null, 'Sending screen'], self._streams.screenshare.stream);
  2279.  
  2280. updateStreamFn(self._streams.screenshare.stream);
  2281.  
  2282. } else if (self._streams.userMedia && self._streams.userMedia.stream) {
  2283. log.debug([peerId, 'MediaStream', null, 'Sending stream'], self._streams.userMedia.stream);
  2284.  
  2285. updateStreamFn(self._streams.userMedia.stream);
  2286.  
  2287. } else {
  2288. log.warn([peerId, 'MediaStream', null, 'No media to send. Will be only receiving']);
  2289.  
  2290. updateStreamFn(null);
  2291. }
  2292.  
  2293. } else {
  2294. log.warn([peerId, 'MediaStream', null,
  2295. 'Not adding any stream as signalingState is closed']);
  2296. }
  2297. } else {
  2298. log.warn([peerId, 'MediaStream', self._mediaStream,
  2299. 'Not adding stream as peerconnection object does not exists']);
  2300. }
  2301. } catch (error) {
  2302. if ((error.message || '').indexOf('already added') > -1) {
  2303. log.warn([peerId, null, null, 'Not re-adding stream as LocalMediaStream is already added'], error);
  2304. } else {
  2305. // Fix errors thrown like NS_ERROR_UNEXPECTED
  2306. log.error([peerId, null, null, 'Failed adding local stream'], error);
  2307. }
  2308. }
  2309. };
  2310.  
  2311. /**
  2312. * Function that handles ended streams.
  2313. * @method _handleEndedStreams
  2314. * @private
  2315. * @for Skylink
  2316. * @since 0.6.16
  2317. */
  2318. Skylink.prototype._handleEndedStreams = function (peerId, checkStreamId) {
  2319. var self = this;
  2320. self._streamsSession[peerId] = self._streamsSession[peerId] || {};
  2321.  
  2322. var renderEndedFn = function (streamId) {
  2323. if (self._streamsSession[peerId][streamId]) {
  2324. var peerInfo = clone(self.getPeerInfo(peerId));
  2325. peerInfo.settings.audio = clone(self._streamsSession[peerId][streamId].audio);
  2326. peerInfo.settings.video = clone(self._streamsSession[peerId][streamId].video);
  2327. var hasScreenshare = peerInfo.settings.video && typeof peerInfo.settings.video === 'object' &&
  2328. !!peerInfo.settings.video.screenshare;
  2329. self._streamsSession[peerId][streamId] = false;
  2330. self._trigger('streamEnded', peerId, peerInfo, false, hasScreenshare, streamId);
  2331. }
  2332. };
  2333.  
  2334. if (checkStreamId) {
  2335. renderEndedFn(checkStreamId);
  2336. } else if (self._peerConnections[peerId]) {
  2337. for (var streamId in self._streamsSession[peerId]) {
  2338. if (self._streamsSession[peerId].hasOwnProperty(streamId) && self._streamsSession[peerId][streamId]) {
  2339. renderEndedFn(streamId);
  2340. }
  2341. }
  2342. }
  2343. };
  2344.