File: source/stream-media.js

  1. /**
  2. * <blockquote class="info">
  3. * For a better user experience, the functionality is throttled when invoked many times in less
  4. * than the milliseconds interval configured in the <a href="#method_init"><code>init()</code> method</a>.
  5. * </blockquote>
  6. * Function that retrieves camera Stream.
  7. * @method getUserMedia
  8. * @param {JSON} [options] The camera Stream configuration options.
  9. * - When not provided, the value is set to <code>{ audio: true, video: true }</code>.
  10. * <small>To fallback to retrieve audio track only when retrieving of audio and video tracks failed,
  11. * enable the <code>audioFallback</code> flag in the <a href="#method_init"><code>init()</code> method</a>.</small>
  12. * @param {Boolean} [options.useExactConstraints=false] <blockquote class="info">
  13. * Note that by enabling this flag, exact values will be requested when retrieving camera Stream,
  14. * but it does not prevent constraints related errors. By default when not enabled,
  15. * expected mandatory maximum values (or optional values for source ID) will requested to prevent constraints related
  16. * errors, with an exception for <code>options.video.frameRate</code> option in Safari and IE (any plugin-enabled) browsers,
  17. * where the expected maximum value will not be requested due to the lack of support.</blockquote>
  18. * The flag if <code>getUserMedia()</code> should request for camera Stream to match exact requested values of
  19. * <code>options.audio.deviceId</code> and <code>options.video.deviceId</code>, <code>options.video.resolution</code>
  20. * and <code>options.video.frameRate</code> when provided.
  21. * @param {Boolean|JSON} [options.audio=false] <blockquote class="info">
  22. * Note that the current Edge browser implementation does not support the <code>options.audio.optional</code>,
  23. * <code>options.audio.deviceId</code>, <code>options.audio.echoCancellation</code>.</blockquote>
  24. * The audio configuration options.
  25. * @param {Boolean} [options.audio.stereo=false] <blockquote class="info"><b>Deprecation Warning!</b>
  26. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code> and
  27. * the <code>options.codecParams.audio.opus["sprop-stereo"]</code>
  28. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  29. * <code>options.codecParams.audio.opus.stereo</code> or <code>options.codecParams.audio.opus["sprop-stereo"]</code>
  30. * is configured, this overrides the <code>options.audio.stereo</code> setting.</blockquote>
  31. * The flag if OPUS audio codec stereo band should be configured for sending encoded audio data.
  32. * <small>When not provided, the default browser configuration is used.</small>
  33. * @param {Boolean} [options.audio.usedtx] <blockquote class="info"><b>Deprecation Warning!</b>
  34. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code>
  35. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  36. * <code>options.codecParams.audio.opus.stereo</code> is configured, this overrides the
  37. * <code>options.audio.stereo</code> setting. Note that this feature might
  38. * not work depending on the browser support and implementation.</blockquote>
  39. * The flag if OPUS audio codec should enable DTX (Discontinuous Transmission) for sending encoded audio data.
  40. * <small>This might help to reduce bandwidth as it reduces the bitrate during silence or background noise, and
  41. * goes hand-in-hand with the <code>options.voiceActivityDetection</code> flag in <a href="#method_joinRoom">
  42. * <code>joinRoom()</code> method</a>.</small>
  43. * <small>When not provided, the default browser configuration is used.</small>
  44. * @param {Boolean} [options.audio.useinbandfec] <blockquote class="info"><b>Deprecation Warning!</b>
  45. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.useinbandfec</code>
  46. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  47. * <code>options.codecParams.audio.opus.useinbandfec</code> is configured, this overrides the
  48. * <code>options.audio.useinbandfec</code> setting. Note that this parameter should only be used
  49. * for debugging purposes only.</blockquote>
  50. * The flag if OPUS audio codec has the capability to take advantage of the in-band FEC
  51. * (Forward Error Correction) when sending encoded audio data.
  52. * <small>This helps to reduce the harm of packet loss by encoding information about the previous packet loss.</small>
  53. * <small>When not provided, the default browser configuration is used.</small>
  54. * @param {Number} [options.audio.maxplaybackrate] <blockquote class="info"><b>Deprecation Warning!</b>
  55. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.maxplaybackrate</code>
  56. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  57. * <code>options.codecParams.audio.opus.maxplaybackrate</code> is configured, this overrides the
  58. * <code>options.audio.maxplaybackrate</code> setting. Note that this feature might
  59. * not work depending on the browser support and implementation.
  60. * Note that this parameter should only be used for debugging purposes only.</blockquote>
  61. * The OPUS audio codec maximum output sampling rate in Hz (hertz) that is is capable of receiving
  62. * decoded audio data, to adjust to the hardware limitations and ensure that any sending audio data
  63. * would not encode at a higher sampling rate specified by this.
  64. * <small>This value must be between <code>8000</code> to <code>48000</code>.</small>
  65. * <small>When not provided, the default browser configuration is used.</small>
  66. * @param {Boolean} [options.audio.mute=false] The flag if audio tracks should be muted upon receiving them.
  67. * <small>Providing the value as <code>false</code> does nothing to <code>peerInfo.mediaStatus.audioMuted</code>,
  68. * but when provided as <code>true</code>, this sets the <code>peerInfo.mediaStatus.audioMuted</code> value to
  69. * <code>true</code> and mutes any existing <a href="#method_shareScreen">
  70. * <code>shareScreen()</code> Stream</a> audio tracks as well.</small>
  71. * @param {Array} [options.audio.optional] <blockquote class="info">
  72. * This property has been deprecated. "optional" constraints has been moved from specs.<br>
  73. * Note that this may result in constraints related error when <code>options.useExactConstraints</code> value is
  74. * <code>true</code>. If you are looking to set the requested source ID of the audio track,
  75. * use <code>options.audio.deviceId</code> instead.</blockquote>
  76. * The <code>navigator.getUserMedia()</code> API <code>audio: { optional [..] }</code> property.
  77. * @param {String} [options.audio.deviceId] <blockquote class="info">
  78. * Note this is currently not supported in Firefox browsers.
  79. * </blockquote> The audio track source ID of the device to use.
  80. * <small>The list of available audio source ID can be retrieved by the <a href="https://developer.
  81. * mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices"><code>navigator.mediaDevices.enumerateDevices</code>
  82. * API</a>.</small>
  83. * @param {Boolean} [options.audio.echoCancellation=true] <blockquote class="info">
  84. * For Chrome/Opera/IE/Safari/Bowser, the echo cancellation functionality may not work and may produce a terrible
  85. * feedback. It is recommended to use headphones or other microphone devices rather than the device
  86. * in-built microphones.</blockquote> The flag to enable echo cancellation for audio track.
  87. * @param {Boolean|JSON} [options.video=false] <blockquote class="info">
  88. * Note that the current Edge browser implementation does not support the <code>options.video.optional</code>,
  89. * <code>options.video.deviceId</code>, <code>options.video.resolution</code> and
  90. * <code>options.video.frameRate</code>, <code>options.video.facingMode</code>.</blockquote>
  91. * The video configuration options.
  92. * @param {Boolean} [options.video.mute=false] The flag if video tracks should be muted upon receiving them.
  93. * <small>Providing the value as <code>false</code> does nothing to <code>peerInfo.mediaStatus.videoMuted</code>,
  94. * but when provided as <code>true</code>, this sets the <code>peerInfo.mediaStatus.videoMuted</code> value to
  95. * <code>true</code> and mutes any existing <a href="#method_shareScreen">
  96. * <code>shareScreen()</code> Stream</a> video tracks as well.</small>
  97. * @param {JSON} [options.video.resolution] The video resolution.
  98. * <small>By default, <a href="#attr_VIDEO_RESOLUTION"><code>VGA</code></a> resolution option
  99. * is selected when not provided.</small>
  100. * [Rel: Skylink.VIDEO_RESOLUTION]
  101. * @param {Number|JSON} [options.video.resolution.width] The video resolution width.
  102. * - When provided as a number, it is the video resolution width.
  103. * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.width</code> settings.
  104. * Parameters are <code>"ideal"</code> for ideal resolution width, <code>"exact"</code> for exact video resolution width,
  105. * <code>"min"</code> for min video resolution width and <code>"max"</code> for max video resolution width.
  106. * Note that this may result in constraints related errors depending on the browser/hardware supports.
  107. * @param {Number|JSON} [options.video.resolution.height] The video resolution height.
  108. * - When provided as a number, it is the video resolution height.
  109. * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.height</code> settings.
  110. * Parameters are <code>"ideal"</code> for ideal video resolution height, <code>"exact"</code> for exact video resolution height,
  111. * <code>"min"</code> for min video resolution height and <code>"max"</code> for max video resolution height.
  112. * Note that this may result in constraints related errors depending on the browser/hardware supports.
  113. * @param {Number|JSON} [options.video.frameRate] The video <a href="https://en.wikipedia.org/wiki/Frame_rate">
  114. * frameRate</a> per second (fps).
  115. * - When provided as a number, it is the video framerate.
  116. * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.frameRate</code> settings.
  117. * Parameters are <code>"ideal"</code> for ideal video framerate, <code>"exact"</code> for exact video framerate,
  118. * <code>"min"</code> for min video framerate and <code>"max"</code> for max video framerate.
  119. * Note that this may result in constraints related errors depending on the browser/hardware supports.
  120. * @param {Array} [options.video.optional] <blockquote class="info">
  121. * This property has been deprecated. "optional" constraints has been moved from specs.<br>
  122. * Note that this may result in constraints related error when <code>options.useExactConstraints</code> value is
  123. * <code>true</code>. If you are looking to set the requested source ID of the video track,
  124. * use <code>options.video.deviceId</code> instead.</blockquote>
  125. * The <code>navigator.getUserMedia()</code> API <code>video: { optional [..] }</code> property.
  126. * @param {String} [options.video.deviceId] <blockquote class="info">
  127. * Note this is currently not supported in Firefox browsers.
  128. * </blockquote> The video track source ID of the device to use.
  129. * <small>The list of available video source ID can be retrieved by the <a href="https://developer.
  130. * mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices"><code>navigator.mediaDevices.enumerateDevices</code>
  131. * API</a>.</small>
  132. * @param {String|JSON} [options.video.facingMode] The video camera facing mode.
  133. * <small>The list of available video source ID can be retrieved by the <a href="https://developer.mozilla.org
  134. * /en-US/docs/Web/API/MediaTrackConstraints/facingMode">MediaTrackConstraints <code>facingMode</code> API</a>.</small>
  135. * @param {Function} [callback] The callback function fired when request has completed.
  136. * <small>Function parameters signature is <code>function (error, success)</code></small>
  137. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  138. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter
  139. * payload value as <code>false</code> for request success.</small>
  140. * @param {Error|String} callback.error The error result in request.
  141. * <small>Defined as <code>null</code> when there are no errors in request</small>
  142. * <small>Object signature is the <code>getUserMedia()</code> error when retrieving camera Stream.</small>
  143. * @param {MediaStream} callback.success The success result in request.
  144. * <small>Defined as <code>null</code> when there are errors in request</small>
  145. * <small>Object signature is the camera Stream object.</small>
  146. * @example
  147. * // Example 1: Get both audio and video.
  148. * skylinkDemo.getUserMedia(function (error, success) {
  149. * if (error) return;
  150. * attachMediaStream(document.getElementById("my-video"), success);
  151. * });
  152. *
  153. * // Example 2: Get only audio.
  154. * skylinkDemo.getUserMedia({
  155. * audio: true
  156. * }, function (error, success) {
  157. * if (error) return;
  158. * attachMediaStream(document.getElementById("my-audio"), success);
  159. * });
  160. *
  161. * // Example 3: Configure resolution for video
  162. * skylinkDemo.getUserMedia({
  163. * audio: true,
  164. * video: {
  165. * resolution: skylinkDemo.VIDEO_RESOLUTION.HD
  166. * }
  167. * }, function (error, success) {
  168. * if (error) return;
  169. * attachMediaStream(document.getElementById("my-video"), success);
  170. * });
  171. *
  172. * // Example 4: Configure stereo flag for OPUS codec audio (OPUS is always used by default)
  173. * skylinkDemo.init({
  174. * appKey: "xxxxxx",
  175. * audioCodec: skylinkDemo.AUDIO_CODEC.OPUS
  176. * }, function (initErr, initSuccess) {
  177. * skylinkDemo.getUserMedia({
  178. * audio: {
  179. * stereo: true
  180. * },
  181. * video: true
  182. * }, function (error, success) {
  183. * if (error) return;
  184. * attachMediaStream(document.getElementById("my-video"), success);
  185. * });
  186. * });
  187. *
  188. * // Example 5: Configure frameRate for video
  189. * skylinkDemo.getUserMedia({
  190. * audio: true,
  191. * video: {
  192. * frameRate: 50
  193. * }
  194. * }, function (error, success) {
  195. * if (error) return;
  196. * attachMediaStream(document.getElementById("my-video"), success);
  197. * });
  198. *
  199. * // Example 6: Configure video and audio based on selected sources. Does not work for Firefox currently.
  200. * var sources = { audio: [], video: [] };
  201. *
  202. * function selectStream (audioSourceId, videoSourceId) {
  203. * if (AdapterJS.webrtcDetectedBrowser === 'firefox') {
  204. * console.warn("Currently this feature is not supported by Firefox browsers!");
  205. * return;
  206. * }
  207. * skylinkDemo.getUserMedia({
  208. * audio: {
  209. * optional: [{ sourceId: audioSourceId }]
  210. * },
  211. * video: {
  212. * optional: [{ sourceId: videoSourceId }]
  213. * }
  214. * }, function (error, success) {
  215. * if (error) return;
  216. * attachMediaStream(document.getElementById("my-video"), success);
  217. * });
  218. * }
  219. *
  220. * navigator.mediaDevices.enumerateDevices().then(function(devices) {
  221. * var selectedAudioSourceId = "";
  222. * var selectedVideoSourceId = "";
  223. * devices.forEach(function(device) {
  224. * console.log(device.kind + ": " + device.label + " source ID = " + device.deviceId);
  225. * if (device.kind === "audio") {
  226. * selectedAudioSourceId = device.deviceId;
  227. * } else {
  228. * selectedVideoSourceId = device.deviceId;
  229. * }
  230. * });
  231. * selectStream(selectedAudioSourceId, selectedVideoSourceId);
  232. * }).catch(function (error) {
  233. * console.error("Failed", error);
  234. * });
  235. * @trigger <ol class="desc-seq">
  236. * <li>If <code>options.audio</code> value is <code>false</code> and <code>options.video</code>
  237. * value is <code>false</code>: <ol><li><b>ABORT</b> and return error.</li></ol></li>
  238. * <li>Retrieve camera Stream. <ol><li>If retrieval was succesful: <ol>
  239. * <li>If there is any previous <code>getUserMedia()</code> Stream: <ol>
  240. * <li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a>.</li></ol></li>
  241. * <li>If there are missing audio or video tracks requested: <ol>
  242. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> triggers parameter payload
  243. * <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code> value as <code>false</code> and
  244. * <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li>
  245. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in <code>peerInfo.mediaStatus</code>.
  246. * <small>This can be retrieved with <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  247. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers parameter payload
  248. * <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallback</code>
  249. * value as <code>false</code>.</li></ol></li><li>Else: <ol>
  250. * <li>If <code>options.audioFallback</code> is enabled in the <a href="#method_init"><code>init()</code> method</a>,
  251. * <code>options.audio</code> value is <code>true</code> and <code>options.video</code> value is <code>true</code>: <ol>
  252. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  253. * parameter payload <code>state</code> as <code>FALLBACKING</code>, <code>isScreensharing</code>
  254. * value as <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  255. * <li>Retrieve camera Stream with audio tracks only. <ol><li>If retrieval was successful: <ol>
  256. * <li>If there is any previous <code>getUserMedia()</code> Stream: <ol>
  257. * <li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a>.</li></ol></li>
  258. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  259. * parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  260. * value as <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  261. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in <code>peerInfo.mediaStatus</code>.
  262. * <small>This can be retrieved with <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  263. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  264. * parameter payload <code>isScreensharing</code> value as <code>false</code> and
  265. * <code>isAudioFallback</code> value as <code>true</code>.</li></ol></li><li>Else: <ol>
  266. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers
  267. * parameter payload <code>isScreensharing</code> value as <code>false</code> and
  268. * <code>isAudioFallbackError</code> value as <code>true</code>.</li>
  269. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
  270. * parameter payload <code>state</code> as <code>ERROR</code>, <code>isScreensharing</code> value as
  271. * <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
  272. * <li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  273. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers parameter payload
  274. * <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallbackError</code> value as
  275. * <code>false</code>.</li><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li></ol></li></ol>
  276. * @for Skylink
  277. * @since 0.5.6
  278. */
  279. Skylink.prototype.getUserMedia = function(options,callback) {
  280. var self = this;
  281.  
  282. if (typeof options === 'function'){
  283. callback = options;
  284. options = {
  285. audio: true,
  286. video: true
  287. };
  288.  
  289. } else if (typeof options !== 'object' || options === null) {
  290. if (typeof options === 'undefined') {
  291. options = {
  292. audio: true,
  293. video: true
  294. };
  295.  
  296. } else {
  297. var invalidOptionsError = 'Please provide a valid options';
  298. log.error(invalidOptionsError, options);
  299. if (typeof callback === 'function') {
  300. callback(new Error(invalidOptionsError), null);
  301. }
  302. return;
  303. }
  304.  
  305. } else if (!options.audio && !options.video) {
  306. var noConstraintOptionsSelectedError = 'Please select audio or video';
  307. log.error(noConstraintOptionsSelectedError, options);
  308. if (typeof callback === 'function') {
  309. callback(new Error(noConstraintOptionsSelectedError), null);
  310. }
  311. return;
  312. }
  313.  
  314. /*if (window.location.protocol !== 'https:' && AdapterJS.webrtcDetectedBrowser === 'chrome' &&
  315. AdapterJS.webrtcDetectedVersion > 46) {
  316. errorMsg = 'getUserMedia() has to be called in https:// application';
  317. log.error(errorMsg, options);
  318. if (typeof callback === 'function') {
  319. callback(new Error(errorMsg), null);
  320. }
  321. return;
  322. }*/
  323.  
  324. self._throttle(function (runFn) {
  325. if (!runFn) {
  326. if (self._initOptions.throttlingShouldThrowError) {
  327. var throttleLimitError = 'Unable to run as throttle interval has not reached (' + self._initOptions.throttleIntervals.getUserMedia + 'ms).';
  328. log.error(throttleLimitError);
  329.  
  330. if (typeof callback === 'function') {
  331. callback(new Error(throttleLimitError), null);
  332. }
  333. }
  334. return;
  335. }
  336.  
  337. if (typeof callback === 'function') {
  338. var mediaAccessSuccessFn = function (stream) {
  339. self.off('mediaAccessError', mediaAccessErrorFn);
  340. callback(null, stream);
  341. };
  342. var mediaAccessErrorFn = function (error) {
  343. self.off('mediaAccessSuccess', mediaAccessSuccessFn);
  344. callback(error, null);
  345. };
  346.  
  347. self.once('mediaAccessSuccess', mediaAccessSuccessFn, function (stream, isScreensharing) {
  348. return !isScreensharing;
  349. });
  350.  
  351. self.once('mediaAccessError', mediaAccessErrorFn, function (error, isScreensharing) {
  352. return !isScreensharing;
  353. });
  354. }
  355.  
  356. // Parse stream settings
  357. var settings = self._parseStreamSettings(options);
  358.  
  359. var onSuccessCbFn = function (stream) {
  360. if (settings.mutedSettings.shouldAudioMuted) {
  361. self._streamsMutedSettings.audioMuted = true;
  362. }
  363.  
  364. if (settings.mutedSettings.shouldVideoMuted) {
  365. self._streamsMutedSettings.videoMuted = true;
  366. }
  367.  
  368. self._onStreamAccessSuccess(stream, settings, false, false);
  369. };
  370.  
  371. var onErrorCbFn = function (error) {
  372. self._onStreamAccessError(error, settings, false, false);
  373. };
  374.  
  375. try {
  376. if (typeof (AdapterJS || {}).webRTCReady !== 'function') {
  377. return onErrorCbFn(new Error('Failed to call getUserMedia() as AdapterJS is not yet loaded!'));
  378. }
  379.  
  380. AdapterJS.webRTCReady(function () {
  381. navigator.getUserMedia(settings.getUserMediaSettings, onSuccessCbFn, onErrorCbFn);
  382. });
  383. } catch (error) {
  384. onErrorCbFn(error);
  385. }
  386.  
  387. }, 'getUserMedia', self._initOptions.throttleIntervals.getUserMedia);
  388. };
  389.  
  390. /**
  391. * <blockquote class="info">
  392. * Note that if <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> is available despite having
  393. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> available, the
  394. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> is sent instead of the
  395. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> to Peers.
  396. * </blockquote>
  397. * Function that sends a new <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>
  398. * to all connected Peers in the Room.
  399. * @method sendStream
  400. * @param {JSON|MediaStream} options The <a href="#method_getUserMedia"><code>getUserMedia()</code>
  401. * method</a> <code>options</code> parameter settings.
  402. * - When provided as a <code>MediaStream</code> object, this configures the <code>options.audio</code> and
  403. * <code>options.video</code> based on the tracks available in the <code>MediaStream</code> object,
  404. * and configures the <code>options.audio.mute</code> and <code>options.video.mute</code> based on the tracks
  405. * <code>.enabled</code> flags in the tracks provided in the <code>MediaStream</code> object without
  406. * invoking <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.
  407. * <small>Object signature matches the <code>options</code> parameter in the
  408. * <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.</small>
  409. * @param {Function} [callback] The callback function fired when request has completed.
  410. * <small>Function parameters signature is <code>function (error, success)</code></small>
  411. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  412. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter payload value
  413. * as <code>false</code> for request success when User is in Room without Peers,
  414. * or by the <a href="#event_peerRestart"><code>peerRestart</code> event</a> triggering
  415. * <code>isSelfInitiateRestart</code> parameter payload value as <code>true</code> for all connected Peers
  416. * for request success when User is in Room with Peers.</small>
  417. * @param {Error|String} callback.error The error result in request.
  418. * <small>Defined as <code>null</code> when there are no errors in request</small>
  419. * <small>Object signature is the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> error or
  420. * when invalid <code>options</code> is provided.</small>
  421. * @param {MediaStream} callback.success The success result in request.
  422. * <small>Defined as <code>null</code> when there are errors in request</small>
  423. * <small>Object signature is the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>
  424. * Stream object.</small>
  425. * @example
  426. * // Example 1: Send MediaStream object before being connected to Room
  427. * function retrieveStreamBySourceForFirefox (sourceId) {
  428. * navigator.mediaDevices.getUserMedia({
  429. * audio: true,
  430. * video: {
  431. * sourceId: { exact: sourceId }
  432. * }
  433. * }).then(function (stream) {
  434. * skylinkDemo.sendStream(stream, function (error, success) {
  435. * if (err) return;
  436. * if (stream === success) {
  437. * console.info("Same MediaStream has been sent");
  438. * }
  439. * console.log("Stream is now being sent to Peers");
  440. * attachMediaStream(document.getElementById("my-video"), success);
  441. * });
  442. * });
  443. * }
  444. *
  445. * // Example 2: Send video after being connected to Room
  446. * function sendVideo () {
  447. * skylinkDemo.joinRoom(function (jRError, jRSuccess) {
  448. * if (jRError) return;
  449. * skylinkDemo.sendStream({
  450. * audio: true,
  451. * video: true
  452. * }, function (error, success) {
  453. * if (error) return;
  454. * console.log("getUserMedia() Stream with video is now being sent to Peers");
  455. * attachMediaStream(document.getElementById("my-video"), success);
  456. * });
  457. * });
  458. * }
  459. * @trigger <ol class="desc-seq">
  460. * <li>Checks <code>options</code> provided. <ol><li>If provided parameter <code>options</code> is not valid: <ol>
  461. * <li><b>ABORT</b> and return error.</li></ol></li>
  462. * <li>Else if provided parameter <code>options</code> is a Stream object: <ol>
  463. * <li>Checks if there is any audio or video tracks. <ol><li>If there is no tracks: <ol>
  464. * <li><b>ABORT</b> and return error.</li></ol></li><li>Else: <ol>
  465. * <li>Set <code>options.audio</code> value as <code>true</code> if Stream has audio tracks.</li>
  466. * <li>Set <code>options.video</code> value as <code>false</code> if Stream has video tracks.</li>
  467. * <li>Mutes / Unmutes audio and video tracks based on current muted settings in
  468. * <code>peerInfo.mediaStatus</code>. <small>This can be retrieved with
  469. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
  470. * <li>If there is any previous <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>:
  471. * <ol><li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a> to stop previous Stream.</li></ol></li>
  472. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  473. * parameter payload <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallback</code>
  474. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  475. * <li>Invoke <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> with
  476. * <code>options</code> provided in <code>sendStream()</code>. <ol><li>If request has errors: <ol>
  477. * <li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li></ol></li>
  478. * <li>If there is currently no <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> and User is in Room: <ol>
  479. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  480. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as
  481. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.</li>
  482. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  483. * <code>isSelf</code> value as <code>true</code>.</li>
  484. * <li>Checks if MCU is enabled for App Key provided in <a href="#method_init"><code>init()</code> method</a>. <ol>
  485. * <li>If MCU is enabled: <ol><li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code>
  486. * method</a>. <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li>
  487. * <li>Else: <ol><li>If there are connected Peers in the Room: <ol>
  488. * <li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>. <ol>
  489. * <li>If request has errors: <ol><li><b>ABORT</b> and return error.
  490. * </li></ol></li></ol></li></ol></li></ol></li></ol></li></ol></li></ol>
  491. * @for Skylink
  492. * @since 0.5.6
  493. */
  494.  
  495. Skylink.prototype.sendStream = function(options, callback) {
  496. var self = this;
  497.  
  498. var restartFn = function (stream) {
  499. if (self._inRoom) {
  500. if (!self._streams.screenshare) {
  501. self._trigger('incomingStream', self._user.sid, stream, true, self.getPeerInfo(), false, stream.id || stream.label);
  502. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  503. }
  504.  
  505. if (Object.keys(self._peerConnections).length > 0 || self._hasMCU) {
  506. self._refreshPeerConnection(Object.keys(self._peerConnections), false, {}, function (err, success) {
  507. if (err) {
  508. log.error('Failed refreshing connections for sendStream() ->', err);
  509. if (typeof callback === 'function') {
  510. callback(new Error('Failed refreshing connections.'), null);
  511. }
  512. return;
  513. }
  514. if (typeof callback === 'function') {
  515. callback(null, stream);
  516. }
  517. });
  518. } else if (typeof callback === 'function') {
  519. callback(null, stream);
  520. }
  521. } else if (typeof callback === 'function') {
  522. callback(null, stream);
  523. }
  524. };
  525.  
  526. // Note: Sometimes it may be "function" or "object" but then "function" might be mistaken for callback function, so for now fixing it that way
  527. if ((typeof options !== 'object' || options === null) && !(AdapterJS && AdapterJS.WebRTCPlugin &&
  528. AdapterJS.WebRTCPlugin.plugin && ['function', 'object'].indexOf(typeof options) > -1)) {
  529. var invalidOptionsError = 'Provided stream settings is invalid';
  530. log.error(invalidOptionsError, options);
  531. if (typeof callback === 'function'){
  532. callback(new Error(invalidOptionsError),null);
  533. }
  534. return;
  535. }
  536.  
  537. if (!self._inRoom) {
  538. log.warn('There are no peers to send stream to as not in room!');
  539. }
  540.  
  541. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  542. var edgeNotSupportError = 'Edge browser currently does not support renegotiation.';
  543. log.error(edgeNotSupportError, options);
  544. if (typeof callback === 'function'){
  545. callback(new Error(edgeNotSupportError),null);
  546. }
  547. return;
  548. }
  549.  
  550. if (typeof options.getAudioTracks === 'function' || typeof options.getVideoTracks === 'function') {
  551. var checkActiveTracksFn = function (tracks) {
  552. for (var t = 0; t < tracks.length; t++) {
  553. if (!(tracks[t].ended || (typeof tracks[t].readyState === 'string' ?
  554. tracks[t].readyState !== 'live' : false))) {
  555. return true;
  556. }
  557. }
  558. return false;
  559. };
  560.  
  561. if (!checkActiveTracksFn( options.getAudioTracks() ) && !checkActiveTracksFn( options.getVideoTracks() )) {
  562. var invalidStreamError = 'Provided stream object does not have audio or video tracks.';
  563. log.error(invalidStreamError, options);
  564. if (typeof callback === 'function'){
  565. callback(new Error(invalidStreamError),null);
  566. }
  567. return;
  568. }
  569.  
  570. self._onStreamAccessSuccess(options, {
  571. settings: {
  572. audio: true,
  573. video: true
  574. },
  575. getUserMediaSettings: {
  576. audio: true,
  577. video: true
  578. }
  579. }, false, false);
  580.  
  581. restartFn(options);
  582.  
  583. } else {
  584. self.getUserMedia(options, function (err, stream) {
  585. if (err) {
  586. if (typeof callback === 'function') {
  587. callback(err, null);
  588. }
  589. return;
  590. }
  591. restartFn(stream);
  592. });
  593. }
  594. };
  595.  
  596. /**
  597. * <blockquote class="info">
  598. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  599. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  600. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  601. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  602. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  603. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  604. * sent within less than an interval.
  605. * </blockquote>
  606. * Function that stops <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.
  607. * @method stopStream
  608. * @example
  609. * function stopStream () {
  610. * skylinkDemo.stopStream();
  611. * }
  612. *
  613. * skylinkDemo.getUserMedia();
  614. * @trigger <ol class="desc-seq">
  615. * <li>Checks if there is <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>. <ol>
  616. * <li>If there is <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>: <ol>
  617. * <li>Stop <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> Stream. <ol>
  618. * <li><a href="#event_mediaAccessStopped"><code>mediaAccessStopped</code> event</a> triggers
  619. * parameter payload <code>isScreensharing</code> value as <code>false</code>.</li><li>If User is in Room: <ol>
  620. * <li><a href="#event_streamEnded"><code>streamEnded</code> event</a> triggers parameter
  621. * payload <code>isSelf</code> value as <code>true</code> and <code>isScreensharing</code> value as<code>false</code>
  622. * .</li><li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  623. * <code>isSelf</code> value as <code>true</code>.</li></ol></li></ol></li></ol></li></ol></li></ol>
  624. * @for Skylink
  625. * @since 0.5.6
  626. */
  627. Skylink.prototype.stopStream = function () {
  628. if (this._streams.userMedia) {
  629. this._stopStreams({
  630. userMedia: true
  631. });
  632. }
  633. };
  634.  
  635. /**
  636. * <blockquote class="info">
  637. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  638. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  639. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  640. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  641. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  642. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  643. * sent within less than an interval.
  644. * </blockquote>
  645. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  646. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio or video tracks.
  647. * @method muteStream
  648. * @param {JSON} options The Streams muting options.
  649. * @param {Boolean} [options.audioMuted=true] The flag if all Streams audio
  650. * tracks should be muted or not.
  651. * @param {Boolean} [options.videoMuted=true] The flag if all Strea.ms video
  652. * tracks should be muted or not.
  653. * @example
  654. * // Example 1: Mute both audio and video tracks in all Streams
  655. * skylinkDemo.muteStream({
  656. * audioMuted: true,
  657. * videoMuted: true
  658. * });
  659. *
  660. * // Example 2: Mute only audio tracks in all Streams
  661. * skylinkDemo.muteStream({
  662. * audioMuted: true,
  663. * videoMuted: false
  664. * });
  665. *
  666. * // Example 3: Mute only video tracks in all Streams
  667. * skylinkDemo.muteStream({
  668. * audioMuted: false,
  669. * videoMuted: true
  670. * });
  671. * @trigger <ol class="desc-seq">
  672. * <li>If provided parameter <code>options</code> is invalid: <ol><li><b>ABORT</b> and return error.</li></ol></li>
  673. * <li>Checks if there is any available Streams: <ol><li>If there is no available Streams: <ol>
  674. * <li><b>ABORT</b> and return error.</li></ol></li><li>If User is in Room: <ol>
  675. * <li>Checks if there is audio tracks to mute / unmute: <ol><li>If there is audio tracks to mute / unmute: <ol>
  676. * <li>If <code>options.audioMuted</code> value is not the same as the current
  677. * <code>peerInfo.mediaStatus.audioMuted</code>: <small>This can be retrieved with
  678. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small> <ol>
  679. * <li><em>For Peer only</em> <a href="#event_peerUpdated"><code>peerUpdated</code> event</a>
  680. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li>
  681. * <li><em>For Peer only</em> <a href="#event_streamMuted"><code>streamMuted</code> event</a>
  682. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li>
  683. * <li>Checks if there is video tracks to mute / unmute: <ol><li>If there is video tracks to mute / unmute: <ol>
  684. * <li>If <code>options.videoMuted</code> value is not the same as the current
  685. * <code>peerInfo.mediaStatus.videoMuted</code>: <small>This can be retrieved with
  686. * <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small> <ol>
  687. * <li><em>For Peer only</em> <a href="#event_peerUpdated"><code>peerUpdated</code> event</a>
  688. * triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li>
  689. * <li><em>For Peer only</em> <a href="#event_streamMuted"><code>streamMuted</code> event</a> triggers with
  690. * parameter payload <code>isSelf</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li></ol></li>
  691. * <li>If <code>options.audioMuted</code> value is not the same as the current
  692. * <code>peerInfo.mediaStatus.audioMuted</code> or <code>options.videoMuted</code> value is not
  693. * the same as the current <code>peerInfo.mediaStatus.videoMuted</code>: <ol>
  694. * <li><a href="#event_localMediaMuted"><code>localMediaMuted</code> event</a> triggers.</li>
  695. * <li>If User is in Room: <ol><li><a href="#event_streamMuted"><code>streamMuted</code> event</a>
  696. * triggers with parameter payload <code>isSelf</code> value as <code>true</code>.</li>
  697. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers with
  698. * parameter payload <code>isSelf</code> value as <code>true</code>.</li></ol></li></ol></li></ol></li></ol>
  699. * @for Skylink
  700. * @since 0.5.7
  701. */
  702. Skylink.prototype.muteStream = function(options) {
  703. var self = this;
  704.  
  705. if (typeof options !== 'object') {
  706. log.error('Provided settings is not an object');
  707. return;
  708. }
  709.  
  710. if (!(self._streams.userMedia && self._streams.userMedia.stream) &&
  711. !(self._streams.screenshare && self._streams.screenshare.stream)) {
  712. log.warn('No streams are available to mute / unmute!');
  713. return;
  714. }
  715.  
  716. var audioMuted = typeof options.audioMuted === 'boolean' ? options.audioMuted : true;
  717. var videoMuted = typeof options.videoMuted === 'boolean' ? options.videoMuted : true;
  718. var hasToggledAudio = false;
  719. var hasToggledVideo = false;
  720.  
  721. if (self._streamsMutedSettings.audioMuted !== audioMuted) {
  722. self._streamsMutedSettings.audioMuted = audioMuted;
  723. hasToggledAudio = true;
  724. }
  725.  
  726. if (self._streamsMutedSettings.videoMuted !== videoMuted) {
  727. self._streamsMutedSettings.videoMuted = videoMuted;
  728. hasToggledVideo = true;
  729. }
  730.  
  731. if (hasToggledVideo || hasToggledAudio) {
  732. var streamTracksAvailability = self._muteStreams();
  733.  
  734. if (hasToggledVideo && self._inRoom) {
  735. self._sendChannelMessage({
  736. type: self._SIG_MESSAGE_TYPE.MUTE_VIDEO,
  737. mid: self._user.sid,
  738. rid: self._room.id,
  739. muted: self._streamsMutedSettings.videoMuted,
  740. stamp: (new Date()).getTime()
  741. });
  742. }
  743.  
  744. if (hasToggledAudio && self._inRoom) {
  745. setTimeout(function () {
  746. self._sendChannelMessage({
  747. type: self._SIG_MESSAGE_TYPE.MUTE_AUDIO,
  748. mid: self._user.sid,
  749. rid: self._room.id,
  750. muted: self._streamsMutedSettings.audioMuted,
  751. stamp: (new Date()).getTime()
  752. });
  753. }, hasToggledVideo ? 1050 : 0);
  754. }
  755.  
  756. if ((streamTracksAvailability.hasVideo && hasToggledVideo) ||
  757. (streamTracksAvailability.hasAudio && hasToggledAudio)) {
  758.  
  759. self._trigger('localMediaMuted', {
  760. audioMuted: streamTracksAvailability.hasAudio ? self._streamsMutedSettings.audioMuted : true,
  761. videoMuted: streamTracksAvailability.hasVideo ? self._streamsMutedSettings.videoMuted : true
  762. });
  763.  
  764. if (self._inRoom) {
  765. self._trigger('streamMuted', self._user.sid, self.getPeerInfo(), true,
  766. self._streams.screenshare && self._streams.screenshare.stream);
  767. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  768. }
  769. }
  770. }
  771. };
  772.  
  773. /**
  774. * <blockquote class="info"><b>Deprecation Warning!</b>
  775. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  776. * </blockquote>
  777. * Function that unmutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  778. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio tracks.
  779. * @method enableAudio
  780. * @deprecated true
  781. * @example
  782. * function unmuteAudio () {
  783. * skylinkDemo.enableAudio();
  784. * }
  785. * @trigger <ol class="desc-seq">
  786. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  787. * <code>options.audioMuted</code> value as <code>false</code> and
  788. * <code>options.videoMuted</code> value with current <code>peerInfo.mediaStatus.videoMuted</code> value.
  789. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  790. * @for Skylink
  791. * @since 0.5.5
  792. */
  793. Skylink.prototype.enableAudio = function() {
  794. this.muteStream({
  795. audioMuted: false,
  796. videoMuted: this._streamsMutedSettings.videoMuted
  797. });
  798. };
  799.  
  800. /**
  801. * <blockquote class="info"><b>Deprecation Warning!</b>
  802. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  803. * </blockquote>
  804. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  805. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio tracks.
  806. * @method disableAudio
  807. * @deprecated true
  808. * @example
  809. * function muteAudio () {
  810. * skylinkDemo.disableAudio();
  811. * }
  812. * @trigger <ol class="desc-seq">
  813. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  814. * <code>options.audioMuted</code> value as <code>true</code> and
  815. * <code>options.videoMuted</code> value with current <code>peerInfo.mediaStatus.videoMuted</code> value.
  816. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  817. * @for Skylink
  818. * @since 0.5.5
  819. */
  820. Skylink.prototype.disableAudio = function() {
  821. this.muteStream({
  822. audioMuted: true,
  823. videoMuted: this._streamsMutedSettings.videoMuted
  824. });
  825. };
  826.  
  827. /**
  828. * <blockquote class="info"><b>Deprecation Warning!</b>
  829. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  830. * </blockquote>
  831. * Function that unmutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  832. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> video tracks.
  833. * @method enableVideo
  834. * @deprecated true
  835. * @example
  836. * function unmuteVideo () {
  837. * skylinkDemo.enableVideo();
  838. * }
  839. * @trigger <ol class="desc-seq">
  840. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  841. * <code>options.videoMuted</code> value as <code>false</code> and
  842. * <code>options.audioMuted</code> value with current <code>peerInfo.mediaStatus.audioMuted</code> value.
  843. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  844. * @for Skylink
  845. * @since 0.5.5
  846. */
  847. Skylink.prototype.enableVideo = function() {
  848. this.muteStream({
  849. videoMuted: false,
  850. audioMuted: this._streamsMutedSettings.audioMuted
  851. });
  852. };
  853.  
  854. /**
  855. * <blockquote class="info"><b>Deprecation Warning!</b>
  856. * This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
  857. * </blockquote>
  858. * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
  859. * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> video tracks.
  860. * @method disableVideo
  861. * @deprecated true
  862. * @example
  863. * function muteVideo () {
  864. * skylinkDemo.disableVideo();
  865. * }
  866. * @trigger <ol class="desc-seq">
  867. * <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
  868. * <code>options.videoMuted</code> value as <code>true</code> and
  869. * <code>options.audioMuted</code> value with current <code>peerInfo.mediaStatus.audioMuted</code> value.
  870. * <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
  871. * @for Skylink
  872. * @since 0.5.5
  873. */
  874. Skylink.prototype.disableVideo = function() {
  875. this.muteStream({
  876. videoMuted: true,
  877. audioMuted: this._streamsMutedSettings.audioMuted
  878. });
  879. };
  880.  
  881. /**
  882. * <blockquote class="info">
  883. * For a better user experience, the functionality is throttled when invoked many times in less
  884. * than the milliseconds interval configured in the <a href="#method_init"><code>init()</code> method</a>.
  885. * Note that the Opera and Edge browser does not support screensharing, and as for IE / Safari browsers using
  886. * the Temasys Plugin screensharing support, check out the <a href="https://temasys.com.sg/plugin/#commercial-licensing">
  887. * commercial licensing</a> for more options.
  888. * </blockquote>
  889. * Function that retrieves screensharing Stream.
  890. * @method shareScreen
  891. * @param {JSON|Boolean} [enableAudio=false] The flag if audio tracks should be retrieved.
  892. * @param {Boolean} [enableAudio.stereo=false] <blockquote class="info"><b>Deprecation Warning!</b>
  893. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code> and
  894. * the <code>options.codecParams.audio.opus["sprop-stereo"]</code>
  895. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  896. * <code>options.codecParams.audio.opus.stereo</code> or <code>options.codecParams.audio.opus["sprop-stereo"]</code>
  897. * is configured, this overrides the <code>options.audio.stereo</code> setting.</blockquote>
  898. * The flag if OPUS audio codec stereo band should be configured for sending encoded audio data.
  899. * <small>When not provided, the default browser configuration is used.</small>
  900. * @param {Boolean} [enableAudio.usedtx] <blockquote class="info"><b>Deprecation Warning!</b>
  901. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code>
  902. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  903. * <code>options.codecParams.audio.opus.stereo</code> is configured, this overrides the
  904. * <code>options.audio.stereo</code> setting. Note that this feature might
  905. * not work depending on the browser support and implementation.</blockquote>
  906. * The flag if OPUS audio codec should enable DTX (Discontinuous Transmission) for sending encoded audio data.
  907. * <small>This might help to reduce bandwidth as it reduces the bitrate during silence or background noise, and
  908. * goes hand-in-hand with the <code>options.voiceActivityDetection</code> flag in <a href="#method_joinRoom">
  909. * <code>joinRoom()</code> method</a>.</small>
  910. * <small>When not provided, the default browser configuration is used.</small>
  911. * @param {Boolean} [enableAudio.useinbandfec] <blockquote class="info"><b>Deprecation Warning!</b>
  912. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.useinbandfec</code>
  913. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  914. * <code>options.codecParams.audio.opus.useinbandfec</code> is configured, this overrides the
  915. * <code>options.audio.useinbandfec</code> setting. Note that this parameter should only be used
  916. * for debugging purposes only.</blockquote>
  917. * The flag if OPUS audio codec has the capability to take advantage of the in-band FEC
  918. * (Forward Error Correction) when sending encoded audio data.
  919. * <small>This helps to reduce the harm of packet loss by encoding information about the previous packet loss.</small>
  920. * <small>When not provided, the default browser configuration is used.</small>
  921. * @param {Number} [enableAudio.maxplaybackrate] <blockquote class="info"><b>Deprecation Warning!</b>
  922. * This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.maxplaybackrate</code>
  923. * parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
  924. * <code>options.codecParams.audio.opus.maxplaybackrate</code> is configured, this overrides the
  925. * <code>options.audio.maxplaybackrate</code> setting. Note that this feature might
  926. * not work depending on the browser support and implementation.
  927. * Note that this parameter should only be used for debugging purposes only.</blockquote>
  928. * The OPUS audio codec maximum output sampling rate in Hz (hertz) that is is capable of receiving
  929. * decoded audio data, to adjust to the hardware limitations and ensure that any sending audio data
  930. * would not encode at a higher sampling rate specified by this.
  931. * <small>This value must be between <code>8000</code> to <code>48000</code>.</small>
  932. * <small>When not provided, the default browser configuration is used.</small>
  933. * @param {Boolean} [enableAudio.echoCancellation=true] <blockquote class="info">
  934. * For Chrome/Opera/IE/Safari/Bowser, the echo cancellation functionality may not work and may produce a terrible
  935. * feedback. It is recommended to use headphones or other microphone devices rather than the device
  936. * in-built microphones.</blockquote> The flag to enable echo cancellation for audio track.
  937. * <small>Note that this will not be toggled for Chrome/Opera case when `mediaSource` value is `["tab","audio"]`.</small>
  938. * @param {String|Array|JSON} [mediaSource=screen] The screensharing media source to select.
  939. * <small>Note that multiple sources are not supported by Firefox as of the time of this release.
  940. * Firefox will use the first item specified in the Array in the event that multiple sources are defined.</small>
  941. * <small>E.g. <code>["screen", "window"]</code>, <code>["tab", "audio"]</code>, <code>"screen"</code> or <code>"tab"</code>
  942. * or <code>{ sourceId: "xxxxx", mediaSource: "screen" }</code>.</small>
  943. * [Rel: Skylink.MEDIA_SOURCE]
  944. * @param {Function} [callback] The callback function fired when request has completed.
  945. * <small>Function parameters signature is <code>function (error, success)</code></small>
  946. * <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
  947. * <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter payload value
  948. * as <code>true</code> for request success when User is not in the Room or is in Room without Peers,
  949. * or by the <a href="#event_peerRestart"><code>peerRestart</code> event</a> triggering
  950. * <code>isSelfInitiateRestart</code> parameter payload value as <code>true</code> for all connected Peers
  951. * for request success when User is in Room with Peers.</small>
  952. * @param {Error|String} callback.error The error result in request.
  953. * <small>Defined as <code>null</code> when there are no errors in request</small>
  954. * <small>Object signature is the <code>shareScreen()</code> error when retrieving screensharing Stream.</small>
  955. * @param {MediaStream} callback.success The success result in request.
  956. * <small>Defined as <code>null</code> when there are errors in request</small>
  957. * <small>Object signature is the screensharing Stream object.</small>
  958. * @example
  959. * // Example 1: Share screen with audio
  960. * skylinkDemo.shareScreen(function (error, success) {
  961. * if (error) return;
  962. * attachMediaStream(document.getElementById("my-screen"), success);
  963. * });
  964. *
  965. * // Example 2: Share screen without audio
  966. * skylinkDemo.shareScreen(false, function (error, success) {
  967. * if (error) return;
  968. * attachMediaStream(document.getElementById("my-screen"), success);
  969. * });
  970. *
  971. * // Example 3: Share "window" media source
  972. * skylinkDemo.shareScreen("window", function (error, success) {
  973. * if (error) return;
  974. * attachMediaStream(document.getElementById("my-screen"), success);
  975. * });
  976. *
  977. * // Example 4: Share tab and its audio media source
  978. * skylinkDemo.shareScreen(true, ["tab", "audio"], function (error, success) {
  979. * if (error) return;
  980. * attachMediaStream(document.getElementById("my-screen"), success);
  981. * });
  982. *
  983. * // Example 5: Share "window" and "screen" media source
  984. * skylinkDemo.shareScreen(["window", "screen"], function (error, success) {
  985. * if (error) return;
  986. * attachMediaStream(document.getElementById("my-screen"), success);
  987. * });
  988. *
  989. * // Example 6: Share "window" with specific media source for specific plugin build users.
  990. * skylinkDemo.shareScreen({ mediaSource: "window", sourceId: "xxxxx" }, function (error, success) {
  991. * if (error) return;
  992. * attachMediaStream(document.getElementById("my-screen"), success);
  993. * });
  994. * @trigger <ol class="desc-seq">
  995. * <li>Retrieves screensharing Stream. <ol><li>If retrieval was successful: <ol><li>If browser is Firefox: <ol>
  996. * <li>If there are missing audio or video tracks requested: <ol>
  997. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  998. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  999. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a>
  1000. * triggers parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  1001. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li>
  1002. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1003. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1004. * value as <code>false</code>.</li></ol></li><li>Else: <ol>
  1005. * <li>If audio is requested: <small>Chrome, Safari and IE currently doesn't support retrieval of
  1006. * audio track together with screensharing video track.</small> <ol><li>Retrieves audio Stream: <ol>
  1007. * <li>If retrieval was successful: <ol><li>Attempts to attach screensharing Stream video track to audio Stream. <ol>
  1008. * <li>If attachment was successful: <ol><li><a href="#event_mediaAccessSuccess">
  1009. * <code>mediaAccessSuccess</code> event</a> triggers parameter payload <code>isScreensharing</code>
  1010. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li><li>Else: <ol>
  1011. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  1012. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  1013. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> triggers parameter payload
  1014. * <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code> value as <code>true</code> and
  1015. * <code>isAudioFallback</code> value as <code>false</code>.</li>
  1016. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1017. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1018. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1019. * <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
  1020. * <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
  1021. * <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a>
  1022. * triggers parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
  1023. * value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li>
  1024. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
  1025. * parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
  1026. * value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1027. * <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a>
  1028. * triggers parameter payload <code>isScreensharing</code> value as <code>true</code>
  1029. * and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1030. * <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers parameter payload
  1031. * <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code> value as
  1032. * <code>false</code>.</li><li><b>ABORT</b> and return error.</li></ol></li></ol></li><li>If User is in Room: <ol>
  1033. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  1034. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as <code>shareScreen()</code> Stream.</li>
  1035. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1036. * <code>isSelf</code> value as <code>true</code>.</li>
  1037. * <li>Checks if MCU is enabled for App Key provided in <a href="#method_init"><code>init()</code> method</a>. <ol>
  1038. * <li>If MCU is enabled: <ol><li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>.
  1039. * <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li><li>Else: <ol>
  1040. * <li>If there are connected Peers in the Room: <ol><li>Invoke <a href="#method_refreshConnection">
  1041. * <code>refreshConnection()</code> method</a>. <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li>
  1042. * </ol></li></ol></li></ol></li></ol></li></ol></li></ol></li></ol>
  1043. * @for Skylink
  1044. * @since 0.6.0
  1045. */
  1046. Skylink.prototype.shareScreen = function (enableAudio, mediaSource, callback) {
  1047. var self = this;
  1048. var enableAudioSettings = false;
  1049. var useMediaSource = [self.MEDIA_SOURCE.SCREEN];
  1050. var useMediaSourceId = null;
  1051. var checkIfSourceExistsFn = function (val) {
  1052. for (var prop in self.MEDIA_SOURCE) {
  1053. if (self.MEDIA_SOURCE.hasOwnProperty(prop) && self.MEDIA_SOURCE[prop] === val) {
  1054. return true;
  1055. }
  1056. }
  1057. return false;
  1058. };
  1059.  
  1060. // shareScreen("screen") or shareScreen({ sourceId: "xxxx", mediaSource: "xxxxx" })
  1061. if (enableAudio && typeof enableAudio === 'string' ||
  1062. (enableAudio && typeof enableAudio === 'object' && enableAudio.sourceId && enableAudio.mediaSource)) {
  1063. if (checkIfSourceExistsFn(typeof enableAudio === 'object' ? enableAudio.mediaSource : enableAudio)) {
  1064. useMediaSource = [typeof enableAudio === 'object' ? enableAudio.mediaSource : enableAudio];
  1065. }
  1066. useMediaSourceId = typeof enableAudio === 'object' ? enableAudio.sourceId : null;
  1067. // shareScreen(["screen", "window"])
  1068. } else if (Array.isArray(enableAudio)) {
  1069. var enableAudioArr = [];
  1070.  
  1071. for (var i = 0; i < enableAudio.length; i++) {
  1072. if (checkIfSourceExistsFn(enableAudio[i])) {
  1073. enableAudioArr.push(enableAudio[i]);
  1074. }
  1075. }
  1076.  
  1077. if (enableAudioArr.length > 0) {
  1078. useMediaSource = enableAudioArr;
  1079. }
  1080. // shareScreen({ stereo: true })
  1081. } else if (enableAudio && typeof enableAudio === 'object') {
  1082. if (enableAudio.sourceId && enableAudio.mediaSource) {
  1083.  
  1084. } else {
  1085. enableAudioSettings = {
  1086. usedtx: typeof enableAudio.usedtx === 'boolean' ? enableAudio.usedtx : null,
  1087. useinbandfec: typeof enableAudio.useinbandfec === 'boolean' ? enableAudio.useinbandfec : null,
  1088. stereo: enableAudio.stereo === true,
  1089. echoCancellation: enableAudio.echoCancellation !== false,
  1090. deviceId: enableAudio.deviceId
  1091. };
  1092. }
  1093. // shareScreen(true)
  1094. } else if (enableAudio === true) {
  1095. enableAudioSettings = enableAudio === true ? {
  1096. usedtx: null,
  1097. useinbandfec: null,
  1098. stereo: false,
  1099. echoCancellation: true,
  1100. deviceId: null
  1101. } : false;
  1102. // shareScreen(function () {})
  1103. } else if (typeof enableAudio === 'function') {
  1104. callback = enableAudio;
  1105. enableAudio = false;
  1106. }
  1107.  
  1108. // shareScreen(.., "screen") or shareScreen({ sourceId: "xxxx", mediaSource: "xxxxx" })
  1109. if (mediaSource && typeof mediaSource === 'string' ||
  1110. (mediaSource && typeof mediaSource === 'object' && mediaSource.sourceId && mediaSource.mediaSource)) {
  1111. if (checkIfSourceExistsFn(typeof mediaSource === 'object' ? mediaSource.mediaSource : mediaSource)) {
  1112. useMediaSource = [typeof mediaSource === 'object' ? mediaSource.mediaSource : mediaSource];
  1113. }
  1114. useMediaSourceId = typeof mediaSource === 'object' ? mediaSource.sourceId : null;
  1115. // shareScreen(.., ["screen", "window"])
  1116. } else if (Array.isArray(mediaSource)) {
  1117. var mediaSourceArr = [];
  1118. for (var i = 0; i < mediaSource.length; i++) {
  1119. if (checkIfSourceExistsFn(mediaSource[i])) {
  1120. mediaSourceArr.push(mediaSource[i]);
  1121. }
  1122. }
  1123. if (mediaSourceArr.length > 0) {
  1124. useMediaSource = mediaSourceArr;
  1125. }
  1126. // shareScreen(.., function () {})
  1127. } else if (typeof mediaSource === 'function') {
  1128. callback = mediaSource;
  1129. }
  1130.  
  1131. if (useMediaSource.indexOf('audio') > -1 && useMediaSource.indexOf('tab') === -1) {
  1132. useMediaSource.splice(useMediaSource.indexOf('audio'), 1);
  1133. if (useMediaSource.length === 0) {
  1134. useMediaSource = [self.MEDIA_SOURCE.SCREEN];
  1135. }
  1136. }
  1137.  
  1138. self._throttle(function (runFn) {
  1139. if (!runFn) {
  1140. if (self._initOptions.throttlingShouldThrowError) {
  1141. var throttleLimitError = 'Unable to run as throttle interval has not reached (' + self._initOptions.throttleIntervals.shareScreen + 'ms).';
  1142. log.error(throttleLimitError);
  1143.  
  1144. if (typeof callback === 'function') {
  1145. callback(new Error(throttleLimitError), null);
  1146. }
  1147. }
  1148. return;
  1149. }
  1150.  
  1151. var settings = {
  1152. settings: {
  1153. audio: enableAudioSettings,
  1154. video: {
  1155. screenshare: true,
  1156. exactConstraints: false
  1157. }
  1158. },
  1159. getUserMediaSettings: {
  1160. audio: false,
  1161. video: {
  1162. mediaSource: useMediaSource
  1163. }
  1164. }
  1165. };
  1166.  
  1167. if (AdapterJS.webrtcDetectedType === 'plugin' && useMediaSourceId) {
  1168. settings.getUserMediaSettings.video.optional = [{
  1169. screenId: useMediaSourceId
  1170. }];
  1171. }
  1172.  
  1173. var mediaAccessSuccessFn = function (stream) {
  1174. self.off('mediaAccessError', mediaAccessErrorFn);
  1175.  
  1176. if (self._inRoom) {
  1177. self._trigger('incomingStream', self._user.sid, stream, true, self.getPeerInfo(), true, stream.id || stream.label);
  1178. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  1179.  
  1180. if (Object.keys(self._peerConnections).length > 0 || self._hasMCU) {
  1181. self._refreshPeerConnection(Object.keys(self._peerConnections), false, {}, function (err, success) {
  1182. if (err) {
  1183. log.error('Failed refreshing connections for shareScreen() ->', err);
  1184. if (typeof callback === 'function') {
  1185. callback(new Error('Failed refreshing connections.'), null);
  1186. }
  1187. return;
  1188. }
  1189. if (typeof callback === 'function') {
  1190. callback(null, stream);
  1191. }
  1192. });
  1193. } else if (typeof callback === 'function') {
  1194. callback(null, stream);
  1195. }
  1196. } else if (typeof callback === 'function') {
  1197. callback(null, stream);
  1198. }
  1199. };
  1200.  
  1201. var mediaAccessErrorFn = function (error) {
  1202. self.off('mediaAccessSuccess', mediaAccessSuccessFn);
  1203.  
  1204. if (typeof callback === 'function') {
  1205. callback(error, null);
  1206. }
  1207. };
  1208.  
  1209. self.once('mediaAccessSuccess', mediaAccessSuccessFn, function (stream, isScreensharing) {
  1210. return isScreensharing;
  1211. });
  1212.  
  1213. self.once('mediaAccessError', mediaAccessErrorFn, function (error, isScreensharing) {
  1214. return isScreensharing;
  1215. });
  1216.  
  1217. var getUserMediaAudioSettings = enableAudioSettings ? {
  1218. echoCancellation: enableAudioSettings.echoCancellation
  1219. } : false;
  1220.  
  1221. try {
  1222. var hasDefaultAudioTrack = false;
  1223. if (enableAudioSettings) {
  1224. if (AdapterJS.webrtcDetectedBrowser === 'firefox') {
  1225. hasDefaultAudioTrack = true;
  1226. settings.getUserMediaSettings.audio = getUserMediaAudioSettings;
  1227. } else if (useMediaSource.indexOf('audio') > -1 && useMediaSource.indexOf('tab') > -1) {
  1228. hasDefaultAudioTrack = true;
  1229. settings.getUserMediaSettings.audio = {};
  1230. }
  1231. }
  1232.  
  1233. var onSuccessCbFn = function (stream) {
  1234. if (hasDefaultAudioTrack || !enableAudioSettings) {
  1235. self._onStreamAccessSuccess(stream, settings, true, false);
  1236. return;
  1237. }
  1238.  
  1239. settings.getUserMediaSettings.audio = getUserMediaAudioSettings;
  1240.  
  1241. var onAudioSuccessCbFn = function (audioStream) {
  1242. try {
  1243. audioStream.addTrack(stream.getVideoTracks()[0]);
  1244.  
  1245. self.once('mediaAccessSuccess', function () {
  1246. self._streams.screenshare.streamClone = stream;
  1247. }, function (stream, isScreensharing) {
  1248. return isScreensharing;
  1249. });
  1250.  
  1251. self._onStreamAccessSuccess(audioStream, settings, true, false);
  1252.  
  1253. } catch (error) {
  1254. log.error('Failed retrieving audio stream for screensharing stream', error);
  1255. self._onStreamAccessSuccess(stream, settings, true, false);
  1256. }
  1257. };
  1258.  
  1259. var onAudioErrorCbFn = function (error) {
  1260. log.error('Failed retrieving audio stream for screensharing stream', error);
  1261. self._onStreamAccessSuccess(stream, settings, true, false);
  1262. };
  1263.  
  1264. navigator.getUserMedia({ audio: getUserMediaAudioSettings }, onAudioSuccessCbFn, onAudioErrorCbFn);
  1265. };
  1266.  
  1267. var onErrorCbFn = function (error) {
  1268. self._onStreamAccessError(error, settings, true, false);
  1269. };
  1270.  
  1271. if (typeof (AdapterJS || {}).webRTCReady !== 'function') {
  1272. return onErrorCbFn(new Error('Failed to call getUserMedia() as AdapterJS is not yet loaded!'));
  1273. }
  1274.  
  1275. AdapterJS.webRTCReady(function () {
  1276. navigator.getUserMedia(settings.getUserMediaSettings, onSuccessCbFn, onErrorCbFn);
  1277. });
  1278. } catch (error) {
  1279. self._onStreamAccessError(error, settings, true, false);
  1280. }
  1281. }, 'shareScreen', self._initOptions.throttleIntervals.shareScreen);
  1282. };
  1283.  
  1284. /**
  1285. * <blockquote class="info">
  1286. * Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
  1287. * <a href="#method_stopStream"><code>stopStream()</code> method</a>,
  1288. * <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
  1289. * <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
  1290. * <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
  1291. * <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
  1292. * sent within less than an interval.
  1293. * </blockquote>
  1294. * Function that stops <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>.
  1295. * @method stopScreen
  1296. * @example
  1297. * function stopScreen () {
  1298. * skylinkDemo.stopScreen();
  1299. * }
  1300. *
  1301. * skylinkDemo.shareScreen();
  1302. * @trigger <ol class="desc-seq">
  1303. * <li>Checks if there is <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>. <ol>
  1304. * <li>If there is <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>: <ol>
  1305. * <li>Stop <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> Stream. <ol>
  1306. * <li><a href="#event_mediaAccessStopped"><code>mediaAccessStopped</code> event</a>
  1307. * triggers parameter payload <code>isScreensharing</code> value as <code>true</code> and
  1308. * <code>isAudioFallback</code> value as <code>false</code>.</li><li>If User is in Room: <ol>
  1309. * <li><a href="#event_streamEnded"><code>streamEnded</code> event</a> triggers parameter payload
  1310. * <code>isSelf</code> value as <code>true</code> and <code>isScreensharing</code> value as <code>true</code>.</li>
  1311. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1312. * <code>isSelf</code> value as <code>true</code>.</li>
  1313. * </ol></li></ol></li><li>If User is in Room: <small><b>SKIP</b> this step if <code>stopScreen()</code>
  1314. * was invoked from <a href="#method_shareScreen"><code>shareScreen()</code> method</a>.</small> <ol>
  1315. * <li>If there is <a href="#method_getUserMedia"> <code>getUserMedia()</code>Stream</a> Stream: <ol>
  1316. * <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
  1317. * <code>isSelf</code> value as <code>true</code> and <code>stream</code> as
  1318. * <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.</li>
  1319. * <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
  1320. * <code>isSelf</code> value as <code>true</code>.</li></ol></li>
  1321. * <li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>.</li>
  1322. * </ol></li></ol></li></ol></li></ol>
  1323. * @for Skylink
  1324. * @since 0.6.0
  1325. */
  1326. Skylink.prototype.stopScreen = function () {
  1327. if (this._streams.screenshare) {
  1328. this._stopStreams({
  1329. screenshare: true
  1330. });
  1331.  
  1332. if (this._inRoom) {
  1333. if (this._streams.userMedia && this._streams.userMedia.stream) {
  1334. this._trigger('incomingStream', this._user.sid, this._streams.userMedia.stream, true, this.getPeerInfo(),
  1335. false, this._streams.userMedia.stream.id || this._streams.userMedia.stream.label);
  1336. this._trigger('peerUpdated', this._user.sid, this.getPeerInfo(), true);
  1337. }
  1338. this._refreshPeerConnection(Object.keys(this._peerConnections), {}, false);
  1339. }
  1340. }
  1341. };
  1342.  
  1343. /**
  1344. * Function that returns the camera and microphone sources.
  1345. * @method getStreamSources
  1346. * @param {Function} callback The callback function fired when request has completed.
  1347. * <small>Function parameters signature is <code>function (success)</code></small>
  1348. * @param {JSON} callback.success The success result in request.
  1349. * <small>Object signature is the list of sources.</small>
  1350. * @param {JSON} callback.success.audio The list of audio input (microphone) and output (speakers) sources.
  1351. * @param {Array} callback.success.audio.input The list of audio input (microphone) sources.
  1352. * @param {JSON} callback.success.audio.input.#index The audio input source item.
  1353. * @param {String} callback.success.audio.input.#index.deviceId The audio input source item device ID.
  1354. * @param {String} callback.success.audio.input.#index.label The audio input source item device label name.
  1355. * @param {String} [callback.success.audio.input.#index.groupId] The audio input source item device physical device ID.
  1356. * <small>Note that there can be different <code>deviceId</code> due to differing sources but can share a
  1357. * <code>groupId</code> because it's the same device.</small>
  1358. * @param {Array} callback.success.audio.output The list of audio output (speakers) sources.
  1359. * @param {JSON} callback.success.audio.output.#index The audio output source item.
  1360. * <small>Object signature matches <code>callback.success.audio.input.#index</code> format.</small>
  1361. * @param {JSON} callback.success.video The list of video input (camera) sources.
  1362. * @param {Array} callback.success.video.input The list of video input (camera) sources.
  1363. * @param {JSON} callback.success.video.input.#index The video input source item.
  1364. * <small>Object signature matches <code>callback.success.audio.input.#index</code> format.</small>
  1365. * @example
  1366. * // Example 1: Retrieve the getUserMedia() stream with selected source ID.
  1367. * skylinkDemo.getStreamSources(function (sources) {
  1368. * skylinkDemo.getUserMedia({
  1369. * audio: sources.audio.input[0].deviceId,
  1370. * video: sources.video.input[0].deviceId
  1371. * });
  1372. * });
  1373. *
  1374. * // Example 2: Set the output audio speaker (Chrome 49+ supported only)
  1375. * skylinkDemo.getStreamSources(function (sources) {
  1376. * var videoElement = document.getElementById('video');
  1377. * if (videoElement && typeof videoElement.setSinkId === 'function') {
  1378. * videoElement.setSinkId(sources.audio.output[0].deviceId)
  1379. * }
  1380. * });
  1381. * @for Skylink
  1382. * @since 0.6.27
  1383. */
  1384. Skylink.prototype.getStreamSources = function(callback) {
  1385. var outputSources = {
  1386. audio: {
  1387. input: [],
  1388. output: []
  1389. },
  1390. video: {
  1391. input: []
  1392. }
  1393. };
  1394.  
  1395. if (typeof callback !== 'function') {
  1396. return log.error('Please provide the callback.');
  1397. }
  1398.  
  1399. var sourcesListFn = function (sources) {
  1400. sources.forEach(function (sourceItem) {
  1401. var item = {
  1402. deviceId: sourceItem.deviceId || sourceItem.sourceId || 'default',
  1403. label: sourceItem.label,
  1404. groupId: sourceItem.groupId || null
  1405. };
  1406.  
  1407. item.label = item.label || 'Source for ' + item.deviceId;
  1408.  
  1409. if (['audio', 'audioinput'].indexOf(sourceItem.kind) > -1) {
  1410. outputSources.audio.input.push(item);
  1411. } else if (['video', 'videoinput'].indexOf(sourceItem.kind) > -1) {
  1412. outputSources.video.input.push(item);
  1413. } else if (sourceItem.kind === 'audiooutput') {
  1414. outputSources.audio.output.push(item);
  1415. }
  1416. });
  1417.  
  1418. callback(outputSources);
  1419. };
  1420.  
  1421. if (navigator.mediaDevices && typeof navigator.mediaDevices.enumerateDevices === 'function') {
  1422. navigator.mediaDevices.enumerateDevices().then(sourcesListFn);
  1423. } else if (window.MediaStreamTrack && typeof MediaStreamTrack.getSources === 'function') {
  1424. MediaStreamTrack.getSources(sourcesListFn);
  1425. } else if (typeof navigator.getUserMedia === 'function') {
  1426. sourcesListFn([
  1427. { deviceId: 'default', kind: 'audioinput', label: 'Default Audio Track' },
  1428. { deviceId: 'default', kind: 'videoinput', label: 'Default Video Track' }
  1429. ]);
  1430. } else {
  1431. sourcesListFn([]);
  1432. }
  1433. };
  1434.  
  1435. /**
  1436. * Function that returns the screensharing sources.
  1437. * @method getScreenSources
  1438. * @param {Function} callback The callback function fired when request has completed.
  1439. * <small>Function parameters signature is <code>function (success)</code></small>
  1440. * @param {JSON} callback.success The success result in request.
  1441. * <small>Object signature is the list of sources.</small>
  1442. * @param {JSON} callback.success The list of screensharing media sources and screen sources.
  1443. * @param {Array} callback.success.mediaSource The array of screensharing media sources.
  1444. * @param {String} callback.success.mediaSource.#index The screensharing media source item.
  1445. * [Rel: Skylink.MEDIA_SOURCE]
  1446. * @param {Array} callback.success.mediaSourceInput The list of specific media source screen inputs.
  1447. * @param {JSON} callback.success.mediaSourceInput.#index The media source screen input item.
  1448. * @param {String} callback.success.mediaSourceInput.#index.sourceId The screen input item ID.
  1449. * @param {String} callback.success.mediaSourceInput.#index.label The screen input item label name.
  1450. * @param {String} callback.success.mediaSourceInput.#index.mediaSource The screen input item media source it belongs to.
  1451. * [Rel: Skylink.MEDIA_SOURCE]
  1452. * @example
  1453. * // Example 1: Retrieve the list of available shareScreen() sources.
  1454. * skylinkDemo.getScreenSources(function (sources) {
  1455. * skylinkDemo.shareScreen(sources.mediaSource[0] || null);
  1456. * });
  1457. *
  1458. * // Example 2: Retrieve the list of available shareScreen() sources with a specific item.
  1459. * skylinkDemo.getScreenSources(function (sources) {
  1460. * if (sources.mediaSourceInput[0]) {
  1461. * skylinkDemo.shareScreen({
  1462. * mediaSource: mediaSourceInput[0].mediaSource,
  1463. * sourceId: mediaSourceInput[0].sourceId
  1464. * });
  1465. * } else {
  1466. * skylinkDemo.shareScreen();
  1467. * }
  1468. * });
  1469. * @for Skylink
  1470. * @since 0.6.27
  1471. */
  1472. Skylink.prototype.getScreenSources = function(callback) {
  1473. var outputSources = {
  1474. mediaSource: [],
  1475. mediaSourceInput: []
  1476. };
  1477.  
  1478. if (typeof callback !== 'function') {
  1479. return log.error('Please provide the callback.');
  1480. }
  1481.  
  1482. // For chrome android 59+ has screensharing support behind chrome://flags (needs to be enabled by user)
  1483. // Reference: https://bugs.chromium.org/p/chromium/issues/detail?id=487935
  1484. if (navigator.userAgent.toLowerCase().indexOf('android') > -1) {
  1485. if (AdapterJS.webrtcDetectedBrowser === 'chrome' && AdapterJS.webrtcDetectedVersion >= 59) {
  1486. outputSources.mediaSource = ['screen'];
  1487. }
  1488. callback(outputSources);
  1489. return;
  1490. }
  1491.  
  1492. // IE / Safari (plugin) needs commerical screensharing enabled
  1493. if (AdapterJS.webrtcDetectedType === 'plugin') {
  1494. AdapterJS.webRTCReady(function () {
  1495. // IE / Safari (plugin) is not available or do not support screensharing
  1496. if (AdapterJS.WebRTCPlugin.plugin && AdapterJS.WebRTCPlugin.plugin.isScreensharingAvailable &&
  1497. AdapterJS.WebRTCPlugin.plugin.HasScreensharingFeature) {
  1498. outputSources.mediaSource = ['window', 'screen'];
  1499.  
  1500. // Do not provide the error callback as well or it will throw NPError.
  1501. if (typeof AdapterJS.WebRTCPlugin.plugin.getScreensharingSources === 'function') {
  1502. AdapterJS.WebRTCPlugin.plugin.getScreensharingSources(function (sources) {
  1503. sources.forEach(sources, function (sourceItem) {
  1504. var item = {
  1505. sourceId: sourceItem.id || sourceItem.sourceId || 'default',
  1506. label: sourceItem.label,
  1507. mediaSource: sourceItem.kind || 'screen'
  1508. };
  1509.  
  1510. item.label = item.label || 'Source for ' + item.sourceId;
  1511. outputSources.mediaSourceInput.push(item);
  1512. });
  1513.  
  1514. callback(outputSources);
  1515. });
  1516. return;
  1517. }
  1518. }
  1519. callback(outputSources);
  1520. });
  1521. return;
  1522.  
  1523. // Chrome 34+ and Opera 21(?)+ supports screensharing
  1524. // Firefox 38(?)+ supports screensharing
  1525. } else if ((AdapterJS.webrtcDetectedBrowser === 'chrome' && AdapterJS.webrtcDetectedVersion >= 34) ||
  1526. (AdapterJS.webrtcDetectedBrowser === 'firefox' && AdapterJS.webrtcDetectedVersion >= 38) ||
  1527. (AdapterJS.webrtcDetectedBrowser === 'opera' && AdapterJS.webrtcDetectedVersion >= 21)) {
  1528. // Just warn users for those who did not configure the Opera screensharing extension settings, it will not work!
  1529. if (AdapterJS.webrtcDetectedBrowser === 'opera' && !(AdapterJS.extensionInfo &&
  1530. AdapterJS.extensionInfo.opera && AdapterJS.extensionInfo.opera.extensionId)) {
  1531. log.warn('Please ensure that your application allows Opera screensharing!');
  1532. }
  1533.  
  1534. outputSources.mediaSource = ['window', 'screen'];
  1535.  
  1536. // Chrome 52+ and Opera 39+ supports tab and audio
  1537. // Reference: https://developer.chrome.com/extensions/desktopCapture
  1538. if ((AdapterJS.webrtcDetectedBrowser === 'chrome' && AdapterJS.webrtcDetectedVersion >= 52) ||
  1539. (AdapterJS.webrtcDetectedBrowser === 'opera' && AdapterJS.webrtcDetectedVersion >= 39)) {
  1540. outputSources.mediaSource.push('tab', 'audio');
  1541.  
  1542. // Firefox supports some other sources
  1543. // Reference: http://fluffy.github.io/w3c-screen-share/#screen-based-video-constraints
  1544. // https://bugzilla.mozilla.org/show_bug.cgi?id=1313758
  1545. // https://bugzilla.mozilla.org/show_bug.cgi?id=1037405
  1546. // https://bugzilla.mozilla.org/show_bug.cgi?id=1313758
  1547. } else if (AdapterJS.webrtcDetectedBrowser === 'firefox') {
  1548. outputSources.mediaSource.push('browser', 'camera', 'application');
  1549. }
  1550. }
  1551.  
  1552. callback(outputSources);
  1553. };
  1554.  
  1555. /**
  1556. * Function that handles the muting of Stream audio and video tracks.
  1557. * @method _muteStreams
  1558. * @private
  1559. * @for Skylink
  1560. * @since 0.6.15
  1561. */
  1562. Skylink.prototype._muteStreams = function () {
  1563. var self = this;
  1564. var hasVideo = false;
  1565. var hasAudio = false;
  1566.  
  1567. var muteFn = function (stream) {
  1568. var audioTracks = stream.getAudioTracks();
  1569. var videoTracks = stream.getVideoTracks();
  1570.  
  1571. for (var a = 0; a < audioTracks.length; a++) {
  1572. audioTracks[a].enabled = !self._streamsMutedSettings.audioMuted;
  1573. hasAudio = true;
  1574. }
  1575.  
  1576. for (var v = 0; v < videoTracks.length; v++) {
  1577. videoTracks[v].enabled = !self._streamsMutedSettings.videoMuted;
  1578. hasVideo = true;
  1579. }
  1580. };
  1581.  
  1582. if (self._streams.userMedia && self._streams.userMedia.stream) {
  1583. muteFn(self._streams.userMedia.stream);
  1584. }
  1585.  
  1586. if (self._streams.screenshare && self._streams.screenshare.stream) {
  1587. muteFn(self._streams.screenshare.stream);
  1588. }
  1589.  
  1590. if (self._streams.screenshare && self._streams.screenshare.streamClone) {
  1591. muteFn(self._streams.screenshare.streamClone);
  1592. }
  1593.  
  1594. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  1595. for (var peerId in self._peerConnections) {
  1596. if (self._peerConnections.hasOwnProperty(peerId) && self._peerConnections[peerId]) {
  1597. var localStreams = self._peerConnections[peerId].getLocalStreams();
  1598. for (var s = 0; s < localStreams.length; s++) {
  1599. muteFn(localStreams[s]);
  1600. }
  1601. }
  1602. }
  1603. }
  1604.  
  1605. log.debug('Updated Streams muted status ->', self._streamsMutedSettings);
  1606.  
  1607. return {
  1608. hasVideo: hasVideo,
  1609. hasAudio: hasAudio
  1610. };
  1611. };
  1612.  
  1613. /**
  1614. * Function that handles stopping the Stream streaming.
  1615. * @method _stopStreams
  1616. * @private
  1617. * @for Skylink
  1618. * @since 0.6.15
  1619. */
  1620. Skylink.prototype._stopStreams = function (options) {
  1621. var self = this;
  1622. var stopFn = function (stream) {
  1623. var streamId = stream.id || stream.label;
  1624. log.debug([null, 'MediaStream', streamId, 'Stopping Stream ->'], stream);
  1625.  
  1626. try {
  1627. var audioTracks = stream.getAudioTracks();
  1628. var videoTracks = stream.getVideoTracks();
  1629.  
  1630. for (var a = 0; a < audioTracks.length; a++) {
  1631. audioTracks[a].stop();
  1632. }
  1633.  
  1634. for (var v = 0; v < videoTracks.length; v++) {
  1635. videoTracks[v].stop();
  1636. }
  1637.  
  1638. } catch (error) {
  1639. stream.stop();
  1640. }
  1641.  
  1642. if (self._streamsStoppedCbs[streamId]) {
  1643. self._streamsStoppedCbs[streamId]();
  1644. delete self._streamsStoppedCbs[streamId];
  1645. }
  1646. };
  1647.  
  1648. var stopUserMedia = false;
  1649. var stopScreenshare = false;
  1650. var hasStoppedMedia = false;
  1651.  
  1652. if (typeof options === 'object') {
  1653. stopUserMedia = options.userMedia === true;
  1654. stopScreenshare = options.screenshare === true;
  1655. }
  1656.  
  1657. if (stopUserMedia && self._streams.userMedia) {
  1658. if (self._streams.userMedia.stream) {
  1659. stopFn(self._streams.userMedia.stream);
  1660. }
  1661.  
  1662. self._streams.userMedia = null;
  1663. hasStoppedMedia = true;
  1664. }
  1665.  
  1666. if (stopScreenshare && self._streams.screenshare) {
  1667. if (self._streams.screenshare.streamClone) {
  1668. stopFn(self._streams.screenshare.streamClone);
  1669. }
  1670.  
  1671. if (self._streams.screenshare.stream) {
  1672. stopFn(self._streams.screenshare.stream);
  1673. }
  1674.  
  1675. self._streams.screenshare = null;
  1676. hasStoppedMedia = true;
  1677. }
  1678.  
  1679. if (self._inRoom && hasStoppedMedia) {
  1680. self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
  1681. }
  1682.  
  1683. log.log('Stopping Streams with settings ->', options);
  1684. };
  1685.  
  1686. /**
  1687. * Function that parses the <code>getUserMedia()</code> settings provided.
  1688. * @method _parseStreamSettings
  1689. * @private
  1690. * @for Skylink
  1691. * @since 0.6.15
  1692. */
  1693. Skylink.prototype._parseStreamSettings = function(options) {
  1694. var settings = {
  1695. settings: { audio: false, video: false },
  1696. mutedSettings: { shouldAudioMuted: false, shouldVideoMuted: false },
  1697. getUserMediaSettings: { audio: false, video: false }
  1698. };
  1699.  
  1700. if (options.audio) {
  1701. // For Edge to work since they do not support the advanced constraints yet
  1702. settings.settings.audio = {
  1703. stereo: false,
  1704. exactConstraints: !!options.useExactConstraints,
  1705. echoCancellation: true
  1706. };
  1707. settings.getUserMediaSettings.audio = {
  1708. echoCancellation: true
  1709. };
  1710.  
  1711. if (typeof options.audio === 'object') {
  1712. if (typeof options.audio.stereo === 'boolean') {
  1713. settings.settings.audio.stereo = options.audio.stereo;
  1714. }
  1715.  
  1716. if (typeof options.audio.useinbandfec === 'boolean') {
  1717. settings.settings.audio.useinbandfec = options.audio.useinbandfec;
  1718. }
  1719.  
  1720. if (typeof options.audio.usedtx === 'boolean') {
  1721. settings.settings.audio.usedtx = options.audio.usedtx;
  1722. }
  1723.  
  1724. if (typeof options.audio.maxplaybackrate === 'number' &&
  1725. options.audio.maxplaybackrate >= 8000 && options.audio.maxplaybackrate <= 48000) {
  1726. settings.settings.audio.maxplaybackrate = options.audio.maxplaybackrate;
  1727. }
  1728.  
  1729. if (typeof options.audio.mute === 'boolean') {
  1730. settings.mutedSettings.shouldAudioMuted = options.audio.mute;
  1731. }
  1732.  
  1733. // Not supported in Edge browser features
  1734. if (AdapterJS.webrtcDetectedBrowser !== 'edge') {
  1735. if (typeof options.audio.echoCancellation === 'boolean') {
  1736. settings.settings.audio.echoCancellation = options.audio.echoCancellation;
  1737. settings.getUserMediaSettings.audio.echoCancellation = options.audio.echoCancellation;
  1738. }
  1739.  
  1740. if (Array.isArray(options.audio.optional)) {
  1741. settings.settings.audio.optional = clone(options.audio.optional);
  1742. settings.getUserMediaSettings.audio.optional = clone(options.audio.optional);
  1743. }
  1744.  
  1745. if (options.audio.deviceId && typeof options.audio.deviceId === 'string' &&
  1746. AdapterJS.webrtcDetectedBrowser !== 'firefox') {
  1747. settings.settings.audio.deviceId = options.audio.deviceId;
  1748. settings.getUserMediaSettings.audio.deviceId = options.useExactConstraints ?
  1749. { exact: options.audio.deviceId } : { ideal: options.audio.deviceId };
  1750. }
  1751. }
  1752. }
  1753.  
  1754. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  1755. settings.getUserMediaSettings.audio = true;
  1756. }
  1757. }
  1758.  
  1759. if (options.video) {
  1760. // For Edge to work since they do not support the advanced constraints yet
  1761. settings.settings.video = {
  1762. resolution: clone(this.VIDEO_RESOLUTION.VGA),
  1763. screenshare: false,
  1764. exactConstraints: !!options.useExactConstraints
  1765. };
  1766. settings.getUserMediaSettings.video = {};
  1767.  
  1768. if (typeof options.video === 'object') {
  1769. if (typeof options.video.mute === 'boolean') {
  1770. settings.mutedSettings.shouldVideoMuted = options.video.mute;
  1771. }
  1772.  
  1773. if (Array.isArray(options.video.optional)) {
  1774. settings.settings.video.optional = clone(options.video.optional);
  1775. settings.getUserMediaSettings.video.optional = clone(options.video.optional);
  1776. }
  1777.  
  1778. if (options.video.deviceId && typeof options.video.deviceId === 'string' &&
  1779. AdapterJS.webrtcDetectedBrowser !== 'firefox') {
  1780. settings.settings.video.deviceId = options.video.deviceId;
  1781. settings.getUserMediaSettings.video.deviceId = options.useExactConstraints ?
  1782. { exact: options.video.deviceId } : { ideal: options.video.deviceId };
  1783. }
  1784.  
  1785. if (options.video.resolution && typeof options.video.resolution === 'object') {
  1786. if ((options.video.resolution.width && typeof options.video.resolution.width === 'object') ||
  1787. typeof options.video.resolution.width === 'number') {
  1788. settings.settings.video.resolution.width = options.video.resolution.width;
  1789. }
  1790. if ((options.video.resolution.height && typeof options.video.resolution.height === 'object') ||
  1791. typeof options.video.resolution.height === 'number') {
  1792. settings.settings.video.resolution.height = options.video.resolution.height;
  1793. }
  1794. }
  1795.  
  1796. settings.getUserMediaSettings.video.width = typeof settings.settings.video.resolution.width === 'object' ?
  1797. settings.settings.video.resolution.width : (options.useExactConstraints ?
  1798. { exact: settings.settings.video.resolution.width } : { max: settings.settings.video.resolution.width });
  1799.  
  1800. settings.getUserMediaSettings.video.height = typeof settings.settings.video.resolution.height === 'object' ?
  1801. settings.settings.video.resolution.height : (options.useExactConstraints ?
  1802. { exact: settings.settings.video.resolution.height } : { max: settings.settings.video.resolution.height });
  1803.  
  1804. if ((options.video.frameRate && typeof options.video.frameRate === 'object') ||
  1805. typeof options.video.frameRate === 'number' && AdapterJS.webrtcDetectedType !== 'plugin') {
  1806. settings.settings.video.frameRate = options.video.frameRate;
  1807. settings.getUserMediaSettings.video.frameRate = typeof settings.settings.video.frameRate === 'object' ?
  1808. settings.settings.video.frameRate : (options.useExactConstraints ?
  1809. { exact: settings.settings.video.frameRate } : { max: settings.settings.video.frameRate });
  1810. }
  1811.  
  1812. if (options.video.facingMode && ['string', 'object'].indexOf(typeof options.video.facingMode) > -1 && AdapterJS.webrtcDetectedType === 'plugin') {
  1813. settings.settings.video.facingMode = options.video.facingMode;
  1814. settings.getUserMediaSettings.video.facingMode = typeof settings.settings.video.facingMode === 'object' ?
  1815. settings.settings.video.facingMode : (options.useExactConstraints ?
  1816. { exact: settings.settings.video.facingMode } : { max: settings.settings.video.facingMode });
  1817. }
  1818. } else {
  1819. settings.getUserMediaSettings.video = {
  1820. width: options.useExactConstraints ? { exact: settings.settings.video.resolution.width } :
  1821. { max: settings.settings.video.resolution.width },
  1822. height: options.useExactConstraints ? { exact: settings.settings.video.resolution.height } :
  1823. { max: settings.settings.video.resolution.height }
  1824. };
  1825. }
  1826.  
  1827. if (AdapterJS.webrtcDetectedBrowser === 'edge') {
  1828. settings.settings.video = {
  1829. screenshare: false,
  1830. exactConstraints: !!options.useExactConstraints
  1831. };
  1832. settings.getUserMediaSettings.video = true;
  1833. }
  1834. }
  1835.  
  1836. return settings;
  1837. };
  1838.  
  1839. /**
  1840. * Function that handles the native <code>navigator.getUserMedia()</code> API success callback result.
  1841. * @method _onStreamAccessSuccess
  1842. * @private
  1843. * @for Skylink
  1844. * @since 0.3.0
  1845. */
  1846. Skylink.prototype._onStreamAccessSuccess = function(stream, settings, isScreenSharing, isAudioFallback) {
  1847. var self = this;
  1848. var streamId = stream.id || stream.label;
  1849. var streamHasEnded = false;
  1850.  
  1851. log.log([null, 'MediaStream', streamId, 'Has access to stream ->'], stream);
  1852.  
  1853. // Stop previous stream
  1854. if (!isScreenSharing && self._streams.userMedia) {
  1855. self._stopStreams({
  1856. userMedia: true,
  1857. screenshare: false
  1858. });
  1859.  
  1860. } else if (isScreenSharing && self._streams.screenshare) {
  1861. self._stopStreams({
  1862. userMedia: false,
  1863. screenshare: true
  1864. });
  1865. }
  1866.  
  1867. self._streamsStoppedCbs[streamId] = function () {
  1868. log.log([null, 'MediaStream', streamId, 'Stream has ended']);
  1869. streamHasEnded = true;
  1870. self._trigger('mediaAccessStopped', !!isScreenSharing, !!isAudioFallback, streamId);
  1871.  
  1872. if (self._inRoom) {
  1873. log.debug([null, 'MediaStream', streamId, 'Sending Stream ended status to Peers']);
  1874.  
  1875. self._sendChannelMessage({
  1876. type: self._SIG_MESSAGE_TYPE.STREAM,
  1877. mid: self._user.sid,
  1878. rid: self._room.id,
  1879. cid: self._key,
  1880. streamId: streamId,
  1881. settings: settings.settings,
  1882. status: 'ended'
  1883. });
  1884.  
  1885. self._trigger('streamEnded', self._user.sid, self.getPeerInfo(), true, !!isScreenSharing, streamId);
  1886.  
  1887. if (isScreenSharing && self._streams.screenshare && self._streams.screenshare.stream &&
  1888. (self._streams.screenshare.stream.id || self._streams.screenshare.stream.label) === streamId) {
  1889. self._streams.screenshare = null;
  1890.  
  1891. } else if (!isScreenSharing && self._streams.userMedia && self._streams.userMedia.stream &&
  1892. (self._streams.userMedia.stream.id || self._streams.userMedia.stream.label) === streamId) {
  1893. self._streams.userMedia = null;
  1894. }
  1895. }
  1896. };
  1897.  
  1898. // Handle event for Chrome / Opera
  1899. if (['chrome', 'opera'].indexOf(AdapterJS.webrtcDetectedBrowser) > -1) {
  1900. stream.oninactive = function () {
  1901. if (self._streamsStoppedCbs[streamId]) {
  1902. self._streamsStoppedCbs[streamId]();
  1903. delete self._streamsStoppedCbs[streamId];
  1904. }
  1905. };
  1906.  
  1907. if (isScreenSharing && stream.getVideoTracks().length > 0) {
  1908. stream.getVideoTracks()[0].onended = function () {
  1909. setTimeout(function () {
  1910. if (!streamHasEnded && self._inRoom) {
  1911. self.stopScreen();
  1912. }
  1913. }, 350);
  1914. };
  1915. }
  1916.  
  1917. // Handle event for Firefox (use an interval)
  1918. } else if (AdapterJS.webrtcDetectedBrowser === 'firefox') {
  1919. stream.endedInterval = setInterval(function () {
  1920. if (typeof stream.recordedTime === 'undefined') {
  1921. stream.recordedTime = 0;
  1922. }
  1923. if (stream.recordedTime === stream.currentTime) {
  1924. clearInterval(stream.endedInterval);
  1925.  
  1926. if (self._streamsStoppedCbs[streamId]) {
  1927. self._streamsStoppedCbs[streamId]();
  1928. delete self._streamsStoppedCbs[streamId];
  1929. }
  1930.  
  1931. } else {
  1932. stream.recordedTime = stream.currentTime;
  1933. }
  1934. }, 1000);
  1935.  
  1936. } else {
  1937. stream.onended = function () {
  1938. if (self._streamsStoppedCbs[streamId]) {
  1939. self._streamsStoppedCbs[streamId]();
  1940. delete self._streamsStoppedCbs[streamId];
  1941. }
  1942. };
  1943. }
  1944.  
  1945. if ((settings.settings.audio && stream.getAudioTracks().length === 0) ||
  1946. (settings.settings.video && stream.getVideoTracks().length === 0)) {
  1947.  
  1948. var tracksNotSameError = 'Expected audio tracks length with ' +
  1949. (settings.settings.audio ? '1' : '0') + ' and video tracks length with ' +
  1950. (settings.settings.video ? '1' : '0') + ' but received audio tracks length ' +
  1951. 'with ' + stream.getAudioTracks().length + ' and video ' +
  1952. 'tracks length with ' + stream.getVideoTracks().length;
  1953.  
  1954. log.warn([null, 'MediaStream', streamId, tracksNotSameError]);
  1955.  
  1956. var requireAudio = !!settings.settings.audio;
  1957. var requireVideo = !!settings.settings.video;
  1958.  
  1959. if (settings.settings.audio && stream.getAudioTracks().length === 0) {
  1960. settings.settings.audio = false;
  1961. }
  1962.  
  1963. if (settings.settings.video && stream.getVideoTracks().length === 0) {
  1964. settings.settings.video = false;
  1965. }
  1966.  
  1967. self._trigger('mediaAccessFallback', {
  1968. error: new Error(tracksNotSameError),
  1969. diff: {
  1970. video: { expected: requireVideo ? 1 : 0, received: stream.getVideoTracks().length },
  1971. audio: { expected: requireAudio ? 1 : 0, received: stream.getAudioTracks().length }
  1972. }
  1973. }, self.MEDIA_ACCESS_FALLBACK_STATE.FALLBACKED, !!isScreenSharing, !!isAudioFallback, streamId);
  1974. }
  1975.  
  1976. self._streams[ isScreenSharing ? 'screenshare' : 'userMedia' ] = {
  1977. stream: stream,
  1978. settings: settings.settings,
  1979. constraints: settings.getUserMediaSettings
  1980. };
  1981. self._muteStreams();
  1982. self._trigger('mediaAccessSuccess', stream, !!isScreenSharing, !!isAudioFallback, streamId);
  1983. };
  1984.  
  1985. /**
  1986. * Function that handles the native <code>navigator.getUserMedia()</code> API failure callback result.
  1987. * @method _onStreamAccessError
  1988. * @private
  1989. * @for Skylink
  1990. * @since 0.6.15
  1991. */
  1992. Skylink.prototype._onStreamAccessError = function(error, settings, isScreenSharing) {
  1993. var self = this;
  1994.  
  1995. if (!isScreenSharing && settings.settings.audio && settings.settings.video && self._initOptions.audioFallback) {
  1996. log.debug('Fallbacking to retrieve audio only Stream');
  1997.  
  1998. self._trigger('mediaAccessFallback', {
  1999. error: error,
  2000. diff: null
  2001. }, self.MEDIA_ACCESS_FALLBACK_STATE.FALLBACKING, false, true);
  2002.  
  2003. var onAudioSuccessCbFn = function (stream) {
  2004. self._onStreamAccessSuccess(stream, settings, false, true);
  2005. };
  2006.  
  2007. var onAudioErrorCbFn = function (error) {
  2008. log.error('Failed fallbacking to retrieve audio only Stream ->', error);
  2009.  
  2010. self._trigger('mediaAccessError', error, false, true);
  2011. self._trigger('mediaAccessFallback', {
  2012. error: error,
  2013. diff: null
  2014. }, self.MEDIA_ACCESS_FALLBACK_STATE.ERROR, false, true);
  2015. };
  2016.  
  2017. navigator.getUserMedia({ audio: true }, onAudioSuccessCbFn, onAudioErrorCbFn);
  2018. return;
  2019. }
  2020.  
  2021. log.error('Failed retrieving ' + (isScreenSharing ? 'screensharing' : 'camera') + ' Stream ->', error);
  2022.  
  2023. self._trigger('mediaAccessError', error, !!isScreenSharing, false);
  2024. };
  2025.  
  2026. /**
  2027. * Function that handles the <code>RTCPeerConnection.onaddstream</code> remote MediaStream received.
  2028. * @method _onRemoteStreamAdded
  2029. * @private
  2030. * @for Skylink
  2031. * @since 0.5.2
  2032. */
  2033. Skylink.prototype._onRemoteStreamAdded = function(targetMid, stream, isScreenSharing) {
  2034. var self = this;
  2035. var streamId = (self._peerConnections[targetMid] && self._peerConnections[targetMid].remoteStreamId) || stream.id || stream.label;
  2036.  
  2037. if (!self._peerInformations[targetMid]) {
  2038. log.warn([targetMid, 'MediaStream', streamId, 'Received remote stream when peer is not connected. Ignoring stream ->'], stream);
  2039. return;
  2040. }
  2041.  
  2042. /*if (!self._peerInformations[targetMid].settings.audio &&
  2043. !self._peerInformations[targetMid].settings.video && !isScreenSharing) {
  2044. log.log([targetMid, 'MediaStream', stream.id,
  2045. 'Receive remote stream but ignoring stream as it is empty ->'
  2046. ], stream);
  2047. return;
  2048. }*/
  2049. log.log([targetMid, 'MediaStream', streamId, 'Received remote stream ->'], stream);
  2050.  
  2051. if (isScreenSharing) {
  2052. log.log([targetMid, 'MediaStream', streamId, 'Peer is having a screensharing session with user']);
  2053. }
  2054.  
  2055. self._trigger('incomingStream', targetMid, stream, false, self.getPeerInfo(targetMid), isScreenSharing, streamId);
  2056. self._trigger('peerUpdated', targetMid, self.getPeerInfo(targetMid), false);
  2057. };
  2058.  
  2059.  
  2060. /**
  2061. * Function that sets User's Stream to send to Peer connection.
  2062. * Priority for <code>shareScreen()</code> Stream over <code>getUserMedia()</code> Stream.
  2063. * @method _addLocalMediaStreams
  2064. * @private
  2065. * @for Skylink
  2066. * @since 0.5.2
  2067. */
  2068. Skylink.prototype._addLocalMediaStreams = function(peerId) {
  2069. var self = this;
  2070.  
  2071. // NOTE ALEX: here we could do something smarter
  2072. // a mediastream is mainly a container, most of the info
  2073. // are attached to the tracks. We should iterates over track and print
  2074. try {
  2075. log.log([peerId, null, null, 'Adding local stream']);
  2076.  
  2077. var pc = self._peerConnections[peerId];
  2078.  
  2079. if (pc) {
  2080. var offerToReceiveAudio = !(!self._sdpSettings.connection.audio && peerId !== 'MCU') &&
  2081. self._getSDPCommonSupports(peerId, pc.remoteDescription).video;
  2082. var offerToReceiveVideo = !(!self._sdpSettings.connection.video && peerId !== 'MCU') &&
  2083. self._getSDPCommonSupports(peerId, pc.remoteDescription).audio;
  2084.  
  2085. if (pc.signalingState !== self.PEER_CONNECTION_STATE.CLOSED) {
  2086. // Updates the streams accordingly
  2087. var updateStreamFn = function (updatedStream) {
  2088. if (updatedStream ? (pc.localStreamId ? updatedStream.id !== pc.localStreamId : true) : true) {
  2089. if (AdapterJS.webrtcDetectedBrowser === 'edge' && !(self._initOptions.useEdgeWebRTC && window.msRTCPeerConnection)) {
  2090. pc.getSenders().forEach(function (sender) {
  2091. pc.removeTrack(sender);
  2092. });
  2093. } else {
  2094. pc.getLocalStreams().forEach(function (stream) {
  2095. pc.removeStream(stream);
  2096. });
  2097. }
  2098.  
  2099. if (!offerToReceiveAudio && !offerToReceiveVideo) {
  2100. return;
  2101. }
  2102.  
  2103. if (updatedStream) {
  2104. if (AdapterJS.webrtcDetectedBrowser === 'edge' && !(self._initOptions.useEdgeWebRTC && window.msRTCPeerConnection)) {
  2105. updatedStream.getTracks().forEach(function (track) {
  2106. if ((track.kind === 'audio' && !offerToReceiveAudio) || (track.kind === 'video' && !offerToReceiveVideo)) {
  2107. return;
  2108. }
  2109. pc.addTrack(track, updatedStream);
  2110. });
  2111. } else {
  2112. pc.addStream(updatedStream);
  2113. }
  2114.  
  2115. pc.localStreamId = updatedStream.id || updatedStream.label;
  2116. pc.localStream = updatedStream;
  2117. }
  2118. }
  2119. };
  2120.  
  2121. if (self._streams.screenshare && self._streams.screenshare.stream) {
  2122. log.debug([peerId, 'MediaStream', null, 'Sending screen'], self._streams.screenshare.stream);
  2123.  
  2124. updateStreamFn(self._streams.screenshare.stream);
  2125.  
  2126. } else if (self._streams.userMedia && self._streams.userMedia.stream) {
  2127. log.debug([peerId, 'MediaStream', null, 'Sending stream'], self._streams.userMedia.stream);
  2128.  
  2129. updateStreamFn(self._streams.userMedia.stream);
  2130.  
  2131. } else {
  2132. log.warn([peerId, 'MediaStream', null, 'No media to send. Will be only receiving']);
  2133.  
  2134. updateStreamFn(null);
  2135. }
  2136.  
  2137. } else {
  2138. log.warn([peerId, 'MediaStream', null,
  2139. 'Not adding any stream as signalingState is closed']);
  2140. }
  2141. } else {
  2142. log.warn([peerId, 'MediaStream', self._mediaStream,
  2143. 'Not adding stream as peerconnection object does not exists']);
  2144. }
  2145. } catch (error) {
  2146. if ((error.message || '').indexOf('already added') > -1) {
  2147. log.warn([peerId, null, null, 'Not re-adding stream as LocalMediaStream is already added'], error);
  2148. } else {
  2149. // Fix errors thrown like NS_ERROR_UNEXPECTED
  2150. log.error([peerId, null, null, 'Failed adding local stream'], error);
  2151. }
  2152. }
  2153. };
  2154.  
  2155. /**
  2156. * Function that handles ended streams.
  2157. * @method _handleEndedStreams
  2158. * @private
  2159. * @for Skylink
  2160. * @since 0.6.16
  2161. */
  2162. Skylink.prototype._handleEndedStreams = function (peerId, checkStreamId) {
  2163. var self = this;
  2164. self._streamsSession[peerId] = self._streamsSession[peerId] || {};
  2165.  
  2166. var renderEndedFn = function (streamId) {
  2167. if (self._streamsSession[peerId][streamId]) {
  2168. var peerInfo = clone(self.getPeerInfo(peerId));
  2169. peerInfo.settings.audio = clone(self._streamsSession[peerId][streamId].audio);
  2170. peerInfo.settings.video = clone(self._streamsSession[peerId][streamId].video);
  2171. var hasScreenshare = peerInfo.settings.video && typeof peerInfo.settings.video === 'object' &&
  2172. !!peerInfo.settings.video.screenshare;
  2173. self._streamsSession[peerId][streamId] = false;
  2174. self._trigger('streamEnded', peerId, peerInfo, false, hasScreenshare, streamId);
  2175. }
  2176. };
  2177.  
  2178. if (checkStreamId) {
  2179. renderEndedFn(checkStreamId);
  2180. } else if (self._peerConnections[peerId]) {
  2181. for (var streamId in self._streamsSession[peerId]) {
  2182. if (self._streamsSession[peerId].hasOwnProperty(streamId) && self._streamsSession[peerId][streamId]) {
  2183. renderEndedFn(streamId);
  2184. }
  2185. }
  2186. }
  2187. };