File: source/stream-media.js

                      /**
                       * <blockquote class="info">
                       *   Note that if the video codec is not supported, the SDK will not configure the local <code>"offer"</code> or
                       *   <code>"answer"</code> session description to prefer the codec.
                       * </blockquote>
                       * The list of available video codecs to set as the preferred video codec to use to encode
                       * sending video data when available encoded video codec for Peer connections
                       * configured in the <a href="#method_init"><code>init()</code> method</a>.
                       * @attribute VIDEO_CODEC
                       * @param {String} AUTO <small>Value <code>"auto"</code></small>
                       *   The value of the option to not prefer any video codec but rather use the created
                       *   local <code>"offer"</code> / <code>"answer"</code> session description video codec preference.
                       * @param {String} VP8  <small>Value <code>"VP8"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/VP8">VP8</a> video codec.
                       * @param {String} VP9  <small>Value <code>"VP9"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/VP9">VP9</a> video codec.
                       * @param {String} H264 <small>Value <code>"H264"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/H.264/MPEG-4_AVC">H264</a> video codec.
                       * @type JSON
                       * @readOnly
                       * @for Skylink
                       * @since 0.5.10
                       */
                      Skylink.prototype.VIDEO_CODEC = {
                        AUTO: 'auto',
                        VP8: 'VP8',
                        H264: 'H264',
                        VP9: 'VP9'
                        //H264UC: 'H264UC'
                      };
                      
                      /**
                       * <blockquote class="info">
                       *   Note that if the audio codec is not supported, the SDK will not configure the local <code>"offer"</code> or
                       *   <code>"answer"</code> session description to prefer the codec.
                       * </blockquote>
                       * The list of available audio codecs to set as the preferred audio codec to use to encode
                       * sending audio data when available encoded audio codec for Peer connections
                       * configured in the <a href="#method_init"><code>init()</code> method</a>.
                       * @attribute AUDIO_CODEC
                       * @param {String} AUTO <small>Value <code>"auto"</code></small>
                       *   The value of the option to not prefer any audio codec but rather use the created
                       *   local <code>"offer"</code> / <code>"answer"</code> session description audio codec preference.
                       * @param {String} OPUS <small>Value <code>"opus"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/Opus_(audio_format)">OPUS</a> audio codec.
                       * @param {String} ISAC <small>Value <code>"ISAC"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/Internet_Speech_Audio_Codec">ISAC</a> audio codec.
                       * @param {String} ILBC <small>Value <code>"ILBC"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/Internet_Low_Bitrate_Codec">iLBC</a> audio codec.
                       * @param {String} G722 <small>Value <code>"G722"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/G.722">G722</a> audio codec.
                       * @param {String} PCMA <small>Value <code>"PCMA"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/G.711">G711u</a> audio codec.
                       * @param {String} PCMU <small>Value <code>"PCMU"</code></small>
                       *   The value of the option to prefer the <a href="https://en.wikipedia.org/wiki/G.711">G711a</a> audio codec.
                       * @type JSON
                       * @readOnly
                       * @for Skylink
                       * @since 0.5.10
                       */
                      Skylink.prototype.AUDIO_CODEC = {
                        AUTO: 'auto',
                        ISAC: 'ISAC',
                        OPUS: 'opus',
                        ILBC: 'ILBC',
                        G722: 'G722',
                        PCMU: 'PCMU',
                        PCMA: 'PCMA',
                        //SILK: 'SILK'
                      };
                      
                      /**
                       * <blockquote class="info">
                       *   Note that currently <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> only configures
                       *   the maximum resolution of the Stream due to browser interopability and support.
                       * </blockquote>
                       * The list of <a href="https://en.wikipedia.org/wiki/Graphics_display_resolution#Video_Graphics_Array">
                       * video resolutions</a> sets configured in the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.
                       * @attribute VIDEO_RESOLUTION
                       * @param {JSON} QQVGA <small>Value <code>{ width: 160, height: 120 }</code></small>
                       *   The value of the option to configure QQVGA resolution.
                       *   <small>Aspect ratio: <code>4:3</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} HQVGA <small>Value <code>{ width: 240, height: 160 }</code></small>
                       *   The value of the option to configure HQVGA resolution.
                       *   <small>Aspect ratio: <code>3:2</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} QVGA <small>Value <code>{ width: 320, height: 240 }</code></small>
                       *   The value of the option to configure QVGA resolution.
                       *   <small>Aspect ratio: <code>4:3</code></small>
                       * @param {JSON} WQVGA <small>Value <code>{ width: 384, height: 240 }</code></small>
                       *   The value of the option to configure WQVGA resolution.
                       *   <small>Aspect ratio: <code>16:10</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} HVGA <small>Value <code>{ width: 480, height: 320 }</code></small>
                       *   The value of the option to configure HVGA resolution.
                       *   <small>Aspect ratio: <code>3:2</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} VGA <small>Value <code>{ width: 640, height: 480 }</code></small>
                       *   The value of the option to configure VGA resolution.
                       *   <small>Aspect ratio: <code>4:3</code></small>
                       * @param {JSON} WVGA <small>Value <code>{ width: 768, height: 480 }</code></small>
                       *   The value of the option to configure WVGA resolution.
                       *   <small>Aspect ratio: <code>16:10</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} FWVGA <small>Value <code>{ width: 854, height: 480 }</code></small>
                       *   The value of the option to configure FWVGA resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} SVGA <small>Value <code>{ width: 800, height: 600 }</code></small>
                       *   The value of the option to configure SVGA resolution.
                       *   <small>Aspect ratio: <code>4:3</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} DVGA <small>Value <code>{ width: 960, height: 640 }</code></small>
                       *   The value of the option to configure DVGA resolution.
                       *   <small>Aspect ratio: <code>3:2</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} WSVGA <small>Value <code>{ width: 1024, height: 576 }</code></small>
                       *   The value of the option to configure WSVGA resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       * @param {JSON} HD <small>Value <code>{ width: 1280, height: 720 }</code></small>
                       *   The value of the option to configure HD resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on device supports.</small>
                       * @param {JSON} HDPLUS <small>Value <code>{ width: 1600, height: 900 }</code></small>
                       *   The value of the option to configure HDPLUS resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} FHD <small>Value <code>{ width: 1920, height: 1080 }</code></small>
                       *   The value of the option to configure FHD resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on device supports.</small>
                       * @param {JSON} QHD <small>Value <code>{ width: 2560, height: 1440 }</code></small>
                       *   The value of the option to configure QHD resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} WQXGAPLUS <small>Value <code>{ width: 3200, height: 1800 }</code></small>
                       *   The value of the option to configure WQXGAPLUS resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} UHD <small>Value <code>{ width: 3840, height: 2160 }</code></small>
                       *   The value of the option to configure UHD resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} UHDPLUS <small>Value <code>{ width: 5120, height: 2880 }</code></small>
                       *   The value of the option to configure UHDPLUS resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} FUHD <small>Value <code>{ width: 7680, height: 4320 }</code></small>
                       *   The value of the option to configure FUHD resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @param {JSON} QUHD <small>Value <code>{ width: 15360, height: 8640 }</code></small>
                       *   The value of the option to configure QUHD resolution.
                       *   <small>Aspect ratio: <code>16:9</code></small>
                       *   <small>Note that configurating this resolution may not be supported depending on browser and device supports.</small>
                       * @type JSON
                       * @readOnly
                       * @for Skylink
                       * @since 0.5.6
                       */
                      Skylink.prototype.VIDEO_RESOLUTION = {
                        QQVGA: { width: 160, height: 120 /*, aspectRatio: '4:3'*/ },
                        HQVGA: { width: 240, height: 160 /*, aspectRatio: '3:2'*/ },
                        QVGA: { width: 320, height: 240 /*, aspectRatio: '4:3'*/ },
                        WQVGA: { width: 384, height: 240 /*, aspectRatio: '16:10'*/ },
                        HVGA: { width: 480, height: 320 /*, aspectRatio: '3:2'*/ },
                        VGA: { width: 640, height: 480 /*, aspectRatio: '4:3'*/ },
                        WVGA: { width: 768, height: 480 /*, aspectRatio: '16:10'*/ },
                        FWVGA: { width: 854, height: 480 /*, aspectRatio: '16:9'*/ },
                        SVGA: { width: 800, height: 600 /*, aspectRatio: '4:3'*/ },
                        DVGA: { width: 960, height: 640 /*, aspectRatio: '3:2'*/ },
                        WSVGA: { width: 1024, height: 576 /*, aspectRatio: '16:9'*/ },
                        HD: { width: 1280, height: 720 /*, aspectRatio: '16:9'*/ },
                        HDPLUS: { width: 1600, height: 900 /*, aspectRatio: '16:9'*/ },
                        FHD: { width: 1920, height: 1080 /*, aspectRatio: '16:9'*/ },
                        QHD: { width: 2560, height: 1440 /*, aspectRatio: '16:9'*/ },
                        WQXGAPLUS: { width: 3200, height: 1800 /*, aspectRatio: '16:9'*/ },
                        UHD: { width: 3840, height: 2160 /*, aspectRatio: '16:9'*/ },
                        UHDPLUS: { width: 5120, height: 2880 /*, aspectRatio: '16:9'*/ },
                        FUHD: { width: 7680, height: 4320 /*, aspectRatio: '16:9'*/ },
                        QUHD: { width: 15360, height: 8640 /*, aspectRatio: '16:9'*/ }
                      };
                      
                      /**
                       * The list of <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> or
                       * <a href="#method_shareScreen"><code>shareScreen()</code> method</a> Stream fallback states.
                       * @attribute MEDIA_ACCESS_FALLBACK_STATE
                       * @param {JSON} FALLBACKING <small>Value <code>0</code></small>
                       *   The value of the state when <code>getUserMedia()</code> will retrieve audio track only
                       *   when retrieving audio and video tracks failed.
                       *   <small>This can be configured by <a href="#method_init"><code>init()</code> method</a>
                       *   <code>audioFallback</code> option.</small>
                       * @param {JSON} FALLBACKED  <small>Value <code>1</code></small>
                       *   The value of the state when <code>getUserMedia()</code> or <code>shareScreen()</code>
                       *   retrieves camera / screensharing Stream successfully but with missing originally required audio or video tracks.
                       * @param {JSON} ERROR       <small>Value <code>-1</code></small>
                       *   The value of the state when <code>getUserMedia()</code> failed to retrieve audio track only
                       *   after retrieving audio and video tracks failed.
                       * @readOnly
                       * @for Skylink
                       * @since 0.6.14
                       */
                      Skylink.prototype.MEDIA_ACCESS_FALLBACK_STATE = {
                        FALLBACKING: 0,
                        FALLBACKED: 1,
                        ERROR: -1
                      };
                      
                      /**
                       * The list of recording states.
                       * @attribute RECORDING_STATE
                       * @param {Number} START <small>Value <code>0</code></small>
                       *   The value of the state when recording session has started.
                       * @param {Number} STOP <small>Value <code>1</code></small>
                       *   The value of the state when recording session has stopped.<br>
                       *   <small>At this stage, the recorded videos will go through the mixin server to compile the videos.</small>
                       * @param {Number} LINK <small>Value <code>2</code></small>
                       *   The value of the state when recording session mixin request has been completed.
                       * @param {Number} ERROR <small>Value <code>-1</code></small>
                       *   The value of the state state when recording session has errors.
                       *   <small>This can happen during recording session or during mixin of recording videos,
                       *   and at this stage, any current recording session or mixin is aborted.</small>
                       * @type JSON
                       * @beta
                       * @for Skylink
                       * @since 0.6.16
                       */
                      Skylink.prototype.RECORDING_STATE = {
                        START: 0,
                        STOP: 1,
                        LINK: 2,
                        ERROR: -1
                      };
                      
                      /**
                       * <blockquote class="info">
                       *   For a better user experience, the functionality is throttled when invoked many times in less
                       *   than the milliseconds interval configured in the <a href="#method_init"><code>init()</code> method</a>.
                       * </blockquote>
                       * Function that retrieves camera Stream.
                       * @method getUserMedia
                       * @param {JSON} [options] The camera Stream configuration options.
                       * - When not provided, the value is set to <code>{ audio: true, video: true }</code>.
                       *   <small>To fallback to retrieve audio track only when retrieving of audio and video tracks failed,
                       *   enable the <code>audioFallback</code> flag in the <a href="#method_init"><code>init()</code> method</a>.</small>
                       * @param {Boolean} [options.useExactConstraints=false] <blockquote class="info">
                       *   Note that by enabling this flag, exact values will be requested when retrieving camera Stream,
                       *   but it does not prevent constraints related errors. By default when not enabled,
                       *   expected mandatory maximum values (or optional values for source ID) will requested to prevent constraints related
                       *   errors, with an exception for <code>options.video.frameRate</code> option in Safari and IE (any plugin-enabled) browsers,
                       *   where the expected maximum value will not be requested due to the lack of support.</blockquote>
                       *   The flag if <code>getUserMedia()</code> should request for camera Stream to match exact requested values of
                       *   <code>options.audio.deviceId</code> and <code>options.video.deviceId</code>, <code>options.video.resolution</code>
                       *   and <code>options.video.frameRate</code> when provided.
                       * @param {Boolean|JSON} [options.audio=false] <blockquote class="info">
                       *    Note that the current Edge browser implementation does not support the <code>options.audio.optional</code>,
                       *    <code>options.audio.deviceId</code>, <code>options.audio.echoCancellation</code>.</blockquote>
                       *    The audio configuration options.
                       * @param {Boolean} [options.audio.stereo=false] <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code>
                       *   parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
                       *   <code>options.codecParams.audio.opus.stereo</code> is configured, this overrides the
                       *   <code>options.audio.stereo</code> setting.</blockquote>
                       *   The flag if OPUS audio codec stereo band should be configured for sending encoded audio data.
                       *   <small>When not provided, the default browser configuration is used.</small>
                       * @param {Boolean} [options.audio.usedtx] <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code>
                       *   parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
                       *   <code>options.codecParams.audio.opus.stereo</code> is configured, this overrides the
                       *   <code>options.audio.stereo</code> setting.  Note that this feature might
                       *   not work depending on the browser support and implementation.</blockquote>
                       *   The flag if OPUS audio codec should enable DTX (Discontinuous Transmission) for sending encoded audio data.
                       *   <small>This might help to reduce bandwidth as it reduces the bitrate during silence or background noise, and
                       *   goes hand-in-hand with the <code>options.voiceActivityDetection</code> flag in <a href="#method_joinRoom">
                       *   <code>joinRoom()</code> method</a>.</small>
                       *   <small>When not provided, the default browser configuration is used.</small>
                       * @param {Boolean} [options.audio.useinbandfec] <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.useinbandfec</code>
                       *   parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
                       *   <code>options.codecParams.audio.opus.useinbandfec</code> is configured, this overrides the
                       *   <code>options.audio.useinbandfec</code> setting. Note that this parameter should only be used
                       *   for debugging purposes only.</blockquote>
                       *   The flag if OPUS audio codec has the capability to take advantage of the in-band FEC
                       *   (Forward Error Correction) when sending encoded audio data.
                       *   <small>This helps to reduce the harm of packet loss by encoding information about the previous packet loss.</small>
                       *   <small>When not provided, the default browser configuration is used.</small>
                       * @param {Number} [options.audio.maxplaybackrate] <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.maxplaybackrate</code>
                       *   parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
                       *   <code>options.codecParams.audio.opus.maxplaybackrate</code> is configured, this overrides the
                       *   <code>options.audio.maxplaybackrate</code> setting.  Note that this feature might
                       *   not work depending on the browser support and implementation.
                       *   Note that this parameter should only be used for debugging purposes only.</blockquote>
                       *   The OPUS audio codec maximum output sampling rate in Hz (hertz) that is is capable of receiving
                       *   decoded audio data, to adjust to the hardware limitations and ensure that any sending audio data
                       *   would not encode at a higher sampling rate specified by this.
                       *   <small>This value must be between <code>8000</code> to <code>48000</code>.</small>
                       *   <small>When not provided, the default browser configuration is used.</small>
                       * @param {Boolean} [options.audio.mute=false] The flag if audio tracks should be muted upon receiving them.
                       *   <small>Providing the value as <code>false</code> does nothing to <code>peerInfo.mediaStatus.audioMuted</code>,
                       *   but when provided as <code>true</code>, this sets the <code>peerInfo.mediaStatus.audioMuted</code> value to
                       *   <code>true</code> and mutes any existing <a href="#method_shareScreen">
                       *   <code>shareScreen()</code> Stream</a> audio tracks as well.</small>
                       * @param {Array} [options.audio.optional] <blockquote class="info">
                       *   Note that this may result in constraints related error when <code>options.useExactConstraints</code> value is
                       *   <code>true</code>. If you are looking to set the requested source ID of the audio track,
                       *   use <code>options.audio.deviceId</code> instead.</blockquote>
                       *   The <code>navigator.getUserMedia()</code> API <code>audio: { optional [..] }</code> property.
                       * @param {String} [options.audio.deviceId] <blockquote class="info">
                       *   Note this is currently not supported in Firefox browsers.
                       *   </blockquote> The audio track source ID of the device to use.
                       *   <small>The list of available audio source ID can be retrieved by the <a href="https://developer.
                       * mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices"><code>navigator.mediaDevices.enumerateDevices</code>
                       *   API</a>.</small>
                       * @param {Boolean} [options.audio.echoCancellation=true] <blockquote class="info">
                       *   For Chrome/Opera/IE/Safari/Bowser, the echo cancellation functionality may not work and may produce a terrible
                       *   feedback. It is recommended to use headphones or other microphone devices rather than the device
                       *   in-built microphones.</blockquote> The flag to enable echo cancellation for audio track.
                       * @param {Boolean|JSON} [options.video=false] <blockquote class="info">
                       *    Note that the current Edge browser implementation does not support the <code>options.video.optional</code>,
                       *    <code>options.video.deviceId</code>, <code>options.video.resolution</code> and
                       *    <code>options.video.frameRate</code>, <code>options.video.facingMode</code>.</blockquote>
                       *   The video configuration options.
                       * @param {Boolean} [options.video.mute=false] The flag if video tracks should be muted upon receiving them.
                       *   <small>Providing the value as <code>false</code> does nothing to <code>peerInfo.mediaStatus.videoMuted</code>,
                       *   but when provided as <code>true</code>, this sets the <code>peerInfo.mediaStatus.videoMuted</code> value to
                       *   <code>true</code> and mutes any existing <a href="#method_shareScreen">
                       *   <code>shareScreen()</code> Stream</a> video tracks as well.</small>
                       * @param {JSON} [options.video.resolution] The video resolution.
                       *   <small>By default, <a href="#attr_VIDEO_RESOLUTION"><code>VGA</code></a> resolution option
                       *   is selected when not provided.</small>
                       *   [Rel: Skylink.VIDEO_RESOLUTION]
                       * @param {Number|JSON} [options.video.resolution.width] The video resolution width.
                       * - When provided as a number, it is the video resolution width.
                       * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.width</code> settings.
                       *   Parameters are <code>"ideal"</code> for ideal resolution width, <code>"exact"</code> for exact video resolution width,
                       *   <code>"min"</code> for min video resolution width and <code>"max"</code> for max video resolution width.
                       *   Note that this may result in constraints related errors depending on the browser/hardware supports.
                       * @param {Number|JSON} [options.video.resolution.height] The video resolution height.
                       * - When provided as a number, it is the video resolution height.
                       * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.height</code> settings.
                       *   Parameters are <code>"ideal"</code> for ideal video resolution height, <code>"exact"</code> for exact video resolution height,
                       *   <code>"min"</code> for min video resolution height and <code>"max"</code> for max video resolution height.
                       *   Note that this may result in constraints related errors depending on the browser/hardware supports.
                       * @param {Number|JSON} [options.video.frameRate] The video <a href="https://en.wikipedia.org/wiki/Frame_rate">
                       *   frameRate</a> per second (fps).
                       * - When provided as a number, it is the video framerate.
                       * - When provided as a JSON, it is the <code>navigator.mediaDevices.getUserMedia()</code> <code>.frameRate</code> settings.
                       *   Parameters are <code>"ideal"</code> for ideal video framerate, <code>"exact"</code> for exact video framerate,
                       *   <code>"min"</code> for min video framerate and <code>"max"</code> for max video framerate.
                       *   Note that this may result in constraints related errors depending on the browser/hardware supports.
                       * @param {Array} [options.video.optional] <blockquote class="info">
                       *   Note that this may result in constraints related error when <code>options.useExactConstraints</code> value is
                       *   <code>true</code>. If you are looking to set the requested source ID of the video track,
                       *   use <code>options.video.deviceId</code> instead.</blockquote>
                       *   The <code>navigator.getUserMedia()</code> API <code>video: { optional [..] }</code> property.
                       * @param {String} [options.video.deviceId] <blockquote class="info">
                       *   Note this is currently not supported in Firefox browsers.
                       *   </blockquote> The video track source ID of the device to use.
                       *   <small>The list of available video source ID can be retrieved by the <a href="https://developer.
                       * mozilla.org/en-US/docs/Web/API/MediaDevices/enumerateDevices"><code>navigator.mediaDevices.enumerateDevices</code>
                       *   API</a>.</small>
                       * @param {String|JSON} [options.video.facingMode] The video camera facing mode.
                       *   <small>The list of available video source ID can be retrieved by the <a href="https://developer.mozilla.org
                       *   /en-US/docs/Web/API/MediaTrackConstraints/facingMode">MediaTrackConstraints <code>facingMode</code> API</a>.</small>
                       * @param {Function} [callback] The callback function fired when request has completed.
                       *   <small>Function parameters signature is <code>function (error, success)</code></small>
                       *   <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
                       *   <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter
                       *   payload value as <code>false</code> for request success.</small>
                       * @param {Error|String} callback.error The error result in request.
                       *   <small>Defined as <code>null</code> when there are no errors in request</small>
                       *   <small>Object signature is the <code>getUserMedia()</code> error when retrieving camera Stream.</small>
                       * @param {MediaStream} callback.success The success result in request.
                       *   <small>Defined as <code>null</code> when there are errors in request</small>
                       *   <small>Object signature is the camera Stream object.</small>
                       * @example
                       *   // Example 1: Get both audio and video.
                       *   skylinkDemo.getUserMedia(function (error, success) {
                       *     if (error) return;
                       *     attachMediaStream(document.getElementById("my-video"), success);
                       *   });
                       *
                       *   // Example 2: Get only audio.
                       *   skylinkDemo.getUserMedia({
                       *     audio: true
                       *   }, function (error, success) {
                       *     if (error) return;
                       *     attachMediaStream(document.getElementById("my-audio"), success);
                       *   });
                       *
                       *   // Example 3: Configure resolution for video
                       *   skylinkDemo.getUserMedia({
                       *     audio: true,
                       *     video: {
                       *       resolution: skylinkDemo.VIDEO_RESOLUTION.HD
                       *     }
                       *   }, function (error, success) {
                       *     if (error) return;
                       *     attachMediaStream(document.getElementById("my-video"), success);
                       *   });
                       *
                       *   // Example 4: Configure stereo flag for OPUS codec audio (OPUS is always used by default)
                       *   skylinkDemo.init({
                       *     appKey: "xxxxxx",
                       *     audioCodec: skylinkDemo.AUDIO_CODEC.OPUS
                       *   }, function (initErr, initSuccess) {
                       *     skylinkDemo.getUserMedia({
                       *       audio: {
                       *         stereo: true
                       *       },
                       *       video: true
                       *     }, function (error, success) {
                       *       if (error) return;
                       *       attachMediaStream(document.getElementById("my-video"), success);
                       *     });
                       *   });
                       *
                       *   // Example 5: Configure frameRate for video
                       *   skylinkDemo.getUserMedia({
                       *     audio: true,
                       *     video: {
                       *       frameRate: 50
                       *     }
                       *   }, function (error, success) {
                       *     if (error) return;
                       *     attachMediaStream(document.getElementById("my-video"), success);
                       *   });
                       *
                       *   // Example 6: Configure video and audio based on selected sources. Does not work for Firefox currently.
                       *   var sources = { audio: [], video: [] };
                       *
                       *   function selectStream (audioSourceId, videoSourceId) {
                       *     if (window.webrtcDetectedBrowser === 'firefox') {
                       *       console.warn("Currently this feature is not supported by Firefox browsers!");
                       *       return;
                       *     }
                       *     skylinkDemo.getUserMedia({
                       *       audio: {
                       *         optional: [{ sourceId: audioSourceId }]
                       *       },
                       *       video: {
                       *         optional: [{ sourceId: videoSourceId }]
                       *       }
                       *     }, function (error, success) {
                       *       if (error) return;
                       *       attachMediaStream(document.getElementById("my-video"), success);
                       *     });
                       *   }
                       *
                       *   navigator.mediaDevices.enumerateDevices().then(function(devices) {
                       *     var selectedAudioSourceId = "";
                       *     var selectedVideoSourceId = "";
                       *     devices.forEach(function(device) {
                       *       console.log(device.kind + ": " + device.label + " source ID = " + device.deviceId);
                       *       if (device.kind === "audio") {
                       *         selectedAudioSourceId = device.deviceId;
                       *       } else {
                       *         selectedVideoSourceId = device.deviceId;
                       *       }
                       *     });
                       *     selectStream(selectedAudioSourceId, selectedVideoSourceId);
                       *   }).catch(function (error) {
                       *      console.error("Failed", error);
                       *   });
                       * @trigger <ol class="desc-seq">
                       *   <li>If <code>options.audio</code> value is <code>false</code> and <code>options.video</code>
                       *   value is <code>false</code>: <ol><li><b>ABORT</b> and return error.</li></ol></li>
                       *   <li>Retrieve camera Stream. <ol><li>If retrieval was succesful: <ol>
                       *   <li>If there is any previous <code>getUserMedia()</code> Stream: <ol>
                       *   <li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a>.</li></ol></li>
                       *   <li>If there are missing audio or video tracks requested: <ol>
                       *   <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> triggers parameter payload
                       *   <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code> value as <code>false</code> and
                       *   <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li>
                       *   <li>Mutes / Unmutes audio and video tracks based on current muted settings in <code>peerInfo.mediaStatus</code>.
                       *   <small>This can be retrieved with <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
                       *   <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers parameter payload
                       *   <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallback</code>
                       *   value as <code>false</code>.</li></ol></li><li>Else: <ol>
                       *   <li>If <code>options.audioFallback</code> is enabled in the <a href="#method_init"><code>init()</code> method</a>,
                       *   <code>options.audio</code> value is <code>true</code> and <code>options.video</code> value is <code>true</code>: <ol>
                       *   <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
                       *   parameter payload <code>state</code> as <code>FALLBACKING</code>, <code>isScreensharing</code>
                       *   value as <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
                       *   <li>Retrieve camera Stream with audio tracks only. <ol><li>If retrieval was successful: <ol>
                       *   <li>If there is any previous <code>getUserMedia()</code> Stream: <ol>
                       *   <li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a>.</li></ol></li>
                       *   <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
                       *   parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
                       *   value as <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
                       *   <li>Mutes / Unmutes audio and video tracks based on current muted settings in <code>peerInfo.mediaStatus</code>.
                       *   <small>This can be retrieved with <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
                       *   <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
                       *   parameter payload <code>isScreensharing</code> value as <code>false</code> and
                       *   <code>isAudioFallback</code> value as <code>true</code>.</li></ol></li><li>Else: <ol>
                       *   <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers
                       *   parameter payload <code>isScreensharing</code> value as <code>false</code> and
                       *   <code>isAudioFallbackError</code> value as <code>true</code>.</li>
                       *   <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> event triggers
                       *   parameter payload <code>state</code> as <code>ERROR</code>, <code>isScreensharing</code> value as
                       *   <code>false</code> and <code>isAudioFallback</code> value as <code>true</code>.</li>
                       *   <li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li><li>Else: <ol>
                       *   <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers parameter payload
                       *   <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallbackError</code> value as
                       *   <code>false</code>.</li><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li></ol></li></ol>
                       * @for Skylink
                       * @since 0.5.6
                       */
                      Skylink.prototype.getUserMedia = function(options,callback) {
                        var self = this;
                      
                        if (typeof options === 'function'){
                          callback = options;
                          options = {
                            audio: true,
                            video: true
                          };
                      
                        } else if (typeof options !== 'object' || options === null) {
                          if (typeof options === 'undefined') {
                            options = {
                              audio: true,
                              video: true
                            };
                      
                          } else {
                            var invalidOptionsError = 'Please provide a valid options';
                            log.error(invalidOptionsError, options);
                            if (typeof callback === 'function') {
                              callback(new Error(invalidOptionsError), null);
                            }
                            return;
                          }
                      
                        } else if (!options.audio && !options.video) {
                          var noConstraintOptionsSelectedError = 'Please select audio or video';
                          log.error(noConstraintOptionsSelectedError, options);
                          if (typeof callback === 'function') {
                            callback(new Error(noConstraintOptionsSelectedError), null);
                          }
                          return;
                        }
                      
                        /*if (window.location.protocol !== 'https:' && window.webrtcDetectedBrowser === 'chrome' &&
                          window.webrtcDetectedVersion > 46) {
                          errorMsg = 'getUserMedia() has to be called in https:// application';
                          log.error(errorMsg, options);
                          if (typeof callback === 'function') {
                            callback(new Error(errorMsg), null);
                          }
                          return;
                        }*/
                      
                        self._throttle(function (runFn) {
                          if (!runFn) {
                            if (self._throttlingShouldThrowError) {
                              var throttleLimitError = 'Unable to run as throttle interval has not reached (' + self._throttlingTimeouts.getUserMedia + 'ms).';
                              log.error(throttleLimitError);
                      
                              if (typeof callback === 'function') {
                                callback(new Error(throttleLimitError), null);
                              }
                            }
                            return;
                          }
                      
                          if (typeof callback === 'function') {
                            var mediaAccessSuccessFn = function (stream) {
                              self.off('mediaAccessError', mediaAccessErrorFn);
                              callback(null, stream);
                            };
                            var mediaAccessErrorFn = function (error) {
                              self.off('mediaAccessSuccess', mediaAccessSuccessFn);
                              callback(error, null);
                            };
                      
                            self.once('mediaAccessSuccess', mediaAccessSuccessFn, function (stream, isScreensharing) {
                              return !isScreensharing;
                            });
                      
                            self.once('mediaAccessError', mediaAccessErrorFn, function (error, isScreensharing) {
                              return !isScreensharing;
                            });
                          }
                      
                          // Parse stream settings
                          var settings = self._parseStreamSettings(options);
                      
                          navigator.getUserMedia(settings.getUserMediaSettings, function (stream) {
                            if (settings.mutedSettings.shouldAudioMuted) {
                              self._streamsMutedSettings.audioMuted = true;
                            }
                      
                            if (settings.mutedSettings.shouldVideoMuted) {
                              self._streamsMutedSettings.videoMuted = true;
                            }
                      
                            self._onStreamAccessSuccess(stream, settings, false, false);
                      
                          }, function (error) {
                            self._onStreamAccessError(error, settings, false, false);
                          });
                        }, 'getUserMedia', self._throttlingTimeouts.getUserMedia);
                      };
                      
                      /**
                       * <blockquote class="info">
                       *   Note that if <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> is available despite having
                       *   <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> available, the
                       *   <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> is sent instead of the
                       *   <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> to Peers.
                       * </blockquote>
                       * Function that sends a new <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>
                       * to all connected Peers in the Room.
                       * @method sendStream
                       * @param {JSON|MediaStream} options The <a href="#method_getUserMedia"><code>getUserMedia()</code>
                       *   method</a> <code>options</code> parameter settings.
                       * - When provided as a <code>MediaStream</code> object, this configures the <code>options.audio</code> and
                       *   <code>options.video</code> based on the tracks available in the <code>MediaStream</code> object,
                       *   and configures the <code>options.audio.mute</code> and <code>options.video.mute</code> based on the tracks
                       *   <code>.enabled</code> flags in the tracks provided in the <code>MediaStream</code> object without
                       *   invoking <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.
                       *   <small>Object signature matches the <code>options</code> parameter in the
                       *   <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>.</small>
                       * @param {Function} [callback] The callback function fired when request has completed.
                       *   <small>Function parameters signature is <code>function (error, success)</code></small>
                       *   <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
                       *   <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter payload value
                       *   as <code>false</code> for request success when User is in Room without Peers,
                       *   or by the <a href="#event_peerRestart"><code>peerRestart</code> event</a> triggering
                       *   <code>isSelfInitiateRestart</code> parameter payload value as <code>true</code> for all connected Peers
                       *   for request success when User is in Room with Peers.</small>
                       * @param {Error|String} callback.error The error result in request.
                       *   <small>Defined as <code>null</code> when there are no errors in request</small>
                       *   <small>Object signature is the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> error or
                       *   when invalid <code>options</code> is provided.</small>
                       * @param {MediaStream} callback.success The success result in request.
                       *   <small>Defined as <code>null</code> when there are errors in request</small>
                       *   <small>Object signature is the <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a>
                       *   Stream object.</small>
                       * @example
                       *   // Example 1: Send MediaStream object
                       *   function retrieveStreamBySourceForFirefox (sourceId) {
                       *     navigator.mediaDevices.getUserMedia({
                       *       audio: true,
                       *       video: {
                       *         sourceId: { exact: sourceId }
                       *       }
                       *     }).then(function (stream) {
                       *       skylinkDemo.sendStream(stream, function (error, success) {
                       *         if (err) return;
                       *         if (stream === success) {
                       *           console.info("Same MediaStream has been sent");
                       *         }
                       *         console.log("Stream is now being sent to Peers");
                       *         attachMediaStream(document.getElementById("my-video"), success);
                       *       });
                       *     });
                       *   }
                       *
                       *   // Example 2: Send video later
                       *   var inRoom = false;
                       *
                       *   function sendVideo () {
                       *     if (!inRoom) return;
                       *     skylinkDemo.sendStream({
                       *       audio: true,
                       *       video: true
                       *     }, function (error, success) {
                       *       if (error) return;
                       *       console.log("getUserMedia() Stream with video is now being sent to Peers");
                       *       attachMediaStream(document.getElementById("my-video"), success);
                       *     });
                       *   }
                       *
                       *   skylinkDemo.joinRoom({
                       *     audio: true
                       *   }, function (jRError, jRSuccess) {
                       *     if (jRError) return;
                       *     inRoom = true;
                       *   });
                       * @trigger <ol class="desc-seq">
                       *   <li>If User is not in Room: <ol><li><b>ABORT</b> and return error.</li></ol></li>
                       *   <li>Checks <code>options</code> provided. <ol><li>If provided parameter <code>options</code> is not valid: <ol>
                       *   <li><b>ABORT</b> and return error.</li></ol></li>
                       *   <li>Else if provided parameter <code>options</code> is a Stream object: <ol>
                       *   <li>Checks if there is any audio or video tracks. <ol><li>If there is no tracks: <ol>
                       *   <li><b>ABORT</b> and return error.</li></ol></li><li>Else: <ol>
                       *   <li>Set <code>options.audio</code> value as <code>true</code> if Stream has audio tracks.</li>
                       *   <li>Set <code>options.video</code> value as <code>false</code> if Stream has video tracks.</li>
                       *   <li>Mutes / Unmutes audio and video tracks based on current muted settings in
                       *   <code>peerInfo.mediaStatus</code>. <small>This can be retrieved with
                       *   <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small></li>
                       *   <li>If there is any previous <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>:
                       *   <ol><li>Invokes <a href="#method_stopStream"><code>stopStream()</code> method</a> to stop previous Stream.</li></ol></li>
                       *   <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
                       *   parameter payload <code>isScreensharing</code> value as <code>false</code> and <code>isAudioFallback</code>
                       *   value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
                       *   <li>Invoke <a href="#method_getUserMedia"><code>getUserMedia()</code> method</a> with
                       *   <code>options</code> provided in <code>sendStream()</code>. <ol><li>If request has errors: <ol>
                       *   <li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li></ol></li>
                       *   <li>If there is currently no <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>: <ol>
                       *   <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code> and <code>stream</code> as
                       *   <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.</li>
                       *   <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code>.</li>
                       *   <li>Checks if MCU is enabled for App Key provided in <a href="#method_init"><code>init()</code> method</a>. <ol>
                       *   <li>If MCU is enabled: <ol><li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code>
                       *   method</a>. <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li>
                       *   <li>Else: <ol><li>If there are connected Peers in the Room: <ol>
                       *   <li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>. <ol>
                       *   <li>If request has errors: <ol><li><b>ABORT</b> and return error.
                       *   </li></ol></li></ol></li></ol></li></ol></li></ol></li></ol></li></ol>
                       * @for Skylink
                       * @since 0.5.6
                       */
                      
                      Skylink.prototype.sendStream = function(options, callback) {
                        var self = this;
                      
                        var restartFn = function (stream) {
                          if (self._inRoom) {
                            if (!self._streams.screenshare) {
                              self._trigger('incomingStream', self._user.sid, stream, true, self.getPeerInfo(), false, stream.id || stream.label);
                              self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
                            }
                      
                            if (Object.keys(self._peerConnections).length > 0 || self._hasMCU) {
                              self._refreshPeerConnection(Object.keys(self._peerConnections), false, {}, function (err, success) {
                                if (err) {
                                  log.error('Failed refreshing connections for sendStream() ->', err);
                                  if (typeof callback === 'function') {
                                    callback(new Error('Failed refreshing connections.'), null);
                                  }
                                  return;
                                }
                                if (typeof callback === 'function') {
                                  callback(null, stream);
                                }
                              });
                            } else if (typeof callback === 'function') {
                              callback(null, stream);
                            }
                          } else {
                            var notInRoomAgainError = 'Unable to send stream as user is not in the Room.';
                            log.error(notInRoomAgainError, stream);
                            if (typeof callback === 'function') {
                              callback(new Error(notInRoomAgainError), null);
                            }
                          }
                        };
                      
                        if (typeof options !== 'object' || options === null) {
                          var invalidOptionsError = 'Provided stream settings is invalid';
                          log.error(invalidOptionsError, options);
                          if (typeof callback === 'function'){
                            callback(new Error(invalidOptionsError),null);
                          }
                          return;
                        }
                      
                        if (!self._inRoom) {
                          var notInRoomError = 'Unable to send stream as user is not in the Room.';
                          log.error(notInRoomError, options);
                          if (typeof callback === 'function'){
                            callback(new Error(notInRoomError),null);
                          }
                          return;
                        }
                      
                        if (window.webrtcDetectedBrowser === 'edge') {
                          var edgeNotSupportError = 'Edge browser currently does not support renegotiation.';
                          log.error(edgeNotSupportError, options);
                          if (typeof callback === 'function'){
                            callback(new Error(edgeNotSupportError),null);
                          }
                          return;
                        }
                      
                        if (typeof options.getAudioTracks === 'function' || typeof options.getVideoTracks === 'function') {
                          var checkActiveTracksFn = function (tracks) {
                            for (var t = 0; t < tracks.length; t++) {
                              if (!(tracks[t].ended || (typeof tracks[t].readyState === 'string' ?
                                tracks[t].readyState !== 'live' : false))) {
                                return true;
                              }
                            }
                            return false;
                          };
                      
                          if (!checkActiveTracksFn( options.getAudioTracks() ) && !checkActiveTracksFn( options.getVideoTracks() )) {
                            var invalidStreamError = 'Provided stream object does not have audio or video tracks.';
                            log.error(invalidStreamError, options);
                            if (typeof callback === 'function'){
                              callback(new Error(invalidStreamError),null);
                            }
                            return;
                          }
                      
                          self._onStreamAccessSuccess(options, {
                            settings: {
                              audio: true,
                              video: true
                            },
                            getUserMediaSettings: {
                              audio: true,
                              video: true
                            }
                          }, false, false);
                      
                          restartFn(options);
                      
                        } else {
                          self.getUserMedia(options, function (err, stream) {
                            if (err) {
                              if (typeof callback === 'function') {
                                callback(err, null);
                              }
                              return;
                            }
                            restartFn(stream);
                          });
                        }
                      };
                      
                      /**
                       * <blockquote class="info">
                       *   Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
                       *   <a href="#method_stopStream"><code>stopStream()</code> method</a>,
                       *   <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
                       *   <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
                       *   <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
                       *   <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
                       *   sent within less than an interval.
                       * </blockquote>
                       * Function that stops <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.
                       * @method stopStream
                       * @example
                       *   function stopStream () {
                       *     skylinkDemo.stopStream();
                       *   }
                       *
                       *   skylinkDemo.getUserMedia();
                       * @trigger <ol class="desc-seq">
                       *   <li>Checks if there is <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>. <ol>
                       *   <li>If there is <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>: <ol>
                       *   <li>Stop <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> Stream. <ol>
                       *   <li><a href="#event_mediaAccessStopped"><code>mediaAccessStopped</code> event</a> triggers
                       *   parameter payload <code>isScreensharing</code> value as <code>false</code>.</li><li>If User is in Room: <ol>
                       *   <li><a href="#event_streamEnded"><code>streamEnded</code> event</a> triggers parameter
                       *   payload <code>isSelf</code> value as <code>true</code> and <code>isScreensharing</code> value as<code>false</code>
                       *   .</li><li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code>.</li></ol></li></ol></li></ol></li></ol></li></ol>
                       * @for Skylink
                       * @since 0.5.6
                       */
                      Skylink.prototype.stopStream = function () {
                        if (this._streams.userMedia) {
                          this._stopStreams({
                            userMedia: true
                          });
                        }
                      };
                      
                      /**
                       * <blockquote class="info">
                       *   Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
                       *   <a href="#method_stopStream"><code>stopStream()</code> method</a>,
                       *   <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
                       *   <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
                       *   <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
                       *   <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
                       *   sent within less than an interval.
                       * </blockquote>
                       * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
                       * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio or video tracks.
                       * @method muteStream
                       * @param {JSON} options The Streams muting options.
                       * @param {Boolean} [options.audioMuted=true] The flag if all Streams audio
                       *   tracks should be muted or not.
                       * @param {Boolean} [options.videoMuted=true] The flag if all Streams video
                       *   tracks should be muted or not.
                       * @example
                       *   // Example 1: Mute both audio and video tracks in all Streams
                       *   skylinkDemo.muteStream({
                       *     audioMuted: true,
                       *     videoMuted: true
                       *   });
                       *
                       *   // Example 2: Mute only audio tracks in all Streams
                       *   skylinkDemo.muteStream({
                       *     audioMuted: true,
                       *     videoMuted: false
                       *   });
                       *
                       *   // Example 3: Mute only video tracks in all Streams
                       *   skylinkDemo.muteStream({
                       *     audioMuted: false,
                       *     videoMuted: true
                       *   });
                       * @trigger <ol class="desc-seq">
                       *   <li>If provided parameter <code>options</code> is invalid: <ol><li><b>ABORT</b> and return error.</li></ol></li>
                       *   <li>Checks if there is any available Streams: <ol><li>If there is no available Streams: <ol>
                       *   <li><b>ABORT</b> and return error.</li></ol></li><li>If User is in Room: <ol>
                       *   <li>Checks if there is audio tracks to mute / unmute: <ol><li>If there is audio tracks to mute / unmute: <ol>
                       *   <li>If <code>options.audioMuted</code> value is not the same as the current
                       *   <code>peerInfo.mediaStatus.audioMuted</code>: <small>This can be retrieved with
                       *   <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small> <ol>
                       *   <li><em>For Peer only</em> <a href="#event_peerUpdated"><code>peerUpdated</code> event</a>
                       *   triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li>
                       *   <li><em>For Peer only</em> <a href="#event_streamMuted"><code>streamMuted</code> event</a>
                       *   triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li>
                       *   <li>Checks if there is video tracks to mute / unmute: <ol><li>If there is video tracks to mute / unmute: <ol>
                       *   <li>If <code>options.videoMuted</code> value is not the same as the current
                       *   <code>peerInfo.mediaStatus.videoMuted</code>: <small>This can be retrieved with
                       *   <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a>.</small> <ol>
                       *   <li><em>For Peer only</em> <a href="#event_peerUpdated"><code>peerUpdated</code> event</a>
                       *   triggers with parameter payload <code>isSelf</code> value as <code>false</code>.</li>
                       *   <li><em>For Peer only</em> <a href="#event_streamMuted"><code>streamMuted</code> event</a> triggers with
                       *   parameter payload <code>isSelf</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li></ol></li>
                       *   <li>If <code>options.audioMuted</code> value is not the same as the current
                       *   <code>peerInfo.mediaStatus.audioMuted</code> or <code>options.videoMuted</code> value is not
                       *   the same as the current <code>peerInfo.mediaStatus.videoMuted</code>: <ol>
                       *   <li><a href="#event_localMediaMuted"><code>localMediaMuted</code> event</a> triggers.</li>
                       *   <li>If User is in Room: <ol><li><a href="#event_streamMuted"><code>streamMuted</code> event</a>
                       *   triggers with parameter payload <code>isSelf</code> value as <code>true</code>.</li>
                       *   <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers with
                       *   parameter payload <code>isSelf</code> value as <code>true</code>.</li></ol></li></ol></li></ol></li></ol>
                       * @for Skylink
                       * @since 0.5.7
                       */
                      Skylink.prototype.muteStream = function(options) {
                        var self = this;
                      
                        if (typeof options !== 'object') {
                          log.error('Provided settings is not an object');
                          return;
                        }
                      
                        if (!(self._streams.userMedia && self._streams.userMedia.stream) &&
                          !(self._streams.screenshare && self._streams.screenshare.stream)) {
                          log.warn('No streams are available to mute / unmute!');
                          return;
                        }
                      
                        var audioMuted = typeof options.audioMuted === 'boolean' ? options.audioMuted : true;
                        var videoMuted = typeof options.videoMuted === 'boolean' ? options.videoMuted : true;
                        var hasToggledAudio = false;
                        var hasToggledVideo = false;
                      
                        if (self._streamsMutedSettings.audioMuted !== audioMuted) {
                          self._streamsMutedSettings.audioMuted = audioMuted;
                          hasToggledAudio = true;
                        }
                      
                        if (self._streamsMutedSettings.videoMuted !== videoMuted) {
                          self._streamsMutedSettings.videoMuted = videoMuted;
                          hasToggledVideo = true;
                        }
                      
                        if (hasToggledVideo || hasToggledAudio) {
                          var streamTracksAvailability = self._muteStreams();
                      
                          if (hasToggledVideo && self._inRoom) {
                            self._sendChannelMessage({
                              type: self._SIG_MESSAGE_TYPE.MUTE_VIDEO,
                              mid: self._user.sid,
                              rid: self._room.id,
                              muted: self._streamsMutedSettings.videoMuted,
                              stamp: (new Date()).getTime()
                            });
                          }
                      
                          if (hasToggledAudio && self._inRoom) {
                            setTimeout(function () {
                              self._sendChannelMessage({
                                type: self._SIG_MESSAGE_TYPE.MUTE_AUDIO,
                                mid: self._user.sid,
                                rid: self._room.id,
                                muted: self._streamsMutedSettings.audioMuted,
                                stamp: (new Date()).getTime()
                              });
                            }, hasToggledVideo ? 1050 : 0);
                          }
                      
                          if ((streamTracksAvailability.hasVideo && hasToggledVideo) ||
                            (streamTracksAvailability.hasAudio && hasToggledAudio)) {
                      
                            self._trigger('localMediaMuted', {
                              audioMuted: streamTracksAvailability.hasAudio ? self._streamsMutedSettings.audioMuted : true,
                              videoMuted: streamTracksAvailability.hasVideo ? self._streamsMutedSettings.videoMuted : true
                            });
                      
                            if (self._inRoom) {
                              self._trigger('streamMuted', self._user.sid, self.getPeerInfo(), true,
                                self._streams.screenshare && self._streams.screenshare.stream);
                              self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
                            }
                          }
                        }
                      };
                      
                      /**
                       * <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
                       * </blockquote>
                       * Function that unmutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
                       * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio tracks.
                       * @method enableAudio
                       * @deprecated true
                       * @example
                       *   function unmuteAudio () {
                       *     skylinkDemo.enableAudio();
                       *   }
                       * @trigger <ol class="desc-seq">
                       *   <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
                       *   <code>options.audioMuted</code> value as <code>false</code> and
                       *   <code>options.videoMuted</code> value with current <code>peerInfo.mediaStatus.videoMuted</code> value.
                       *   <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
                       * @for Skylink
                       * @since 0.5.5
                       */
                      Skylink.prototype.enableAudio = function() {
                        this.muteStream({
                          audioMuted: false,
                          videoMuted: this._streamsMutedSettings.videoMuted
                        });
                      };
                      
                      /**
                       * <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
                       * </blockquote>
                       * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
                       * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> audio tracks.
                       * @method disableAudio
                       * @deprecated true
                       * @example
                       *   function muteAudio () {
                       *     skylinkDemo.disableAudio();
                       *   }
                       * @trigger <ol class="desc-seq">
                       *   <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
                       *   <code>options.audioMuted</code> value as <code>true</code> and
                       *   <code>options.videoMuted</code> value with current <code>peerInfo.mediaStatus.videoMuted</code> value.
                       *   <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
                       * @for Skylink
                       * @since 0.5.5
                       */
                      Skylink.prototype.disableAudio = function() {
                        this.muteStream({
                          audioMuted: true,
                          videoMuted: this._streamsMutedSettings.videoMuted
                        });
                      };
                      
                      /**
                       * <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
                       * </blockquote>
                       * Function that unmutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
                       * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> video tracks.
                       * @method enableVideo
                       * @deprecated true
                       * @example
                       *   function unmuteVideo () {
                       *     skylinkDemo.enableVideo();
                       *   }
                       * @trigger <ol class="desc-seq">
                       *   <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
                       *   <code>options.videoMuted</code> value as <code>false</code> and
                       *   <code>options.audioMuted</code> value with current <code>peerInfo.mediaStatus.audioMuted</code> value.
                       *   <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
                       * @for Skylink
                       * @since 0.5.5
                       */
                      Skylink.prototype.enableVideo = function() {
                        this.muteStream({
                          videoMuted: false,
                          audioMuted: this._streamsMutedSettings.audioMuted
                        });
                      };
                      
                      /**
                       * <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This method has been deprecated. Use <a href="#method_muteStream"><code>muteStream()</code> method</a> instead.
                       * </blockquote>
                       * Function that mutes both <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a> and
                       * <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> video tracks.
                       * @method disableVideo
                       * @deprecated true
                       * @example
                       *   function muteVideo () {
                       *     skylinkDemo.disableVideo();
                       *   }
                       * @trigger <ol class="desc-seq">
                       *   <li>Invokes <a href="#method_muteStream"><code>muteStream()</code> method</a> with
                       *   <code>options.videoMuted</code> value as <code>true</code> and
                       *   <code>options.audioMuted</code> value with current <code>peerInfo.mediaStatus.audioMuted</code> value.
                       *   <small>See <a href="#method_getPeerInfo"><code>getPeerInfo()</code> method</a> for more information.</small></li></ol>
                       * @for Skylink
                       * @since 0.5.5
                       */
                      Skylink.prototype.disableVideo = function() {
                        this.muteStream({
                          videoMuted: true,
                          audioMuted: this._streamsMutedSettings.audioMuted
                        });
                      };
                      
                      /**
                       * <blockquote class="info">
                       *   For a better user experience, the functionality is throttled when invoked many times in less
                       *   than the milliseconds interval configured in the <a href="#method_init"><code>init()</code> method</a>.
                       *   Note that the Opera and Edge browser does not support screensharing, and as for IE / Safari browsers using
                       *   the Temasys Plugin screensharing support, check out the <a href="https://temasys.com.sg/plugin/#commercial-licensing">
                       *   commercial licensing</a> for more options.
                       * </blockquote>
                       * Function that retrieves screensharing Stream.
                       * @method shareScreen
                       * @param {JSON|Boolean} [enableAudio=false] The flag if audio tracks should be retrieved.
                       * @param {Boolean} [enableAudio.stereo=false] <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code>
                       *   parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
                       *   <code>options.codecParams.audio.opus.stereo</code> is configured, this overrides the
                       *   <code>options.audio.stereo</code> setting.</blockquote>
                       *   The flag if OPUS audio codec stereo band should be configured for sending encoded audio data.
                       *   <small>When not provided, the default browser configuration is used.</small>
                       * @param {Boolean} [enableAudio.usedtx] <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.stereo</code>
                       *   parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
                       *   <code>options.codecParams.audio.opus.stereo</code> is configured, this overrides the
                       *   <code>options.audio.stereo</code> setting.  Note that this feature might
                       *   not work depending on the browser support and implementation.</blockquote>
                       *   The flag if OPUS audio codec should enable DTX (Discontinuous Transmission) for sending encoded audio data.
                       *   <small>This might help to reduce bandwidth as it reduces the bitrate during silence or background noise, and
                       *   goes hand-in-hand with the <code>options.voiceActivityDetection</code> flag in <a href="#method_joinRoom">
                       *   <code>joinRoom()</code> method</a>.</small>
                       *   <small>When not provided, the default browser configuration is used.</small>
                       * @param {Boolean} [enableAudio.useinbandfec] <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.useinbandfec</code>
                       *   parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
                       *   <code>options.codecParams.audio.opus.useinbandfec</code> is configured, this overrides the
                       *   <code>options.audio.useinbandfec</code> setting. Note that this parameter should only be used
                       *   for debugging purposes only.</blockquote>
                       *   The flag if OPUS audio codec has the capability to take advantage of the in-band FEC
                       *   (Forward Error Correction) when sending encoded audio data.
                       *   <small>This helps to reduce the harm of packet loss by encoding information about the previous packet loss.</small>
                       *   <small>When not provided, the default browser configuration is used.</small>
                       * @param {Number} [enableAudio.maxplaybackrate] <blockquote class="info"><b>Deprecation Warning!</b>
                       *   This property has been deprecated. Configure this with the <code>options.codecParams.audio.opus.maxplaybackrate</code>
                       *   parameter in the <a href="#method_init"><code>init()</code> method</a> instead. If the
                       *   <code>options.codecParams.audio.opus.maxplaybackrate</code> is configured, this overrides the
                       *   <code>options.audio.maxplaybackrate</code> setting.  Note that this feature might
                       *   not work depending on the browser support and implementation.
                       *   Note that this parameter should only be used for debugging purposes only.</blockquote>
                       *   The OPUS audio codec maximum output sampling rate in Hz (hertz) that is is capable of receiving
                       *   decoded audio data, to adjust to the hardware limitations and ensure that any sending audio data
                       *   would not encode at a higher sampling rate specified by this.
                       *   <small>This value must be between <code>8000</code> to <code>48000</code>.</small>
                       *   <small>When not provided, the default browser configuration is used.</small>
                       * @param {Boolean} [enableAudio.echoCancellation=true] <blockquote class="info">
                       *   For Chrome/Opera/IE/Safari/Bowser, the echo cancellation functionality may not work and may produce a terrible
                       *   feedback. It is recommended to use headphones or other microphone devices rather than the device
                       *   in-built microphones.</blockquote> The flag to enable echo cancellation for audio track.
                       * @param {Function} [callback] The callback function fired when request has completed.
                       *   <small>Function parameters signature is <code>function (error, success)</code></small>
                       *   <small>Function request completion is determined by the <a href="#event_mediaAccessSuccess">
                       *   <code>mediaAccessSuccess</code> event</a> triggering <code>isScreensharing</code> parameter payload value
                       *   as <code>true</code> for request success when User is not in the Room or is in Room without Peers,
                       *   or by the <a href="#event_peerRestart"><code>peerRestart</code> event</a> triggering
                       *   <code>isSelfInitiateRestart</code> parameter payload value as <code>true</code> for all connected Peers
                       *   for request success when User is in Room with Peers.</small>
                       * @param {Error|String} callback.error The error result in request.
                       *   <small>Defined as <code>null</code> when there are no errors in request</small>
                       *   <small>Object signature is the <code>shareScreen()</code> error when retrieving screensharing Stream.</small>
                       * @param {MediaStream} callback.success The success result in request.
                       *   <small>Defined as <code>null</code> when there are errors in request</small>
                       *   <small>Object signature is the screensharing Stream object.</small>
                       * @example
                       *   // Example 1: Share screen with audio
                       *   skylinkDemo.shareScreen(function (error, success) {
                       *     if (error) return;
                       *     attachMediaStream(document.getElementById("my-screen"), success);
                       *   });
                       *
                       *   // Example 2: Share screen without audio
                       *   skylinkDemo.shareScreen(false, function (error, success) {
                       *     if (error) return;
                       *     attachMediaStream(document.getElementById("my-screen"), success);
                       *   });
                       * @trigger <ol class="desc-seq">
                       *   <li>Retrieves screensharing Stream. <ol><li>If retrieval was successful: <ol><li>If browser is Firefox: <ol>
                       *   <li>If there are missing audio or video tracks requested: <ol>
                       *   <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
                       *   <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
                       *   <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a>
                       *   triggers parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
                       *   value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li>
                       *   <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
                       *   parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
                       *   value as <code>false</code>.</li></ol></li><li>Else: <ol>
                       *   <li>If audio is requested: <small>Chrome, Safari and IE currently doesn't support retrieval of
                       *   audio track together with screensharing video track.</small> <ol><li>Retrieves audio Stream: <ol>
                       *   <li>If retrieval was successful: <ol><li>Attempts to attach screensharing Stream video track to audio Stream. <ol>
                       *   <li>If attachment was successful: <ol><li><a href="#event_mediaAccessSuccess">
                       *   <code>mediaAccessSuccess</code> event</a> triggers parameter payload <code>isScreensharing</code>
                       *   value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li><li>Else: <ol>
                       *   <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
                       *   <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
                       *   <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a> triggers parameter payload
                       *   <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code> value as <code>true</code> and
                       *   <code>isAudioFallback</code> value as <code>false</code>.</li>
                       *   <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
                       *   parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
                       *   value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
                       *   <li>If there is any previous <code>shareScreen()</code> Stream: <ol>
                       *   <li>Invokes <a href="#method_stopScreen"><code>stopScreen()</code> method</a>.</li></ol></li>
                       *   <li><a href="#event_mediaAccessFallback"><code>mediaAccessFallback</code> event</a>
                       *   triggers parameter payload <code>state</code> as <code>FALLBACKED</code>, <code>isScreensharing</code>
                       *   value as <code>true</code> and <code>isAudioFallback</code> value as <code>false</code>.</li>
                       *   <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a> triggers
                       *   parameter payload <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code>
                       *   value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
                       *   <li><a href="#event_mediaAccessSuccess"><code>mediaAccessSuccess</code> event</a>
                       *   triggers parameter payload <code>isScreensharing</code> value as <code>true</code>
                       *   and <code>isAudioFallback</code> value as <code>false</code>.</li></ol></li></ol></li></ol></li><li>Else: <ol>
                       *   <li><a href="#event_mediaAccessError"><code>mediaAccessError</code> event</a> triggers parameter payload
                       *   <code>isScreensharing</code> value as <code>true</code> and <code>isAudioFallback</code> value as
                       *   <code>false</code>.</li><li><b>ABORT</b> and return error.</li></ol></li></ol></li><li>If User is in Room: <ol>
                       *   <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code> and <code>stream</code> as <code>shareScreen()</code> Stream.</li>
                       *   <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code>.</li>
                       *   <li>Checks if MCU is enabled for App Key provided in <a href="#method_init"><code>init()</code> method</a>. <ol>
                       *   <li>If MCU is enabled: <ol><li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>.
                       *   <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li></ol></li></ol></li></ol></li><li>Else: <ol>
                       *   <li>If there are connected Peers in the Room: <ol><li>Invoke <a href="#method_refreshConnection">
                       *   <code>refreshConnection()</code> method</a>. <ol><li>If request has errors: <ol><li><b>ABORT</b> and return error.</li>
                       *   </ol></li></ol></li></ol></li></ol></li></ol></li></ol></li></ol>
                       * @for Skylink
                       * @since 0.6.0
                       */
                      Skylink.prototype.shareScreen = function (enableAudio, callback) {
                        var self = this;
                        var enableAudioSettings = {
                          stereo: false,
                          echoCancellation: true
                        };
                      
                        if (typeof enableAudio === 'function') {
                          callback = enableAudio;
                          enableAudio = false;
                      
                        } else if (enableAudio && typeof enableAudio === 'object') {
                          enableAudioSettings.usedtx = typeof enableAudio.usedtx === 'boolean' ? enableAudio.usedtx : null;
                          enableAudioSettings.useinbandfec = typeof enableAudio.useinbandfec === 'boolean' ? enableAudio.useinbandfec : null;
                          enableAudioSettings.stereo = enableAudio.stereo === true;
                          enableAudioSettings.echoCancellation = enableAudio.echoCancellation !== false;
                        }
                      
                        self._throttle(function (runFn) {
                          if (!runFn) {
                            if (self._throttlingShouldThrowError) {
                              var throttleLimitError = 'Unable to run as throttle interval has not reached (' + self._throttlingTimeouts.shareScreen + 'ms).';
                              log.error(throttleLimitError);
                      
                              if (typeof callback === 'function') {
                                callback(new Error(throttleLimitError), null);
                              }
                            }
                            return;
                          }
                      
                          var settings = {
                            settings: {
                              audio: enableAudio ? enableAudioSettings : false,
                              video: {
                                screenshare: true,
                                exactConstraints: false
                              }
                            },
                            getUserMediaSettings: {
                              video: {
                                mediaSource: 'window'
                              }
                            }
                          };
                      
                          var mediaAccessSuccessFn = function (stream) {
                            self.off('mediaAccessError', mediaAccessErrorFn);
                      
                            if (self._inRoom) {
                              self._trigger('incomingStream', self._user.sid, stream, true, self.getPeerInfo(), true, stream.id || stream.label);
                              self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
                      
                              if (Object.keys(self._peerConnections).length > 0 || self._hasMCU) {
                                self._refreshPeerConnection(Object.keys(self._peerConnections), false, {}, function (err, success) {
                                  if (err) {
                                    log.error('Failed refreshing connections for shareScreen() ->', err);
                                    if (typeof callback === 'function') {
                                      callback(new Error('Failed refreshing connections.'), null);
                                    }
                                    return;
                                  }
                                  if (typeof callback === 'function') {
                                    callback(null, stream);
                                  }
                                });
                              } else if (typeof callback === 'function') {
                                callback(null, stream);
                              }
                            } else if (typeof callback === 'function') {
                              callback(null, stream);
                            }
                          };
                      
                          var mediaAccessErrorFn = function (error) {
                            self.off('mediaAccessSuccess', mediaAccessSuccessFn);
                      
                            if (typeof callback === 'function') {
                              callback(error, null);
                            }
                          };
                      
                          self.once('mediaAccessSuccess', mediaAccessSuccessFn, function (stream, isScreensharing) {
                            return isScreensharing;
                          });
                      
                          self.once('mediaAccessError', mediaAccessErrorFn, function (error, isScreensharing) {
                            return isScreensharing;
                          });
                      
                          try {
                            if (enableAudio && window.webrtcDetectedBrowser === 'firefox') {
                              settings.getUserMediaSettings.audio = { echoCancellation: enableAudioSettings.echoCancellation };
                            }
                      
                            navigator.getUserMedia(settings.getUserMediaSettings, function (stream) {
                              if (window.webrtcDetectedBrowser === 'firefox' || !enableAudio) {
                                self._onStreamAccessSuccess(stream, settings, true, false);
                                return;
                              }
                      
                              navigator.getUserMedia({
                                audio: { echoCancellation: enableAudioSettings.echoCancellation }
                              }, function (audioStream) {
                                try {
                                  audioStream.addTrack(stream.getVideoTracks()[0]);
                      
                                  self.once('mediaAccessSuccess', function () {
                                    self._streams.screenshare.streamClone = stream;
                                  }, function (stream, isScreensharing) {
                                    return isScreensharing;
                                  });
                      
                                  self._onStreamAccessSuccess(audioStream, settings, true, false);
                      
                                } catch (error) {
                                  log.error('Failed retrieving audio stream for screensharing stream', error);
                                  self._onStreamAccessSuccess(stream, settings, true, false);
                                }
                              }, function (error) {
                                log.error('Failed retrieving audio stream for screensharing stream', error);
                                self._onStreamAccessSuccess(stream, settings, true, false);
                              });
                            }, function (error) {
                              self._onStreamAccessError(error, settings, true, false);
                            });
                      
                          } catch (error) {
                            self._onStreamAccessError(error, settings, true, false);
                          }
                        }, 'shareScreen', self._throttlingTimeouts.shareScreen);
                      };
                      
                      /**
                       * <blockquote class="info">
                       *   Note that broadcasted events from <a href="#method_muteStream"><code>muteStream()</code> method</a>,
                       *   <a href="#method_stopStream"><code>stopStream()</code> method</a>,
                       *   <a href="#method_stopScreen"><code>stopScreen()</code> method</a>,
                       *   <a href="#method_sendMessage"><code>sendMessage()</code> method</a>,
                       *   <a href="#method_unlockRoom"><code>unlockRoom()</code> method</a> and
                       *   <a href="#method_lockRoom"><code>lockRoom()</code> method</a> may be queued when
                       *   sent within less than an interval.
                       * </blockquote>
                       * Function that stops <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>.
                       * @method stopScreen
                       * @example
                       *   function stopScreen () {
                       *     skylinkDemo.stopScreen();
                       *   }
                       *
                       *   skylinkDemo.shareScreen();
                       * @trigger <ol class="desc-seq">
                       *   <li>Checks if there is <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>. <ol>
                       *   <li>If there is <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a>: <ol>
                       *   <li>Stop <a href="#method_shareScreen"><code>shareScreen()</code> Stream</a> Stream. <ol>
                       *   <li><a href="#event_mediaAccessStopped"><code>mediaAccessStopped</code> event</a>
                       *   triggers parameter payload <code>isScreensharing</code> value as <code>true</code> and
                       *   <code>isAudioFallback</code> value as <code>false</code>.</li><li>If User is in Room: <ol>
                       *   <li><a href="#event_streamEnded"><code>streamEnded</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code> and <code>isScreensharing</code> value as <code>true</code>.</li>
                       *   <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code>.</li>
                       *   </ol></li></ol></li><li>If User is in Room: <small><b>SKIP</b> this step if <code>stopScreen()</code>
                       *   was invoked from <a href="#method_shareScreen"><code>shareScreen()</code> method</a>.</small> <ol>
                       *   <li>If there is <a href="#method_getUserMedia"> <code>getUserMedia()</code>Stream</a> Stream: <ol>
                       *   <li><a href="#event_incomingStream"><code>incomingStream</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code> and <code>stream</code> as
                       *   <a href="#method_getUserMedia"><code>getUserMedia()</code> Stream</a>.</li>
                       *   <li><a href="#event_peerUpdated"><code>peerUpdated</code> event</a> triggers parameter payload
                       *   <code>isSelf</code> value as <code>true</code>.</li></ol></li>
                       *   <li>Invoke <a href="#method_refreshConnection"><code>refreshConnection()</code> method</a>.</li>
                       *   </ol></li></ol></li></ol></li></ol>
                       * @for Skylink
                       * @since 0.6.0
                       */
                      Skylink.prototype.stopScreen = function () {
                        if (this._streams.screenshare) {
                          this._stopStreams({
                            screenshare: true
                          });
                      
                          if (this._inRoom) {
                            if (this._streams.userMedia && this._streams.userMedia.stream) {
                              this._trigger('incomingStream', this._user.sid, this._streams.userMedia.stream, true, this.getPeerInfo(),
                                false, this._streams.userMedia.stream.id || this._streams.userMedia.stream.label);
                              this._trigger('peerUpdated', this._user.sid, this.getPeerInfo(), true);
                            }
                            this._refreshPeerConnection(Object.keys(this._peerConnections), {}, false);
                          }
                        }
                      };
                      
                      /**
                       * Function that handles the muting of Stream audio and video tracks.
                       * @method _muteStreams
                       * @private
                       * @for Skylink
                       * @since 0.6.15
                       */
                      Skylink.prototype._muteStreams = function () {
                        var self = this;
                        var hasVideo = false;
                        var hasAudio = false;
                      
                        var muteFn = function (stream) {
                          var audioTracks = stream.getAudioTracks();
                          var videoTracks = stream.getVideoTracks();
                      
                          for (var a = 0; a < audioTracks.length; a++) {
                            audioTracks[a].enabled = !self._streamsMutedSettings.audioMuted;
                            hasAudio = true;
                          }
                      
                          for (var v = 0; v < videoTracks.length; v++) {
                            videoTracks[v].enabled = !self._streamsMutedSettings.videoMuted;
                            hasVideo = true;
                          }
                        };
                      
                        if (self._streams.userMedia && self._streams.userMedia.stream) {
                          muteFn(self._streams.userMedia.stream);
                        }
                      
                        if (self._streams.screenshare && self._streams.screenshare.stream) {
                          muteFn(self._streams.screenshare.stream);
                        }
                      
                        if (self._streams.screenshare && self._streams.screenshare.streamClone) {
                          muteFn(self._streams.screenshare.streamClone);
                        }
                      
                        if (window.webrtcDetectedBrowser === 'edge') {
                          for (var peerId in self._peerConnections) {
                            if (self._peerConnections.hasOwnProperty(peerId) && self._peerConnections[peerId]) {
                              var localStreams = self._peerConnections[peerId].getLocalStreams();
                              for (var s = 0; s < localStreams.length; s++) {
                                muteFn(localStreams[s]);
                              }
                            }
                          }
                        }
                      
                        log.debug('Updated Streams muted status ->', self._streamsMutedSettings);
                      
                        return {
                          hasVideo: hasVideo,
                          hasAudio: hasAudio
                        };
                      };
                      
                      /**
                       * Function that handles stopping the Stream streaming.
                       * @method _stopStreams
                       * @private
                       * @for Skylink
                       * @since 0.6.15
                       */
                      Skylink.prototype._stopStreams = function (options) {
                        var self = this;
                        var stopFn = function (stream) {
                          var streamId = stream.id || stream.label;
                          log.debug([null, 'MediaStream', streamId, 'Stopping Stream ->'], stream);
                      
                          try {
                            var audioTracks = stream.getAudioTracks();
                            var videoTracks = stream.getVideoTracks();
                      
                            for (var a = 0; a < audioTracks.length; a++) {
                              audioTracks[a].stop();
                            }
                      
                            for (var v = 0; v < videoTracks.length; v++) {
                              videoTracks[v].stop();
                            }
                      
                          } catch (error) {
                            stream.stop();
                          }
                      
                          if (self._streamsStoppedCbs[streamId]) {
                            self._streamsStoppedCbs[streamId]();
                            delete self._streamsStoppedCbs[streamId];
                          }
                        };
                      
                        var stopUserMedia = false;
                        var stopScreenshare = false;
                        var hasStoppedMedia = false;
                      
                        if (typeof options === 'object') {
                          stopUserMedia = options.userMedia === true;
                          stopScreenshare = options.screenshare === true;
                        }
                      
                        if (stopUserMedia && self._streams.userMedia) {
                          if (self._streams.userMedia.stream) {
                            stopFn(self._streams.userMedia.stream);
                          }
                      
                          self._streams.userMedia = null;
                          hasStoppedMedia = true;
                        }
                      
                        if (stopScreenshare && self._streams.screenshare) {
                          if (self._streams.screenshare.streamClone) {
                            stopFn(self._streams.screenshare.streamClone);
                          }
                      
                          if (self._streams.screenshare.stream) {
                            stopFn(self._streams.screenshare.stream);
                          }
                      
                          self._streams.screenshare = null;
                          hasStoppedMedia = true;
                        }
                      
                        if (self._inRoom && hasStoppedMedia) {
                          self._trigger('peerUpdated', self._user.sid, self.getPeerInfo(), true);
                        }
                      
                        log.log('Stopping Streams with settings ->', options);
                      };
                      
                      /**
                       * Function that parses the <code>getUserMedia()</code> settings provided.
                       * @method _parseStreamSettings
                       * @private
                       * @for Skylink
                       * @since 0.6.15
                       */
                      Skylink.prototype._parseStreamSettings = function(options) {
                        var settings = {
                          settings: { audio: false, video: false },
                          mutedSettings: { shouldAudioMuted: false, shouldVideoMuted: false },
                          getUserMediaSettings: { audio: false, video: false }
                        };
                      
                        if (options.audio) {
                          // For Edge to work since they do not support the advanced constraints yet
                          settings.settings.audio = {
                            stereo: false,
                            exactConstraints: !!options.useExactConstraints,
                            echoCancellation: true
                          };
                          settings.getUserMediaSettings.audio = {
                            echoCancellation: true
                          };
                      
                          if (typeof options.audio === 'object') {
                            if (typeof options.audio.stereo === 'boolean') {
                              settings.settings.audio.stereo = options.audio.stereo;
                            }
                      
                            if (typeof options.audio.useinbandfec === 'boolean') {
                              settings.settings.audio.useinbandfec = options.audio.useinbandfec;
                            }
                      
                            if (typeof options.audio.usedtx === 'boolean') {
                              settings.settings.audio.usedtx = options.audio.usedtx;
                            }
                      
                            if (typeof options.audio.maxplaybackrate === 'number' &&
                              options.audio.maxplaybackrate >= 8000 && options.audio.maxplaybackrate <= 48000) {
                              settings.settings.audio.maxplaybackrate = options.audio.maxplaybackrate;
                            }
                      
                            if (typeof options.audio.mute === 'boolean') {
                              settings.mutedSettings.shouldAudioMuted = options.audio.mute;
                            }
                      
                            // Not supported in Edge browser features
                            if (window.webrtcDetectedBrowser !== 'edge') {
                              if (typeof options.audio.echoCancellation === 'boolean') {
                                settings.settings.audio.echoCancellation = options.audio.echoCancellation;
                                settings.getUserMediaSettings.audio.echoCancellation = options.audio.echoCancellation;
                              }
                      
                              if (Array.isArray(options.audio.optional)) {
                                settings.settings.audio.optional = clone(options.audio.optional);
                                settings.getUserMediaSettings.audio.optional = clone(options.audio.optional);
                              }
                      
                              if (options.audio.deviceId && typeof options.audio.deviceId === 'string' &&
                                window.webrtcDetectedBrowser !== 'firefox') {
                                settings.settings.audio.deviceId = options.audio.deviceId;
                      
                                if (options.useExactConstraints) {
                                  settings.getUserMediaSettings.audio.deviceId = { exact: options.audio.deviceId };
                      
                                } else {
                                  if (!Array.isArray(settings.getUserMediaSettings.audio.optional)) {
                                    settings.getUserMediaSettings.audio.optional = [];
                                  }
                      
                                  settings.getUserMediaSettings.audio.optional.push({
                                    sourceId: options.audio.deviceId
                                  });
                                }
                              }
                            }
                          }
                      
                          if (window.webrtcDetectedBrowser === 'edge') {
                            settings.getUserMediaSettings.audio = true;
                          }
                        }
                      
                        if (options.video) {
                          // For Edge to work since they do not support the advanced constraints yet
                          settings.settings.video = {
                            resolution: clone(this.VIDEO_RESOLUTION.VGA),
                            screenshare: false,
                            exactConstraints: !!options.useExactConstraints
                          };
                          settings.getUserMediaSettings.video = {};
                      
                          if (typeof options.video === 'object') {
                            if (typeof options.video.mute === 'boolean') {
                              settings.mutedSettings.shouldVideoMuted = options.video.mute;
                            }
                      
                            if (Array.isArray(options.video.optional)) {
                              settings.settings.video.optional = clone(options.video.optional);
                              settings.getUserMediaSettings.video.optional = clone(options.video.optional);
                            }
                      
                            if (options.video.deviceId && typeof options.video.deviceId === 'string' &&
                              window.webrtcDetectedBrowser !== 'firefox') {
                              settings.settings.video.deviceId = options.video.deviceId;
                      
                              if (options.useExactConstraints) {
                                settings.getUserMediaSettings.video.deviceId = { exact: options.video.deviceId };
                      
                              } else {
                                if (!Array.isArray(settings.getUserMediaSettings.video.optional)) {
                                  settings.getUserMediaSettings.video.optional = [];
                                }
                      
                                settings.getUserMediaSettings.video.optional.push({
                                  sourceId: options.video.deviceId
                                });
                              }
                            }
                      
                            if (options.video.resolution && typeof options.video.resolution === 'object') {
                              if ((options.video.resolution.width && typeof options.video.resolution.width === 'object') ||
                                typeof options.video.resolution.width === 'number') {
                                settings.settings.video.resolution.width = options.video.resolution.width;
                              }
                              if ((options.video.resolution.height && typeof options.video.resolution.height === 'object') ||
                                typeof options.video.resolution.height === 'number') {
                                settings.settings.video.resolution.height = options.video.resolution.height;
                              }
                            }
                      
                            settings.getUserMediaSettings.video.width = typeof settings.settings.video.resolution.width === 'object' ?
                              settings.settings.video.resolution.width : (options.useExactConstraints ?
                              { exact: settings.settings.video.resolution.width } : { max: settings.settings.video.resolution.width });
                      
                            settings.getUserMediaSettings.video.height = typeof settings.settings.video.resolution.height === 'object' ?
                              settings.settings.video.resolution.height : (options.useExactConstraints ?
                              { exact: settings.settings.video.resolution.height } : { max: settings.settings.video.resolution.height });
                      
                            if ((options.video.frameRate && typeof options.video.frameRate === 'object') ||
                              typeof options.video.frameRate === 'number' && !self._isUsingPlugin) {
                              settings.settings.video.frameRate = options.video.frameRate;
                              settings.getUserMediaSettings.video.frameRate = typeof settings.settings.video.frameRate === 'object' ?
                                settings.settings.video.frameRate : (options.useExactConstraints ?
                                { exact: settings.settings.video.frameRate } : { max: settings.settings.video.frameRate });
                            }
                      
                            if (options.video.facingMode && ['string', 'object'].indexOf(typeof options.video.facingMode) > -1 && self._isUsingPlugin) {
                              settings.settings.video.facingMode = options.video.facingMode;
                              settings.getUserMediaSettings.video.facingMode = typeof settings.settings.video.facingMode === 'object' ?
                                settings.settings.video.facingMode : (options.useExactConstraints ?
                                { exact: settings.settings.video.facingMode } : { max: settings.settings.video.facingMode });
                            }
                          } else if (options.useExactConstraints) {
                            settings.getUserMediaSettings.video = {
                              width: { exact: settings.settings.video.resolution.width },
                              height: { exact: settings.settings.video.resolution.height }
                            };
                      
                          } else {
                            settings.getUserMediaSettings.video.mandatory = {
                              maxWidth: settings.settings.video.resolution.width,
                              maxHeight: settings.settings.video.resolution.height
                            };
                          }
                      
                          if (window.webrtcDetectedBrowser === 'edge') {
                            settings.settings.video = {
                              screenshare: false,
                              exactConstraints: !!options.useExactConstraints
                            };
                            settings.getUserMediaSettings.video = true;
                          }
                        }
                      
                        return settings;
                      };
                      
                      /**
                       * Function that handles the native <code>navigator.getUserMedia()</code> API success callback result.
                       * @method _onStreamAccessSuccess
                       * @private
                       * @for Skylink
                       * @since 0.3.0
                       */
                      Skylink.prototype._onStreamAccessSuccess = function(stream, settings, isScreenSharing, isAudioFallback) {
                        var self = this;
                        var streamId = stream.id || stream.label;
                      
                        log.log([null, 'MediaStream', streamId, 'Has access to stream ->'], stream);
                      
                        // Stop previous stream
                        if (!isScreenSharing && self._streams.userMedia) {
                          self._stopStreams({
                            userMedia: true,
                            screenshare: false
                          });
                      
                        } else if (isScreenSharing && self._streams.screenshare) {
                          self._stopStreams({
                            userMedia: false,
                            screenshare: true
                          });
                        }
                      
                        self._streamsStoppedCbs[streamId] = function () {
                          log.log([null, 'MediaStream', streamId, 'Stream has ended']);
                      
                          self._trigger('mediaAccessStopped', !!isScreenSharing, !!isAudioFallback, streamId);
                      
                          if (self._inRoom) {
                            log.debug([null, 'MediaStream', streamId, 'Sending Stream ended status to Peers']);
                      
                            self._sendChannelMessage({
                              type: self._SIG_MESSAGE_TYPE.STREAM,
                              mid: self._user.sid,
                              rid: self._room.id,
                              cid: self._key,
                              streamId: streamId,
                              settings: settings.settings,
                              status: 'ended'
                            });
                      
                            self._trigger('streamEnded', self._user.sid, self.getPeerInfo(), true, !!isScreenSharing, streamId);
                      
                            if (isScreenSharing && self._streams.screenshare && self._streams.screenshare.stream &&
                              (self._streams.screenshare.stream.id || self._streams.screenshare.stream.label) === streamId) {
                              self._streams.screenshare = null;
                      
                            } else if (!isScreenSharing && self._streams.userMedia && self._streams.userMedia.stream &&
                              (self._streams.userMedia.stream.id || self._streams.userMedia.stream.label) === streamId) {
                              self._streams.userMedia = null;
                            }
                          }
                        };
                      
                        // Handle event for Chrome / Opera
                        if (['chrome', 'opera'].indexOf(window.webrtcDetectedBrowser) > -1) {
                          stream.oninactive = function () {
                            if (self._streamsStoppedCbs[streamId]) {
                              self._streamsStoppedCbs[streamId]();
                              delete self._streamsStoppedCbs[streamId];
                            }
                          };
                      
                        // Handle event for Firefox (use an interval)
                        } else if (window.webrtcDetectedBrowser === 'firefox') {
                          stream.endedInterval = setInterval(function () {
                            if (typeof stream.recordedTime === 'undefined') {
                              stream.recordedTime = 0;
                            }
                            if (stream.recordedTime === stream.currentTime) {
                              clearInterval(stream.endedInterval);
                      
                              if (self._streamsStoppedCbs[streamId]) {
                                self._streamsStoppedCbs[streamId]();
                                delete self._streamsStoppedCbs[streamId];
                              }
                      
                            } else {
                              stream.recordedTime = stream.currentTime;
                            }
                          }, 1000);
                      
                        } else {
                          stream.onended = function () {
                            if (self._streamsStoppedCbs[streamId]) {
                              self._streamsStoppedCbs[streamId]();
                              delete self._streamsStoppedCbs[streamId];
                            }
                          };
                        }
                      
                        if ((settings.settings.audio && stream.getAudioTracks().length === 0) ||
                          (settings.settings.video && stream.getVideoTracks().length === 0)) {
                      
                          var tracksNotSameError = 'Expected audio tracks length with ' +
                            (settings.settings.audio ? '1' : '0') + ' and video tracks length with ' +
                            (settings.settings.video ? '1' : '0') + ' but received audio tracks length ' +
                            'with ' + stream.getAudioTracks().length + ' and video ' +
                            'tracks length with ' + stream.getVideoTracks().length;
                      
                          log.warn([null, 'MediaStream', streamId, tracksNotSameError]);
                      
                          var requireAudio = !!settings.settings.audio;
                          var requireVideo = !!settings.settings.video;
                      
                          if (settings.settings.audio && stream.getAudioTracks().length === 0) {
                            settings.settings.audio = false;
                          }
                      
                          if (settings.settings.video && stream.getVideoTracks().length === 0) {
                            settings.settings.video = false;
                          }
                      
                          self._trigger('mediaAccessFallback', {
                            error: new Error(tracksNotSameError),
                            diff: {
                              video: { expected: requireVideo ? 1 : 0, received: stream.getVideoTracks().length },
                              audio: { expected: requireAudio ? 1 : 0, received: stream.getAudioTracks().length }
                            }
                          }, self.MEDIA_ACCESS_FALLBACK_STATE.FALLBACKED, !!isScreenSharing, !!isAudioFallback, streamId);
                        }
                      
                        self._streams[ isScreenSharing ? 'screenshare' : 'userMedia' ] = {
                          stream: stream,
                          settings: settings.settings,
                          constraints: settings.getUserMediaSettings
                        };
                        self._muteStreams();
                        self._trigger('mediaAccessSuccess', stream, !!isScreenSharing, !!isAudioFallback, streamId);
                      };
                      
                      /**
                       * Function that handles the native <code>navigator.getUserMedia()</code> API failure callback result.
                       * @method _onStreamAccessError
                       * @private
                       * @for Skylink
                       * @since 0.6.15
                       */
                      Skylink.prototype._onStreamAccessError = function(error, settings, isScreenSharing) {
                        var self = this;
                      
                        if (!isScreenSharing && settings.settings.audio && settings.settings.video && self._audioFallback) {
                          log.debug('Fallbacking to retrieve audio only Stream');
                      
                          self._trigger('mediaAccessFallback', {
                            error: error,
                            diff: null
                          }, self.MEDIA_ACCESS_FALLBACK_STATE.FALLBACKING, false, true);
                      
                          navigator.getUserMedia({
                            audio: true
                          }, function (stream) {
                            self._onStreamAccessSuccess(stream, settings, false, true);
                      
                          }, function (error) {
                            log.error('Failed fallbacking to retrieve audio only Stream ->', error);
                      
                            self._trigger('mediaAccessError', error, false, true);
                            self._trigger('mediaAccessFallback', {
                              error: error,
                              diff: null
                            }, self.MEDIA_ACCESS_FALLBACK_STATE.ERROR, false, true);
                          });
                          return;
                        }
                      
                        log.error('Failed retrieving ' + (isScreenSharing ? 'screensharing' : 'camera') + ' Stream ->', error);
                      
                        self._trigger('mediaAccessError', error, !!isScreenSharing, false);
                      };
                      
                      /**
                       * Function that handles the <code>RTCPeerConnection.onaddstream</code> remote MediaStream received.
                       * @method _onRemoteStreamAdded
                       * @private
                       * @for Skylink
                       * @since 0.5.2
                       */
                      Skylink.prototype._onRemoteStreamAdded = function(targetMid, stream, isScreenSharing) {
                        var self = this;
                      
                        if (!self._peerInformations[targetMid]) {
                          log.warn([targetMid, 'MediaStream', stream.id,
                            'Received remote stream when peer is not connected. ' +
                            'Ignoring stream ->'], stream);
                          return;
                        }
                      
                        /*if (!self._peerInformations[targetMid].settings.audio &&
                          !self._peerInformations[targetMid].settings.video && !isScreenSharing) {
                          log.log([targetMid, 'MediaStream', stream.id,
                            'Receive remote stream but ignoring stream as it is empty ->'
                            ], stream);
                          return;
                        }*/
                        log.log([targetMid, 'MediaStream', stream.id, 'Received remote stream ->'], stream);
                      
                        if (isScreenSharing) {
                          log.log([targetMid, 'MediaStream', stream.id, 'Peer is having a screensharing session with user']);
                        }
                      
                        self._trigger('incomingStream', targetMid, stream, false, self.getPeerInfo(targetMid), isScreenSharing, stream.id || stream.label);
                        self._trigger('peerUpdated', targetMid, self.getPeerInfo(targetMid), false);
                      };
                      
                      /**
                       * Function that sets User's Stream to send to Peer connection.
                       * Priority for <code>shareScreen()</code> Stream over <code>getUserMedia()</code> Stream.
                       * @method _addLocalMediaStreams
                       * @private
                       * @for Skylink
                       * @since 0.5.2
                       */
                      Skylink.prototype._addLocalMediaStreams = function(peerId) {
                        var self = this;
                      
                        // NOTE ALEX: here we could do something smarter
                        // a mediastream is mainly a container, most of the info
                        // are attached to the tracks. We should iterates over track and print
                        try {
                          log.log([peerId, null, null, 'Adding local stream']);
                      
                          var pc = self._peerConnections[peerId];
                          var peerAgent = ((self._peerInformations[peerId] || {}).agent || {}).name || '';
                          var offerToReceiveAudio = !(!self._sdpSettings.connection.audio && peerId !== 'MCU');
                          var offerToReceiveVideo = !(!self._sdpSettings.connection.video && peerId !== 'MCU') &&
                            ((window.webrtcDetectedBrowser === 'edge' && peerAgent !== 'edge') ||
                            (['IE', 'safari'].indexOf(window.webrtcDetectedBrowser) > -1 && peerAgent === 'edge') ?
                            !!self._currentCodecSupport.video.h264 : true);
                      
                          if (pc) {
                            if (pc.signalingState !== self.PEER_CONNECTION_STATE.CLOSED) {
                              // Updates the streams accordingly
                              var updateStreamFn = function (updatedStream) {
                                var hasStream = false;
                      
                                // remove streams
                                var streams = pc.getLocalStreams();
                                for (var i = 0; i < streams.length; i++) {
                                  if (updatedStream !== null && streams[i].id === updatedStream.id) {
                                    hasStream = true;
                                    continue;
                                  }
                                  // try removeStream
                                  pc.removeStream(streams[i]);
                                }
                      
                                if (updatedStream !== null && !hasStream) {
                                  if (window.webrtcDetectedBrowser === 'edge' && (!offerToReceiveVideo || !offerToReceiveAudio)) {
                                    try {
                                      var cloneStream = updatedStream.clone();
                                      var tracks = cloneStream.getTracks();
                                      for (var t = 0; t < tracks.length; t++) {
                                        if (tracks[t].kind === 'video' ? !offerToReceiveVideo : !offerToReceiveAudio) {
                                          cloneStream.removeTrack(tracks[t]);
                                        } else {
                                          tracks[t].enabled = tracks[t].kind === 'audio' ? !self._streamsMutedSettings.audioMuted :
                                            !self._streamsMutedSettings.videoMuted;
                                        }
                                      }
                                      pc.addStream(cloneStream);
                                    } catch (e) {
                                      pc.addStream(updatedStream);
                                    }
                                  } else {
                                    pc.addStream(updatedStream);
                                  }
                                  pc.addStream(window.webrtcDetectedBrowser === 'edge' ? updatedStream.clone() : updatedStream);
                                }
                              };
                      
                              if (self._streams.screenshare && self._streams.screenshare.stream) {
                                log.debug([peerId, 'MediaStream', null, 'Sending screen'], self._streams.screenshare.stream);
                      
                                updateStreamFn(self._streams.screenshare.stream);
                      
                              } else if (self._streams.userMedia && self._streams.userMedia.stream) {
                                log.debug([peerId, 'MediaStream', null, 'Sending stream'], self._streams.userMedia.stream);
                      
                                updateStreamFn(self._streams.userMedia.stream);
                      
                              } else {
                                log.warn([peerId, 'MediaStream', null, 'No media to send. Will be only receiving']);
                      
                                updateStreamFn(null);
                              }
                      
                            } else {
                              log.warn([peerId, 'MediaStream', null,
                                'Not adding any stream as signalingState is closed']);
                            }
                          } else {
                            log.warn([peerId, 'MediaStream', self._mediaStream,
                              'Not adding stream as peerconnection object does not exists']);
                          }
                        } catch (error) {
                          if ((error.message || '').indexOf('already added') > -1) {
                            log.warn([peerId, null, null, 'Not re-adding stream as LocalMediaStream is already added'], error);
                          } else {
                            // Fix errors thrown like NS_ERROR_UNEXPECTED
                            log.error([peerId, null, null, 'Failed adding local stream'], error);
                          }
                        }
                      };
                      
                      /**
                       * Function that handles ended streams.
                       * @method _handleEndedStreams
                       * @private
                       * @for Skylink
                       * @since 0.6.16
                       */
                      Skylink.prototype._handleEndedStreams = function (peerId, checkStreamId) {
                        var self = this;
                        self._streamsSession[peerId] = self._streamsSession[peerId] || {};
                      
                        var renderEndedFn = function (streamId) {
                          var shouldTrigger = !!self._streamsSession[peerId][streamId];
                      
                          if (!checkStreamId && self._peerConnections[peerId] &&
                            self._peerConnections[peerId].signalingState !== self.PEER_CONNECTION_STATE.CLOSED) {
                            var streams = self._peerConnections[peerId].getRemoteStreams();
                      
                            for (var i = 0; i < streams.length; i++) {
                              if (streamId === (streams[i].id || streams[i].label)) {
                                shouldTrigger = false;
                                break;
                              }
                            }
                          }
                      
                          if (shouldTrigger) {
                            var peerInfo = clone(self.getPeerInfo(peerId));
                            peerInfo.settings.audio = clone(self._streamsSession[peerId][streamId].audio);
                            peerInfo.settings.video = clone(self._streamsSession[peerId][streamId].video);
                            var hasScreenshare = peerInfo.settings.video && typeof peerInfo.settings.video === 'object' &&
                              !!peerInfo.settings.video.screenshare;
                            self._streamsSession[peerId][streamId] = false;
                            self._trigger('streamEnded', peerId, peerInfo, false, hasScreenshare, streamId);
                          }
                        };
                      
                        if (checkStreamId) {
                          renderEndedFn(checkStreamId);
                        } else {
                          for (var prop in self._streamsSession[peerId]) {
                            if (self._streamsSession[peerId].hasOwnProperty(prop) && self._streamsSession[peerId][prop]) {
                              renderEndedFn(prop);
                            }
                          }
                        }
                      };