123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136 |
- /* global APP */
- var StreamEventTypes = require("../../service/RTC/StreamEventTypes.js");
- var RTCEvents = require("../../service/RTC/RTCEvents");
- var RTCBrowserType = require("./RTCBrowserType");
-
- /**
- * This implements 'onended' callback normally fired by WebRTC after the stream
- * is stopped. There is no such behaviour yet in FF, so we have to add it.
- * @param stream original WebRTC stream object to which 'onended' handling
- * will be added.
- */
- function implementOnEndedHandling(localStream) {
- var stream = localStream.getOriginalStream();
- var originalStop = stream.stop;
- stream.stop = function () {
- originalStop.apply(stream);
- if (localStream.isActive()) {
- stream.ended = true;
- stream.onended();
- }
- };
- }
-
- function LocalStream(stream, type, eventEmitter, videoType, isGUMStream) {
- this.stream = stream;
- this.eventEmitter = eventEmitter;
- this.type = type;
- this.videoType = videoType;
- this.isGUMStream = true;
- if(isGUMStream === false)
- this.isGUMStream = isGUMStream;
- var self = this;
- if(type == "audio") {
- this.getTracks = function () {
- return self.stream.getAudioTracks();
- };
- } else {
- this.getTracks = function () {
- return self.stream.getVideoTracks();
- };
- }
-
- APP.RTC.addMediaStreamInactiveHandler(
- this.stream,
- function () {
- self.streamEnded();
- });
-
- if (RTCBrowserType.isFirefox()) {
- implementOnEndedHandling(this);
- }
- }
-
- LocalStream.prototype.streamEnded = function () {
- this.eventEmitter.emit(StreamEventTypes.EVENT_TYPE_LOCAL_ENDED, this);
- };
-
- LocalStream.prototype.getOriginalStream = function()
- {
- return this.stream;
- };
-
- LocalStream.prototype.isAudioStream = function () {
- return this.type === "audio";
- };
-
- LocalStream.prototype.setMute = function (mute)
- {
- var isAudio = this.isAudioStream();
- var eventType = isAudio ? RTCEvents.AUDIO_MUTE : RTCEvents.VIDEO_MUTE;
-
- if ((window.location.protocol != "https:" && this.isGUMStream) ||
- (isAudio && this.isGUMStream) || this.videoType === "screen" ||
- // FIXME FF does not support 'removeStream' method used to mute
- RTCBrowserType.isFirefox()) {
-
- var tracks = this.getTracks();
- for (var idx = 0; idx < tracks.length; idx++) {
- tracks[idx].enabled = !mute;
- }
- this.eventEmitter.emit(eventType, mute);
- } else {
- if (mute) {
- APP.xmpp.removeStream(this.stream);
- APP.RTC.stopMediaStream(this.stream);
- this.eventEmitter.emit(eventType, true);
- } else {
- var self = this;
- APP.RTC.rtcUtils.obtainAudioAndVideoPermissions(
- (this.isAudioStream() ? ["audio"] : ["video"]),
- function (stream) {
- if (isAudio) {
- APP.RTC.changeLocalAudio(stream,
- function () {
- self.eventEmitter.emit(eventType, false);
- });
- } else {
- APP.RTC.changeLocalVideo(stream, false,
- function () {
- self.eventEmitter.emit(eventType, false);
- });
- }
- });
- }
- }
- };
-
- LocalStream.prototype.isMuted = function () {
- var tracks = [];
- if (this.isAudioStream()) {
- tracks = this.stream.getAudioTracks();
- } else {
- if (!this.isActive())
- return true;
- tracks = this.stream.getVideoTracks();
- }
- for (var idx = 0; idx < tracks.length; idx++) {
- if(tracks[idx].enabled)
- return false;
- }
- return true;
- };
-
- LocalStream.prototype.getId = function () {
- return this.stream.getTracks()[0].id;
- };
-
- /**
- * Checks whether the MediaStream is avtive/not ended.
- * @returns {boolean} whether MediaStream is active.
- */
- LocalStream.prototype.isActive = function () {
- return !this.stream.ended;
- };
-
- module.exports = LocalStream;
|