UNPKG

137 kBJavaScriptView Raw
1(function(e){if("function"==typeof bootstrap)bootstrap("localmedia",e);else if("object"==typeof exports)module.exports=e();else if("function"==typeof define&&define.amd)define(e);else if("undefined"!=typeof ses){if(!ses.ok())return;ses.makeLocalMedia=e}else"undefined"!=typeof window?window.LocalMedia=e():global.LocalMedia=e()})(function(){var define,ses,bootstrap,module,exports;
2return (function(e,t,n){function i(n,s){if(!t[n]){if(!e[n]){var o=typeof require=="function"&&require;if(!s&&o)return o(n,!0);if(r)return r(n,!0);throw new Error("Cannot find module '"+n+"'")}var u=t[n]={exports:{}};e[n][0].call(u.exports,function(t){var r=e[n][1][t];return i(r?r:t)},u,u.exports)}return t[n].exports}var r=typeof require=="function"&&require;for(var s=0;s<n.length;s++)i(n[s]);return i})({1:[function(require,module,exports){
3var util = require('util');
4var hark = require('hark');
5var webrtcSupport = require('webrtcsupport');
6var getUserMedia = require('getusermedia');
7var getScreenMedia = require('getscreenmedia');
8var WildEmitter = require('wildemitter');
9var GainController = require('mediastream-gain');
10var mockconsole = require('mockconsole');
11
12
13function LocalMedia(opts) {
14 WildEmitter.call(this);
15
16 var config = this.config = {
17 autoAdjustMic: false,
18 detectSpeakingEvents: false,
19 audioFallback: false,
20 media: {
21 audio: true,
22 video: true
23 },
24 logger: mockconsole
25 };
26
27 var item;
28 for (item in opts) {
29 this.config[item] = opts[item];
30 }
31
32 this.logger = config.logger;
33 this._log = this.logger.log.bind(this.logger, 'LocalMedia:');
34 this._logerror = this.logger.error.bind(this.logger, 'LocalMedia:');
35
36 this.screenSharingSupport = webrtcSupport.screenSharing;
37
38 this.localStreams = [];
39 this.localScreens = [];
40
41 if (!webrtcSupport.supportGetUserMedia) {
42 this._logerror('Your browser does not support local media capture.');
43 }
44}
45
46util.inherits(LocalMedia, WildEmitter);
47
48
49LocalMedia.prototype.start = function (mediaConstraints, cb) {
50 var self = this;
51 var constraints = mediaConstraints || this.config.media;
52
53 getUserMedia(constraints, function (err, stream) {
54
55 if (!err) {
56 if (constraints.audio && self.config.detectSpeakingEvents) {
57 self.setupAudioMonitor(stream, self.config.harkOptions);
58 }
59 self.localStreams.push(stream);
60
61 if (self.config.autoAdjustMic) {
62 self.gainController = new GainController(stream);
63 // start out somewhat muted if we can track audio
64 self.setMicIfEnabled(0.5);
65 }
66
67 // TODO: might need to migrate to the video tracks onended
68 // FIXME: firefox does not seem to trigger this...
69 stream.onended = function () {
70 /*
71 var idx = self.localStreams.indexOf(stream);
72 if (idx > -1) {
73 self.localScreens.splice(idx, 1);
74 }
75 self.emit('localStreamStopped', stream);
76 */
77 };
78
79 self.emit('localStream', stream);
80 } else {
81 // Fallback for users without a camera
82 if (self.config.audioFallback && err.name === 'DevicesNotFoundError' && constraints.video !== false) {
83 constraints.video = false;
84 self.start(constraints, cb);
85 return;
86 }
87 }
88 if (cb) {
89 return cb(err, stream);
90 }
91 });
92};
93
94LocalMedia.prototype.stop = function (stream) {
95 var self = this;
96 // FIXME: duplicates cleanup code until fixed in FF
97 if (stream) {
98 stream.getTracks().forEach(function (track) { track.stop(); });
99 var idx = self.localStreams.indexOf(stream);
100 if (idx > -1) {
101 self.emit('localStreamStopped', stream);
102 self.localStreams = self.localStreams.splice(idx, 1);
103 } else {
104 idx = self.localScreens.indexOf(stream);
105 if (idx > -1) {
106 self.emit('localScreenStopped', stream);
107 self.localScreens = self.localScreens.splice(idx, 1);
108 }
109 }
110 } else {
111 this.stopStreams();
112 this.stopScreenShare();
113 }
114};
115
116LocalMedia.prototype.stopStreams = function () {
117 var self = this;
118 if (this.audioMonitor) {
119 this.audioMonitor.stop();
120 delete this.audioMonitor;
121 }
122 this.localStreams.forEach(function (stream) {
123 stream.getTracks().forEach(function (track) { track.stop(); });
124 self.emit('localStreamStopped', stream);
125 });
126 this.localStreams = [];
127};
128
129LocalMedia.prototype.startScreenShare = function (cb) {
130 var self = this;
131 getScreenMedia(function (err, stream) {
132 if (!err) {
133 self.localScreens.push(stream);
134
135 // TODO: might need to migrate to the video tracks onended
136 // Firefox does not support .onended but it does not support
137 // screensharing either
138 stream.onended = function () {
139 var idx = self.localScreens.indexOf(stream);
140 if (idx > -1) {
141 self.localScreens.splice(idx, 1);
142 }
143 self.emit('localScreenStopped', stream);
144 };
145 self.emit('localScreen', stream);
146 }
147
148 // enable the callback
149 if (cb) {
150 return cb(err, stream);
151 }
152 });
153};
154
155LocalMedia.prototype.stopScreenShare = function (stream) {
156 var self = this;
157 if (stream) {
158 stream.getTracks().forEach(function (track) { track.stop(); });
159 this.emit('localScreenStopped', stream);
160 } else {
161 this.localScreens.forEach(function (stream) {
162 stream.getTracks().forEach(function (track) { track.stop(); });
163 self.emit('localScreenStopped', stream);
164 });
165 this.localScreens = [];
166 }
167};
168
169// Audio controls
170LocalMedia.prototype.mute = function () {
171 this._audioEnabled(false);
172 this.hardMuted = true;
173 this.emit('audioOff');
174};
175
176LocalMedia.prototype.unmute = function () {
177 this._audioEnabled(true);
178 this.hardMuted = false;
179 this.emit('audioOn');
180};
181
182LocalMedia.prototype.setupAudioMonitor = function (stream, harkOptions) {
183 this._log('Setup audio');
184 var audio = this.audioMonitor = hark(stream, harkOptions);
185 var self = this;
186 var timeout;
187
188 audio.on('speaking', function () {
189 self.emit('speaking');
190 if (self.hardMuted) {
191 return;
192 }
193 self.setMicIfEnabled(1);
194 });
195
196 audio.on('stopped_speaking', function () {
197 if (timeout) {
198 clearTimeout(timeout);
199 }
200
201 timeout = setTimeout(function () {
202 self.emit('stoppedSpeaking');
203 if (self.hardMuted) {
204 return;
205 }
206 self.setMicIfEnabled(0.5);
207 }, 1000);
208 });
209 audio.on('volume_change', function (volume, treshold) {
210 self.emit('volumeChange', volume, treshold);
211 });
212};
213
214// We do this as a seperate method in order to
215// still leave the "setMicVolume" as a working
216// method.
217LocalMedia.prototype.setMicIfEnabled = function (volume) {
218 if (!this.config.autoAdjustMic) {
219 return;
220 }
221 this.gainController.setGain(volume);
222};
223
224// Video controls
225LocalMedia.prototype.pauseVideo = function () {
226 this._videoEnabled(false);
227 this.emit('videoOff');
228};
229LocalMedia.prototype.resumeVideo = function () {
230 this._videoEnabled(true);
231 this.emit('videoOn');
232};
233
234// Combined controls
235LocalMedia.prototype.pause = function () {
236 this.mute();
237 this.pauseVideo();
238};
239LocalMedia.prototype.resume = function () {
240 this.unmute();
241 this.resumeVideo();
242};
243
244// Internal methods for enabling/disabling audio/video
245LocalMedia.prototype._audioEnabled = function (bool) {
246 // work around for chrome 27 bug where disabling tracks
247 // doesn't seem to work (works in canary, remove when working)
248 this.setMicIfEnabled(bool ? 1 : 0);
249 this.localStreams.forEach(function (stream) {
250 stream.getAudioTracks().forEach(function (track) {
251 track.enabled = !!bool;
252 });
253 });
254};
255LocalMedia.prototype._videoEnabled = function (bool) {
256 this.localStreams.forEach(function (stream) {
257 stream.getVideoTracks().forEach(function (track) {
258 track.enabled = !!bool;
259 });
260 });
261};
262
263// check if all audio streams are enabled
264LocalMedia.prototype.isAudioEnabled = function () {
265 var enabled = true;
266 this.localStreams.forEach(function (stream) {
267 stream.getAudioTracks().forEach(function (track) {
268 enabled = enabled && track.enabled;
269 });
270 });
271 return enabled;
272};
273
274// check if all video streams are enabled
275LocalMedia.prototype.isVideoEnabled = function () {
276 var enabled = true;
277 this.localStreams.forEach(function (stream) {
278 stream.getVideoTracks().forEach(function (track) {
279 enabled = enabled && track.enabled;
280 });
281 });
282 return enabled;
283};
284
285// Backwards Compat
286LocalMedia.prototype.startLocalMedia = LocalMedia.prototype.start;
287LocalMedia.prototype.stopLocalMedia = LocalMedia.prototype.stop;
288
289// fallback for old .localStream behaviour
290Object.defineProperty(LocalMedia.prototype, 'localStream', {
291 get: function () {
292 return this.localStreams.length > 0 ? this.localStreams[0] : null;
293 }
294});
295// fallback for old .localScreen behaviour
296Object.defineProperty(LocalMedia.prototype, 'localScreen', {
297 get: function () {
298 return this.localScreens.length > 0 ? this.localScreens[0] : null;
299 }
300});
301
302module.exports = LocalMedia;
303
304},{"getscreenmedia":6,"getusermedia":5,"hark":3,"mediastream-gain":8,"mockconsole":9,"util":2,"webrtcsupport":4,"wildemitter":7}],2:[function(require,module,exports){
305var events = require('events');
306
307exports.isArray = isArray;
308exports.isDate = function(obj){return Object.prototype.toString.call(obj) === '[object Date]'};
309exports.isRegExp = function(obj){return Object.prototype.toString.call(obj) === '[object RegExp]'};
310
311
312exports.print = function () {};
313exports.puts = function () {};
314exports.debug = function() {};
315
316exports.inspect = function(obj, showHidden, depth, colors) {
317 var seen = [];
318
319 var stylize = function(str, styleType) {
320 // http://en.wikipedia.org/wiki/ANSI_escape_code#graphics
321 var styles =
322 { 'bold' : [1, 22],
323 'italic' : [3, 23],
324 'underline' : [4, 24],
325 'inverse' : [7, 27],
326 'white' : [37, 39],
327 'grey' : [90, 39],
328 'black' : [30, 39],
329 'blue' : [34, 39],
330 'cyan' : [36, 39],
331 'green' : [32, 39],
332 'magenta' : [35, 39],
333 'red' : [31, 39],
334 'yellow' : [33, 39] };
335
336 var style =
337 { 'special': 'cyan',
338 'number': 'blue',
339 'boolean': 'yellow',
340 'undefined': 'grey',
341 'null': 'bold',
342 'string': 'green',
343 'date': 'magenta',
344 // "name": intentionally not styling
345 'regexp': 'red' }[styleType];
346
347 if (style) {
348 return '\u001b[' + styles[style][0] + 'm' + str +
349 '\u001b[' + styles[style][1] + 'm';
350 } else {
351 return str;
352 }
353 };
354 if (! colors) {
355 stylize = function(str, styleType) { return str; };
356 }
357
358 function format(value, recurseTimes) {
359 // Provide a hook for user-specified inspect functions.
360 // Check that value is an object with an inspect function on it
361 if (value && typeof value.inspect === 'function' &&
362 // Filter out the util module, it's inspect function is special
363 value !== exports &&
364 // Also filter out any prototype objects using the circular check.
365 !(value.constructor && value.constructor.prototype === value)) {
366 return value.inspect(recurseTimes);
367 }
368
369 // Primitive types cannot have properties
370 switch (typeof value) {
371 case 'undefined':
372 return stylize('undefined', 'undefined');
373
374 case 'string':
375 var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '')
376 .replace(/'/g, "\\'")
377 .replace(/\\"/g, '"') + '\'';
378 return stylize(simple, 'string');
379
380 case 'number':
381 return stylize('' + value, 'number');
382
383 case 'boolean':
384 return stylize('' + value, 'boolean');
385 }
386 // For some reason typeof null is "object", so special case here.
387 if (value === null) {
388 return stylize('null', 'null');
389 }
390
391 // Look up the keys of the object.
392 var visible_keys = Object_keys(value);
393 var keys = showHidden ? Object_getOwnPropertyNames(value) : visible_keys;
394
395 // Functions without properties can be shortcutted.
396 if (typeof value === 'function' && keys.length === 0) {
397 if (isRegExp(value)) {
398 return stylize('' + value, 'regexp');
399 } else {
400 var name = value.name ? ': ' + value.name : '';
401 return stylize('[Function' + name + ']', 'special');
402 }
403 }
404
405 // Dates without properties can be shortcutted
406 if (isDate(value) && keys.length === 0) {
407 return stylize(value.toUTCString(), 'date');
408 }
409
410 var base, type, braces;
411 // Determine the object type
412 if (isArray(value)) {
413 type = 'Array';
414 braces = ['[', ']'];
415 } else {
416 type = 'Object';
417 braces = ['{', '}'];
418 }
419
420 // Make functions say that they are functions
421 if (typeof value === 'function') {
422 var n = value.name ? ': ' + value.name : '';
423 base = (isRegExp(value)) ? ' ' + value : ' [Function' + n + ']';
424 } else {
425 base = '';
426 }
427
428 // Make dates with properties first say the date
429 if (isDate(value)) {
430 base = ' ' + value.toUTCString();
431 }
432
433 if (keys.length === 0) {
434 return braces[0] + base + braces[1];
435 }
436
437 if (recurseTimes < 0) {
438 if (isRegExp(value)) {
439 return stylize('' + value, 'regexp');
440 } else {
441 return stylize('[Object]', 'special');
442 }
443 }
444
445 seen.push(value);
446
447 var output = keys.map(function(key) {
448 var name, str;
449 if (value.__lookupGetter__) {
450 if (value.__lookupGetter__(key)) {
451 if (value.__lookupSetter__(key)) {
452 str = stylize('[Getter/Setter]', 'special');
453 } else {
454 str = stylize('[Getter]', 'special');
455 }
456 } else {
457 if (value.__lookupSetter__(key)) {
458 str = stylize('[Setter]', 'special');
459 }
460 }
461 }
462 if (visible_keys.indexOf(key) < 0) {
463 name = '[' + key + ']';
464 }
465 if (!str) {
466 if (seen.indexOf(value[key]) < 0) {
467 if (recurseTimes === null) {
468 str = format(value[key]);
469 } else {
470 str = format(value[key], recurseTimes - 1);
471 }
472 if (str.indexOf('\n') > -1) {
473 if (isArray(value)) {
474 str = str.split('\n').map(function(line) {
475 return ' ' + line;
476 }).join('\n').substr(2);
477 } else {
478 str = '\n' + str.split('\n').map(function(line) {
479 return ' ' + line;
480 }).join('\n');
481 }
482 }
483 } else {
484 str = stylize('[Circular]', 'special');
485 }
486 }
487 if (typeof name === 'undefined') {
488 if (type === 'Array' && key.match(/^\d+$/)) {
489 return str;
490 }
491 name = JSON.stringify('' + key);
492 if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) {
493 name = name.substr(1, name.length - 2);
494 name = stylize(name, 'name');
495 } else {
496 name = name.replace(/'/g, "\\'")
497 .replace(/\\"/g, '"')
498 .replace(/(^"|"$)/g, "'");
499 name = stylize(name, 'string');
500 }
501 }
502
503 return name + ': ' + str;
504 });
505
506 seen.pop();
507
508 var numLinesEst = 0;
509 var length = output.reduce(function(prev, cur) {
510 numLinesEst++;
511 if (cur.indexOf('\n') >= 0) numLinesEst++;
512 return prev + cur.length + 1;
513 }, 0);
514
515 if (length > 50) {
516 output = braces[0] +
517 (base === '' ? '' : base + '\n ') +
518 ' ' +
519 output.join(',\n ') +
520 ' ' +
521 braces[1];
522
523 } else {
524 output = braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1];
525 }
526
527 return output;
528 }
529 return format(obj, (typeof depth === 'undefined' ? 2 : depth));
530};
531
532
533function isArray(ar) {
534 return Array.isArray(ar) ||
535 (typeof ar === 'object' && Object.prototype.toString.call(ar) === '[object Array]');
536}
537
538
539function isRegExp(re) {
540 typeof re === 'object' && Object.prototype.toString.call(re) === '[object RegExp]';
541}
542
543
544function isDate(d) {
545 return typeof d === 'object' && Object.prototype.toString.call(d) === '[object Date]';
546}
547
548function pad(n) {
549 return n < 10 ? '0' + n.toString(10) : n.toString(10);
550}
551
552var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',
553 'Oct', 'Nov', 'Dec'];
554
555// 26 Feb 16:19:34
556function timestamp() {
557 var d = new Date();
558 var time = [pad(d.getHours()),
559 pad(d.getMinutes()),
560 pad(d.getSeconds())].join(':');
561 return [d.getDate(), months[d.getMonth()], time].join(' ');
562}
563
564exports.log = function (msg) {};
565
566exports.pump = null;
567
568var Object_keys = Object.keys || function (obj) {
569 var res = [];
570 for (var key in obj) res.push(key);
571 return res;
572};
573
574var Object_getOwnPropertyNames = Object.getOwnPropertyNames || function (obj) {
575 var res = [];
576 for (var key in obj) {
577 if (Object.hasOwnProperty.call(obj, key)) res.push(key);
578 }
579 return res;
580};
581
582var Object_create = Object.create || function (prototype, properties) {
583 // from es5-shim
584 var object;
585 if (prototype === null) {
586 object = { '__proto__' : null };
587 }
588 else {
589 if (typeof prototype !== 'object') {
590 throw new TypeError(
591 'typeof prototype[' + (typeof prototype) + '] != \'object\''
592 );
593 }
594 var Type = function () {};
595 Type.prototype = prototype;
596 object = new Type();
597 object.__proto__ = prototype;
598 }
599 if (typeof properties !== 'undefined' && Object.defineProperties) {
600 Object.defineProperties(object, properties);
601 }
602 return object;
603};
604
605exports.inherits = function(ctor, superCtor) {
606 ctor.super_ = superCtor;
607 ctor.prototype = Object_create(superCtor.prototype, {
608 constructor: {
609 value: ctor,
610 enumerable: false,
611 writable: true,
612 configurable: true
613 }
614 });
615};
616
617var formatRegExp = /%[sdj%]/g;
618exports.format = function(f) {
619 if (typeof f !== 'string') {
620 var objects = [];
621 for (var i = 0; i < arguments.length; i++) {
622 objects.push(exports.inspect(arguments[i]));
623 }
624 return objects.join(' ');
625 }
626
627 var i = 1;
628 var args = arguments;
629 var len = args.length;
630 var str = String(f).replace(formatRegExp, function(x) {
631 if (x === '%%') return '%';
632 if (i >= len) return x;
633 switch (x) {
634 case '%s': return String(args[i++]);
635 case '%d': return Number(args[i++]);
636 case '%j': return JSON.stringify(args[i++]);
637 default:
638 return x;
639 }
640 });
641 for(var x = args[i]; i < len; x = args[++i]){
642 if (x === null || typeof x !== 'object') {
643 str += ' ' + x;
644 } else {
645 str += ' ' + exports.inspect(x);
646 }
647 }
648 return str;
649};
650
651},{"events":10}],4:[function(require,module,exports){
652// created by @HenrikJoreteg
653var prefix;
654var version;
655
656if (window.mozRTCPeerConnection || navigator.mozGetUserMedia) {
657 prefix = 'moz';
658 version = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
659} else if (window.webkitRTCPeerConnection || navigator.webkitGetUserMedia) {
660 prefix = 'webkit';
661 version = navigator.userAgent.match(/Chrom(e|ium)/) && parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10);
662}
663
664var PC = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
665var IceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate;
666var SessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription;
667var MediaStream = window.webkitMediaStream || window.MediaStream;
668var screenSharing = window.location.protocol === 'https:' &&
669 ((prefix === 'webkit' && version >= 26) ||
670 (prefix === 'moz' && version >= 33))
671var AudioContext = window.AudioContext || window.webkitAudioContext;
672var videoEl = document.createElement('video');
673var supportVp8 = videoEl && videoEl.canPlayType && videoEl.canPlayType('video/webm; codecs="vp8", vorbis') === "probably";
674var getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.msGetUserMedia || navigator.mozGetUserMedia;
675
676// export support flags and constructors.prototype && PC
677module.exports = {
678 prefix: prefix,
679 browserVersion: version,
680 support: !!PC && !!getUserMedia,
681 // new support style
682 supportRTCPeerConnection: !!PC,
683 supportVp8: supportVp8,
684 supportGetUserMedia: !!getUserMedia,
685 supportDataChannel: !!(PC && PC.prototype && PC.prototype.createDataChannel),
686 supportWebAudio: !!(AudioContext && AudioContext.prototype.createMediaStreamSource),
687 supportMediaStream: !!(MediaStream && MediaStream.prototype.removeTrack),
688 supportScreenSharing: !!screenSharing,
689 // constructors
690 AudioContext: AudioContext,
691 PeerConnection: PC,
692 SessionDescription: SessionDescription,
693 IceCandidate: IceCandidate,
694 MediaStream: MediaStream,
695 getUserMedia: getUserMedia
696};
697
698},{}],7:[function(require,module,exports){
699/*
700WildEmitter.js is a slim little event emitter by @henrikjoreteg largely based
701on @visionmedia's Emitter from UI Kit.
702
703Why? I wanted it standalone.
704
705I also wanted support for wildcard emitters like this:
706
707emitter.on('*', function (eventName, other, event, payloads) {
708
709});
710
711emitter.on('somenamespace*', function (eventName, payloads) {
712
713});
714
715Please note that callbacks triggered by wildcard registered events also get
716the event name as the first argument.
717*/
718
719module.exports = WildEmitter;
720
721function WildEmitter() { }
722
723WildEmitter.mixin = function (constructor) {
724 var prototype = constructor.prototype || constructor;
725
726 prototype.isWildEmitter= true;
727
728 // Listen on the given `event` with `fn`. Store a group name if present.
729 prototype.on = function (event, groupName, fn) {
730 this.callbacks = this.callbacks || {};
731 var hasGroup = (arguments.length === 3),
732 group = hasGroup ? arguments[1] : undefined,
733 func = hasGroup ? arguments[2] : arguments[1];
734 func._groupName = group;
735 (this.callbacks[event] = this.callbacks[event] || []).push(func);
736 return this;
737 };
738
739 // Adds an `event` listener that will be invoked a single
740 // time then automatically removed.
741 prototype.once = function (event, groupName, fn) {
742 var self = this,
743 hasGroup = (arguments.length === 3),
744 group = hasGroup ? arguments[1] : undefined,
745 func = hasGroup ? arguments[2] : arguments[1];
746 function on() {
747 self.off(event, on);
748 func.apply(this, arguments);
749 }
750 this.on(event, group, on);
751 return this;
752 };
753
754 // Unbinds an entire group
755 prototype.releaseGroup = function (groupName) {
756 this.callbacks = this.callbacks || {};
757 var item, i, len, handlers;
758 for (item in this.callbacks) {
759 handlers = this.callbacks[item];
760 for (i = 0, len = handlers.length; i < len; i++) {
761 if (handlers[i]._groupName === groupName) {
762 //console.log('removing');
763 // remove it and shorten the array we're looping through
764 handlers.splice(i, 1);
765 i--;
766 len--;
767 }
768 }
769 }
770 return this;
771 };
772
773 // Remove the given callback for `event` or all
774 // registered callbacks.
775 prototype.off = function (event, fn) {
776 this.callbacks = this.callbacks || {};
777 var callbacks = this.callbacks[event],
778 i;
779
780 if (!callbacks) return this;
781
782 // remove all handlers
783 if (arguments.length === 1) {
784 delete this.callbacks[event];
785 return this;
786 }
787
788 // remove specific handler
789 i = callbacks.indexOf(fn);
790 callbacks.splice(i, 1);
791 if (callbacks.length === 0) {
792 delete this.callbacks[event];
793 }
794 return this;
795 };
796
797 /// Emit `event` with the given args.
798 // also calls any `*` handlers
799 prototype.emit = function (event) {
800 this.callbacks = this.callbacks || {};
801 var args = [].slice.call(arguments, 1),
802 callbacks = this.callbacks[event],
803 specialCallbacks = this.getWildcardCallbacks(event),
804 i,
805 len,
806 item,
807 listeners;
808
809 if (callbacks) {
810 listeners = callbacks.slice();
811 for (i = 0, len = listeners.length; i < len; ++i) {
812 if (!listeners[i]) {
813 break;
814 }
815 listeners[i].apply(this, args);
816 }
817 }
818
819 if (specialCallbacks) {
820 len = specialCallbacks.length;
821 listeners = specialCallbacks.slice();
822 for (i = 0, len = listeners.length; i < len; ++i) {
823 if (!listeners[i]) {
824 break;
825 }
826 listeners[i].apply(this, [event].concat(args));
827 }
828 }
829
830 return this;
831 };
832
833 // Helper for for finding special wildcard event handlers that match the event
834 prototype.getWildcardCallbacks = function (eventName) {
835 this.callbacks = this.callbacks || {};
836 var item,
837 split,
838 result = [];
839
840 for (item in this.callbacks) {
841 split = item.split('*');
842 if (item === '*' || (split.length === 2 && eventName.slice(0, split[0].length) === split[0])) {
843 result = result.concat(this.callbacks[item]);
844 }
845 }
846 return result;
847 };
848
849};
850
851WildEmitter.mixin(WildEmitter);
852
853},{}],9:[function(require,module,exports){
854var methods = "assert,count,debug,dir,dirxml,error,exception,group,groupCollapsed,groupEnd,info,log,markTimeline,profile,profileEnd,time,timeEnd,trace,warn".split(",");
855var l = methods.length;
856var fn = function () {};
857var mockconsole = {};
858
859while (l--) {
860 mockconsole[methods[l]] = fn;
861}
862
863module.exports = mockconsole;
864
865},{}],11:[function(require,module,exports){
866// shim for using process in browser
867
868var process = module.exports = {};
869
870process.nextTick = (function () {
871 var canSetImmediate = typeof window !== 'undefined'
872 && window.setImmediate;
873 var canPost = typeof window !== 'undefined'
874 && window.postMessage && window.addEventListener
875 ;
876
877 if (canSetImmediate) {
878 return function (f) { return window.setImmediate(f) };
879 }
880
881 if (canPost) {
882 var queue = [];
883 window.addEventListener('message', function (ev) {
884 var source = ev.source;
885 if ((source === window || source === null) && ev.data === 'process-tick') {
886 ev.stopPropagation();
887 if (queue.length > 0) {
888 var fn = queue.shift();
889 fn();
890 }
891 }
892 }, true);
893
894 return function nextTick(fn) {
895 queue.push(fn);
896 window.postMessage('process-tick', '*');
897 };
898 }
899
900 return function nextTick(fn) {
901 setTimeout(fn, 0);
902 };
903})();
904
905process.title = 'browser';
906process.browser = true;
907process.env = {};
908process.argv = [];
909
910process.binding = function (name) {
911 throw new Error('process.binding is not supported');
912}
913
914// TODO(shtylman)
915process.cwd = function () { return '/' };
916process.chdir = function (dir) {
917 throw new Error('process.chdir is not supported');
918};
919
920},{}],10:[function(require,module,exports){
921var process=require("__browserify_process");if (!process.EventEmitter) process.EventEmitter = function () {};
922
923var EventEmitter = exports.EventEmitter = process.EventEmitter;
924var isArray = typeof Array.isArray === 'function'
925 ? Array.isArray
926 : function (xs) {
927 return Object.prototype.toString.call(xs) === '[object Array]'
928 }
929;
930function indexOf (xs, x) {
931 if (xs.indexOf) return xs.indexOf(x);
932 for (var i = 0; i < xs.length; i++) {
933 if (x === xs[i]) return i;
934 }
935 return -1;
936}
937
938// By default EventEmitters will print a warning if more than
939// 10 listeners are added to it. This is a useful default which
940// helps finding memory leaks.
941//
942// Obviously not all Emitters should be limited to 10. This function allows
943// that to be increased. Set to zero for unlimited.
944var defaultMaxListeners = 10;
945EventEmitter.prototype.setMaxListeners = function(n) {
946 if (!this._events) this._events = {};
947 this._events.maxListeners = n;
948};
949
950
951EventEmitter.prototype.emit = function(type) {
952 // If there is no 'error' event listener then throw.
953 if (type === 'error') {
954 if (!this._events || !this._events.error ||
955 (isArray(this._events.error) && !this._events.error.length))
956 {
957 if (arguments[1] instanceof Error) {
958 throw arguments[1]; // Unhandled 'error' event
959 } else {
960 throw new Error("Uncaught, unspecified 'error' event.");
961 }
962 return false;
963 }
964 }
965
966 if (!this._events) return false;
967 var handler = this._events[type];
968 if (!handler) return false;
969
970 if (typeof handler == 'function') {
971 switch (arguments.length) {
972 // fast cases
973 case 1:
974 handler.call(this);
975 break;
976 case 2:
977 handler.call(this, arguments[1]);
978 break;
979 case 3:
980 handler.call(this, arguments[1], arguments[2]);
981 break;
982 // slower
983 default:
984 var args = Array.prototype.slice.call(arguments, 1);
985 handler.apply(this, args);
986 }
987 return true;
988
989 } else if (isArray(handler)) {
990 var args = Array.prototype.slice.call(arguments, 1);
991
992 var listeners = handler.slice();
993 for (var i = 0, l = listeners.length; i < l; i++) {
994 listeners[i].apply(this, args);
995 }
996 return true;
997
998 } else {
999 return false;
1000 }
1001};
1002
1003// EventEmitter is defined in src/node_events.cc
1004// EventEmitter.prototype.emit() is also defined there.
1005EventEmitter.prototype.addListener = function(type, listener) {
1006 if ('function' !== typeof listener) {
1007 throw new Error('addListener only takes instances of Function');
1008 }
1009
1010 if (!this._events) this._events = {};
1011
1012 // To avoid recursion in the case that type == "newListeners"! Before
1013 // adding it to the listeners, first emit "newListeners".
1014 this.emit('newListener', type, listener);
1015
1016 if (!this._events[type]) {
1017 // Optimize the case of one listener. Don't need the extra array object.
1018 this._events[type] = listener;
1019 } else if (isArray(this._events[type])) {
1020
1021 // Check for listener leak
1022 if (!this._events[type].warned) {
1023 var m;
1024 if (this._events.maxListeners !== undefined) {
1025 m = this._events.maxListeners;
1026 } else {
1027 m = defaultMaxListeners;
1028 }
1029
1030 if (m && m > 0 && this._events[type].length > m) {
1031 this._events[type].warned = true;
1032 console.error('(node) warning: possible EventEmitter memory ' +
1033 'leak detected. %d listeners added. ' +
1034 'Use emitter.setMaxListeners() to increase limit.',
1035 this._events[type].length);
1036 console.trace();
1037 }
1038 }
1039
1040 // If we've already got an array, just append.
1041 this._events[type].push(listener);
1042 } else {
1043 // Adding the second element, need to change to array.
1044 this._events[type] = [this._events[type], listener];
1045 }
1046
1047 return this;
1048};
1049
1050EventEmitter.prototype.on = EventEmitter.prototype.addListener;
1051
1052EventEmitter.prototype.once = function(type, listener) {
1053 var self = this;
1054 self.on(type, function g() {
1055 self.removeListener(type, g);
1056 listener.apply(this, arguments);
1057 });
1058
1059 return this;
1060};
1061
1062EventEmitter.prototype.removeListener = function(type, listener) {
1063 if ('function' !== typeof listener) {
1064 throw new Error('removeListener only takes instances of Function');
1065 }
1066
1067 // does not use listeners(), so no side effect of creating _events[type]
1068 if (!this._events || !this._events[type]) return this;
1069
1070 var list = this._events[type];
1071
1072 if (isArray(list)) {
1073 var i = indexOf(list, listener);
1074 if (i < 0) return this;
1075 list.splice(i, 1);
1076 if (list.length == 0)
1077 delete this._events[type];
1078 } else if (this._events[type] === listener) {
1079 delete this._events[type];
1080 }
1081
1082 return this;
1083};
1084
1085EventEmitter.prototype.removeAllListeners = function(type) {
1086 if (arguments.length === 0) {
1087 this._events = {};
1088 return this;
1089 }
1090
1091 // does not use listeners(), so no side effect of creating _events[type]
1092 if (type && this._events && this._events[type]) this._events[type] = null;
1093 return this;
1094};
1095
1096EventEmitter.prototype.listeners = function(type) {
1097 if (!this._events) this._events = {};
1098 if (!this._events[type]) this._events[type] = [];
1099 if (!isArray(this._events[type])) {
1100 this._events[type] = [this._events[type]];
1101 }
1102 return this._events[type];
1103};
1104
1105EventEmitter.listenerCount = function(emitter, type) {
1106 var ret;
1107 if (!emitter._events || !emitter._events[type])
1108 ret = 0;
1109 else if (typeof emitter._events[type] === 'function')
1110 ret = 1;
1111 else
1112 ret = emitter._events[type].length;
1113 return ret;
1114};
1115
1116},{"__browserify_process":11}],5:[function(require,module,exports){
1117// getUserMedia helper by @HenrikJoreteg used for navigator.getUserMedia shim
1118var adapter = require('webrtc-adapter');
1119
1120module.exports = function (constraints, cb) {
1121 var error;
1122 var haveOpts = arguments.length === 2;
1123 var defaultOpts = {video: true, audio: true};
1124
1125 var denied = 'PermissionDeniedError';
1126 var altDenied = 'PERMISSION_DENIED';
1127 var notSatisfied = 'ConstraintNotSatisfiedError';
1128
1129 // make constraints optional
1130 if (!haveOpts) {
1131 cb = constraints;
1132 constraints = defaultOpts;
1133 }
1134
1135 // treat lack of browser support like an error
1136 if (typeof navigator === 'undefined' || !navigator.getUserMedia) {
1137 // throw proper error per spec
1138 error = new Error('MediaStreamError');
1139 error.name = 'NotSupportedError';
1140
1141 // keep all callbacks async
1142 return setTimeout(function () {
1143 cb(error);
1144 }, 0);
1145 }
1146
1147 // normalize error handling when no media types are requested
1148 if (!constraints.audio && !constraints.video) {
1149 error = new Error('MediaStreamError');
1150 error.name = 'NoMediaRequestedError';
1151
1152 // keep all callbacks async
1153 return setTimeout(function () {
1154 cb(error);
1155 }, 0);
1156 }
1157
1158 // testing support -- note: using the about:config pref is better
1159 // for Firefox 39+, this might get removed in the future
1160 if (localStorage && localStorage.useFirefoxFakeDevice === 'true') {
1161 constraints.fake = true;
1162 }
1163
1164 navigator.mediaDevices.getUserMedia(constraints)
1165 .then(function (stream) {
1166 cb(null, stream);
1167 }).catch(function (err) {
1168 var error;
1169 // coerce into an error object since FF gives us a string
1170 // there are only two valid names according to the spec
1171 // we coerce all non-denied to "constraint not satisfied".
1172 if (typeof err === 'string') {
1173 error = new Error('MediaStreamError');
1174 if (err === denied || err === altDenied) {
1175 error.name = denied;
1176 } else {
1177 error.name = notSatisfied;
1178 }
1179 } else {
1180 // if we get an error object make sure '.name' property is set
1181 // according to spec: http://dev.w3.org/2011/webrtc/editor/getusermedia.html#navigatorusermediaerror-and-navigatorusermediaerrorcallback
1182 error = err;
1183 if (!error.name) {
1184 // this is likely chrome which
1185 // sets a property called "ERROR_DENIED" on the error object
1186 // if so we make sure to set a name
1187 if (error[denied]) {
1188 err.name = denied;
1189 } else {
1190 err.name = notSatisfied;
1191 }
1192 }
1193 }
1194
1195 cb(error);
1196 });
1197};
1198
1199},{"webrtc-adapter":12}],8:[function(require,module,exports){
1200var support = require('webrtcsupport');
1201
1202
1203function GainController(stream) {
1204 this.support = support.webAudio && support.mediaStream;
1205
1206 // set our starting value
1207 this.gain = 1;
1208
1209 if (this.support) {
1210 var context = this.context = new support.AudioContext();
1211 this.microphone = context.createMediaStreamSource(stream);
1212 this.gainFilter = context.createGain();
1213 this.destination = context.createMediaStreamDestination();
1214 this.outputStream = this.destination.stream;
1215 this.microphone.connect(this.gainFilter);
1216 this.gainFilter.connect(this.destination);
1217 stream.addTrack(this.outputStream.getAudioTracks()[0]);
1218 stream.removeTrack(stream.getAudioTracks()[0]);
1219 }
1220 this.stream = stream;
1221}
1222
1223// setting
1224GainController.prototype.setGain = function (val) {
1225 // check for support
1226 if (!this.support) return;
1227 this.gainFilter.gain.value = val;
1228 this.gain = val;
1229};
1230
1231GainController.prototype.getGain = function () {
1232 return this.gain;
1233};
1234
1235GainController.prototype.off = function () {
1236 return this.setGain(0);
1237};
1238
1239GainController.prototype.on = function () {
1240 this.setGain(1);
1241};
1242
1243
1244module.exports = GainController;
1245
1246},{"webrtcsupport":13}],13:[function(require,module,exports){
1247// created by @HenrikJoreteg
1248var prefix;
1249var version;
1250
1251if (window.mozRTCPeerConnection || navigator.mozGetUserMedia) {
1252 prefix = 'moz';
1253 version = parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
1254} else if (window.webkitRTCPeerConnection || navigator.webkitGetUserMedia) {
1255 prefix = 'webkit';
1256 version = navigator.userAgent.match(/Chrom(e|ium)/) && parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10);
1257}
1258
1259var PC = window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
1260var IceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate;
1261var SessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription;
1262var MediaStream = window.webkitMediaStream || window.MediaStream;
1263var screenSharing = window.location.protocol === 'https:' &&
1264 ((prefix === 'webkit' && version >= 26) ||
1265 (prefix === 'moz' && version >= 33))
1266var AudioContext = window.AudioContext || window.webkitAudioContext;
1267var videoEl = document.createElement('video');
1268var supportVp8 = videoEl && videoEl.canPlayType && videoEl.canPlayType('video/webm; codecs="vp8", vorbis') === "probably";
1269var getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.msGetUserMedia || navigator.mozGetUserMedia;
1270
1271// export support flags and constructors.prototype && PC
1272module.exports = {
1273 prefix: prefix,
1274 browserVersion: version,
1275 support: !!PC && supportVp8 && !!getUserMedia,
1276 // new support style
1277 supportRTCPeerConnection: !!PC,
1278 supportVp8: supportVp8,
1279 supportGetUserMedia: !!getUserMedia,
1280 supportDataChannel: !!(PC && PC.prototype && PC.prototype.createDataChannel),
1281 supportWebAudio: !!(AudioContext && AudioContext.prototype.createMediaStreamSource),
1282 supportMediaStream: !!(MediaStream && MediaStream.prototype.removeTrack),
1283 supportScreenSharing: !!screenSharing,
1284 // old deprecated style. Dont use this anymore
1285 dataChannel: !!(PC && PC.prototype && PC.prototype.createDataChannel),
1286 webAudio: !!(AudioContext && AudioContext.prototype.createMediaStreamSource),
1287 mediaStream: !!(MediaStream && MediaStream.prototype.removeTrack),
1288 screenSharing: !!screenSharing,
1289 // constructors
1290 AudioContext: AudioContext,
1291 PeerConnection: PC,
1292 SessionDescription: SessionDescription,
1293 IceCandidate: IceCandidate,
1294 MediaStream: MediaStream,
1295 getUserMedia: getUserMedia
1296};
1297
1298},{}],3:[function(require,module,exports){
1299var WildEmitter = require('wildemitter');
1300
1301function getMaxVolume (analyser, fftBins) {
1302 var maxVolume = -Infinity;
1303 analyser.getFloatFrequencyData(fftBins);
1304
1305 for(var i=4, ii=fftBins.length; i < ii; i++) {
1306 if (fftBins[i] > maxVolume && fftBins[i] < 0) {
1307 maxVolume = fftBins[i];
1308 }
1309 };
1310
1311 return maxVolume;
1312}
1313
1314
1315var audioContextType = window.AudioContext || window.webkitAudioContext;
1316// use a single audio context due to hardware limits
1317var audioContext = null;
1318module.exports = function(stream, options) {
1319 var harker = new WildEmitter();
1320
1321
1322 // make it not break in non-supported browsers
1323 if (!audioContextType) return harker;
1324
1325 //Config
1326 var options = options || {},
1327 smoothing = (options.smoothing || 0.1),
1328 interval = (options.interval || 50),
1329 threshold = options.threshold,
1330 play = options.play,
1331 history = options.history || 10,
1332 running = true;
1333
1334 //Setup Audio Context
1335 if (!audioContext) {
1336 audioContext = new audioContextType();
1337 }
1338 var sourceNode, fftBins, analyser;
1339
1340 analyser = audioContext.createAnalyser();
1341 analyser.fftSize = 512;
1342 analyser.smoothingTimeConstant = smoothing;
1343 fftBins = new Float32Array(analyser.fftSize);
1344
1345 if (stream.jquery) stream = stream[0];
1346 if (stream instanceof HTMLAudioElement || stream instanceof HTMLVideoElement) {
1347 //Audio Tag
1348 sourceNode = audioContext.createMediaElementSource(stream);
1349 if (typeof play === 'undefined') play = true;
1350 threshold = threshold || -50;
1351 } else {
1352 //WebRTC Stream
1353 sourceNode = audioContext.createMediaStreamSource(stream);
1354 threshold = threshold || -50;
1355 }
1356
1357 sourceNode.connect(analyser);
1358 if (play) analyser.connect(audioContext.destination);
1359
1360 harker.speaking = false;
1361
1362 harker.setThreshold = function(t) {
1363 threshold = t;
1364 };
1365
1366 harker.setInterval = function(i) {
1367 interval = i;
1368 };
1369
1370 harker.stop = function() {
1371 running = false;
1372 harker.emit('volume_change', -100, threshold);
1373 if (harker.speaking) {
1374 harker.speaking = false;
1375 harker.emit('stopped_speaking');
1376 }
1377 };
1378 harker.speakingHistory = [];
1379 for (var i = 0; i < history; i++) {
1380 harker.speakingHistory.push(0);
1381 }
1382
1383 // Poll the analyser node to determine if speaking
1384 // and emit events if changed
1385 var looper = function() {
1386 setTimeout(function() {
1387
1388 //check if stop has been called
1389 if(!running) {
1390 return;
1391 }
1392
1393 var currentVolume = getMaxVolume(analyser, fftBins);
1394
1395 harker.emit('volume_change', currentVolume, threshold);
1396
1397 var history = 0;
1398 if (currentVolume > threshold && !harker.speaking) {
1399 // trigger quickly, short history
1400 for (var i = harker.speakingHistory.length - 3; i < harker.speakingHistory.length; i++) {
1401 history += harker.speakingHistory[i];
1402 }
1403 if (history >= 2) {
1404 harker.speaking = true;
1405 harker.emit('speaking');
1406 }
1407 } else if (currentVolume < threshold && harker.speaking) {
1408 for (var i = 0; i < harker.speakingHistory.length; i++) {
1409 history += harker.speakingHistory[i];
1410 }
1411 if (history == 0) {
1412 harker.speaking = false;
1413 harker.emit('stopped_speaking');
1414 }
1415 }
1416 harker.speakingHistory.shift();
1417 harker.speakingHistory.push(0 + (currentVolume > threshold));
1418
1419 looper();
1420 }, interval);
1421 };
1422 looper();
1423
1424
1425 return harker;
1426}
1427
1428},{"wildemitter":7}],6:[function(require,module,exports){
1429// getScreenMedia helper by @HenrikJoreteg
1430var getUserMedia = require('getusermedia');
1431
1432// cache for constraints and callback
1433var cache = {};
1434
1435module.exports = function (constraints, cb) {
1436 var hasConstraints = arguments.length === 2;
1437 var callback = hasConstraints ? cb : constraints;
1438 var error;
1439
1440 if (typeof window === 'undefined' || window.location.protocol === 'http:') {
1441 error = new Error('NavigatorUserMediaError');
1442 error.name = 'HTTPS_REQUIRED';
1443 return callback(error);
1444 }
1445
1446 if (window.navigator.userAgent.match('Chrome')) {
1447 var chromever = parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10);
1448 var maxver = 33;
1449 var isCef = !window.chrome.webstore;
1450 // "known" crash in chrome 34 and 35 on linux
1451 if (window.navigator.userAgent.match('Linux')) maxver = 35;
1452
1453 // check that the extension is installed by looking for a
1454 // sessionStorage variable that contains the extension id
1455 // this has to be set after installation unless the contest
1456 // script does that
1457 if (sessionStorage.getScreenMediaJSExtensionId) {
1458 chrome.runtime.sendMessage(sessionStorage.getScreenMediaJSExtensionId,
1459 {type:'getScreen', id: 1}, null,
1460 function (data) {
1461 if (!data || data.sourceId === '') { // user canceled
1462 var error = new Error('NavigatorUserMediaError');
1463 error.name = 'PERMISSION_DENIED';
1464 callback(error);
1465 } else {
1466 constraints = (hasConstraints && constraints) || {audio: false, video: {
1467 mandatory: {
1468 chromeMediaSource: 'desktop',
1469 maxWidth: window.screen.width,
1470 maxHeight: window.screen.height,
1471 maxFrameRate: 3
1472 }
1473 }};
1474 constraints.video.mandatory.chromeMediaSourceId = data.sourceId;
1475 getUserMedia(constraints, callback);
1476 }
1477 }
1478 );
1479 } else if (window.cefGetScreenMedia) {
1480 //window.cefGetScreenMedia is experimental - may be removed without notice
1481 window.cefGetScreenMedia(function(sourceId) {
1482 if (!sourceId) {
1483 var error = new Error('cefGetScreenMediaError');
1484 error.name = 'CEF_GETSCREENMEDIA_CANCELED';
1485 callback(error);
1486 } else {
1487 constraints = (hasConstraints && constraints) || {audio: false, video: {
1488 mandatory: {
1489 chromeMediaSource: 'desktop',
1490 maxWidth: window.screen.width,
1491 maxHeight: window.screen.height,
1492 maxFrameRate: 3
1493 },
1494 optional: [
1495 {googLeakyBucket: true},
1496 {googTemporalLayeredScreencast: true}
1497 ]
1498 }};
1499 constraints.video.mandatory.chromeMediaSourceId = sourceId;
1500 getUserMedia(constraints, callback);
1501 }
1502 });
1503 } else if (isCef || (chromever >= 26 && chromever <= maxver)) {
1504 // chrome 26 - chrome 33 way to do it -- requires bad chrome://flags
1505 // note: this is basically in maintenance mode and will go away soon
1506 constraints = (hasConstraints && constraints) || {
1507 video: {
1508 mandatory: {
1509 googLeakyBucket: true,
1510 maxWidth: window.screen.width,
1511 maxHeight: window.screen.height,
1512 maxFrameRate: 3,
1513 chromeMediaSource: 'screen'
1514 }
1515 }
1516 };
1517 getUserMedia(constraints, callback);
1518 } else {
1519 // chrome 34+ way requiring an extension
1520 var pending = window.setTimeout(function () {
1521 error = new Error('NavigatorUserMediaError');
1522 error.name = 'EXTENSION_UNAVAILABLE';
1523 return callback(error);
1524 }, 1000);
1525 cache[pending] = [callback, hasConstraints ? constraints : null];
1526 window.postMessage({ type: 'getScreen', id: pending }, '*');
1527 }
1528 } else if (window.navigator.userAgent.match('Firefox')) {
1529 var ffver = parseInt(window.navigator.userAgent.match(/Firefox\/(.*)/)[1], 10);
1530 if (ffver >= 33) {
1531 constraints = (hasConstraints && constraints) || {
1532 video: {
1533 mozMediaSource: 'window',
1534 mediaSource: 'window'
1535 }
1536 }
1537 getUserMedia(constraints, function (err, stream) {
1538 callback(err, stream);
1539 // workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=1045810
1540 if (!err) {
1541 var lastTime = stream.currentTime;
1542 var polly = window.setInterval(function () {
1543 if (!stream) window.clearInterval(polly);
1544 if (stream.currentTime == lastTime) {
1545 window.clearInterval(polly);
1546 if (stream.onended) {
1547 stream.onended();
1548 }
1549 }
1550 lastTime = stream.currentTime;
1551 }, 500);
1552 }
1553 });
1554 } else {
1555 error = new Error('NavigatorUserMediaError');
1556 error.name = 'EXTENSION_UNAVAILABLE'; // does not make much sense but...
1557 }
1558 }
1559};
1560
1561window.addEventListener('message', function (event) {
1562 if (event.origin != window.location.origin) {
1563 return;
1564 }
1565 if (event.data.type == 'gotScreen' && cache[event.data.id]) {
1566 var data = cache[event.data.id];
1567 var constraints = data[1];
1568 var callback = data[0];
1569 delete cache[event.data.id];
1570
1571 if (event.data.sourceId === '') { // user canceled
1572 var error = new Error('NavigatorUserMediaError');
1573 error.name = 'PERMISSION_DENIED';
1574 callback(error);
1575 } else {
1576 constraints = constraints || {audio: false, video: {
1577 mandatory: {
1578 chromeMediaSource: 'desktop',
1579 maxWidth: window.screen.width,
1580 maxHeight: window.screen.height,
1581 maxFrameRate: 3
1582 },
1583 optional: [
1584 {googLeakyBucket: true},
1585 {googTemporalLayeredScreencast: true}
1586 ]
1587 }};
1588 constraints.video.mandatory.chromeMediaSourceId = event.data.sourceId;
1589 getUserMedia(constraints, callback);
1590 }
1591 } else if (event.data.type == 'getScreenPending') {
1592 window.clearTimeout(event.data.id);
1593 }
1594});
1595
1596},{"getusermedia":5}],12:[function(require,module,exports){
1597/*
1598 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
1599 *
1600 * Use of this source code is governed by a BSD-style license
1601 * that can be found in the LICENSE file in the root of the source
1602 * tree.
1603 */
1604 /* eslint-env node */
1605
1606'use strict';
1607
1608// Shimming starts here.
1609(function() {
1610 // Utils.
1611 var logging = require('./utils').log;
1612 var browserDetails = require('./utils').browserDetails;
1613 // Export to the adapter global object visible in the browser.
1614 module.exports.browserDetails = browserDetails;
1615 module.exports.extractVersion = require('./utils').extractVersion;
1616 module.exports.disableLog = require('./utils').disableLog;
1617
1618 // Comment out the line below if you want logging to occur, including logging
1619 // for the switch statement below. Can also be turned on in the browser via
1620 // adapter.disableLog(false), but then logging from the switch statement below
1621 // will not appear.
1622 require('./utils').disableLog(true);
1623
1624 // Browser shims.
1625 var chromeShim = require('./chrome/chrome_shim') || null;
1626 var edgeShim = require('./edge/edge_shim') || null;
1627 var firefoxShim = require('./firefox/firefox_shim') || null;
1628 var safariShim = require('./safari/safari_shim') || null;
1629
1630 // Shim browser if found.
1631 switch (browserDetails.browser) {
1632 case 'opera': // fallthrough as it uses chrome shims
1633 case 'chrome':
1634 if (!chromeShim || !chromeShim.shimPeerConnection) {
1635 logging('Chrome shim is not included in this adapter release.');
1636 return;
1637 }
1638 logging('adapter.js shimming chrome.');
1639 // Export to the adapter global object visible in the browser.
1640 module.exports.browserShim = chromeShim;
1641
1642 chromeShim.shimGetUserMedia();
1643 chromeShim.shimSourceObject();
1644 chromeShim.shimPeerConnection();
1645 chromeShim.shimOnTrack();
1646 break;
1647 case 'firefox':
1648 if (!firefoxShim || !firefoxShim.shimPeerConnection) {
1649 logging('Firefox shim is not included in this adapter release.');
1650 return;
1651 }
1652 logging('adapter.js shimming firefox.');
1653 // Export to the adapter global object visible in the browser.
1654 module.exports.browserShim = firefoxShim;
1655
1656 firefoxShim.shimGetUserMedia();
1657 firefoxShim.shimSourceObject();
1658 firefoxShim.shimPeerConnection();
1659 firefoxShim.shimOnTrack();
1660 break;
1661 case 'edge':
1662 if (!edgeShim || !edgeShim.shimPeerConnection) {
1663 logging('MS edge shim is not included in this adapter release.');
1664 return;
1665 }
1666 logging('adapter.js shimming edge.');
1667 // Export to the adapter global object visible in the browser.
1668 module.exports.browserShim = edgeShim;
1669
1670 edgeShim.shimPeerConnection();
1671 break;
1672 case 'safari':
1673 if (!safariShim) {
1674 logging('Safari shim is not included in this adapter release.');
1675 return;
1676 }
1677 logging('adapter.js shimming safari.');
1678 // Export to the adapter global object visible in the browser.
1679 module.exports.browserShim = safariShim;
1680
1681 safariShim.shimGetUserMedia();
1682 break;
1683 default:
1684 logging('Unsupported browser!');
1685 }
1686})();
1687
1688},{"./chrome/chrome_shim":15,"./edge/edge_shim":16,"./firefox/firefox_shim":17,"./safari/safari_shim":18,"./utils":14}],14:[function(require,module,exports){
1689/*
1690 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
1691 *
1692 * Use of this source code is governed by a BSD-style license
1693 * that can be found in the LICENSE file in the root of the source
1694 * tree.
1695 */
1696 /* eslint-env node */
1697'use strict';
1698
1699var logDisabled_ = false;
1700
1701// Utility methods.
1702var utils = {
1703 disableLog: function(bool) {
1704 if (typeof bool !== 'boolean') {
1705 return new Error('Argument type: ' + typeof bool +
1706 '. Please use a boolean.');
1707 }
1708 logDisabled_ = bool;
1709 return (bool) ? 'adapter.js logging disabled' :
1710 'adapter.js logging enabled';
1711 },
1712
1713 log: function() {
1714 if (typeof window === 'object') {
1715 if (logDisabled_) {
1716 return;
1717 }
1718 if (typeof console !== 'undefined' && typeof console.log === 'function') {
1719 console.log.apply(console, arguments);
1720 }
1721 }
1722 },
1723
1724 /**
1725 * Extract browser version out of the provided user agent string.
1726 *
1727 * @param {!string} uastring userAgent string.
1728 * @param {!string} expr Regular expression used as match criteria.
1729 * @param {!number} pos position in the version string to be returned.
1730 * @return {!number} browser version.
1731 */
1732 extractVersion: function(uastring, expr, pos) {
1733 var match = uastring.match(expr);
1734 return match && match.length >= pos && parseInt(match[pos], 10);
1735 },
1736
1737 /**
1738 * Browser detector.
1739 *
1740 * @return {object} result containing browser, version and minVersion
1741 * properties.
1742 */
1743 detectBrowser: function() {
1744 // Returned result object.
1745 var result = {};
1746 result.browser = null;
1747 result.version = null;
1748 result.minVersion = null;
1749
1750 // Fail early if it's not a browser
1751 if (typeof window === 'undefined' || !window.navigator) {
1752 result.browser = 'Not a browser.';
1753 return result;
1754 }
1755
1756 // Firefox.
1757 if (navigator.mozGetUserMedia) {
1758 result.browser = 'firefox';
1759 result.version = this.extractVersion(navigator.userAgent,
1760 /Firefox\/([0-9]+)\./, 1);
1761 result.minVersion = 31;
1762
1763 // all webkit-based browsers
1764 } else if (navigator.webkitGetUserMedia) {
1765 // Chrome, Chromium, Webview, Opera, all use the chrome shim for now
1766 if (window.webkitRTCPeerConnection) {
1767 result.browser = 'chrome';
1768 result.version = this.extractVersion(navigator.userAgent,
1769 /Chrom(e|ium)\/([0-9]+)\./, 2);
1770 result.minVersion = 38;
1771
1772 // Safari or unknown webkit-based
1773 // for the time being Safari has support for MediaStreams but not webRTC
1774 } else {
1775 // Safari UA substrings of interest for reference:
1776 // - webkit version: AppleWebKit/602.1.25 (also used in Op,Cr)
1777 // - safari UI version: Version/9.0.3 (unique to Safari)
1778 // - safari UI webkit version: Safari/601.4.4 (also used in Op,Cr)
1779 //
1780 // if the webkit version and safari UI webkit versions are equals,
1781 // ... this is a stable version.
1782 //
1783 // only the internal webkit version is important today to know if
1784 // media streams are supported
1785 //
1786 if (navigator.userAgent.match(/Version\/(\d+).(\d+)/)) {
1787 result.browser = 'safari';
1788 result.version = this.extractVersion(navigator.userAgent,
1789 /AppleWebKit\/([0-9]+)\./, 1);
1790 result.minVersion = 602;
1791
1792 // unknown webkit-based browser
1793 } else {
1794 result.browser = 'Unsupported webkit-based browser ' +
1795 'with GUM support but no WebRTC support.';
1796 return result;
1797 }
1798 }
1799
1800 // Edge.
1801 } else if (navigator.mediaDevices &&
1802 navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) {
1803 result.browser = 'edge';
1804 result.version = this.extractVersion(navigator.userAgent,
1805 /Edge\/(\d+).(\d+)$/, 2);
1806 result.minVersion = 10547;
1807
1808 // Default fallthrough: not supported.
1809 } else {
1810 result.browser = 'Not a supported browser.';
1811 return result;
1812 }
1813
1814 // Warn if version is less than minVersion.
1815 if (result.version < result.minVersion) {
1816 utils.log('Browser: ' + result.browser + ' Version: ' + result.version +
1817 ' < minimum supported version: ' + result.minVersion +
1818 '\n some things might not work!');
1819 }
1820
1821 return result;
1822 }
1823};
1824
1825// Export.
1826module.exports = {
1827 log: utils.log,
1828 disableLog: utils.disableLog,
1829 browserDetails: utils.detectBrowser(),
1830 extractVersion: utils.extractVersion
1831};
1832
1833},{}],18:[function(require,module,exports){
1834/*
1835 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
1836 *
1837 * Use of this source code is governed by a BSD-style license
1838 * that can be found in the LICENSE file in the root of the source
1839 * tree.
1840 */
1841'use strict';
1842var safariShim = {
1843 // TODO: DrAlex, should be here, double check against LayoutTests
1844 // shimOnTrack: function() { },
1845
1846 // TODO: DrAlex
1847 // attachMediaStream: function(element, stream) { },
1848 // reattachMediaStream: function(to, from) { },
1849
1850 // TODO: once the back-end for the mac port is done, add.
1851 // TODO: check for webkitGTK+
1852 // shimPeerConnection: function() { },
1853
1854 shimGetUserMedia: function() {
1855 navigator.getUserMedia = navigator.webkitGetUserMedia;
1856 }
1857};
1858
1859// Expose public methods.
1860module.exports = {
1861 shimGetUserMedia: safariShim.shimGetUserMedia
1862 // TODO
1863 // shimOnTrack: safariShim.shimOnTrack,
1864 // shimPeerConnection: safariShim.shimPeerConnection,
1865 // attachMediaStream: safariShim.attachMediaStream,
1866 // reattachMediaStream: safariShim.reattachMediaStream
1867};
1868
1869},{}],15:[function(require,module,exports){
1870/*
1871 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
1872 *
1873 * Use of this source code is governed by a BSD-style license
1874 * that can be found in the LICENSE file in the root of the source
1875 * tree.
1876 */
1877 /* eslint-env node */
1878'use strict';
1879var logging = require('../utils.js').log;
1880var browserDetails = require('../utils.js').browserDetails;
1881
1882var chromeShim = {
1883 shimOnTrack: function() {
1884 if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
1885 window.RTCPeerConnection.prototype)) {
1886 Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
1887 get: function() {
1888 return this._ontrack;
1889 },
1890 set: function(f) {
1891 var self = this;
1892 if (this._ontrack) {
1893 this.removeEventListener('track', this._ontrack);
1894 this.removeEventListener('addstream', this._ontrackpoly);
1895 }
1896 this.addEventListener('track', this._ontrack = f);
1897 this.addEventListener('addstream', this._ontrackpoly = function(e) {
1898 // onaddstream does not fire when a track is added to an existing
1899 // stream. But stream.onaddtrack is implemented so we use that.
1900 e.stream.addEventListener('addtrack', function(te) {
1901 var event = new Event('track');
1902 event.track = te.track;
1903 event.receiver = {track: te.track};
1904 event.streams = [e.stream];
1905 self.dispatchEvent(event);
1906 });
1907 e.stream.getTracks().forEach(function(track) {
1908 var event = new Event('track');
1909 event.track = track;
1910 event.receiver = {track: track};
1911 event.streams = [e.stream];
1912 this.dispatchEvent(event);
1913 }.bind(this));
1914 }.bind(this));
1915 }
1916 });
1917 }
1918 },
1919
1920 shimSourceObject: function() {
1921 if (typeof window === 'object') {
1922 if (window.HTMLMediaElement &&
1923 !('srcObject' in window.HTMLMediaElement.prototype)) {
1924 // Shim the srcObject property, once, when HTMLMediaElement is found.
1925 Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
1926 get: function() {
1927 return this._srcObject;
1928 },
1929 set: function(stream) {
1930 var self = this;
1931 // Use _srcObject as a private property for this shim
1932 this._srcObject = stream;
1933 if (this.src) {
1934 URL.revokeObjectURL(this.src);
1935 }
1936
1937 if (!stream) {
1938 this.src = '';
1939 return;
1940 }
1941 this.src = URL.createObjectURL(stream);
1942 // We need to recreate the blob url when a track is added or
1943 // removed. Doing it manually since we want to avoid a recursion.
1944 stream.addEventListener('addtrack', function() {
1945 if (self.src) {
1946 URL.revokeObjectURL(self.src);
1947 }
1948 self.src = URL.createObjectURL(stream);
1949 });
1950 stream.addEventListener('removetrack', function() {
1951 if (self.src) {
1952 URL.revokeObjectURL(self.src);
1953 }
1954 self.src = URL.createObjectURL(stream);
1955 });
1956 }
1957 });
1958 }
1959 }
1960 },
1961
1962 shimPeerConnection: function() {
1963 // The RTCPeerConnection object.
1964 window.RTCPeerConnection = function(pcConfig, pcConstraints) {
1965 // Translate iceTransportPolicy to iceTransports,
1966 // see https://code.google.com/p/webrtc/issues/detail?id=4869
1967 logging('PeerConnection');
1968 if (pcConfig && pcConfig.iceTransportPolicy) {
1969 pcConfig.iceTransports = pcConfig.iceTransportPolicy;
1970 }
1971
1972 var pc = new webkitRTCPeerConnection(pcConfig, pcConstraints);
1973 var origGetStats = pc.getStats.bind(pc);
1974 pc.getStats = function(selector, successCallback, errorCallback) {
1975 var self = this;
1976 var args = arguments;
1977
1978 // If selector is a function then we are in the old style stats so just
1979 // pass back the original getStats format to avoid breaking old users.
1980 if (arguments.length > 0 && typeof selector === 'function') {
1981 return origGetStats(selector, successCallback);
1982 }
1983
1984 var fixChromeStats_ = function(response) {
1985 var standardReport = {};
1986 var reports = response.result();
1987 reports.forEach(function(report) {
1988 var standardStats = {
1989 id: report.id,
1990 timestamp: report.timestamp,
1991 type: report.type
1992 };
1993 report.names().forEach(function(name) {
1994 standardStats[name] = report.stat(name);
1995 });
1996 standardReport[standardStats.id] = standardStats;
1997 });
1998
1999 return standardReport;
2000 };
2001
2002 if (arguments.length >= 2) {
2003 var successCallbackWrapper_ = function(response) {
2004 args[1](fixChromeStats_(response));
2005 };
2006
2007 return origGetStats.apply(this, [successCallbackWrapper_,
2008 arguments[0]]);
2009 }
2010
2011 // promise-support
2012 return new Promise(function(resolve, reject) {
2013 if (args.length === 1 && typeof selector === 'object') {
2014 origGetStats.apply(self,
2015 [function(response) {
2016 resolve.apply(null, [fixChromeStats_(response)]);
2017 }, reject]);
2018 } else {
2019 origGetStats.apply(self, [resolve, reject]);
2020 }
2021 });
2022 };
2023
2024 return pc;
2025 };
2026 window.RTCPeerConnection.prototype = webkitRTCPeerConnection.prototype;
2027
2028 // wrap static methods. Currently just generateCertificate.
2029 if (webkitRTCPeerConnection.generateCertificate) {
2030 Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
2031 get: function() {
2032 return webkitRTCPeerConnection.generateCertificate;
2033 }
2034 });
2035 }
2036
2037 // add promise support
2038 ['createOffer', 'createAnswer'].forEach(function(method) {
2039 var nativeMethod = webkitRTCPeerConnection.prototype[method];
2040 webkitRTCPeerConnection.prototype[method] = function() {
2041 var self = this;
2042 if (arguments.length < 1 || (arguments.length === 1 &&
2043 typeof(arguments[0]) === 'object')) {
2044 var opts = arguments.length === 1 ? arguments[0] : undefined;
2045 return new Promise(function(resolve, reject) {
2046 nativeMethod.apply(self, [resolve, reject, opts]);
2047 });
2048 }
2049 return nativeMethod.apply(this, arguments);
2050 };
2051 });
2052
2053 ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
2054 .forEach(function(method) {
2055 var nativeMethod = webkitRTCPeerConnection.prototype[method];
2056 webkitRTCPeerConnection.prototype[method] = function() {
2057 var args = arguments;
2058 var self = this;
2059 args[0] = new ((method === 'addIceCandidate')?
2060 RTCIceCandidate : RTCSessionDescription)(args[0]);
2061 return new Promise(function(resolve, reject) {
2062 nativeMethod.apply(self, [args[0],
2063 function() {
2064 resolve();
2065 if (args.length >= 2) {
2066 args[1].apply(null, []);
2067 }
2068 },
2069 function(err) {
2070 reject(err);
2071 if (args.length >= 3) {
2072 args[2].apply(null, [err]);
2073 }
2074 }]
2075 );
2076 });
2077 };
2078 });
2079 },
2080
2081 // Attach a media stream to an element.
2082 attachMediaStream: function(element, stream) {
2083 logging('DEPRECATED, attachMediaStream will soon be removed.');
2084 if (browserDetails.version >= 43) {
2085 element.srcObject = stream;
2086 } else if (typeof element.src !== 'undefined') {
2087 element.src = URL.createObjectURL(stream);
2088 } else {
2089 logging('Error attaching stream to element.');
2090 }
2091 },
2092
2093 reattachMediaStream: function(to, from) {
2094 logging('DEPRECATED, reattachMediaStream will soon be removed.');
2095 if (browserDetails.version >= 43) {
2096 to.srcObject = from.srcObject;
2097 } else {
2098 to.src = from.src;
2099 }
2100 }
2101};
2102
2103
2104// Expose public methods.
2105module.exports = {
2106 shimOnTrack: chromeShim.shimOnTrack,
2107 shimSourceObject: chromeShim.shimSourceObject,
2108 shimPeerConnection: chromeShim.shimPeerConnection,
2109 shimGetUserMedia: require('./getusermedia'),
2110 attachMediaStream: chromeShim.attachMediaStream,
2111 reattachMediaStream: chromeShim.reattachMediaStream
2112};
2113
2114},{"../utils.js":14,"./getusermedia":19}],17:[function(require,module,exports){
2115/*
2116 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
2117 *
2118 * Use of this source code is governed by a BSD-style license
2119 * that can be found in the LICENSE file in the root of the source
2120 * tree.
2121 */
2122 /* eslint-env node */
2123'use strict';
2124
2125var logging = require('../utils').log;
2126var browserDetails = require('../utils').browserDetails;
2127
2128var firefoxShim = {
2129 shimOnTrack: function() {
2130 if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
2131 window.RTCPeerConnection.prototype)) {
2132 Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
2133 get: function() {
2134 return this._ontrack;
2135 },
2136 set: function(f) {
2137 if (this._ontrack) {
2138 this.removeEventListener('track', this._ontrack);
2139 this.removeEventListener('addstream', this._ontrackpoly);
2140 }
2141 this.addEventListener('track', this._ontrack = f);
2142 this.addEventListener('addstream', this._ontrackpoly = function(e) {
2143 e.stream.getTracks().forEach(function(track) {
2144 var event = new Event('track');
2145 event.track = track;
2146 event.receiver = {track: track};
2147 event.streams = [e.stream];
2148 this.dispatchEvent(event);
2149 }.bind(this));
2150 }.bind(this));
2151 }
2152 });
2153 }
2154 },
2155
2156 shimSourceObject: function() {
2157 // Firefox has supported mozSrcObject since FF22, unprefixed in 42.
2158 if (typeof window === 'object') {
2159 if (window.HTMLMediaElement &&
2160 !('srcObject' in window.HTMLMediaElement.prototype)) {
2161 // Shim the srcObject property, once, when HTMLMediaElement is found.
2162 Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
2163 get: function() {
2164 return this.mozSrcObject;
2165 },
2166 set: function(stream) {
2167 this.mozSrcObject = stream;
2168 }
2169 });
2170 }
2171 }
2172 },
2173
2174 shimPeerConnection: function() {
2175 // The RTCPeerConnection object.
2176 if (!window.RTCPeerConnection) {
2177 window.RTCPeerConnection = function(pcConfig, pcConstraints) {
2178 if (browserDetails.version < 38) {
2179 // .urls is not supported in FF < 38.
2180 // create RTCIceServers with a single url.
2181 if (pcConfig && pcConfig.iceServers) {
2182 var newIceServers = [];
2183 for (var i = 0; i < pcConfig.iceServers.length; i++) {
2184 var server = pcConfig.iceServers[i];
2185 if (server.hasOwnProperty('urls')) {
2186 for (var j = 0; j < server.urls.length; j++) {
2187 var newServer = {
2188 url: server.urls[j]
2189 };
2190 if (server.urls[j].indexOf('turn') === 0) {
2191 newServer.username = server.username;
2192 newServer.credential = server.credential;
2193 }
2194 newIceServers.push(newServer);
2195 }
2196 } else {
2197 newIceServers.push(pcConfig.iceServers[i]);
2198 }
2199 }
2200 pcConfig.iceServers = newIceServers;
2201 }
2202 }
2203 return new mozRTCPeerConnection(pcConfig, pcConstraints);
2204 };
2205 window.RTCPeerConnection.prototype = mozRTCPeerConnection.prototype;
2206
2207 // wrap static methods. Currently just generateCertificate.
2208 if (mozRTCPeerConnection.generateCertificate) {
2209 Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
2210 get: function() {
2211 return mozRTCPeerConnection.generateCertificate;
2212 }
2213 });
2214 }
2215
2216 window.RTCSessionDescription = mozRTCSessionDescription;
2217 window.RTCIceCandidate = mozRTCIceCandidate;
2218 }
2219
2220 // shim away need for obsolete RTCIceCandidate/RTCSessionDescription.
2221 ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
2222 .forEach(function(method) {
2223 var nativeMethod = RTCPeerConnection.prototype[method];
2224 RTCPeerConnection.prototype[method] = function() {
2225 arguments[0] = new ((method === 'addIceCandidate')?
2226 RTCIceCandidate : RTCSessionDescription)(arguments[0]);
2227 return nativeMethod.apply(this, arguments);
2228 };
2229 });
2230 },
2231
2232 shimGetUserMedia: function() {
2233 // getUserMedia constraints shim.
2234 var getUserMedia_ = function(constraints, onSuccess, onError) {
2235 var constraintsToFF37_ = function(c) {
2236 if (typeof c !== 'object' || c.require) {
2237 return c;
2238 }
2239 var require = [];
2240 Object.keys(c).forEach(function(key) {
2241 if (key === 'require' || key === 'advanced' ||
2242 key === 'mediaSource') {
2243 return;
2244 }
2245 var r = c[key] = (typeof c[key] === 'object') ?
2246 c[key] : {ideal: c[key]};
2247 if (r.min !== undefined ||
2248 r.max !== undefined || r.exact !== undefined) {
2249 require.push(key);
2250 }
2251 if (r.exact !== undefined) {
2252 if (typeof r.exact === 'number') {
2253 r. min = r.max = r.exact;
2254 } else {
2255 c[key] = r.exact;
2256 }
2257 delete r.exact;
2258 }
2259 if (r.ideal !== undefined) {
2260 c.advanced = c.advanced || [];
2261 var oc = {};
2262 if (typeof r.ideal === 'number') {
2263 oc[key] = {min: r.ideal, max: r.ideal};
2264 } else {
2265 oc[key] = r.ideal;
2266 }
2267 c.advanced.push(oc);
2268 delete r.ideal;
2269 if (!Object.keys(r).length) {
2270 delete c[key];
2271 }
2272 }
2273 });
2274 if (require.length) {
2275 c.require = require;
2276 }
2277 return c;
2278 };
2279 constraints = JSON.parse(JSON.stringify(constraints));
2280 if (browserDetails.version < 38) {
2281 logging('spec: ' + JSON.stringify(constraints));
2282 if (constraints.audio) {
2283 constraints.audio = constraintsToFF37_(constraints.audio);
2284 }
2285 if (constraints.video) {
2286 constraints.video = constraintsToFF37_(constraints.video);
2287 }
2288 logging('ff37: ' + JSON.stringify(constraints));
2289 }
2290 return navigator.mozGetUserMedia(constraints, onSuccess, onError);
2291 };
2292
2293 navigator.getUserMedia = getUserMedia_;
2294
2295 // Returns the result of getUserMedia as a Promise.
2296 var getUserMediaPromise_ = function(constraints) {
2297 return new Promise(function(resolve, reject) {
2298 navigator.getUserMedia(constraints, resolve, reject);
2299 });
2300 };
2301
2302 // Shim for mediaDevices on older versions.
2303 if (!navigator.mediaDevices) {
2304 navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
2305 addEventListener: function() { },
2306 removeEventListener: function() { }
2307 };
2308 }
2309 navigator.mediaDevices.enumerateDevices =
2310 navigator.mediaDevices.enumerateDevices || function() {
2311 return new Promise(function(resolve) {
2312 var infos = [
2313 {kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
2314 {kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
2315 ];
2316 resolve(infos);
2317 });
2318 };
2319
2320 if (browserDetails.version < 41) {
2321 // Work around http://bugzil.la/1169665
2322 var orgEnumerateDevices =
2323 navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
2324 navigator.mediaDevices.enumerateDevices = function() {
2325 return orgEnumerateDevices().then(undefined, function(e) {
2326 if (e.name === 'NotFoundError') {
2327 return [];
2328 }
2329 throw e;
2330 });
2331 };
2332 }
2333 },
2334
2335 // Attach a media stream to an element.
2336 attachMediaStream: function(element, stream) {
2337 logging('DEPRECATED, attachMediaStream will soon be removed.');
2338 element.srcObject = stream;
2339 },
2340
2341 reattachMediaStream: function(to, from) {
2342 logging('DEPRECATED, reattachMediaStream will soon be removed.');
2343 to.srcObject = from.srcObject;
2344 }
2345};
2346
2347// Expose public methods.
2348module.exports = {
2349 shimOnTrack: firefoxShim.shimOnTrack,
2350 shimSourceObject: firefoxShim.shimSourceObject,
2351 shimPeerConnection: firefoxShim.shimPeerConnection,
2352 shimGetUserMedia: require('./getusermedia'),
2353 attachMediaStream: firefoxShim.attachMediaStream,
2354 reattachMediaStream: firefoxShim.reattachMediaStream
2355};
2356
2357},{"../utils":14,"./getusermedia":20}],16:[function(require,module,exports){
2358/*
2359 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
2360 *
2361 * Use of this source code is governed by a BSD-style license
2362 * that can be found in the LICENSE file in the root of the source
2363 * tree.
2364 */
2365 /* eslint-env node */
2366'use strict';
2367
2368var SDPUtils = require('./edge_sdp');
2369var logging = require('../utils').log;
2370
2371var edgeShim = {
2372 shimPeerConnection: function() {
2373 if (window.RTCIceGatherer) {
2374 // ORTC defines an RTCIceCandidate object but no constructor.
2375 // Not implemented in Edge.
2376 if (!window.RTCIceCandidate) {
2377 window.RTCIceCandidate = function(args) {
2378 return args;
2379 };
2380 }
2381 // ORTC does not have a session description object but
2382 // other browsers (i.e. Chrome) that will support both PC and ORTC
2383 // in the future might have this defined already.
2384 if (!window.RTCSessionDescription) {
2385 window.RTCSessionDescription = function(args) {
2386 return args;
2387 };
2388 }
2389 }
2390
2391 window.RTCPeerConnection = function(config) {
2392 var self = this;
2393
2394 var _eventTarget = document.createDocumentFragment();
2395 ['addEventListener', 'removeEventListener', 'dispatchEvent']
2396 .forEach(function(method) {
2397 self[method] = _eventTarget[method].bind(_eventTarget);
2398 });
2399
2400 this.onicecandidate = null;
2401 this.onaddstream = null;
2402 this.ontrack = null;
2403 this.onremovestream = null;
2404 this.onsignalingstatechange = null;
2405 this.oniceconnectionstatechange = null;
2406 this.onnegotiationneeded = null;
2407 this.ondatachannel = null;
2408
2409 this.localStreams = [];
2410 this.remoteStreams = [];
2411 this.getLocalStreams = function() {
2412 return self.localStreams;
2413 };
2414 this.getRemoteStreams = function() {
2415 return self.remoteStreams;
2416 };
2417
2418 this.localDescription = new RTCSessionDescription({
2419 type: '',
2420 sdp: ''
2421 });
2422 this.remoteDescription = new RTCSessionDescription({
2423 type: '',
2424 sdp: ''
2425 });
2426 this.signalingState = 'stable';
2427 this.iceConnectionState = 'new';
2428 this.iceGatheringState = 'new';
2429
2430 this.iceOptions = {
2431 gatherPolicy: 'all',
2432 iceServers: []
2433 };
2434 if (config && config.iceTransportPolicy) {
2435 switch (config.iceTransportPolicy) {
2436 case 'all':
2437 case 'relay':
2438 this.iceOptions.gatherPolicy = config.iceTransportPolicy;
2439 break;
2440 case 'none':
2441 // FIXME: remove once implementation and spec have added this.
2442 throw new TypeError('iceTransportPolicy "none" not supported');
2443 default:
2444 // don't set iceTransportPolicy.
2445 break;
2446 }
2447 }
2448 if (config && config.iceServers) {
2449 // Edge does not like
2450 // 1) stun:
2451 // 2) turn: that does not have all of turn:host:port?transport=udp
2452 this.iceOptions.iceServers = config.iceServers.filter(function(server) {
2453 if (server && server.urls) {
2454 server.urls = server.urls.filter(function(url) {
2455 return url.indexOf('turn:') === 0 &&
2456 url.indexOf('transport=udp') !== -1;
2457 })[0];
2458 return !!server.urls;
2459 }
2460 return false;
2461 });
2462 }
2463
2464 // per-track iceGathers, iceTransports, dtlsTransports, rtpSenders, ...
2465 // everything that is needed to describe a SDP m-line.
2466 this.transceivers = [];
2467
2468 // since the iceGatherer is currently created in createOffer but we
2469 // must not emit candidates until after setLocalDescription we buffer
2470 // them in this array.
2471 this._localIceCandidatesBuffer = [];
2472 };
2473
2474 window.RTCPeerConnection.prototype._emitBufferedCandidates = function() {
2475 var self = this;
2476 var sections = SDPUtils.splitSections(self.localDescription.sdp);
2477 // FIXME: need to apply ice candidates in a way which is async but
2478 // in-order
2479 this._localIceCandidatesBuffer.forEach(function(event) {
2480 var end = !event.candidate || Object.keys(event.candidate).length === 0;
2481 if (end) {
2482 for (var j = 1; j < sections.length; j++) {
2483 if (sections[j].indexOf('\r\na=end-of-candidates\r\n') === -1) {
2484 sections[j] += 'a=end-of-candidates\r\n';
2485 }
2486 }
2487 } else if (event.candidate.candidate.indexOf('typ endOfCandidates')
2488 === -1) {
2489 sections[event.candidate.sdpMLineIndex + 1] +=
2490 'a=' + event.candidate.candidate + '\r\n';
2491 }
2492 self.localDescription.sdp = sections.join('');
2493 self.dispatchEvent(event);
2494 if (self.onicecandidate !== null) {
2495 self.onicecandidate(event);
2496 }
2497 if (!event.candidate && self.iceGatheringState !== 'complete') {
2498 var complete = self.transceivers.every(function(transceiver) {
2499 return transceiver.iceGatherer &&
2500 transceiver.iceGatherer.state === 'completed';
2501 });
2502 if (complete) {
2503 self.iceGatheringState = 'complete';
2504 }
2505 }
2506 });
2507 this._localIceCandidatesBuffer = [];
2508 };
2509
2510 window.RTCPeerConnection.prototype.addStream = function(stream) {
2511 // Clone is necessary for local demos mostly, attaching directly
2512 // to two different senders does not work (build 10547).
2513 this.localStreams.push(stream.clone());
2514 this._maybeFireNegotiationNeeded();
2515 };
2516
2517 window.RTCPeerConnection.prototype.removeStream = function(stream) {
2518 var idx = this.localStreams.indexOf(stream);
2519 if (idx > -1) {
2520 this.localStreams.splice(idx, 1);
2521 this._maybeFireNegotiationNeeded();
2522 }
2523 };
2524
2525 // Determines the intersection of local and remote capabilities.
2526 window.RTCPeerConnection.prototype._getCommonCapabilities =
2527 function(localCapabilities, remoteCapabilities) {
2528 var commonCapabilities = {
2529 codecs: [],
2530 headerExtensions: [],
2531 fecMechanisms: []
2532 };
2533 localCapabilities.codecs.forEach(function(lCodec) {
2534 for (var i = 0; i < remoteCapabilities.codecs.length; i++) {
2535 var rCodec = remoteCapabilities.codecs[i];
2536 if (lCodec.name.toLowerCase() === rCodec.name.toLowerCase() &&
2537 lCodec.clockRate === rCodec.clockRate &&
2538 lCodec.numChannels === rCodec.numChannels) {
2539 // push rCodec so we reply with offerer payload type
2540 commonCapabilities.codecs.push(rCodec);
2541
2542 // FIXME: also need to determine intersection between
2543 // .rtcpFeedback and .parameters
2544 break;
2545 }
2546 }
2547 });
2548
2549 localCapabilities.headerExtensions
2550 .forEach(function(lHeaderExtension) {
2551 for (var i = 0; i < remoteCapabilities.headerExtensions.length;
2552 i++) {
2553 var rHeaderExtension = remoteCapabilities.headerExtensions[i];
2554 if (lHeaderExtension.uri === rHeaderExtension.uri) {
2555 commonCapabilities.headerExtensions.push(rHeaderExtension);
2556 break;
2557 }
2558 }
2559 });
2560
2561 // FIXME: fecMechanisms
2562 return commonCapabilities;
2563 };
2564
2565 // Create ICE gatherer, ICE transport and DTLS transport.
2566 window.RTCPeerConnection.prototype._createIceAndDtlsTransports =
2567 function(mid, sdpMLineIndex) {
2568 var self = this;
2569 var iceGatherer = new RTCIceGatherer(self.iceOptions);
2570 var iceTransport = new RTCIceTransport(iceGatherer);
2571 iceGatherer.onlocalcandidate = function(evt) {
2572 var event = new Event('icecandidate');
2573 event.candidate = {sdpMid: mid, sdpMLineIndex: sdpMLineIndex};
2574
2575 var cand = evt.candidate;
2576 var end = !cand || Object.keys(cand).length === 0;
2577 // Edge emits an empty object for RTCIceCandidateComplete‥
2578 if (end) {
2579 // polyfill since RTCIceGatherer.state is not implemented in
2580 // Edge 10547 yet.
2581 if (iceGatherer.state === undefined) {
2582 iceGatherer.state = 'completed';
2583 }
2584
2585 // Emit a candidate with type endOfCandidates to make the samples
2586 // work. Edge requires addIceCandidate with this empty candidate
2587 // to start checking. The real solution is to signal
2588 // end-of-candidates to the other side when getting the null
2589 // candidate but some apps (like the samples) don't do that.
2590 event.candidate.candidate =
2591 'candidate:1 1 udp 1 0.0.0.0 9 typ endOfCandidates';
2592 } else {
2593 // RTCIceCandidate doesn't have a component, needs to be added
2594 cand.component = iceTransport.component === 'RTCP' ? 2 : 1;
2595 event.candidate.candidate = SDPUtils.writeCandidate(cand);
2596 }
2597
2598 var complete = self.transceivers.every(function(transceiver) {
2599 return transceiver.iceGatherer &&
2600 transceiver.iceGatherer.state === 'completed';
2601 });
2602
2603 // Emit candidate if localDescription is set.
2604 // Also emits null candidate when all gatherers are complete.
2605 switch (self.iceGatheringState) {
2606 case 'new':
2607 self._localIceCandidatesBuffer.push(event);
2608 if (end && complete) {
2609 self._localIceCandidatesBuffer.push(
2610 new Event('icecandidate'));
2611 }
2612 break;
2613 case 'gathering':
2614 self._emitBufferedCandidates();
2615 self.dispatchEvent(event);
2616 if (self.onicecandidate !== null) {
2617 self.onicecandidate(event);
2618 }
2619 if (complete) {
2620 self.dispatchEvent(new Event('icecandidate'));
2621 if (self.onicecandidate !== null) {
2622 self.onicecandidate(new Event('icecandidate'));
2623 }
2624 self.iceGatheringState = 'complete';
2625 }
2626 break;
2627 case 'complete':
2628 // should not happen... currently!
2629 break;
2630 default: // no-op.
2631 break;
2632 }
2633 };
2634 iceTransport.onicestatechange = function() {
2635 self._updateConnectionState();
2636 };
2637
2638 var dtlsTransport = new RTCDtlsTransport(iceTransport);
2639 dtlsTransport.ondtlsstatechange = function() {
2640 self._updateConnectionState();
2641 };
2642 dtlsTransport.onerror = function() {
2643 // onerror does not set state to failed by itself.
2644 dtlsTransport.state = 'failed';
2645 self._updateConnectionState();
2646 };
2647
2648 return {
2649 iceGatherer: iceGatherer,
2650 iceTransport: iceTransport,
2651 dtlsTransport: dtlsTransport
2652 };
2653 };
2654
2655 // Start the RTP Sender and Receiver for a transceiver.
2656 window.RTCPeerConnection.prototype._transceive = function(transceiver,
2657 send, recv) {
2658 var params = this._getCommonCapabilities(transceiver.localCapabilities,
2659 transceiver.remoteCapabilities);
2660 if (send && transceiver.rtpSender) {
2661 params.encodings = transceiver.sendEncodingParameters;
2662 params.rtcp = {
2663 cname: SDPUtils.localCName
2664 };
2665 if (transceiver.recvEncodingParameters.length) {
2666 params.rtcp.ssrc = transceiver.recvEncodingParameters[0].ssrc;
2667 }
2668 transceiver.rtpSender.send(params);
2669 }
2670 if (recv && transceiver.rtpReceiver) {
2671 params.encodings = transceiver.recvEncodingParameters;
2672 params.rtcp = {
2673 cname: transceiver.cname
2674 };
2675 if (transceiver.sendEncodingParameters.length) {
2676 params.rtcp.ssrc = transceiver.sendEncodingParameters[0].ssrc;
2677 }
2678 transceiver.rtpReceiver.receive(params);
2679 }
2680 };
2681
2682 window.RTCPeerConnection.prototype.setLocalDescription =
2683 function(description) {
2684 var self = this;
2685 var sections;
2686 var sessionpart;
2687 if (description.type === 'offer') {
2688 // FIXME: What was the purpose of this empty if statement?
2689 // if (!this._pendingOffer) {
2690 // } else {
2691 if (this._pendingOffer) {
2692 // VERY limited support for SDP munging. Limited to:
2693 // * changing the order of codecs
2694 sections = SDPUtils.splitSections(description.sdp);
2695 sessionpart = sections.shift();
2696 sections.forEach(function(mediaSection, sdpMLineIndex) {
2697 var caps = SDPUtils.parseRtpParameters(mediaSection);
2698 self._pendingOffer[sdpMLineIndex].localCapabilities = caps;
2699 });
2700 this.transceivers = this._pendingOffer;
2701 delete this._pendingOffer;
2702 }
2703 } else if (description.type === 'answer') {
2704 sections = SDPUtils.splitSections(self.remoteDescription.sdp);
2705 sessionpart = sections.shift();
2706 sections.forEach(function(mediaSection, sdpMLineIndex) {
2707 var transceiver = self.transceivers[sdpMLineIndex];
2708 var iceGatherer = transceiver.iceGatherer;
2709 var iceTransport = transceiver.iceTransport;
2710 var dtlsTransport = transceiver.dtlsTransport;
2711 var localCapabilities = transceiver.localCapabilities;
2712 var remoteCapabilities = transceiver.remoteCapabilities;
2713 var rejected = mediaSection.split('\n', 1)[0]
2714 .split(' ', 2)[1] === '0';
2715
2716 if (!rejected) {
2717 var remoteIceParameters = SDPUtils.getIceParameters(
2718 mediaSection, sessionpart);
2719 iceTransport.start(iceGatherer, remoteIceParameters,
2720 'controlled');
2721
2722 var remoteDtlsParameters = SDPUtils.getDtlsParameters(
2723 mediaSection, sessionpart);
2724 dtlsTransport.start(remoteDtlsParameters);
2725
2726 // Calculate intersection of capabilities.
2727 var params = self._getCommonCapabilities(localCapabilities,
2728 remoteCapabilities);
2729
2730 // Start the RTCRtpSender. The RTCRtpReceiver for this
2731 // transceiver has already been started in setRemoteDescription.
2732 self._transceive(transceiver,
2733 params.codecs.length > 0,
2734 false);
2735 }
2736 });
2737 }
2738
2739 this.localDescription = {
2740 type: description.type,
2741 sdp: description.sdp
2742 };
2743 switch (description.type) {
2744 case 'offer':
2745 this._updateSignalingState('have-local-offer');
2746 break;
2747 case 'answer':
2748 this._updateSignalingState('stable');
2749 break;
2750 default:
2751 throw new TypeError('unsupported type "' + description.type +
2752 '"');
2753 }
2754
2755 // If a success callback was provided, emit ICE candidates after it
2756 // has been executed. Otherwise, emit callback after the Promise is
2757 // resolved.
2758 var hasCallback = arguments.length > 1 &&
2759 typeof arguments[1] === 'function';
2760 if (hasCallback) {
2761 var cb = arguments[1];
2762 window.setTimeout(function() {
2763 cb();
2764 if (self.iceGatheringState === 'new') {
2765 self.iceGatheringState = 'gathering';
2766 }
2767 self._emitBufferedCandidates();
2768 }, 0);
2769 }
2770 var p = Promise.resolve();
2771 p.then(function() {
2772 if (!hasCallback) {
2773 if (self.iceGatheringState === 'new') {
2774 self.iceGatheringState = 'gathering';
2775 }
2776 // Usually candidates will be emitted earlier.
2777 window.setTimeout(self._emitBufferedCandidates.bind(self), 500);
2778 }
2779 });
2780 return p;
2781 };
2782
2783 window.RTCPeerConnection.prototype.setRemoteDescription =
2784 function(description) {
2785 var self = this;
2786 var stream = new MediaStream();
2787 var receiverList = [];
2788 var sections = SDPUtils.splitSections(description.sdp);
2789 var sessionpart = sections.shift();
2790 sections.forEach(function(mediaSection, sdpMLineIndex) {
2791 var lines = SDPUtils.splitLines(mediaSection);
2792 var mline = lines[0].substr(2).split(' ');
2793 var kind = mline[0];
2794 var rejected = mline[1] === '0';
2795 var direction = SDPUtils.getDirection(mediaSection, sessionpart);
2796
2797 var transceiver;
2798 var iceGatherer;
2799 var iceTransport;
2800 var dtlsTransport;
2801 var rtpSender;
2802 var rtpReceiver;
2803 var sendEncodingParameters;
2804 var recvEncodingParameters;
2805 var localCapabilities;
2806
2807 var track;
2808 // FIXME: ensure the mediaSection has rtcp-mux set.
2809 var remoteCapabilities = SDPUtils.parseRtpParameters(mediaSection);
2810 var remoteIceParameters;
2811 var remoteDtlsParameters;
2812 if (!rejected) {
2813 remoteIceParameters = SDPUtils.getIceParameters(mediaSection,
2814 sessionpart);
2815 remoteDtlsParameters = SDPUtils.getDtlsParameters(mediaSection,
2816 sessionpart);
2817 }
2818 recvEncodingParameters =
2819 SDPUtils.parseRtpEncodingParameters(mediaSection);
2820
2821 var mid = SDPUtils.matchPrefix(mediaSection, 'a=mid:');
2822 if (mid.length) {
2823 mid = mid[0].substr(6);
2824 } else {
2825 mid = SDPUtils.generateIdentifier();
2826 }
2827
2828 var cname;
2829 // Gets the first SSRC. Note that with RTX there might be multiple
2830 // SSRCs.
2831 var remoteSsrc = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
2832 .map(function(line) {
2833 return SDPUtils.parseSsrcMedia(line);
2834 })
2835 .filter(function(obj) {
2836 return obj.attribute === 'cname';
2837 })[0];
2838 if (remoteSsrc) {
2839 cname = remoteSsrc.value;
2840 }
2841
2842 var isComplete = SDPUtils.matchPrefix(mediaSection,
2843 'a=end-of-candidates').length > 0;
2844 var cands = SDPUtils.matchPrefix(mediaSection, 'a=candidate:')
2845 .map(function(cand) {
2846 return SDPUtils.parseCandidate(cand);
2847 })
2848 .filter(function(cand) {
2849 return cand.component === '1';
2850 });
2851 if (description.type === 'offer' && !rejected) {
2852 var transports = self._createIceAndDtlsTransports(mid,
2853 sdpMLineIndex);
2854 if (isComplete) {
2855 transports.iceTransport.setRemoteCandidates(cands);
2856 }
2857
2858 localCapabilities = RTCRtpReceiver.getCapabilities(kind);
2859 sendEncodingParameters = [{
2860 ssrc: (2 * sdpMLineIndex + 2) * 1001
2861 }];
2862
2863 rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
2864
2865 track = rtpReceiver.track;
2866 receiverList.push([track, rtpReceiver]);
2867 // FIXME: not correct when there are multiple streams but that is
2868 // not currently supported in this shim.
2869 stream.addTrack(track);
2870
2871 // FIXME: look at direction.
2872 if (self.localStreams.length > 0 &&
2873 self.localStreams[0].getTracks().length >= sdpMLineIndex) {
2874 // FIXME: actually more complicated, needs to match types etc
2875 var localtrack = self.localStreams[0]
2876 .getTracks()[sdpMLineIndex];
2877 rtpSender = new RTCRtpSender(localtrack,
2878 transports.dtlsTransport);
2879 }
2880
2881 self.transceivers[sdpMLineIndex] = {
2882 iceGatherer: transports.iceGatherer,
2883 iceTransport: transports.iceTransport,
2884 dtlsTransport: transports.dtlsTransport,
2885 localCapabilities: localCapabilities,
2886 remoteCapabilities: remoteCapabilities,
2887 rtpSender: rtpSender,
2888 rtpReceiver: rtpReceiver,
2889 kind: kind,
2890 mid: mid,
2891 cname: cname,
2892 sendEncodingParameters: sendEncodingParameters,
2893 recvEncodingParameters: recvEncodingParameters
2894 };
2895 // Start the RTCRtpReceiver now. The RTPSender is started in
2896 // setLocalDescription.
2897 self._transceive(self.transceivers[sdpMLineIndex],
2898 false,
2899 direction === 'sendrecv' || direction === 'sendonly');
2900 } else if (description.type === 'answer' && !rejected) {
2901 transceiver = self.transceivers[sdpMLineIndex];
2902 iceGatherer = transceiver.iceGatherer;
2903 iceTransport = transceiver.iceTransport;
2904 dtlsTransport = transceiver.dtlsTransport;
2905 rtpSender = transceiver.rtpSender;
2906 rtpReceiver = transceiver.rtpReceiver;
2907 sendEncodingParameters = transceiver.sendEncodingParameters;
2908 localCapabilities = transceiver.localCapabilities;
2909
2910 self.transceivers[sdpMLineIndex].recvEncodingParameters =
2911 recvEncodingParameters;
2912 self.transceivers[sdpMLineIndex].remoteCapabilities =
2913 remoteCapabilities;
2914 self.transceivers[sdpMLineIndex].cname = cname;
2915
2916 if (isComplete) {
2917 iceTransport.setRemoteCandidates(cands);
2918 }
2919 iceTransport.start(iceGatherer, remoteIceParameters,
2920 'controlling');
2921 dtlsTransport.start(remoteDtlsParameters);
2922
2923 self._transceive(transceiver,
2924 direction === 'sendrecv' || direction === 'recvonly',
2925 direction === 'sendrecv' || direction === 'sendonly');
2926
2927 if (rtpReceiver &&
2928 (direction === 'sendrecv' || direction === 'sendonly')) {
2929 track = rtpReceiver.track;
2930 receiverList.push([track, rtpReceiver]);
2931 stream.addTrack(track);
2932 } else {
2933 // FIXME: actually the receiver should be created later.
2934 delete transceiver.rtpReceiver;
2935 }
2936 }
2937 });
2938
2939 this.remoteDescription = {
2940 type: description.type,
2941 sdp: description.sdp
2942 };
2943 switch (description.type) {
2944 case 'offer':
2945 this._updateSignalingState('have-remote-offer');
2946 break;
2947 case 'answer':
2948 this._updateSignalingState('stable');
2949 break;
2950 default:
2951 throw new TypeError('unsupported type "' + description.type +
2952 '"');
2953 }
2954 if (stream.getTracks().length) {
2955 self.remoteStreams.push(stream);
2956 window.setTimeout(function() {
2957 var event = new Event('addstream');
2958 event.stream = stream;
2959 self.dispatchEvent(event);
2960 if (self.onaddstream !== null) {
2961 window.setTimeout(function() {
2962 self.onaddstream(event);
2963 }, 0);
2964 }
2965
2966 receiverList.forEach(function(item) {
2967 var track = item[0];
2968 var receiver = item[1];
2969 var trackEvent = new Event('track');
2970 trackEvent.track = track;
2971 trackEvent.receiver = receiver;
2972 trackEvent.streams = [stream];
2973 self.dispatchEvent(event);
2974 if (self.ontrack !== null) {
2975 window.setTimeout(function() {
2976 self.ontrack(trackEvent);
2977 }, 0);
2978 }
2979 });
2980 }, 0);
2981 }
2982 if (arguments.length > 1 && typeof arguments[1] === 'function') {
2983 window.setTimeout(arguments[1], 0);
2984 }
2985 return Promise.resolve();
2986 };
2987
2988 window.RTCPeerConnection.prototype.close = function() {
2989 this.transceivers.forEach(function(transceiver) {
2990 /* not yet
2991 if (transceiver.iceGatherer) {
2992 transceiver.iceGatherer.close();
2993 }
2994 */
2995 if (transceiver.iceTransport) {
2996 transceiver.iceTransport.stop();
2997 }
2998 if (transceiver.dtlsTransport) {
2999 transceiver.dtlsTransport.stop();
3000 }
3001 if (transceiver.rtpSender) {
3002 transceiver.rtpSender.stop();
3003 }
3004 if (transceiver.rtpReceiver) {
3005 transceiver.rtpReceiver.stop();
3006 }
3007 });
3008 // FIXME: clean up tracks, local streams, remote streams, etc
3009 this._updateSignalingState('closed');
3010 };
3011
3012 // Update the signaling state.
3013 window.RTCPeerConnection.prototype._updateSignalingState =
3014 function(newState) {
3015 this.signalingState = newState;
3016 var event = new Event('signalingstatechange');
3017 this.dispatchEvent(event);
3018 if (this.onsignalingstatechange !== null) {
3019 this.onsignalingstatechange(event);
3020 }
3021 };
3022
3023 // Determine whether to fire the negotiationneeded event.
3024 window.RTCPeerConnection.prototype._maybeFireNegotiationNeeded =
3025 function() {
3026 // Fire away (for now).
3027 var event = new Event('negotiationneeded');
3028 this.dispatchEvent(event);
3029 if (this.onnegotiationneeded !== null) {
3030 this.onnegotiationneeded(event);
3031 }
3032 };
3033
3034 // Update the connection state.
3035 window.RTCPeerConnection.prototype._updateConnectionState = function() {
3036 var self = this;
3037 var newState;
3038 var states = {
3039 'new': 0,
3040 closed: 0,
3041 connecting: 0,
3042 checking: 0,
3043 connected: 0,
3044 completed: 0,
3045 failed: 0
3046 };
3047 this.transceivers.forEach(function(transceiver) {
3048 states[transceiver.iceTransport.state]++;
3049 states[transceiver.dtlsTransport.state]++;
3050 });
3051 // ICETransport.completed and connected are the same for this purpose.
3052 states.connected += states.completed;
3053
3054 newState = 'new';
3055 if (states.failed > 0) {
3056 newState = 'failed';
3057 } else if (states.connecting > 0 || states.checking > 0) {
3058 newState = 'connecting';
3059 } else if (states.disconnected > 0) {
3060 newState = 'disconnected';
3061 } else if (states.new > 0) {
3062 newState = 'new';
3063 } else if (states.connected > 0 || states.completed > 0) {
3064 newState = 'connected';
3065 }
3066
3067 if (newState !== self.iceConnectionState) {
3068 self.iceConnectionState = newState;
3069 var event = new Event('iceconnectionstatechange');
3070 this.dispatchEvent(event);
3071 if (this.oniceconnectionstatechange !== null) {
3072 this.oniceconnectionstatechange(event);
3073 }
3074 }
3075 };
3076
3077 window.RTCPeerConnection.prototype.createOffer = function() {
3078 var self = this;
3079 if (this._pendingOffer) {
3080 throw new Error('createOffer called while there is a pending offer.');
3081 }
3082 var offerOptions;
3083 if (arguments.length === 1 && typeof arguments[0] !== 'function') {
3084 offerOptions = arguments[0];
3085 } else if (arguments.length === 3) {
3086 offerOptions = arguments[2];
3087 }
3088
3089 var tracks = [];
3090 var numAudioTracks = 0;
3091 var numVideoTracks = 0;
3092 // Default to sendrecv.
3093 if (this.localStreams.length) {
3094 numAudioTracks = this.localStreams[0].getAudioTracks().length;
3095 numVideoTracks = this.localStreams[0].getVideoTracks().length;
3096 }
3097 // Determine number of audio and video tracks we need to send/recv.
3098 if (offerOptions) {
3099 // Reject Chrome legacy constraints.
3100 if (offerOptions.mandatory || offerOptions.optional) {
3101 throw new TypeError(
3102 'Legacy mandatory/optional constraints not supported.');
3103 }
3104 if (offerOptions.offerToReceiveAudio !== undefined) {
3105 numAudioTracks = offerOptions.offerToReceiveAudio;
3106 }
3107 if (offerOptions.offerToReceiveVideo !== undefined) {
3108 numVideoTracks = offerOptions.offerToReceiveVideo;
3109 }
3110 }
3111 if (this.localStreams.length) {
3112 // Push local streams.
3113 this.localStreams[0].getTracks().forEach(function(track) {
3114 tracks.push({
3115 kind: track.kind,
3116 track: track,
3117 wantReceive: track.kind === 'audio' ?
3118 numAudioTracks > 0 : numVideoTracks > 0
3119 });
3120 if (track.kind === 'audio') {
3121 numAudioTracks--;
3122 } else if (track.kind === 'video') {
3123 numVideoTracks--;
3124 }
3125 });
3126 }
3127 // Create M-lines for recvonly streams.
3128 while (numAudioTracks > 0 || numVideoTracks > 0) {
3129 if (numAudioTracks > 0) {
3130 tracks.push({
3131 kind: 'audio',
3132 wantReceive: true
3133 });
3134 numAudioTracks--;
3135 }
3136 if (numVideoTracks > 0) {
3137 tracks.push({
3138 kind: 'video',
3139 wantReceive: true
3140 });
3141 numVideoTracks--;
3142 }
3143 }
3144
3145 var sdp = SDPUtils.writeSessionBoilerplate();
3146 var transceivers = [];
3147 tracks.forEach(function(mline, sdpMLineIndex) {
3148 // For each track, create an ice gatherer, ice transport,
3149 // dtls transport, potentially rtpsender and rtpreceiver.
3150 var track = mline.track;
3151 var kind = mline.kind;
3152 var mid = SDPUtils.generateIdentifier();
3153
3154 var transports = self._createIceAndDtlsTransports(mid, sdpMLineIndex);
3155
3156 var localCapabilities = RTCRtpSender.getCapabilities(kind);
3157 var rtpSender;
3158 var rtpReceiver;
3159
3160 // generate an ssrc now, to be used later in rtpSender.send
3161 var sendEncodingParameters = [{
3162 ssrc: (2 * sdpMLineIndex + 1) * 1001
3163 }];
3164 if (track) {
3165 rtpSender = new RTCRtpSender(track, transports.dtlsTransport);
3166 }
3167
3168 if (mline.wantReceive) {
3169 rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
3170 }
3171
3172 transceivers[sdpMLineIndex] = {
3173 iceGatherer: transports.iceGatherer,
3174 iceTransport: transports.iceTransport,
3175 dtlsTransport: transports.dtlsTransport,
3176 localCapabilities: localCapabilities,
3177 remoteCapabilities: null,
3178 rtpSender: rtpSender,
3179 rtpReceiver: rtpReceiver,
3180 kind: kind,
3181 mid: mid,
3182 sendEncodingParameters: sendEncodingParameters,
3183 recvEncodingParameters: null
3184 };
3185 var transceiver = transceivers[sdpMLineIndex];
3186 sdp += SDPUtils.writeMediaSection(transceiver,
3187 transceiver.localCapabilities, 'offer', self.localStreams[0]);
3188 });
3189
3190 this._pendingOffer = transceivers;
3191 var desc = new RTCSessionDescription({
3192 type: 'offer',
3193 sdp: sdp
3194 });
3195 if (arguments.length && typeof arguments[0] === 'function') {
3196 window.setTimeout(arguments[0], 0, desc);
3197 }
3198 return Promise.resolve(desc);
3199 };
3200
3201 window.RTCPeerConnection.prototype.createAnswer = function() {
3202 var self = this;
3203
3204 var sdp = SDPUtils.writeSessionBoilerplate();
3205 this.transceivers.forEach(function(transceiver) {
3206 // Calculate intersection of capabilities.
3207 var commonCapabilities = self._getCommonCapabilities(
3208 transceiver.localCapabilities,
3209 transceiver.remoteCapabilities);
3210
3211 sdp += SDPUtils.writeMediaSection(transceiver, commonCapabilities,
3212 'answer', self.localStreams[0]);
3213 });
3214
3215 var desc = new RTCSessionDescription({
3216 type: 'answer',
3217 sdp: sdp
3218 });
3219 if (arguments.length && typeof arguments[0] === 'function') {
3220 window.setTimeout(arguments[0], 0, desc);
3221 }
3222 return Promise.resolve(desc);
3223 };
3224
3225 window.RTCPeerConnection.prototype.addIceCandidate = function(candidate) {
3226 var mLineIndex = candidate.sdpMLineIndex;
3227 if (candidate.sdpMid) {
3228 for (var i = 0; i < this.transceivers.length; i++) {
3229 if (this.transceivers[i].mid === candidate.sdpMid) {
3230 mLineIndex = i;
3231 break;
3232 }
3233 }
3234 }
3235 var transceiver = this.transceivers[mLineIndex];
3236 if (transceiver) {
3237 var cand = Object.keys(candidate.candidate).length > 0 ?
3238 SDPUtils.parseCandidate(candidate.candidate) : {};
3239 // Ignore Chrome's invalid candidates since Edge does not like them.
3240 if (cand.protocol === 'tcp' && cand.port === 0) {
3241 return;
3242 }
3243 // Ignore RTCP candidates, we assume RTCP-MUX.
3244 if (cand.component !== '1') {
3245 return;
3246 }
3247 // A dirty hack to make samples work.
3248 if (cand.type === 'endOfCandidates') {
3249 cand = {};
3250 }
3251 transceiver.iceTransport.addRemoteCandidate(cand);
3252
3253 // update the remoteDescription.
3254 var sections = SDPUtils.splitSections(this.remoteDescription.sdp);
3255 sections[mLineIndex + 1] += (cand.type ? candidate.candidate.trim()
3256 : 'a=end-of-candidates') + '\r\n';
3257 this.remoteDescription.sdp = sections.join('');
3258 }
3259 if (arguments.length > 1 && typeof arguments[1] === 'function') {
3260 window.setTimeout(arguments[1], 0);
3261 }
3262 return Promise.resolve();
3263 };
3264
3265 window.RTCPeerConnection.prototype.getStats = function() {
3266 var promises = [];
3267 this.transceivers.forEach(function(transceiver) {
3268 ['rtpSender', 'rtpReceiver', 'iceGatherer', 'iceTransport',
3269 'dtlsTransport'].forEach(function(method) {
3270 if (transceiver[method]) {
3271 promises.push(transceiver[method].getStats());
3272 }
3273 });
3274 });
3275 var cb = arguments.length > 1 && typeof arguments[1] === 'function' &&
3276 arguments[1];
3277 return new Promise(function(resolve) {
3278 var results = {};
3279 Promise.all(promises).then(function(res) {
3280 res.forEach(function(result) {
3281 Object.keys(result).forEach(function(id) {
3282 results[id] = result[id];
3283 });
3284 });
3285 if (cb) {
3286 window.setTimeout(cb, 0, results);
3287 }
3288 resolve(results);
3289 });
3290 });
3291 };
3292 },
3293
3294 // Attach a media stream to an element.
3295 attachMediaStream: function(element, stream) {
3296 logging('DEPRECATED, attachMediaStream will soon be removed.');
3297 element.srcObject = stream;
3298 },
3299
3300 reattachMediaStream: function(to, from) {
3301 logging('DEPRECATED, reattachMediaStream will soon be removed.');
3302 to.srcObject = from.srcObject;
3303 }
3304};
3305
3306// Expose public methods.
3307module.exports = {
3308 shimPeerConnection: edgeShim.shimPeerConnection,
3309 attachMediaStream: edgeShim.attachMediaStream,
3310 reattachMediaStream: edgeShim.reattachMediaStream
3311};
3312
3313},{"../utils":14,"./edge_sdp":21}],21:[function(require,module,exports){
3314/*
3315 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3316 *
3317 * Use of this source code is governed by a BSD-style license
3318 * that can be found in the LICENSE file in the root of the source
3319 * tree.
3320 */
3321 /* eslint-env node */
3322'use strict';
3323
3324// SDP helpers.
3325var SDPUtils = {};
3326
3327// Generate an alphanumeric identifier for cname or mids.
3328// TODO: use UUIDs instead? https://gist.github.com/jed/982883
3329SDPUtils.generateIdentifier = function() {
3330 return Math.random().toString(36).substr(2, 10);
3331};
3332
3333// The RTCP CNAME used by all peerconnections from the same JS.
3334SDPUtils.localCName = SDPUtils.generateIdentifier();
3335
3336// Splits SDP into lines, dealing with both CRLF and LF.
3337SDPUtils.splitLines = function(blob) {
3338 return blob.trim().split('\n').map(function(line) {
3339 return line.trim();
3340 });
3341};
3342// Splits SDP into sessionpart and mediasections. Ensures CRLF.
3343SDPUtils.splitSections = function(blob) {
3344 var parts = blob.split('\nm=');
3345 return parts.map(function(part, index) {
3346 return (index > 0 ? 'm=' + part : part).trim() + '\r\n';
3347 });
3348};
3349
3350// Returns lines that start with a certain prefix.
3351SDPUtils.matchPrefix = function(blob, prefix) {
3352 return SDPUtils.splitLines(blob).filter(function(line) {
3353 return line.indexOf(prefix) === 0;
3354 });
3355};
3356
3357// Parses an ICE candidate line. Sample input:
3358// candidate:702786350 2 udp 41819902 8.8.8.8 60769 typ relay raddr 8.8.8.8
3359// rport 55996"
3360SDPUtils.parseCandidate = function(line) {
3361 var parts;
3362 // Parse both variants.
3363 if (line.indexOf('a=candidate:') === 0) {
3364 parts = line.substring(12).split(' ');
3365 } else {
3366 parts = line.substring(10).split(' ');
3367 }
3368
3369 var candidate = {
3370 foundation: parts[0],
3371 component: parts[1],
3372 protocol: parts[2].toLowerCase(),
3373 priority: parseInt(parts[3], 10),
3374 ip: parts[4],
3375 port: parseInt(parts[5], 10),
3376 // skip parts[6] == 'typ'
3377 type: parts[7]
3378 };
3379
3380 for (var i = 8; i < parts.length; i += 2) {
3381 switch (parts[i]) {
3382 case 'raddr':
3383 candidate.relatedAddress = parts[i + 1];
3384 break;
3385 case 'rport':
3386 candidate.relatedPort = parseInt(parts[i + 1], 10);
3387 break;
3388 case 'tcptype':
3389 candidate.tcpType = parts[i + 1];
3390 break;
3391 default: // Unknown extensions are silently ignored.
3392 break;
3393 }
3394 }
3395 return candidate;
3396};
3397
3398// Translates a candidate object into SDP candidate attribute.
3399SDPUtils.writeCandidate = function(candidate) {
3400 var sdp = [];
3401 sdp.push(candidate.foundation);
3402 sdp.push(candidate.component);
3403 sdp.push(candidate.protocol.toUpperCase());
3404 sdp.push(candidate.priority);
3405 sdp.push(candidate.ip);
3406 sdp.push(candidate.port);
3407
3408 var type = candidate.type;
3409 sdp.push('typ');
3410 sdp.push(type);
3411 if (type !== 'host' && candidate.relatedAddress &&
3412 candidate.relatedPort) {
3413 sdp.push('raddr');
3414 sdp.push(candidate.relatedAddress); // was: relAddr
3415 sdp.push('rport');
3416 sdp.push(candidate.relatedPort); // was: relPort
3417 }
3418 if (candidate.tcpType && candidate.protocol.toLowerCase() === 'tcp') {
3419 sdp.push('tcptype');
3420 sdp.push(candidate.tcpType);
3421 }
3422 return 'candidate:' + sdp.join(' ');
3423};
3424
3425// Parses an rtpmap line, returns RTCRtpCoddecParameters. Sample input:
3426// a=rtpmap:111 opus/48000/2
3427SDPUtils.parseRtpMap = function(line) {
3428 var parts = line.substr(9).split(' ');
3429 var parsed = {
3430 payloadType: parseInt(parts.shift(), 10) // was: id
3431 };
3432
3433 parts = parts[0].split('/');
3434
3435 parsed.name = parts[0];
3436 parsed.clockRate = parseInt(parts[1], 10); // was: clockrate
3437 // was: channels
3438 parsed.numChannels = parts.length === 3 ? parseInt(parts[2], 10) : 1;
3439 return parsed;
3440};
3441
3442// Generate an a=rtpmap line from RTCRtpCodecCapability or
3443// RTCRtpCodecParameters.
3444SDPUtils.writeRtpMap = function(codec) {
3445 var pt = codec.payloadType;
3446 if (codec.preferredPayloadType !== undefined) {
3447 pt = codec.preferredPayloadType;
3448 }
3449 return 'a=rtpmap:' + pt + ' ' + codec.name + '/' + codec.clockRate +
3450 (codec.numChannels !== 1 ? '/' + codec.numChannels : '') + '\r\n';
3451};
3452
3453// Parses an a=extmap line (headerextension from RFC 5285). Sample input:
3454// a=extmap:2 urn:ietf:params:rtp-hdrext:toffset
3455SDPUtils.parseExtmap = function(line) {
3456 var parts = line.substr(9).split(' ');
3457 return {
3458 id: parseInt(parts[0], 10),
3459 uri: parts[1]
3460 };
3461};
3462
3463// Generates a=extmap line from RTCRtpHeaderExtensionParameters or
3464// RTCRtpHeaderExtension.
3465SDPUtils.writeExtmap = function(headerExtension) {
3466 return 'a=extmap:' + (headerExtension.id || headerExtension.preferredId) +
3467 ' ' + headerExtension.uri + '\r\n';
3468};
3469
3470// Parses an ftmp line, returns dictionary. Sample input:
3471// a=fmtp:96 vbr=on;cng=on
3472// Also deals with vbr=on; cng=on
3473SDPUtils.parseFmtp = function(line) {
3474 var parsed = {};
3475 var kv;
3476 var parts = line.substr(line.indexOf(' ') + 1).split(';');
3477 for (var j = 0; j < parts.length; j++) {
3478 kv = parts[j].trim().split('=');
3479 parsed[kv[0].trim()] = kv[1];
3480 }
3481 return parsed;
3482};
3483
3484// Generates an a=ftmp line from RTCRtpCodecCapability or RTCRtpCodecParameters.
3485SDPUtils.writeFmtp = function(codec) {
3486 var line = '';
3487 var pt = codec.payloadType;
3488 if (codec.preferredPayloadType !== undefined) {
3489 pt = codec.preferredPayloadType;
3490 }
3491 if (codec.parameters && Object.keys(codec.parameters).length) {
3492 var params = [];
3493 Object.keys(codec.parameters).forEach(function(param) {
3494 params.push(param + '=' + codec.parameters[param]);
3495 });
3496 line += 'a=fmtp:' + pt + ' ' + params.join(';') + '\r\n';
3497 }
3498 return line;
3499};
3500
3501// Parses an rtcp-fb line, returns RTCPRtcpFeedback object. Sample input:
3502// a=rtcp-fb:98 nack rpsi
3503SDPUtils.parseRtcpFb = function(line) {
3504 var parts = line.substr(line.indexOf(' ') + 1).split(' ');
3505 return {
3506 type: parts.shift(),
3507 parameter: parts.join(' ')
3508 };
3509};
3510// Generate a=rtcp-fb lines from RTCRtpCodecCapability or RTCRtpCodecParameters.
3511SDPUtils.writeRtcpFb = function(codec) {
3512 var lines = '';
3513 var pt = codec.payloadType;
3514 if (codec.preferredPayloadType !== undefined) {
3515 pt = codec.preferredPayloadType;
3516 }
3517 if (codec.rtcpFeedback && codec.rtcpFeedback.length) {
3518 // FIXME: special handling for trr-int?
3519 codec.rtcpFeedback.forEach(function(fb) {
3520 lines += 'a=rtcp-fb:' + pt + ' ' + fb.type + ' ' + fb.parameter +
3521 '\r\n';
3522 });
3523 }
3524 return lines;
3525};
3526
3527// Parses an RFC 5576 ssrc media attribute. Sample input:
3528// a=ssrc:3735928559 cname:something
3529SDPUtils.parseSsrcMedia = function(line) {
3530 var sp = line.indexOf(' ');
3531 var parts = {
3532 ssrc: parseInt(line.substr(7, sp - 7), 10)
3533 };
3534 var colon = line.indexOf(':', sp);
3535 if (colon > -1) {
3536 parts.attribute = line.substr(sp + 1, colon - sp - 1);
3537 parts.value = line.substr(colon + 1);
3538 } else {
3539 parts.attribute = line.substr(sp + 1);
3540 }
3541 return parts;
3542};
3543
3544// Extracts DTLS parameters from SDP media section or sessionpart.
3545// FIXME: for consistency with other functions this should only
3546// get the fingerprint line as input. See also getIceParameters.
3547SDPUtils.getDtlsParameters = function(mediaSection, sessionpart) {
3548 var lines = SDPUtils.splitLines(mediaSection);
3549 // Search in session part, too.
3550 lines = lines.concat(SDPUtils.splitLines(sessionpart));
3551 var fpLine = lines.filter(function(line) {
3552 return line.indexOf('a=fingerprint:') === 0;
3553 })[0].substr(14);
3554 // Note: a=setup line is ignored since we use the 'auto' role.
3555 var dtlsParameters = {
3556 role: 'auto',
3557 fingerprints: [{
3558 algorithm: fpLine.split(' ')[0],
3559 value: fpLine.split(' ')[1]
3560 }]
3561 };
3562 return dtlsParameters;
3563};
3564
3565// Serializes DTLS parameters to SDP.
3566SDPUtils.writeDtlsParameters = function(params, setupType) {
3567 var sdp = 'a=setup:' + setupType + '\r\n';
3568 params.fingerprints.forEach(function(fp) {
3569 sdp += 'a=fingerprint:' + fp.algorithm + ' ' + fp.value + '\r\n';
3570 });
3571 return sdp;
3572};
3573// Parses ICE information from SDP media section or sessionpart.
3574// FIXME: for consistency with other functions this should only
3575// get the ice-ufrag and ice-pwd lines as input.
3576SDPUtils.getIceParameters = function(mediaSection, sessionpart) {
3577 var lines = SDPUtils.splitLines(mediaSection);
3578 // Search in session part, too.
3579 lines = lines.concat(SDPUtils.splitLines(sessionpart));
3580 var iceParameters = {
3581 usernameFragment: lines.filter(function(line) {
3582 return line.indexOf('a=ice-ufrag:') === 0;
3583 })[0].substr(12),
3584 password: lines.filter(function(line) {
3585 return line.indexOf('a=ice-pwd:') === 0;
3586 })[0].substr(10)
3587 };
3588 return iceParameters;
3589};
3590
3591// Serializes ICE parameters to SDP.
3592SDPUtils.writeIceParameters = function(params) {
3593 return 'a=ice-ufrag:' + params.usernameFragment + '\r\n' +
3594 'a=ice-pwd:' + params.password + '\r\n';
3595};
3596
3597// Parses the SDP media section and returns RTCRtpParameters.
3598SDPUtils.parseRtpParameters = function(mediaSection) {
3599 var description = {
3600 codecs: [],
3601 headerExtensions: [],
3602 fecMechanisms: [],
3603 rtcp: []
3604 };
3605 var lines = SDPUtils.splitLines(mediaSection);
3606 var mline = lines[0].split(' ');
3607 for (var i = 3; i < mline.length; i++) { // find all codecs from mline[3..]
3608 var pt = mline[i];
3609 var rtpmapline = SDPUtils.matchPrefix(
3610 mediaSection, 'a=rtpmap:' + pt + ' ')[0];
3611 if (rtpmapline) {
3612 var codec = SDPUtils.parseRtpMap(rtpmapline);
3613 var fmtps = SDPUtils.matchPrefix(
3614 mediaSection, 'a=fmtp:' + pt + ' ');
3615 // Only the first a=fmtp:<pt> is considered.
3616 codec.parameters = fmtps.length ? SDPUtils.parseFmtp(fmtps[0]) : {};
3617 codec.rtcpFeedback = SDPUtils.matchPrefix(
3618 mediaSection, 'a=rtcp-fb:' + pt + ' ')
3619 .map(SDPUtils.parseRtcpFb);
3620 description.codecs.push(codec);
3621 // parse FEC mechanisms from rtpmap lines.
3622 switch (codec.name.toUpperCase()) {
3623 case 'RED':
3624 case 'ULPFEC':
3625 description.fecMechanisms.push(codec.name.toUpperCase());
3626 break;
3627 default: // only RED and ULPFEC are recognized as FEC mechanisms.
3628 break;
3629 }
3630 }
3631 }
3632 SDPUtils.matchPrefix(mediaSection, 'a=extmap:').forEach(function(line) {
3633 description.headerExtensions.push(SDPUtils.parseExtmap(line));
3634 });
3635 // FIXME: parse rtcp.
3636 return description;
3637};
3638
3639// Generates parts of the SDP media section describing the capabilities /
3640// parameters.
3641SDPUtils.writeRtpDescription = function(kind, caps) {
3642 var sdp = '';
3643
3644 // Build the mline.
3645 sdp += 'm=' + kind + ' ';
3646 sdp += caps.codecs.length > 0 ? '9' : '0'; // reject if no codecs.
3647 sdp += ' UDP/TLS/RTP/SAVPF ';
3648 sdp += caps.codecs.map(function(codec) {
3649 if (codec.preferredPayloadType !== undefined) {
3650 return codec.preferredPayloadType;
3651 }
3652 return codec.payloadType;
3653 }).join(' ') + '\r\n';
3654
3655 sdp += 'c=IN IP4 0.0.0.0\r\n';
3656 sdp += 'a=rtcp:9 IN IP4 0.0.0.0\r\n';
3657
3658 // Add a=rtpmap lines for each codec. Also fmtp and rtcp-fb.
3659 caps.codecs.forEach(function(codec) {
3660 sdp += SDPUtils.writeRtpMap(codec);
3661 sdp += SDPUtils.writeFmtp(codec);
3662 sdp += SDPUtils.writeRtcpFb(codec);
3663 });
3664 // FIXME: add headerExtensions, fecMechanismş and rtcp.
3665 sdp += 'a=rtcp-mux\r\n';
3666 return sdp;
3667};
3668
3669// Parses the SDP media section and returns an array of
3670// RTCRtpEncodingParameters.
3671SDPUtils.parseRtpEncodingParameters = function(mediaSection) {
3672 var encodingParameters = [];
3673 var description = SDPUtils.parseRtpParameters(mediaSection);
3674 var hasRed = description.fecMechanisms.indexOf('RED') !== -1;
3675 var hasUlpfec = description.fecMechanisms.indexOf('ULPFEC') !== -1;
3676
3677 // filter a=ssrc:... cname:, ignore PlanB-msid
3678 var ssrcs = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
3679 .map(function(line) {
3680 return SDPUtils.parseSsrcMedia(line);
3681 })
3682 .filter(function(parts) {
3683 return parts.attribute === 'cname';
3684 });
3685 var primarySsrc = ssrcs.length > 0 && ssrcs[0].ssrc;
3686 var secondarySsrc;
3687
3688 var flows = SDPUtils.matchPrefix(mediaSection, 'a=ssrc-group:FID')
3689 .map(function(line) {
3690 var parts = line.split(' ');
3691 parts.shift();
3692 return parts.map(function(part) {
3693 return parseInt(part, 10);
3694 });
3695 });
3696 if (flows.length > 0 && flows[0].length > 1 && flows[0][0] === primarySsrc) {
3697 secondarySsrc = flows[0][1];
3698 }
3699
3700 description.codecs.forEach(function(codec) {
3701 if (codec.name.toUpperCase() === 'RTX' && codec.parameters.apt) {
3702 var encParam = {
3703 ssrc: primarySsrc,
3704 codecPayloadType: parseInt(codec.parameters.apt, 10),
3705 rtx: {
3706 ssrc: secondarySsrc
3707 }
3708 };
3709 encodingParameters.push(encParam);
3710 if (hasRed) {
3711 encParam = JSON.parse(JSON.stringify(encParam));
3712 encParam.fec = {
3713 ssrc: secondarySsrc,
3714 mechanism: hasUlpfec ? 'red+ulpfec' : 'red'
3715 };
3716 encodingParameters.push(encParam);
3717 }
3718 }
3719 });
3720 if (encodingParameters.length === 0 && primarySsrc) {
3721 encodingParameters.push({
3722 ssrc: primarySsrc
3723 });
3724 }
3725
3726 // we support both b=AS and b=TIAS but interpret AS as TIAS.
3727 var bandwidth = SDPUtils.matchPrefix(mediaSection, 'b=');
3728 if (bandwidth.length) {
3729 if (bandwidth[0].indexOf('b=TIAS:') === 0) {
3730 bandwidth = parseInt(bandwidth[0].substr(7), 10);
3731 } else if (bandwidth[0].indexOf('b=AS:') === 0) {
3732 bandwidth = parseInt(bandwidth[0].substr(5), 10);
3733 }
3734 encodingParameters.forEach(function(params) {
3735 params.maxBitrate = bandwidth;
3736 });
3737 }
3738 return encodingParameters;
3739};
3740
3741SDPUtils.writeSessionBoilerplate = function() {
3742 // FIXME: sess-id should be an NTP timestamp.
3743 return 'v=0\r\n' +
3744 'o=thisisadapterortc 8169639915646943137 2 IN IP4 127.0.0.1\r\n' +
3745 's=-\r\n' +
3746 't=0 0\r\n';
3747};
3748
3749SDPUtils.writeMediaSection = function(transceiver, caps, type, stream) {
3750 var sdp = SDPUtils.writeRtpDescription(transceiver.kind, caps);
3751
3752 // Map ICE parameters (ufrag, pwd) to SDP.
3753 sdp += SDPUtils.writeIceParameters(
3754 transceiver.iceGatherer.getLocalParameters());
3755
3756 // Map DTLS parameters to SDP.
3757 sdp += SDPUtils.writeDtlsParameters(
3758 transceiver.dtlsTransport.getLocalParameters(),
3759 type === 'offer' ? 'actpass' : 'active');
3760
3761 sdp += 'a=mid:' + transceiver.mid + '\r\n';
3762
3763 if (transceiver.rtpSender && transceiver.rtpReceiver) {
3764 sdp += 'a=sendrecv\r\n';
3765 } else if (transceiver.rtpSender) {
3766 sdp += 'a=sendonly\r\n';
3767 } else if (transceiver.rtpReceiver) {
3768 sdp += 'a=recvonly\r\n';
3769 } else {
3770 sdp += 'a=inactive\r\n';
3771 }
3772
3773 // FIXME: for RTX there might be multiple SSRCs. Not implemented in Edge yet.
3774 if (transceiver.rtpSender) {
3775 var msid = 'msid:' + stream.id + ' ' +
3776 transceiver.rtpSender.track.id + '\r\n';
3777 sdp += 'a=' + msid;
3778 sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
3779 ' ' + msid;
3780 }
3781 // FIXME: this should be written by writeRtpDescription.
3782 sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
3783 ' cname:' + SDPUtils.localCName + '\r\n';
3784 return sdp;
3785};
3786
3787// Gets the direction from the mediaSection or the sessionpart.
3788SDPUtils.getDirection = function(mediaSection, sessionpart) {
3789 // Look for sendrecv, sendonly, recvonly, inactive, default to sendrecv.
3790 var lines = SDPUtils.splitLines(mediaSection);
3791 for (var i = 0; i < lines.length; i++) {
3792 switch (lines[i]) {
3793 case 'a=sendrecv':
3794 case 'a=sendonly':
3795 case 'a=recvonly':
3796 case 'a=inactive':
3797 return lines[i].substr(2);
3798 default:
3799 // FIXME: What should happen here?
3800 }
3801 }
3802 if (sessionpart) {
3803 return SDPUtils.getDirection(sessionpart);
3804 }
3805 return 'sendrecv';
3806};
3807
3808// Expose public methods.
3809module.exports = SDPUtils;
3810
3811},{}],19:[function(require,module,exports){
3812/*
3813 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3814 *
3815 * Use of this source code is governed by a BSD-style license
3816 * that can be found in the LICENSE file in the root of the source
3817 * tree.
3818 */
3819 /* eslint-env node */
3820'use strict';
3821var logging = require('../utils.js').log;
3822
3823// Expose public methods.
3824module.exports = function() {
3825 var constraintsToChrome_ = function(c) {
3826 if (typeof c !== 'object' || c.mandatory || c.optional) {
3827 return c;
3828 }
3829 var cc = {};
3830 Object.keys(c).forEach(function(key) {
3831 if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
3832 return;
3833 }
3834 var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]};
3835 if (r.exact !== undefined && typeof r.exact === 'number') {
3836 r.min = r.max = r.exact;
3837 }
3838 var oldname_ = function(prefix, name) {
3839 if (prefix) {
3840 return prefix + name.charAt(0).toUpperCase() + name.slice(1);
3841 }
3842 return (name === 'deviceId') ? 'sourceId' : name;
3843 };
3844 if (r.ideal !== undefined) {
3845 cc.optional = cc.optional || [];
3846 var oc = {};
3847 if (typeof r.ideal === 'number') {
3848 oc[oldname_('min', key)] = r.ideal;
3849 cc.optional.push(oc);
3850 oc = {};
3851 oc[oldname_('max', key)] = r.ideal;
3852 cc.optional.push(oc);
3853 } else {
3854 oc[oldname_('', key)] = r.ideal;
3855 cc.optional.push(oc);
3856 }
3857 }
3858 if (r.exact !== undefined && typeof r.exact !== 'number') {
3859 cc.mandatory = cc.mandatory || {};
3860 cc.mandatory[oldname_('', key)] = r.exact;
3861 } else {
3862 ['min', 'max'].forEach(function(mix) {
3863 if (r[mix] !== undefined) {
3864 cc.mandatory = cc.mandatory || {};
3865 cc.mandatory[oldname_(mix, key)] = r[mix];
3866 }
3867 });
3868 }
3869 });
3870 if (c.advanced) {
3871 cc.optional = (cc.optional || []).concat(c.advanced);
3872 }
3873 return cc;
3874 };
3875
3876 var getUserMedia_ = function(constraints, onSuccess, onError) {
3877 constraints = JSON.parse(JSON.stringify(constraints));
3878 if (constraints.audio) {
3879 constraints.audio = constraintsToChrome_(constraints.audio);
3880 }
3881 if (constraints.video) {
3882 constraints.video = constraintsToChrome_(constraints.video);
3883 }
3884 logging('chrome: ' + JSON.stringify(constraints));
3885 return navigator.webkitGetUserMedia(constraints, onSuccess, onError);
3886 };
3887 navigator.getUserMedia = getUserMedia_;
3888
3889 // Returns the result of getUserMedia as a Promise.
3890 var getUserMediaPromise_ = function(constraints) {
3891 return new Promise(function(resolve, reject) {
3892 navigator.getUserMedia(constraints, resolve, reject);
3893 });
3894 };
3895
3896 if (!navigator.mediaDevices) {
3897 navigator.mediaDevices = {
3898 getUserMedia: getUserMediaPromise_,
3899 enumerateDevices: function() {
3900 return new Promise(function(resolve) {
3901 var kinds = {audio: 'audioinput', video: 'videoinput'};
3902 return MediaStreamTrack.getSources(function(devices) {
3903 resolve(devices.map(function(device) {
3904 return {label: device.label,
3905 kind: kinds[device.kind],
3906 deviceId: device.id,
3907 groupId: ''};
3908 }));
3909 });
3910 });
3911 }
3912 };
3913 }
3914
3915 // A shim for getUserMedia method on the mediaDevices object.
3916 // TODO(KaptenJansson) remove once implemented in Chrome stable.
3917 if (!navigator.mediaDevices.getUserMedia) {
3918 navigator.mediaDevices.getUserMedia = function(constraints) {
3919 return getUserMediaPromise_(constraints);
3920 };
3921 } else {
3922 // Even though Chrome 45 has navigator.mediaDevices and a getUserMedia
3923 // function which returns a Promise, it does not accept spec-style
3924 // constraints.
3925 var origGetUserMedia = navigator.mediaDevices.getUserMedia.
3926 bind(navigator.mediaDevices);
3927 navigator.mediaDevices.getUserMedia = function(c) {
3928 if (c) {
3929 logging('spec: ' + JSON.stringify(c)); // whitespace for alignment
3930 c.audio = constraintsToChrome_(c.audio);
3931 c.video = constraintsToChrome_(c.video);
3932 logging('chrome: ' + JSON.stringify(c));
3933 }
3934 return origGetUserMedia(c);
3935 }.bind(this);
3936 }
3937
3938 // Dummy devicechange event methods.
3939 // TODO(KaptenJansson) remove once implemented in Chrome stable.
3940 if (typeof navigator.mediaDevices.addEventListener === 'undefined') {
3941 navigator.mediaDevices.addEventListener = function() {
3942 logging('Dummy mediaDevices.addEventListener called.');
3943 };
3944 }
3945 if (typeof navigator.mediaDevices.removeEventListener === 'undefined') {
3946 navigator.mediaDevices.removeEventListener = function() {
3947 logging('Dummy mediaDevices.removeEventListener called.');
3948 };
3949 }
3950};
3951
3952},{"../utils.js":14}],20:[function(require,module,exports){
3953/*
3954 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3955 *
3956 * Use of this source code is governed by a BSD-style license
3957 * that can be found in the LICENSE file in the root of the source
3958 * tree.
3959 */
3960 /* eslint-env node */
3961'use strict';
3962
3963var logging = require('../utils').log;
3964var browserDetails = require('../utils').browserDetails;
3965
3966// Expose public methods.
3967module.exports = function() {
3968 // getUserMedia constraints shim.
3969 var getUserMedia_ = function(constraints, onSuccess, onError) {
3970 var constraintsToFF37_ = function(c) {
3971 if (typeof c !== 'object' || c.require) {
3972 return c;
3973 }
3974 var require = [];
3975 Object.keys(c).forEach(function(key) {
3976 if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
3977 return;
3978 }
3979 var r = c[key] = (typeof c[key] === 'object') ?
3980 c[key] : {ideal: c[key]};
3981 if (r.min !== undefined ||
3982 r.max !== undefined || r.exact !== undefined) {
3983 require.push(key);
3984 }
3985 if (r.exact !== undefined) {
3986 if (typeof r.exact === 'number') {
3987 r. min = r.max = r.exact;
3988 } else {
3989 c[key] = r.exact;
3990 }
3991 delete r.exact;
3992 }
3993 if (r.ideal !== undefined) {
3994 c.advanced = c.advanced || [];
3995 var oc = {};
3996 if (typeof r.ideal === 'number') {
3997 oc[key] = {min: r.ideal, max: r.ideal};
3998 } else {
3999 oc[key] = r.ideal;
4000 }
4001 c.advanced.push(oc);
4002 delete r.ideal;
4003 if (!Object.keys(r).length) {
4004 delete c[key];
4005 }
4006 }
4007 });
4008 if (require.length) {
4009 c.require = require;
4010 }
4011 return c;
4012 };
4013 constraints = JSON.parse(JSON.stringify(constraints));
4014 if (browserDetails.version < 38) {
4015 logging('spec: ' + JSON.stringify(constraints));
4016 if (constraints.audio) {
4017 constraints.audio = constraintsToFF37_(constraints.audio);
4018 }
4019 if (constraints.video) {
4020 constraints.video = constraintsToFF37_(constraints.video);
4021 }
4022 logging('ff37: ' + JSON.stringify(constraints));
4023 }
4024 return navigator.mozGetUserMedia(constraints, onSuccess, onError);
4025 };
4026
4027 navigator.getUserMedia = getUserMedia_;
4028
4029 // Returns the result of getUserMedia as a Promise.
4030 var getUserMediaPromise_ = function(constraints) {
4031 return new Promise(function(resolve, reject) {
4032 navigator.getUserMedia(constraints, resolve, reject);
4033 });
4034 };
4035
4036 // Shim for mediaDevices on older versions.
4037 if (!navigator.mediaDevices) {
4038 navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
4039 addEventListener: function() { },
4040 removeEventListener: function() { }
4041 };
4042 }
4043 navigator.mediaDevices.enumerateDevices =
4044 navigator.mediaDevices.enumerateDevices || function() {
4045 return new Promise(function(resolve) {
4046 var infos = [
4047 {kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
4048 {kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
4049 ];
4050 resolve(infos);
4051 });
4052 };
4053
4054 if (browserDetails.version < 41) {
4055 // Work around http://bugzil.la/1169665
4056 var orgEnumerateDevices =
4057 navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
4058 navigator.mediaDevices.enumerateDevices = function() {
4059 return orgEnumerateDevices().then(undefined, function(e) {
4060 if (e.name === 'NotFoundError') {
4061 return [];
4062 }
4063 throw e;
4064 });
4065 };
4066 }
4067};
4068
4069},{"../utils":14}]},{},[1])(1)
4070});
4071;
\No newline at end of file