UNPKG

14.4 kBJavaScriptView Raw
1var webrtc = require('webrtcsupport');
2var getUserMedia = require('getusermedia');
3var PeerConnection = require('rtcpeerconnection');
4var WildEmitter = require('wildemitter');
5var hark = require('hark');
6var GainController = require('mediastream-gain');
7var mockconsole = require('mockconsole');
8
9
10function WebRTC(opts) {
11 var self = this;
12 var options = opts || {};
13 var config = this.config = {
14 debug: false,
15 localVideoEl: '',
16 remoteVideosEl: '',
17 autoRequestMedia: false,
18 // makes the entire PC config overridable
19 peerConnectionConfig: {
20 iceServers: [{"url": "stun:stun.l.google.com:19302"}]
21 },
22 peerConnectionContraints: {
23 optional: [
24 {DtlsSrtpKeyAgreement: true}
25 ]
26 },
27 autoAdjustMic: false,
28 media: {
29 audio: true,
30 video: true
31 },
32 receiveMedia: {
33 mandatory: {
34 OfferToReceiveAudio: true,
35 OfferToReceiveVideo: true
36 }
37 },
38 detectSpeakingEvents: true,
39 enableDataChannels: true
40 };
41 var item, connection;
42
43 // expose screensharing check
44 this.screenSharingSupport = webrtc.screenSharing;
45
46 // We also allow a 'logger' option. It can be any object that implements
47 // log, warn, and error methods.
48 // We log nothing by default, following "the rule of silence":
49 // http://www.linfo.org/rule_of_silence.html
50 this.logger = function () {
51 // we assume that if you're in debug mode and you didn't
52 // pass in a logger, you actually want to log as much as
53 // possible.
54 if (opts.debug) {
55 return opts.logger || console;
56 } else {
57 // or we'll use your logger which should have its own logic
58 // for output. Or we'll return the no-op.
59 return opts.logger || mockconsole;
60 }
61 }();
62
63 // set options
64 for (item in options) {
65 this.config[item] = options[item];
66 }
67
68 // check for support
69 if (!webrtc.support) {
70 this.logger.error('Your browser doesn\'t seem to support WebRTC');
71 }
72
73 // where we'll store our peer connections
74 this.peers = [];
75
76 WildEmitter.call(this);
77
78 // log events in debug mode
79 if (this.config.debug) {
80 this.on('*', function (event, val1, val2) {
81 var logger;
82 // if you didn't pass in a logger and you explicitly turning on debug
83 // we're just going to assume you're wanting log output with console
84 if (self.config.logger === mockconsole) {
85 logger = console;
86 } else {
87 logger = self.logger;
88 }
89 logger.log('event:', event, val1, val2);
90 });
91 }
92}
93
94WebRTC.prototype = Object.create(WildEmitter.prototype, {
95 constructor: {
96 value: WebRTC
97 }
98});
99
100WebRTC.prototype.createPeer = function (opts) {
101 var peer;
102 opts.parent = this;
103 peer = new Peer(opts);
104 this.peers.push(peer);
105 return peer;
106};
107
108WebRTC.prototype.startLocalMedia = function (mediaConstraints, cb) {
109 var self = this;
110 var constraints = mediaConstraints || {video: true, audio: true};
111
112 getUserMedia(constraints, function (err, stream) {
113 if (!err) {
114 if (constraints.audio && self.config.detectSpeakingEvents) {
115 self.setupAudioMonitor(stream);
116 }
117 self.localStream = stream;
118
119 if (self.config.autoAdjustMic) {
120 self.gainController = new GainController(stream);
121 // start out somewhat muted if we can track audio
122 self.setMicIfEnabled(0.5);
123 }
124
125 self.emit('localStream', stream);
126 }
127 if (cb) cb(err, stream);
128 });
129};
130
131WebRTC.prototype.stopLocalMedia = function () {
132 if (this.localStream) {
133 this.localStream.stop();
134 this.emit('localStreamStopped');
135 }
136};
137
138// Audio controls
139WebRTC.prototype.mute = function () {
140 this._audioEnabled(false);
141 this.hardMuted = true;
142 this.emit('audioOff');
143};
144WebRTC.prototype.unmute = function () {
145 this._audioEnabled(true);
146 this.hardMuted = false;
147 this.emit('audioOn');
148};
149
150// Audio monitor
151WebRTC.prototype.setupAudioMonitor = function (stream) {
152 this.logger.log('Setup audio');
153 var audio = hark(stream);
154 var self = this;
155 var timeout;
156
157 audio.on('speaking', function () {
158 if (self.hardMuted) return;
159 self.setMicIfEnabled(1);
160 self.sendToAll('speaking', {});
161 self.emit('speaking');
162 });
163
164 audio.on('stopped_speaking', function () {
165 if (self.hardMuted) return;
166 if (timeout) clearTimeout(timeout);
167
168 timeout = setTimeout(function () {
169 self.setMicIfEnabled(0.5);
170 self.sendToAll('stopped_speaking', {});
171 self.emit('stoppedSpeaking');
172 }, 1000);
173 });
174 if (this.config.enableDataChannels) {
175 // until https://code.google.com/p/chromium/issues/detail?id=121673 is fixed...
176 audio.on('volume_change', function (volume, treshold) {
177 if (self.hardMuted) return;
178 self.emit('volumeChange', volume, treshold);
179 self.peers.forEach(function (peer) {
180 if (peer.enableDataChannels) {
181 var dc = peer.getDataChannel('hark');
182 if (dc.readyState != 'open') return;
183 dc.send(JSON.stringify({type: 'volume', volume: volume }));
184 }
185 });
186 });
187 }
188};
189
190// We do this as a seperate method in order to
191// still leave the "setMicVolume" as a working
192// method.
193WebRTC.prototype.setMicIfEnabled = function (volume) {
194 if (!this.config.autoAdjustMic) return;
195 this.gainController.setGain(volume);
196};
197
198// Video controls
199WebRTC.prototype.pauseVideo = function () {
200 this._videoEnabled(false);
201 this.emit('videoOff');
202};
203WebRTC.prototype.resumeVideo = function () {
204 this._videoEnabled(true);
205 this.emit('videoOn');
206};
207
208// Combined controls
209WebRTC.prototype.pause = function () {
210 this._audioEnabled(false);
211 this.pauseVideo();
212};
213WebRTC.prototype.resume = function () {
214 this._audioEnabled(true);
215 this.resumeVideo();
216};
217
218// Internal methods for enabling/disabling audio/video
219WebRTC.prototype._audioEnabled = function (bool) {
220 // work around for chrome 27 bug where disabling tracks
221 // doesn't seem to work (works in canary, remove when working)
222 this.setMicIfEnabled(bool ? 1 : 0);
223 this.localStream.getAudioTracks().forEach(function (track) {
224 track.enabled = !!bool;
225 });
226};
227WebRTC.prototype._videoEnabled = function (bool) {
228 this.localStream.getVideoTracks().forEach(function (track) {
229 track.enabled = !!bool;
230 });
231};
232
233// removes peers
234WebRTC.prototype.removePeers = function (id, type) {
235 this.getPeers(id, type).forEach(function (peer) {
236 peer.end();
237 });
238};
239
240// fetches all Peer objects by session id and/or type
241WebRTC.prototype.getPeers = function (sessionId, type) {
242 return this.peers.filter(function (peer) {
243 return (!sessionId || peer.id === sessionId) && (!type || peer.type === type);
244 });
245};
246
247// sends message to all
248WebRTC.prototype.sendToAll = function (message, payload) {
249 this.peers.forEach(function (peer) {
250 peer.send(message, payload);
251 });
252};
253
254// sends message to all using a datachannel
255WebRTC.prototype.sendDirectlyToAll = function (channel, message, payload) {
256 this.peers.forEach(function (peer) {
257 if (peer.enableDataChannels) {
258 peer.sendDirectly(channel, message, payload);
259 }
260 });
261};
262
263function Peer(options) {
264 var self = this;
265
266 this.id = options.id;
267 this.parent = options.parent;
268 this.type = options.type || 'video';
269 this.oneway = options.oneway || false;
270 this.sharemyscreen = options.sharemyscreen || false;
271 this.browserPrefix = options.prefix;
272 this.stream = options.stream;
273 this.enableDataChannels = options.enableDataChannels === undefined ? this.parent.config.enableDataChannels : options.enableDataChannels;
274 this.receiveMedia = options.receiveMedia || this.parent.config.receiveMedia;
275 this.channels = {};
276 // Create an RTCPeerConnection via the polyfill
277 this.pc = new PeerConnection(this.parent.config.peerConnectionConfig, this.parent.config.peerConnectionContraints);
278 this.pc.on('ice', this.onIceCandidate.bind(this));
279 this.pc.on('addStream', this.handleRemoteStreamAdded.bind(this));
280 this.pc.on('addChannel', this.handleDataChannelAdded.bind(this));
281 this.pc.on('removeStream', this.handleStreamRemoved.bind(this));
282 // Just fire negotiation needed events for now
283 // When browser re-negotiation handling seems to work
284 // we can use this as the trigger for starting the offer/answer process
285 // automatically. We'll just leave it be for now while this stabalizes.
286 this.pc.on('negotiationNeeded', this.emit.bind(this, 'negotiationNeeded'));
287 this.logger = this.parent.logger;
288
289 // handle screensharing/broadcast mode
290 if (options.type === 'screen') {
291 if (this.parent.localScreen && this.sharemyscreen) {
292 this.logger.log('adding local screen stream to peer connection');
293 this.pc.addStream(this.parent.localScreen);
294 this.broadcaster = options.broadcaster;
295 }
296 } else {
297 this.pc.addStream(this.parent.localStream);
298 }
299
300 // call emitter constructor
301 WildEmitter.call(this);
302
303 // proxy events to parent
304 this.on('*', function () {
305 self.parent.emit.apply(self.parent, arguments);
306 });
307}
308
309Peer.prototype = Object.create(WildEmitter.prototype, {
310 constructor: {
311 value: Peer
312 }
313});
314
315Peer.prototype.handleMessage = function (message) {
316 var self = this;
317
318 this.logger.log('getting', message.type, message);
319
320 if (message.prefix) this.browserPrefix = message.prefix;
321
322 if (message.type === 'offer') {
323 this.pc.handleOffer(message.payload, function (err) {
324 if (err) {
325 return;
326 }
327 // auto-accept
328 self.pc.answer(self.receiveMedia, function (err, sessionDescription) {
329 self.send('answer', sessionDescription);
330 });
331 });
332 } else if (message.type === 'answer') {
333 this.pc.handleAnswer(message.payload);
334 } else if (message.type === 'candidate') {
335 this.pc.processIce(message.payload);
336 } else if (message.type === 'speaking') {
337 this.parent.emit('speaking', {id: message.from});
338 } else if (message.type === 'stopped_speaking') {
339 this.parent.emit('stopped_speaking', {id: message.from});
340 } else if (message.type === 'mute') {
341 this.parent.emit('mute', {id: message.from, name: message.payload.name});
342 } else if (message.type === 'unmute') {
343 this.parent.emit('unmute', {id: message.from, name: message.payload.name});
344 }
345};
346
347// send via signalling channel
348Peer.prototype.send = function (messageType, payload) {
349 var message = {
350 to: this.id,
351 broadcaster: this.broadcaster,
352 roomType: this.type,
353 type: messageType,
354 payload: payload,
355 prefix: webrtc.prefix
356 };
357 this.logger.log('sending', messageType, message);
358 this.parent.emit('message', message);
359};
360
361// send via data channel
362Peer.prototype.sendDirectly = function (channel, messageType, payload) {
363 var message = {
364 type: messageType,
365 payload: payload
366 };
367 this.logger.log('sending via datachannel', channel, messageType, message);
368 this.getDataChannel(channel).send(JSON.stringify(message));
369};
370
371// Internal method registering handlers for a data channel and emitting events on the peer
372Peer.prototype._observeDataChannel = function (channel) {
373 var self = this;
374 channel.onclose = this.emit.bind(this, 'channelClose', channel);
375 channel.onerror = this.emit.bind(this, 'channelError', channel);
376 channel.onmessage = function (event) {
377 self.emit('channelMessage', self, channel.label, JSON.parse(event.data), channel, event);
378 };
379 channel.onopen = this.emit.bind(this, 'channelOpen', channel);
380};
381
382// Fetch or create a data channel by the given name
383Peer.prototype.getDataChannel = function (name, opts) {
384 if (!webrtc.dataChannel) return this.emit('error', new Error('createDataChannel not supported'));
385 var channel = this.channels[name];
386 opts || (opts = {});
387 if (channel) return channel;
388 // if we don't have one by this label, create it
389 channel = this.channels[name] = this.pc.createDataChannel(name, opts);
390 this._observeDataChannel(channel);
391 return channel;
392};
393
394Peer.prototype.onIceCandidate = function (candidate) {
395 if (this.closed) return;
396 if (candidate) {
397 this.send('candidate', candidate);
398 } else {
399 this.logger.log("End of candidates.");
400 }
401};
402
403Peer.prototype.start = function () {
404 var self = this;
405
406 // well, the webrtc api requires that we either
407 // a) create a datachannel a priori
408 // b) do a renegotiation later to add the SCTP m-line
409 // Let's do (a) first...
410 if (this.enableDataChannels) {
411 this.getDataChannel('simplewebrtc');
412 }
413
414 this.pc.offer(this.receiveMedia, function (err, sessionDescription) {
415 self.send('offer', sessionDescription);
416 });
417};
418
419Peer.prototype.end = function () {
420 if (this.closed) return;
421 this.pc.close();
422 this.handleStreamRemoved();
423};
424
425Peer.prototype.handleRemoteStreamAdded = function (event) {
426 var self = this;
427 if (this.stream) {
428 this.logger.warn('Already have a remote stream');
429 } else {
430 this.stream = event.stream;
431 this.stream.onended = function () {
432 self.end();
433 };
434 this.parent.emit('peerStreamAdded', this);
435 }
436};
437
438Peer.prototype.handleStreamRemoved = function () {
439 this.parent.peers.splice(this.parent.peers.indexOf(this), 1);
440 this.closed = true;
441 this.parent.emit('peerStreamRemoved', this);
442};
443
444Peer.prototype.handleDataChannelAdded = function (channel) {
445 this.channels[channel.label] = channel;
446 this._observeDataChannel(channel);
447};
448
449module.exports = WebRTC;