'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); var vue = require('vue'); var VueFeather = require('vue-feather'); var mitt = require('mitt'); var core = require('@vueuse/core'); var $ = require('jquery'); var JMeetJS = require('@joinera/lib-jitsi-meet'); var components$1 = require('@vueuse/components'); var bus = mitt(); var subdomain = ""; const domain = 'meet.jit.si'; // const domain = '8x8.vc'; var options = { hosts: { domain: domain, //anonymousdomain: `guest.${domain}`, authdomain: `${domain}`, muc: 'conference.' + subdomain + domain, // FIXME: use XEP-0030 focus: `focus.${domain}`, }, serviceUrl: `wss://${domain}/xmpp-websocket`, disableSimulcast: false, enableRemb: true, enableTcc: true, resolution: 720, constraints: { video: { height: { ideal: 720, max: 720, min: 180 }, width: { ideal: 1280, max: 1280, min: 320 } } }, enableInsecureRoomNameWarning: true, externalConnectUrl: `https://${domain}/http-pre-bind`, analytics: { amplitudeAPPKey: "fafdba4c3b47fe5f151060ca37f02d2f", whiteListedEvents: ['conference.joined', 'page.reload.scheduled', 'rejoined', 'transport.stats'], }, enableP2P: true, // flag to control P2P connections // New P2P options // p2p: { // enabled: true, // preferH264: true, // disableH264: true, // useStunTurn: true // use XEP-0215 to fetch STUN and TURN servers for the P2P connection // }, p2p: { enabled: true, enableUnifiedOnChrome: false, iceTransportPolicy: 'all', preferredCodec: 'H264', disabledCodec: '', backToP2PDelay: 5, // stunServers: [ // { urls: 'stun:jitsi-meet.example.com:3478' }, // { urls: 'stun:meet-jit-si-turnrelay.jitsi.net:443' } // ] }, useStunTurn: true, // use XEP-0215 to fetch TURN servers for the JVB connection useTurnUdp: false, websocket: `wss://${domain}/xmpp-websocket`, // FIXME: use xep-0156 for that clientNode: 'http://jitsi.org/jitsimeet', // The name of client node advertised in XEP-0115 'c' stanza //deprecated desktop sharing settings, included only because older version of jitsi-meet require them desktopSharing: 'ext', // Desktop sharing method. Can be set to 'ext', 'webrtc' or false to disable. chromeExtensionId: 'kglhbbefdnlheedjiejgomgmfplipfeb', // Id of desktop streamer Chrome extension desktopSharingSources: ['screen', 'window'], googleApiApplicationClientID: "39065779381-bbhnkrgibtf4p0j9ne5vsq7bm49t1tlf.apps.googleusercontent.com", microsoftApiApplicationClientID: "00000000-0000-0000-0000-000040240063", enableCalendarIntegration: true, //new desktop sharing settings desktopSharingChromeExtId: 'kglhbbefdnlheedjiejgomgmfplipfeb', // Id of desktop streamer Chrome extension desktopSharingChromeDisabled: false, desktopSharingChromeSources: ['screen', 'window', 'tab'], desktopSharingChromeMinExtVersion: '0.2.6.2', // Required version of Chrome extension desktopSharingFirefoxDisabled: false, useRoomAsSharedDocumentName: false, enableLipSync: false, disableRtx: false, // Enables RTX everywhere enableScreenshotCapture: false, openBridgeChannel: 'websocket', // One of true, 'datachannel', or 'websocket' channelLastN: -1, // The default value of the channel attribute last-n. startBitrate: "800", disableAudioLevels: false, disableSuspendVideo: true, stereo: false, forceJVB121Ratio: -1, enableTalkWhileMuted: true, enableNoAudioDetection: true, enableNoisyMicDetection: true, enableClosePage: true, disableLocalVideoFlip: true, hiddenDomain: `recorder.${domain}`, dropbox: { appKey: 'DROPBOX_APP_KEY' }, transcribingEnabled: false, enableRecording: true, liveStreamingEnabled: true, fileRecordingsEnabled: true, fileRecordingsServiceEnabled: false, fileRecordingsServiceSharingEnabled: false, requireDisplayName: false, enableWelcomePage: true, isBrand: false, dialInNumbersUrl: 'https://api.jitsi.net/phoneNumberList', dialInConfCodeUrl: 'https://api.jitsi.net/conferenceMapper', dialOutCodesUrl: 'https://api.jitsi.net/countrycodes', dialOutAuthUrl: 'https://api.jitsi.net/authorizephone', peopleSearchUrl: 'https://api.jitsi.net/directorySearch', inviteServiceUrl: 'https://api.jitsi.net/conferenceInvite', inviteServiceCallFlowsUrl: 'https://api.jitsi.net/conferenceinvitecallflows', peopleSearchQueryTypes: ['user', 'conferenceRooms'], startAudioMuted: 9, startVideoMuted: 9, enableUserRolesBasedOnToken: false, enableLayerSuspension: false, feedbackPercentage: 0, deploymentUrls: { userDocumentationURL: "https://jitsi.github.io/handbook/help", }, chromeExtensionBanner: { url: "https://chrome.google.com/webstore/detail/jitsi-meetings/kglhbbefdnlheedjiejgomgmfplipfeb", chromeExtensionsInfo: [{ "path": "jitsi-logo-48x48.png", "id": "kglhbbefdnlheedjiejgomgmfplipfeb" }] }, prejoinPageEnabled: false, hepopAnalyticsUrl: "", hepopAnalyticsEvent: { product: "lib-jitsi-meet", subproduct: "meet-jit-si", name: "jitsi.page.load.failed", action: "page.load.failed", actionSubject: "page.load", type: "page.load.failed", source: "page.load", attributes: { type: "operational", source: 'page.load' }, server: `${domain}` }, deploymentInfo: { environment: 'meet-jit-si', envType: 'prod', releaseNumber: '735', shard: 'meet-jit-si-eu-central-1a-s52', region: 'eu-central-1', userRegion: 'eu-central-1', crossRegion: 0 }, rttMonitor: { enabled: false, initialDelay: 30000, getStatsInterval: 10000, analyticsInterval: 60000, stunServers: { "us-east-1": "all-us-east-1-turn.jitsi.net:443", "ap-se-2": "all-ap-se-2-turn.jitsi.net:443", "ap-se-1": "all-ap-se-1-turn.jitsi.net:443", "us-west-2": "all-us-west-2-turn.jitsi.net:443", "eu-central-1": "all-eu-central-1-turn.jitsi.net:443", "eu-west-1": "all-eu-west-1-turn.jitsi.net:443" } }, e2eping: { pingInterval: -1 }, abTesting: { }, testing: { capScreenshareBitrate: 1, octo: { probability: 1 } }, logging: { // Default log level defaultLogLevel: "trace", // The following are too verbose in their logging with the default level "modules/RTC/TraceablePeerConnection.js": "info", "modules/statistics/CallStats.js": "info", "modules/xmpp/strophe.util.js": "log", }, }; /** * @type {JMeetJS} */ const JitsiMeetJS = window.JitsiMeetJS || JMeetJS; window.$ = $; function createTracksAndAddToRoom (room) { JitsiMeetJS.createLocalTracks({ devices: ['video', 'audio'] }).then((tracks) => { tracks.forEach(track => { room.addTrack(track); }); }).catch(error => { console.error('There was an error creating the local tracks:', error); } ); } function createAndJoinRoom (connection, roomName, username, password, constraints) { return new Promise((resolve) => { const room = connection.initJitsiConference(roomName, {}); room.on(JitsiMeetJS.events.conference.CONFERENCE_JOINED, () => { resolve(room); }); if (username) { room.setDisplayName(username); } room.setSenderVideoConstraint(constraints); if (password) { room.join(password); } else { room.join(); } }); } function connect (roomName, tok) { return new Promise(((resolve, reject) => { let optionsWithRoom = { ...options }; optionsWithRoom.serviceUrl = options.serviceUrl + `?room=${roomName}`; for (const [loggerId, level] of Object.entries(options.logging)) { if (loggerId !== "defaultLogLevel") { JitsiMeetJS.setLogLevelById(level, loggerId); } } const token = (tok?.length) ? tok : null; const connection = new JitsiMeetJS.JitsiConnection(null, token, optionsWithRoom); connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_ESTABLISHED, () => { resolve(connection); }); connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_FAILED, (e) => { reject("The connection failed. - " + e); }); connection.addEventListener(JitsiMeetJS.events.connection.CONNECTION_DISCONNECTED, (e) => { reject("The connection disconnected. - " + e); }); connection.connect(); })) } const ratios = ["4:3", "16:9", "1:1", "1:2"]; const getRatio = (aspect) => { const ratio = ratios[aspect].split(":"); return ratio[1] / ratio[0]; }; const resize = (aspect, timeoutKey) => { // If this resize is triggered from a timeout, clear it. if (timeoutKey) { clearTimeout(timeoutKey); timeoutKey = null; } const margin = 10; const ratio = getRatio(aspect); let dish = document.querySelector(".Dish"); if (dish) { let width = dish.offsetWidth - margin * 2; let height = dish.offsetHeight - margin * 2; // loop (i recommend you optimize this) let max = 0; let i = 1; while (i < 5000) { let area = getArea(i, dish, width, height, ratio, margin); if (area === false) { max = i - 1; break; } i++; } // remove margins max = max - margin * 2; resizer(max, dish, margin, ratio, aspect); } }; const resizer = (width, dish, margin, ratio, aspect) => { for (var s = 0; s < dish.children.length; s++) { // camera fron dish (div without class) let element = dish.children[s]; // custom margin element.style.margin = margin + "px"; // calculate dimensions element.style.width = width + "px"; element.style.height = width * ratio + "px"; // to show the aspect ratio in demo (optional) element.setAttribute("data-aspect", ratios[aspect]); } }; const getArea = (increment, dish, width, height, ratio, margin) => { let i = 0; let w = 0; let h = increment * ratio + (margin * 2); while (i < (dish.children.length)) { if ((w + increment) > width) { w = 0; h = h + (increment * ratio) + (margin * 2); } w = w + increment + (margin * 2); i++; } if (h > height || increment > width) return false; else return increment; }; const _hoisted_1$5 = ["id"]; const _hoisted_2$4 = { class: "control-box" }; const _hoisted_3$4 = ["id"]; const _hoisted_4$3 = { key: 1, class: "Dish" }; const _hoisted_5$3 = ["id"]; const _hoisted_6$1 = { class: "control-box" }; const _hoisted_7$1 = ["id"]; var script$5 = { __name: 'AudioPlayList', props: { tracks: { type: Array, required: true, }, conference: { type: Object, }, aspect: { type: Number, default: 0, }, }, emits: ["active-focus", "active-blur"], setup(__props, { emit }) { const props = __props; const def = { getId: () => null, isLocal: () => true, getParticipantId: () => null, }; const scenery = vue.ref(null); const { isOutside } = core.useMouseInElement(scenery); vue.watch(isOutside, () => { emit( isOutside.value ? "active-blur" : "active-focus", speakerTrack.value?.getId() ); }); const computedTracks = vue.computed(() => props.tracks); const speakerTrack = vue.ref( computedTracks.value?.find( (e) => e.getParticipantId() === (props.conference?.lastDominantSpeaker ?? computedTracks.value[0]?.getParticipantId()) ) || def ); const speakerId = vue.computed(() => speakerTrack.value?.getParticipantId()); const trackRefs = vue.ref({}); const muted = vue.ref({}); const lstnrs = { USER_LEFT: (userId) => { if (computedTracks.value.length) { const track = computedTracks.value.find( (e) => e.getParticipantId() === userId ); if (userId === track.getParticipantId()) { track.detach(trackRefs.value[track.getId()]); track.detach(trackRefs.value[track.getId() + "-a"]); // Remove the track from the DOM. const element = document.getElementById(track.getId()); element && element.parentNode.remove(); const element2 = document.getElementById(track.getId() + "-a"); element2 && element2.parentNode.remove(); } } }, TRACK_MUTE_CHANGED: (track) => { muted.value[track.getId()] = track.isMuted(); }, DOMINANT_SPEAKER_CHANGED: (id) => { if (computedTracks.value.length) { speakerTrack.value = computedTracks.value.find((e) => e.getParticipantId() === id) || def; } }, }; /** * * @param { String } str */ const trimName = (str) => { return str.substring(0, 2); }; const ptcpnt = (track, getName) => { const participant = props.conference?.getParticipantById( track.getParticipantId() ); if (participant && getName === true) { return participant.getDisplayName() ?? "Guest"; } return participant; }; // Watch and set the initial active speaker const unwatch1 = vue.watch( [() => props.conference, () => props.tracks?.length], ([conference, ctracks]) => { if (conference && ctracks && !speakerTrack.value?.getId()) { speakerTrack.value = conference.getLocalAudioTrack() || def; resize(props.aspect); unwatch1(); } } ); // Register Event Listeners Object.keys(lstnrs).forEach((i) => { bus.on(i, lstnrs[i]); }); // Remove Event Bus Listners vue.onBeforeUnmount(() => { Object.keys(lstnrs).forEach((i) => { bus.off(i, lstnrs[i]); }); }); return (_ctx, _cache) => { return (vue.openBlock(), vue.createElementBlock("div", null, [ (__props.conference?.participants?.size < 2) ? (vue.openBlock(), vue.createElementBlock("div", { ref_key: "scenery", ref: scenery, class: "Screen", key: speakerId.value }, [ (speakerTrack.value.getId()) ? (vue.openBlock(), vue.createElementBlock("div", { key: 0, "data-aspect": "4:3", class: "audio-only", id: `track-${speakerTrack.value.getId()}-a` }, [ vue.createElementVNode("div", _hoisted_2$4, [ vue.createElementVNode("span", null, vue.toDisplayString(trimName(speakerTrack.value.isLocal() ? "Me" : ptcpnt(speakerTrack.value, true))), 1 /* TEXT */), vue.createVNode(vue.unref(VueFeather), { type: `mic${muted.value[speakerTrack.value.getId()] ? '-off' : ''}` }, null, 8 /* PROPS */, ["type"]) ]), (!speakerTrack.value.isLocal()) ? (vue.openBlock(), vue.createElementBlock("audio", { key: 0, autoplay: "", ref: (e) => (trackRefs.value[speakerTrack.value.getId() + '-a'] = e), id: speakerTrack.value.getId() + '-a', class: "audio-container" }, null, 8 /* PROPS */, _hoisted_3$4)) : vue.createCommentVNode("v-if", true) ], 8 /* PROPS */, _hoisted_1$5)) : vue.createCommentVNode("v-if", true) ])) : vue.createCommentVNode("v-if", true), (computedTracks.value?.length && __props.conference?.participants?.size) ? (vue.openBlock(), vue.createElementBlock("div", _hoisted_4$3, [ (vue.openBlock(true), vue.createElementBlock(vue.Fragment, null, vue.renderList(computedTracks.value.filter( (e) => e.getId() !== speakerTrack.value.getId() && __props.conference?.participants?.size < 2 ), (track) => { return (vue.openBlock(), vue.createElementBlock("div", { "data-aspect": "4:3", class: "audio-only", id: `track-${track.getId()}`, key: track.id }, [ vue.createElementVNode("div", _hoisted_6$1, [ vue.createElementVNode("span", null, vue.toDisplayString(trimName(track.isLocal() ? "Me" : ptcpnt(track, true))), 1 /* TEXT */), vue.createVNode(vue.unref(VueFeather), { type: `mic${muted.value[track.getId()] ? '-off' : ''}` }, null, 8 /* PROPS */, ["type"]) ]), (!track.isLocal()) ? (vue.openBlock(), vue.createElementBlock("audio", { key: 0, autoplay: "", ref_for: true, ref: (e) => (trackRefs.value[track.getId()] = e), id: track.getId(), class: "audio-container" }, null, 8 /* PROPS */, _hoisted_7$1)) : vue.createCommentVNode("v-if", true) ], 8 /* PROPS */, _hoisted_5$3)) }), 128 /* KEYED_FRAGMENT */)) ])) : vue.createCommentVNode("v-if", true) ])) } } }; script$5.__file = "src/components/AudioPlayList.vue"; const _hoisted_1$4 = { key: 0, class: "con-flex con-flex-column con-loader" }; var script$4 = { __name: 'WaitingLoader', props: { show: { type: Boolean, }, color: { type: String, default: "green", }, width: { type: [String, Number], default: "80px", }, height: { type: [String, Number], default: "100%", }, text: { type: String, }, }, setup(__props) { const props = __props; const style = vue.computed(() => ({ width: typeof props.width === "string" ? props.width : props.width + "px", height: typeof props.height === "string" ? props.height : props.height + "px", })); const dots = vue.ref(""); let interval = setInterval(() => { if (dots.value.length === 3) { dots.value = ""; } else { dots.value += "."; } }, 600); vue.onBeforeUnmount(() => { clearInterval(interval); }); return (_ctx, _cache) => { return (__props.show) ? (vue.openBlock(), vue.createElementBlock("div", _hoisted_1$4, [ vue.createElementVNode("div", { class: "con-ellipsis", style: vue.normalizeStyle(style.value) }, [ vue.createElementVNode("div", { class: vue.normalizeClass(`con-bg-${__props.color}`) }, null, 2 /* CLASS */), vue.createElementVNode("div", { class: vue.normalizeClass(`con-bg-${__props.color}`) }, null, 2 /* CLASS */), vue.createElementVNode("div", { class: vue.normalizeClass(`con-bg-${__props.color}`) }, null, 2 /* CLASS */), vue.createElementVNode("div", { class: vue.normalizeClass(`con-bg-${__props.color}`) }, null, 2 /* CLASS */) ], 4 /* STYLE */), (__props.text) ? (vue.openBlock(), vue.createElementBlock("h3", { key: 0, class: vue.normalizeClass(`con-text-${__props.color}`) }, vue.toDisplayString(__props.text + dots.value), 3 /* TEXT, CLASS */)) : vue.createCommentVNode("v-if", true), vue.renderSlot(_ctx.$slots, "button") ])) : vue.createCommentVNode("v-if", true) } } }; script$4.__file = "src/components/WaitingLoader.vue"; const _hoisted_1$3 = { class: "con-modal-content" }; const _hoisted_2$3 = { class: "con-modal-header", id: "modalTitle" }; const _hoisted_3$3 = { class: "con-modal-body con-pa-md", id: "modalDescription" }; var script$3 = { __name: 'SceneModal', props: { modelValue: { type: Boolean, }, title: { Type: String, }, }, emits: ["update:modelValue"], setup(__props, { emit }) { const props = __props; const modalKey = vue.ref(0); const dialog = vue.ref(null); const show = vue.ref(props.modelValue); vue.onMounted(() => { document.body.appendChild(dialog.value); }); vue.onBeforeUnmount(() => { document.body.removeChild(dialog.value); }); vue.watch( () => show.value, (e) => { emit("update:modelValue", e); } ); vue.watch( () => props.modelValue, (e) => { show.value = e; } ); const close = (e) => { if ( e.target.classList.contains("con-modal") || e.target.classList.contains("con-modal-close") ) { show.value = false; } }; return (_ctx, _cache) => { return (vue.openBlock(), vue.createElementBlock("div", { role: "dialog", "aria-labelledby": "modalTitle", "aria-describedby": "modalDescription", id: "deviceModal", class: vue.normalizeClass(["con-modal", { show: show.value }]), ref_key: "dialog", ref: dialog, key: modalKey.value, onClick: close }, [ vue.createCommentVNode(" Modal content "), vue.createElementVNode("div", _hoisted_1$3, [ vue.createElementVNode("div", _hoisted_2$3, [ vue.createElementVNode("span", { "aria-label": "Close modal", class: "con-modal-close", onClick: close }, "×"), vue.createElementVNode("h2", null, vue.toDisplayString(__props.title), 1 /* TEXT */) ]), vue.createElementVNode("div", _hoisted_3$3, [ vue.renderSlot(_ctx.$slots, "default") ]) ]) ], 2 /* CLASS */)) } } }; script$3.__file = "src/components/SceneModal.vue"; const _hoisted_1$2 = { class: "con-row con-mt-md" }; const _hoisted_2$2 = { class: "text-grey-7", for: "company_name" }; const _hoisted_3$2 = { class: "con-select" }; const _hoisted_4$2 = ["onUpdate:modelValue"]; const _hoisted_5$2 = ["selected", "value"]; var script$2 = { __name: 'DeviceSelector', props: { room: { type: Object, }, }, emits: ["configuring"], setup(__props, { emit }) { const props = __props; const devices = vue.ref({ audioinput: [], audiooutput: [], videoinput: [], }); const device = vue.ref({ audioinput: "", audiooutput: "", videoinput: "", }); const configDialog = vue.ref(false); /** * @type {JMeetJS} */ const JitsiMeetJS = window.JitsiMeetJS || JMeetJS; const configureCall = () => { if (!props.room) return; emit("configuring"); // Pass the current devices to the device list JitsiMeetJS.mediaDevices.enumerateDevices((e) => { devices.value = {}; // device.value[i.kind] = device.value[e[0].kind] || e[0].deviceId e.forEach((i) => { if (devices.value[i.kind]) { devices.value[i.kind].push(i); } else { devices.value[i.kind] = [i]; } }); Object.keys(devices.value).forEach((kind) => { device.value[kind] = device.value[kind] || devices.value[kind][0].deviceId; }); }); // configDialog.value = true; }; const changeInputOutputDevice = async (selected, type) => { if (!props.room) return; if (type === "audioinput" || type === "videoinput") { const track = type === "audioinput" ? props.room.getLocalAudioTrack() : props.room.getLocalVideoTrack(); const tracks = await JitsiMeetJS.createLocalTracks({ devices: [type.replace("input", "")], [type === "videoinput" ? "cameraDeviceId" : "micDeviceId"]: selected, }); if (tracks[0] && track) { // let oldTrack = document.getElementById(track.getId()); // if (oldTrack) { // track.detach(oldTrack); // oldTrack.parentNode.removeChild(oldTrack); // } props.room.replaceTrack(track, tracks[0]); // props.room.removeTrack(track).then(() => { // props.room.addTrack(tracks[0]).then(() => { // bus.emit("TRACK_ADDED", tracks[0]); // if (type === "videoinput") { // } // }); // }); } } else if (type === "audiooutput") { JitsiMeetJS.mediaDevices.setAudioOutputDevice(selected); } device.value[type] = selected; // $h.notify({ // title: // type === "audiooutput" // ? "Audio Output" // : type === "audioinput" // ? "Audio Input" // : "Video Input", // message: // type === "audiooutput" // ? "Audio output device changed" // : type === "audioinput" // ? "Audio input device changed" // : "Video input device changed", // timeout: 3000, // }); }; configureCall(); return (_ctx, _cache) => { return (vue.openBlock(), vue.createElementBlock("div", null, [ vue.renderSlot(_ctx.$slots, "button", { configDialog: (e) => (configDialog.value = true) }, () => [ vue.createElementVNode("button", { class: "con-control-btn con-hover-bg", onClick: _cache[0] || (_cache[0] = $event => (configDialog.value = true)) }, [ vue.createVNode(vue.unref(VueFeather), { type: "settings" }) ]) ]), vue.createVNode(script$3, { modelValue: configDialog.value, "onUpdate:modelValue": _cache[1] || (_cache[1] = $event => ((configDialog).value = $event)), title: "Device Configuration" }, { default: vue.withCtx(() => [ vue.createElementVNode("div", _hoisted_1$2, [ (vue.openBlock(), vue.createElementBlock(vue.Fragment, null, vue.renderList([ { v: 'audioinput', l: 'Audio Input Device', i: 'microphone' }, { v: 'audiooutput', l: 'Audio Ouput Device', i: 'headphone' }, { v: 'videoinput', l: 'Video Source', i: 'camera' }, ], (field) => { return vue.createElementVNode("div", { class: "con-col-12 con-text-left con-mt-md", key: field }, [ vue.createElementVNode("label", _hoisted_2$2, vue.toDisplayString(field.l), 1 /* TEXT */), vue.createElementVNode("div", _hoisted_3$2, [ vue.createElementVNode("label", null, [ vue.withDirectives(vue.createElementVNode("select", { "onUpdate:modelValue": [$event => ((device.value[field.v]) = $event), (value) => changeInputOutputDevice(device.value[field.v], field.v) ] }, [ (vue.openBlock(true), vue.createElementBlock(vue.Fragment, null, vue.renderList(devices.value[field.v], (dev) => { return (vue.openBlock(), vue.createElementBlock("option", { key: dev.id, selected: dev.id === device.value[field.v], value: dev.deviceId }, vue.toDisplayString(dev.label), 9 /* TEXT, PROPS */, _hoisted_5$2)) }), 128 /* KEYED_FRAGMENT */)) ], 8 /* PROPS */, _hoisted_4$2), [ [vue.vModelSelect, device.value[field.v]] ]) ]) ]) ]) }), 64 /* STABLE_FRAGMENT */)) ]) ]), _: 1 /* STABLE */ }, 8 /* PROPS */, ["modelValue"]) ])) } } }; script$2.__file = "src/components/DeviceSelector.vue"; const _hoisted_1$1 = ["id"]; const _hoisted_2$1 = { class: "control-box" }; const _hoisted_3$1 = ["id"]; const _hoisted_4$1 = ["id"]; const _hoisted_5$1 = { class: "control-box" }; const _hoisted_6 = ["id"]; const _hoisted_7 = ["id"]; const _hoisted_8 = ["id"]; var script$1 = { __name: 'MediaTrack', props: { track: { type: Object, required: true, }, isAudio: { type: Boolean, }, isActive: { type: Boolean, }, aspect: { type: Number, default: 0, }, }, emits: ["active-focus", "active-blur"], setup(__props, { emit }) { const props = __props; const participantAudioTrack = vue.ref(null); const participant = vue.ref(null); const largeVideo = vue.computed(() => !props.isAudio && props.isActive); const canvasRef = vue.ref(null); const trackRef = vue.ref(null); // const tracked = computed(() => props.track); const trackId = vue.computed(() => props.track?.getId()); const inited = vue.ref(false); const status = vue.ref({ audioMuted: false, videoMuted: false, }); const scenery = vue.ref(null); const { isOutside } = core.useMouseInElement(scenery); vue.watch(isOutside, () => { emit(isOutside.value ? "active-blur" : "active-focus", trackId.value); }); const lstnrs = { USER_LEFT: (userId, track) => { if (track && userId === track.getParticipantId()) { track.detach(trackRef.value); // Remove the track from the DOM. const element = document.getElementById("track-" + trackId.value); element && element.parentNode.removeChild(element); } }, TRACK_MUTE_CHANGED: (track) => { if (track) { const muteAudio = participantAudioTrack.value?.getParticipantId() === track.getParticipantId() && track.type === "audio"; if (track.getId() === trackId.value) { status.value.videoMuted = track.isMuted(); } else if (muteAudio) { status.value.audioMuted = track.isMuted(); } } }, }; /** * * @param { String } str */ const trimName = (str) => { return str.substring(0, 2); }; const canvasTimeout = vue.ref(null); const init = (track) => { const room = track.conference; inited.value = true; if (room) { // Set the participant participant.value = room.getParticipantById(track.getParticipantId()); // Get the participant's audio track. let audioTrack; if (track.type === "audio") { audioTrack = track; } else { audioTrack = track.isLocal() ? room.getLocalAudioTrack() : participant.value.getTracksByMediaType("audio")[0] ?? {}; } participantAudioTrack.value = audioTrack; } if (canvasTimeout.value) { clearTimeout(canvasTimeout.value); canvasTimeout.value = null; } if (track && (!props.isAudio || !track.isLocal())) { track.attach(trackRef.value); if (!props.isAudio && canvasRef.value) { const ctx = canvasRef.value.getContext("2d"); trackRef.value.addEventListener( "play", (e) => { const $this = e.target; //cache (function loop() { if (!$this.paused && !$this.ended) { ctx.drawImage($this, 0, 0); canvasTimeout.value = setTimeout(loop, 1000 / 30); // drawing at 30fps } })(); }, 0 ); } } const resizeTimeout = setTimeout(() => { // Resize the track resize(props.aspect, resizeTimeout); }, 100); }; // Watch for changes in the track. const unwatchTrack = vue.watch( () => props.track, (track) => { init(track); } ); vue.onMounted(() => { vue.nextTick().then(() => init(props.track)); }); vue.onBeforeUnmount(() => { unwatchTrack(); // Clean up the track. if (trackRef.value) { trackRef.value.pause(); trackRef.value.srcObject = null; trackRef.value.load(); } // Detach the track from the DOM. if (typeof props.track.detach === "function") { props.track.detach(trackRef.value); } // Remove Event Bus Listners Object.keys(lstnrs).forEach((i) => { bus.off(i, lstnrs[i]); }); }); // Register Event Listeners Object.keys(lstnrs).forEach((i) => { bus.on(i, lstnrs[i]); }); return (_ctx, _cache) => { return (largeVideo.value) ? (vue.openBlock(), vue.createElementBlock("div", { key: 0, ref_key: "scenery", ref: scenery, class: "Screen ref", id: `track-${trackId.value}` }, [ vue.createElementVNode("canvas", { ref_key: "canvasRef", ref: canvasRef, class: "largeVideoBackground" }, null, 512 /* NEED_PATCH */), vue.createElementVNode("div", { "data-aspect": "4:3", class: vue.normalizeClass({ 'audio-only': !!status.value.videoMuted }) }, [ vue.createElementVNode("div", _hoisted_2$1, [ vue.createElementVNode("span", null, vue.toDisplayString(trimName( __props.track.isLocal() ? "Me" : participant.value?.getDisplayName() ?? "Guest" )), 1 /* TEXT */), vue.createVNode(vue.unref(VueFeather), { type: `mic${status.value.audioMuted ? '-off' : ''}` }, null, 8 /* PROPS */, ["type"]) ]), vue.withDirectives(vue.createElementVNode("video", { autoplay: "", ref_key: "trackRef", ref: trackRef, id: trackId.value }, null, 8 /* PROPS */, _hoisted_3$1), [ [vue.vShow, !status.value.videoMuted] ]) ], 2 /* CLASS */) ], 8 /* PROPS */, _hoisted_1$1)) : (!__props.isAudio) ? (vue.openBlock(), vue.createElementBlock("div", { key: 1, "data-aspect": "4:3", style: {"margin":"10px","min-width":"calc(100% - 10px)","height":"147.75px"}, class: vue.normalizeClass({ 'audio-only': !!status.value.videoMuted }), id: `track-${trackId.value}` }, [ vue.createElementVNode("div", _hoisted_5$1, [ vue.createElementVNode("span", null, vue.toDisplayString(trimName( __props.track.isLocal() ? "Me" : participant.value?.getDisplayName() ?? "Guest" )), 1 /* TEXT */), vue.createVNode(vue.unref(VueFeather), { type: `mic${status.value.audioMuted ? '-off' : ''}` }, null, 8 /* PROPS */, ["type"]) ]), vue.withDirectives(vue.createElementVNode("video", { autoplay: "", ref_key: "trackRef", ref: trackRef, id: trackId.value }, null, 8 /* PROPS */, _hoisted_6), [ [vue.vShow, !status.value.videoMuted] ]) ], 10 /* CLASS, PROPS */, _hoisted_4$1)) : (!__props.track.isLocal()) ? (vue.openBlock(), vue.createElementBlock("div", { key: 2, class: "audio-container", id: `track-${trackId.value}` }, [ vue.createElementVNode("audio", { autoplay: "", ref_key: "trackRef", ref: trackRef, id: trackId.value }, null, 8 /* PROPS */, _hoisted_8) ], 8 /* PROPS */, _hoisted_7)) : vue.createCommentVNode("v-if", true) } } }; script$1.__file = "src/components/MediaTrack.vue"; const _hoisted_1 = { key: 0, class: "waiting banner z-5" }; const _hoisted_2 = { key: 2, class: "Conference" }; const _hoisted_3 = { key: 1, class: "Dish", ref: "dishesRef" }; const _hoisted_4 = ["onClick"]; const _hoisted_5 = ["onClick"]; var script = { __name: 'VideoConferencing', props: { msg: String, appDomain: { type: String, default: "meet.jit.si", }, roomName: { type: String, required: true, validator: (val) => { const check = val.length > 3 && !/\s/.test(val) && val === val.toLowerCase(); if (!check) { console.error( "Room name must be at least 4 characters long, have no spaces, and contain no capital letters" ); } return check; }, }, userName: { type: String, validator: (val) => { // Ensure user name is at least 3 characters long and has no spaces or capital letters const check = val.length < 1 || (!/\s/.test(val) && val === val.toLowerCase()); if (!check) { console.error( "User name must have no spaces, and contain no capital letters" ); } return check; }, }, displayName: { type: String, }, roomPassword: { type: String, }, videoConstraints: { type: [String, Number], default: 360, //720 }, appToken: { type: String, }, allowVideo: { type: Boolean, default: true, }, allowAudio: { type: Boolean, default: true, }, debugLevel: { type: String, default: "ERROR", //"ERROR", }, aspect: { type: Number, default: 0, }, alwaysShowControls: { type: Boolean, default: false, }, autoConnect: { type: Boolean, default: false, }, }, emits: [ "error", "ready", "left", "joined", "loading", "started", "stopped", "connected", "trackAdded", ], setup(__props, { emit }) { const props = __props; /** * @type {JMeetJS} */ const JitsiMeetJS = window.JitsiMeetJS || JMeetJS; const scenery = vue.ref(null); core.useResizeObserver(scenery, () => { if (status.value.show) { resize(props.aspect); } }); const showControls = vue.ref(props.alwaysShowControls); const localAudioTrack = vue.ref(null); const localVideoTrack = vue.ref(null); const activeVideoTrack = vue.ref(null); const videoTracks = vue.ref([]); const audioTracks = vue.ref([]); const conference = vue.ref(null); const speakerId = vue.ref(null); const baseKey = vue.ref(new Date().getTime()); const status = vue.ref({ loading: false, show: false, audioMuted: false, videoMuted: false, }); const LOGLEVEL = props.debugLevel ? (props.debugLevel || "INFO").toUpperCase() : "ERROR"; JitsiMeetJS.init(); JitsiMeetJS.setLogLevel(JitsiMeetJS.logLevels[LOGLEVEL]); const addTrack = (track) => { if (track.getType() === "video" && props.allowVideo) { videoTracks.value.push(track); if (track.isLocal()) { activeVideoTrack.value = track; localVideoTrack.value = track; } } else if (track.getType() === "audio" && props.allowAudio) { audioTracks.value.push(track); if (track.isLocal()) { localAudioTrack.value = track; } status.value[track.getType() + "Muted"] = track.isMuted(); } }; const removeTrack = (track) => { if (track.getType() === "video" && props.allowVideo) { videoTracks.value = videoTracks.value.filter( (e) => e.getId() !== track.getId() ); if (track.getId() === activeVideoTrack.value?.getId()) { activeVideoTrack.value = videoTracks.value[0] ?? null; } } else if (track.getType() === "audio" && props.allowAudio) { audioTracks.value = audioTracks.value.filter( (e) => e.getId() !== track.getId() ); } }; // Define event listeners in a separate object for readability and maintainability const lstnrs = { TRACK_ADDED: (track) => { emit("trackAdded", track); bus.emit("TRACK_ADDED", track); addTrack(track); }, TRACK_REMOVED: (track) => { bus.emit("TRACK_REMOVED", track); if (track.containers.length) { track.containers.forEach((el) => { track.detach(el); el.parentElement.remove(); }); removeTrack(track); } }, DOMINANT_SPEAKER_CHANGED: (id) => { bus.emit("DOMINANT_SPEAKER_CHANGED", id); speakerId.value = id; if (videoTracks.value.length) { activeVideoTrack.value = videoTracks.value.find( (e) => e.getParticipantId() === id ); } }, USER_LEFT: (user) => { emit("left", user); bus.emit("USER_LEFT", user); if (conference.value.participants.size === 0) { stop(); } }, TRACK_MUTE_CHANGED: (track) => { let resizeTimeout = setTimeout(() => { resize(props.aspect, resizeTimeout); }, 10); bus.emit("TRACK_MUTE_CHANGED", track); if ( [ localVideoTrack.value?.getParticipantId(), localAudioTrack.value?.getParticipantId(), ].includes(track.getParticipantId()) ) { status.value[track.type + "Muted"] = track.isMuted(); } }, }; const setName = (name, titlecase) => { if (!name) { // If no name is provided, generate a random one const letters = "abcdefghijklmnopqrstuvwxyz"; name = ""; for (let i = 0; i < 10; i++) { name += letters[Math.floor(Math.random() * letters.length)]; } } if (titlecase) { name = name.replace(/\b\w/g, (l) => l.toUpperCase()); } return name; }; const connectNow = () => { const roomName = props.roomName; const username = setName(props.userName); connect(roomName, props.appToken) .then((connection) => { emit("connected"); return createAndJoinRoom( connection, roomName, username, props.roomPassword, props.videoConstraints ); }) .then((room) => { conference.value = room; // Load all event listeners in debug mode and expose the conference object to the window if (props.debugLevel === "DEBUG") { window.conference = room; window.JitsiMeetJS = JitsiMeetJS; Object.keys(JitsiMeetJS.events.conference).forEach((key) => { room.on(JitsiMeetJS.events.conference[key], (e) => { console.log("Conference Event", key, e); }); }); } room.setDisplayName(setName(props.displayName ?? props.userName, true)); room.on( JitsiMeetJS.events.conference.DOMINANT_SPEAKER_CHANGED, lstnrs.DOMINANT_SPEAKER_CHANGED ); room.on( JitsiMeetJS.events.conference.TRACK_REMOVED, lstnrs.TRACK_REMOVED ); room.on( JitsiMeetJS.events.conference.TRACK_MUTE_CHANGED, lstnrs.TRACK_MUTE_CHANGED ); room.on(JitsiMeetJS.events.conference.TRACK_ADDED, lstnrs.TRACK_ADDED); room.on(JitsiMeetJS.events.conference.USER_LEFT, lstnrs.USER_LEFT); createTracksAndAddToRoom(room); status.value.loading = false; status.value.show = true; emit("joined", room); emit("loading", false); }) .catch((error) => { console.error(error); status.value.show = false; status.value.loading = false; emit("loading", false); emit("error", error); }); }; const muteMe = (type = "audio") => { const track = type === "audio" ? localAudioTrack.value : localVideoTrack.value; if (track) { if (track.isMuted()) { track.unmute(); } else { track.mute(); } } }; const start = () => { status.value.loading = true; emit("loading", true); let startTimeout = setTimeout(() => { connectNow(); emit("started"); clearTimeout(startTimeout); startTimeout = null; }, 1000); }; const stop = () => { unwatcherSc1(); unwatcherMv2(); if (conference.value?.isJoined()) { status.value.loading = true; emit("loading", true); conference.value.leave().then(() => { status.value.show = false; status.value.loading = false; conference.value = null; videoTracks.value = []; audioTracks.value = []; activeVideoTrack.value = null; localAudioTrack.value = null; localVideoTrack.value = null; baseKey.value = new Date().getTime(); emit("stopped"); emit("loading", false); }); // Remove EventListeners if (conference.value) { Object.keys(lstnrs).forEach((i) => { conference.value.off(JitsiMeetJS.events.conference[i], lstnrs[i]); }); } } }; const unwatcherSc1 = vue.watch(showControls, () => { if (status.value.show) { let resizeTimeout = setTimeout(() => { resize(props.aspect, resizeTimeout); }, 3); } }); const unwatcherMv2 = vue.watch( () => status.value.videoMuted, () => { resize(props.aspect); } ); vue.onMounted(() => { emit("ready"); if (props.autoConnect) { start(); } }); vue.onBeforeUnmount(() => { stop(); }); return (_ctx, _cache) => { return (vue.openBlock(), vue.createBlock(vue.unref(components$1.UseFullscreen), null, { default: vue.withCtx(({ toggle: toggleFullscreen, isFullscreen }) => [ (vue.openBlock(), vue.createElementBlock("div", { class: "Scenary", key: baseKey.value, ref_key: "scenery", ref: scenery }, [ ( !conference.value?.participants?.size && (status.value.show || status.value.loading) ) ? (vue.openBlock(), vue.createElementBlock("div", _hoisted_1, [ vue.createVNode(script$4, { show: "", height: "30px", color: status.value.show ? 'blue' : 'brown', text: status.value.show ? 'Waiting for others to join' : 'Connecting' }, vue.createSlots({ _: 2 /* DYNAMIC */ }, [ (status.value.show) ? { name: "button", fn: vue.withCtx(() => [ vue.createElementVNode("button", { class: "con-control-btn con-hover-bg con-text-red", onClick: stop }, [ vue.createVNode(vue.unref(VueFeather), { type: "phone-off" }) ]) ]), key: "0" } : undefined ]), 1032 /* PROPS, DYNAMIC_SLOTS */, ["color", "text"]) ])) : vue.createCommentVNode("v-if", true), (!__props.allowVideo) ? (vue.openBlock(), vue.createBlock(script$5, { key: 1, class: "Conference", tracks: audioTracks.value, conference: conference.value, onActiveBlur: _cache[0] || (_cache[0] = $event => (showControls.value = false)), onActiveFocus: _cache[1] || (_cache[1] = $event => (showControls.value = true)) }, null, 8 /* PROPS */, ["tracks", "conference"])) : (vue.openBlock(), vue.createElementBlock("div", _hoisted_2, [ vue.createCommentVNode(" Screen Here "), (activeVideoTrack.value && conference.value?.participants?.size < 2) ? (vue.openBlock(), vue.createBlock(script$1, { "is-active": "", key: activeVideoTrack.value.getId(), track: activeVideoTrack.value, onActiveBlur: _cache[2] || (_cache[2] = $event => (showControls.value = false)), onActiveFocus: _cache[3] || (_cache[3] = $event => (showControls.value = true)) }, null, 8 /* PROPS */, ["track"])) : vue.createCommentVNode("v-if", true), (conference.value?.participants?.size) ? (vue.openBlock(), vue.createElementBlock("div", _hoisted_3, [ vue.createCommentVNode(" Dishes Here "), (vue.openBlock(true), vue.createElementBlock(vue.Fragment, null, vue.renderList(videoTracks.value.filter( (t) => t?.getId() !== activeVideoTrack.value?.getId() && conference.value?.participants?.size < 2 ), (track) => { return (vue.openBlock(), vue.createBlock(script$1, { style: {"width":"100%"}, key: track.getId(), track: track }, null, 8 /* PROPS */, ["track"])) }), 128 /* KEYED_FRAGMENT */)) ], 512 /* NEED_PATCH */)) : vue.createCommentVNode("v-if", true) ])), vue.renderSlot(_ctx.$slots, "controls", { toggleFullscreen: toggleFullscreen, activeVideoTrack: activeVideoTrack.value, videoTracks: videoTracks.value, conference: conference.value, muteMe: muteMe, tracks: { video: localVideoTrack.value, audio: localAudioTrack.value }, status: status.value, start: start, stop: stop }, () => [ vue.createElementVNode("div", { class: vue.normalizeClass(["con-gap-3 con-justify-center con-control-buttons z-3", { 'requires-focus': !__props.alwaysShowControls, 'focus-aquired': showControls.value || !conference.value?.participants?.size, }]) }, [ (status.value.show && __props.allowAudio) ? (vue.openBlock(), vue.createElementBlock("button", { key: 0, class: "con-control-btn con-hover-bg", onClick: _cache[4] || (_cache[4] = $event => (muteMe())) }, [ vue.createVNode(vue.unref(VueFeather), { type: `mic${status.value.audioMuted ? '-off' : ''}` }, null, 8 /* PROPS */, ["type"]) ])) : vue.createCommentVNode("v-if", true), (status.value.show && __props.allowVideo) ? (vue.openBlock(), vue.createElementBlock("button", { key: 1, class: "con-control-btn con-hover-bg", onClick: _cache[5] || (_cache[5] = $event => (muteMe('video'))) }, [ vue.createVNode(vue.unref(VueFeather), { type: `video${status.value.videoMuted ? '-off' : ''}` }, null, 8 /* PROPS */, ["type"]) ])) : vue.createCommentVNode("v-if", true), (!status.value.show && (!__props.autoConnect || status.value.loading)) ? (vue.openBlock(), vue.createElementBlock("button", { key: 2, class: "con-control-btn con-hover-bg con-text-green", onClick: start }, [ vue.createVNode(vue.unref(VueFeather), { type: `phone${status.value.loading ? '-call' : ''}` }, null, 8 /* PROPS */, ["type"]) ])) : vue.createCommentVNode("v-if", true), (status.value.show) ? (vue.openBlock(), vue.createElementBlock("button", { key: 3, class: "con-control-btn con-hover-bg con-text-red", onClick: stop }, [ vue.createVNode(vue.unref(VueFeather), { type: "phone-off" }) ])) : vue.createCommentVNode("v-if", true), vue.createVNode(script$4, { show: status.value.loading, color: status.value.show ? 'red' : 'green' }, null, 8 /* PROPS */, ["show", "color"]), (status.value.show) ? (vue.openBlock(), vue.createElementBlock("button", { key: 4, class: "con-control-btn con-hover-bg", onClick: toggleFullscreen }, [ vue.createVNode(vue.unref(VueFeather), { type: isFullscreen ? 'minimize' : 'maximize' }, null, 8 /* PROPS */, ["type"]) ], 8 /* PROPS */, _hoisted_4)) : vue.createCommentVNode("v-if", true), (conference.value && !isFullscreen) ? (vue.openBlock(), vue.createBlock(script$2, { key: 5, room: conference.value, aspect: __props.aspect }, { button: vue.withCtx(({ configDialog }) => [ vue.renderSlot(_ctx.$slots, "configbutton", { configDialog: configDialog }, () => [ vue.createElementVNode("button", { class: "con-control-btn con-hover-bg", onClick: $event => (configDialog()) }, [ vue.createVNode(vue.unref(VueFeather), { type: "settings" }) ], 8 /* PROPS */, _hoisted_5) ]) ]), _: 3 /* FORWARDED */ }, 8 /* PROPS */, ["room", "aspect"])) : vue.createCommentVNode("v-if", true) ], 2 /* CLASS */) ]), vue.createCommentVNode(" Audio Here "), (vue.openBlock(true), vue.createElementBlock(vue.Fragment, null, vue.renderList(audioTracks.value, (track) => { return (vue.openBlock(), vue.createBlock(script$1, { key: track.getId(), track: track, "is-audio": "" }, null, 8 /* PROPS */, ["track"])) }), 128 /* KEYED_FRAGMENT */)) ])) ]), _: 3 /* FORWARDED */ })) } } }; script.__file = "src/components/VideoConferencing.vue"; var components = { VideoConference: script, VoiceConference: script$5, DeviceSelector: script$2, WaitingLoader: script$4, MediaTrack: script$1, ConModal: script$3, }; const plugin = { install (app) { for (const prop in components) { if (Object.keys(components).includes(prop)) { const component = components[prop]; app.component(prop, component); } } } }; const ConModal = components.ConModal; const DeviceSelector = components.DeviceSelector; const MediaTrack = components.MediaTrack; const VideoConference = components.VideoConference; const VoiceConference = components.VoiceConference; const WaitingLoader = components.WaitingLoader; exports.ConModal = ConModal; exports.DeviceSelector = DeviceSelector; exports.MediaTrack = MediaTrack; exports.VideoConference = VideoConference; exports.VoiceConference = VoiceConference; exports.WaitingLoader = WaitingLoader; exports.default = plugin;