diff --git a/package-lock.json b/package-lock.json index 85ec172..66e644b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,6 @@ "codemirror": "^6.0.1", "events": "^3.3.0", "inter-ui": "^3.19.3", - "janode": "^1.6.7", "nats.ws": "^1.9.0", "pinia": "^2.0.21", "pinia-plugin-persistedstate": "^2.3.0", @@ -1422,14 +1421,6 @@ "node": ">=8" } }, - "node_modules/bindings": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", - "dependencies": { - "file-uri-to-path": "1.0.0" - } - }, "node_modules/blob-util": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", @@ -3225,11 +3216,6 @@ "node": "^10.12.0 || >=12.0.0" } }, - "node_modules/file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" - }, "node_modules/fill-range": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", @@ -4140,33 +4126,12 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, - "node_modules/isomorphic-ws": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", - "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", - "peerDependencies": { - "ws": "*" - } - }, "node_modules/isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", "dev": true }, - "node_modules/janode": { - "version": "1.6.7", - "resolved": "https://registry.npmjs.org/janode/-/janode-1.6.7.tgz", - "integrity": "sha512-lbq27rQBbI7zzj31kwm9ccJsKJYSFSmihSxRLwXtMamBaodY08mjKID53C9Rlwcv3uHwoDnFLiIHwCKSxQU96A==", - "dependencies": { - "isomorphic-ws": "^4.0.1", - "unix-dgram": "^2.0.4", - "ws": "^8.0.0" - }, - "engines": { - "node": " >=14.13.1 || >=16.0.0" - } - }, "node_modules/joi": { "version": "17.7.0", "resolved": "https://registry.npmjs.org/joi/-/joi-17.7.0.tgz", @@ -4694,11 +4659,6 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, - "node_modules/nan": { - "version": "2.17.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.17.0.tgz", - "integrity": "sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==" - }, "node_modules/nanoclone": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz", @@ -6591,19 +6551,6 @@ "node": ">= 10.0.0" } }, - "node_modules/unix-dgram": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/unix-dgram/-/unix-dgram-2.0.6.tgz", - "integrity": "sha512-AURroAsb73BZ6CdAyMrTk/hYKNj3DuYYEuOaB8bYMOHGKupRNScw90Q5C71tWJc3uE7dIeXRyuwN0xLLq3vDTg==", - "hasInstallScript": true, - "dependencies": { - "bindings": "^1.5.0", - "nan": "^2.16.0" - }, - "engines": { - "node": ">=0.10.48" - } - }, "node_modules/untildify": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", @@ -7083,6 +7030,7 @@ "version": "8.11.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz", "integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==", + "dev": true, "engines": { "node": ">=10.0.0" }, @@ -8215,14 +8163,6 @@ "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", "dev": true }, - "bindings": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", - "requires": { - "file-uri-to-path": "1.0.0" - } - }, "blob-util": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", @@ -9482,11 +9422,6 @@ "flat-cache": "^3.0.4" } }, - "file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" - }, "fill-range": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", @@ -10115,28 +10050,12 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, - "isomorphic-ws": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz", - "integrity": "sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w==", - "requires": {} - }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==", "dev": true }, - "janode": { - "version": "1.6.7", - "resolved": "https://registry.npmjs.org/janode/-/janode-1.6.7.tgz", - "integrity": "sha512-lbq27rQBbI7zzj31kwm9ccJsKJYSFSmihSxRLwXtMamBaodY08mjKID53C9Rlwcv3uHwoDnFLiIHwCKSxQU96A==", - "requires": { - "isomorphic-ws": "^4.0.1", - "unix-dgram": "^2.0.4", - "ws": "^8.0.0" - } - }, "joi": { "version": "17.7.0", "resolved": "https://registry.npmjs.org/joi/-/joi-17.7.0.tgz", @@ -10549,11 +10468,6 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, - "nan": { - "version": "2.17.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.17.0.tgz", - "integrity": "sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==" - }, "nanoclone": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz", @@ -11899,15 +11813,6 @@ "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", "dev": true }, - "unix-dgram": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/unix-dgram/-/unix-dgram-2.0.6.tgz", - "integrity": "sha512-AURroAsb73BZ6CdAyMrTk/hYKNj3DuYYEuOaB8bYMOHGKupRNScw90Q5C71tWJc3uE7dIeXRyuwN0xLLq3vDTg==", - "requires": { - "bindings": "^1.5.0", - "nan": "^2.16.0" - } - }, "untildify": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", @@ -12220,6 +12125,7 @@ "version": "8.11.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz", "integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==", + "dev": true, "requires": {} }, "xml-name-validator": { diff --git a/package.json b/package.json index 09ae8c2..9d58df2 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,6 @@ "codemirror": "^6.0.1", "events": "^3.3.0", "inter-ui": "^3.19.3", - "janode": "^1.6.7", "nats.ws": "^1.9.0", "pinia": "^2.0.21", "pinia-plugin-persistedstate": "^2.3.0", diff --git a/src/stores/janus.ts b/src/stores/janus.ts index 4f24a4d..bc0e8a5 100644 --- a/src/stores/janus.ts +++ b/src/stores/janus.ts @@ -1,5 +1,4 @@ import { defineStore, acceptHMRUpdate } from "pinia"; -import adapter from "webrtc-adapter"; import { toRaw } from "vue"; import { VideoStreamMerger, @@ -8,13 +7,16 @@ import { type ConstructorOptions, } from "video-stream-merger"; import { type JanusStream, ConnectionStatus } from "@/types"; -import Janode from "janode"; -import StreamingPlugin from "janode/plugins/streaming"; +// import Janode from "janode"; +// import StreamingPlugin from "janode/plugins/streaming"; +import Janus from "@/vendor/janus"; +import type { JanusJS } from "@/vendor/janus"; import { handleError } from "@/utils"; import { useVideoStore } from "./video"; import { useCameraSettingsStore } from "./cameraSettings"; +import { success } from "./alerts"; -window.adapter = adapter; +export const IceServers = [{ urls: "stun:stun.l.google.com:19302" }]; function getJanusUri() { const hostname = window.location.hostname; @@ -28,10 +30,12 @@ function getJanusUri() { export const useJanusStore = defineStore({ id: "janus", state: () => ({ - janusWsConnection: undefined as undefined | any, // Janode.Connection, but Janode doe snot export types - janusSession: undefined as undefined | any, + clientId: Janus.randomString(12), + // janus: undefined as undefined | Janus, + // janusWsConnection: undefined as undefined | any, // Janode.Connection, but Janode doe snot export types + // janusSession: undefined as undefined | any, janusPeerConnection: undefined as undefined | RTCPeerConnection, - janusStreamingPluginHandle: undefined as undefined | any, + janusStreamingPluginHandle: undefined as undefined | JanusJS.PluginHandle, selectedStream: undefined as undefined | JanusStream, streamList: [] as Array, status: ConnectionStatus.ConnectionNotStarted as ConnectionStatus, @@ -63,248 +67,323 @@ export const useJanusStore = defineStore({ } this.$reset(); }, - async connectJanus(): Promise { - const janusUri = getJanusUri(); - const connectOpts = { - is_admin: false, - address: { - url: janusUri, - }, - }; - const janusWsConnection = await Janode.connect(connectOpts).catch( - (e: Error) => handleError("Janus websocket connection failed", e) - ); - console.log("Got janusWsConnection"); - const janusSession = await janusWsConnection - .create() - .catch((e: Error) => - handleError("Failed to create Janus websocket session ", e) - ); - const janusStreamingPluginHandle = await janusSession - .attach(StreamingPlugin) - .catch((e: Error) => - handleError("Failed to create Janus streaming handle", e) - ); - const streamListRes = await janusStreamingPluginHandle.list(); - console.log("Found streamlist", streamListRes); - // get detailed info from streamlist - const streamList = await Promise.all( - streamListRes.list.map(async (stream: any) => { - const res = await janusStreamingPluginHandle.info({ id: stream.id }); - return { - description: res.description, - enabled: res.enabled, - id: res.id, - media: res.media, - name: res.name, - type: res.type, - viewers: res.viewers, - } as JanusStream; - }) - ); - - console.log("Fetched detailed stream info", streamList); - - this.$patch({ - streamList, - }); - - janusStreamingPluginHandle.once(Janode.EVENT.HANDLE_DETACHED, () => { - console.log( - `${janusStreamingPluginHandle.name} manager handle detached` - ); - }); - // Janode exports "EVENT" property with core events - janusStreamingPluginHandle.on( - Janode.EVENT.HANDLE_WEBRTCUP, - (_data: any) => { - const videoStore = useVideoStore(); - videoStore.webrtcup = true; - console.log("webrtcup event"); - } - ); - janusStreamingPluginHandle.on( - Janode.EVENT.HANDLE_SLOWLINK, - (evtdata: any) => { - console.log("slowlink event", evtdata); - } - ); - janusStreamingPluginHandle.on( - Janode.EVENT.HANDLE_HANGUP, - (evtdata: any) => console.log("hangup event", evtdata) - ); - janusStreamingPluginHandle.on( - Janode.EVENT.HANDLE_DETACHED, - (evtdata: any) => console.log("detached event", evtdata) - ); - - janusStreamingPluginHandle.on(Janode.EVENT.HANDLE_MEDIA, (evtdata: any) => - console.log("media event", evtdata) - ); - - janusWsConnection.on(Janode.EVENT.CONNECTION_CLOSED, () => { - console.log(`Connection with ${janusUri} closed`); - }); - - janusWsConnection.on( - Janode.EVENT.CONNECTION_ERROR, - ({ message }: { message: any }) => { - console.log(`Connection with Janus error (${message})`); - // TODO notify clients via alert - - // TODO reconnect - // notify clients - } - ); - janusStreamingPluginHandle.on( - StreamingPlugin.EVENT.STREAMING_STATUS, - (evtdata: any) => { - console.log( - `${janusStreamingPluginHandle.name} streaming handle event status:`, - evtdata - ); - } - ); + async loadStreamsList(pluginHandle: JanusJS.PluginHandle) { + const self = this; + const listRequest = { request: "list" }; + console.log("Sending request to janus.plugin.streaming", listRequest); + pluginHandle.send({ + message: listRequest, + success: (result: any) => { + if (!result) { + console.warn("janus.plugin.streaming list returned no response"); + return; + } + if (result["list"]) { + const streamListRes = result["list"]; + console.log( + "Fetched list of available WebRTC streams:", + streamListRes + ); + // TODO do we need detailed stream info? + // const streamList = await Promise.all( + // streamListRes.map(async (stream: any) => { + // const infoRequest = { request: "info", id: stream.id }; - this.$patch({ - janusWsConnection, - janusSession, - janusStreamingPluginHandle, + // const res = await janusStreamingPluginHandle.info({ id: stream.id }); + // return { + // description: res.description, + // enabled: res.enabled, + // id: res.id, + // media: res.media, + // name: res.name, + // type: res.type, + // viewers: res.viewers, + // } as JanusStream; + // }) + // ); + // self.$patch({ streamList}) + } + }, }); - if (streamList.length > 0 && this.selectedStream == undefined) { - console.log("Setting selected stream to:", streamList[0]); - this.$patch({ selectedStream: streamList[0] }); - } - return true; - }, - async closePC() { - if (this.janusPeerConnection !== undefined) { - const janusPeerConnection = toRaw(this.janusPeerConnection); - - console.log("stopping PeerConnection"); - janusPeerConnection.close(); - this.$patch({ janusPeerConnection: undefined }); - } }, - async trickle(event: any) { - const { candidate } = event; - if (this.janusStreamingPluginHandle === undefined) { - console.warn( - "trickle() called with undefined janusStreamingPluginHandle" - ); - return; - } - const janusStreamingPluginHandle = toRaw(this.janusStreamingPluginHandle); + async onConnectSuccess(janus: JanusJS.Janus) { + const self = this; + let streaming = undefined as undefined | JanusJS.PluginHandle; - if (candidate === undefined) { - janusStreamingPluginHandle.trickleComplete().catch((e: any) => { - console.error("trickleComplete error", e); - }); - } else { - janusStreamingPluginHandle.trickle(candidate).catch((e: any) => { - console.error("trickle error", e); - }); - } - }, - async jsepAnswer(offer: any) { + // bounding box overlay is composited client-side using VideoStreamMerger element const cameraSettings = useCameraSettingsStore(); if (cameraSettings.settings === undefined) { await cameraSettings.loadSettings(); } const streamSettings = toRaw(cameraSettings.settings); - const pc = new RTCPeerConnection({ - iceServers: [ - { - urls: "stun:stun.l.google.com:19302", - }, - ], - }); - pc.onnegotiationneeded = (event) => - console.log("pc.onnegotiationneeded", event); - pc.onicecandidate = (event) => - this.trickle({ candidate: event.candidate }); - pc.oniceconnectionstatechange = () => { - console.log( - "pc.oniceconnectionstatechange => " + pc.iceConnectionState - ); - if ( - pc.iceConnectionState === "failed" || - pc.iceConnectionState === "closed" - ) { - console.warn("Stopping all streams and closing peer connection"); - this.stopAllStreams(); - this.closePC(); - } - }; - const merger = new VideoStreamMerger({ fps: streamSettings?.camera.framerate_n, height: streamSettings?.camera.height, width: streamSettings?.camera.width, } as ConstructorOptions); - merger.start(); - this.setVideoElement(merger.result); - pc.ontrack = (event) => { - console.log("pc.ontrack", event); + // create streaming plugin handle + janus.attach({ + plugin: "janus.plugin.streaming", + opaqueId: self.clientId, + success: async (pluginHandle: JanusJS.PluginHandle) => { + this.$patch({ janusStreamingPluginHandle: pluginHandle }); + streaming = pluginHandle; + await self.loadStreamsList(pluginHandle); + }, + error: (error) => { + handleError( + "Error establishing janus.plugin.streaming handle", + new Error(error) + ); + }, + iceState: ( + state: "connected" | "failed" | "disconnected" | "closed" + ) => { + console.log(`WebRTC ice state is ${state}`); + }, + webrtcState: (isConnected: boolean) => { + console.log(`WebRTC is connected: ${isConnected}`); + }, + slowLink: (uplink: boolean, lost: number, mid: string) => { + console.warn( + `WebRTC slow link detected. uplink=${uplink} lost=${lost} mid=${mid}` + ); + }, + onmessage: (msg: JanusJS.Message, jsep?: JanusJS.JSEP) => { + console.log("Received Janus message", msg, "JSEP: ", jsep); + const result = msg["result"]; + + // handle message contents + if (result) { + if (result["status"]) { + const status = result["status"]; + console.log("WebRTC connection status: ", status); + } else if (msg["streaming"] === "event") { + const event = msg["streaming"]; + console.log("WebRTC event received: ", event); + } + } else if (msg["error"]) { + handleError("Janus Gateway Error", new Error(msg["error"])); + } - event.track.onunmute = (evt) => { - console.log("track.onunmute", evt); - }; - event.track.onmute = (evt) => { - console.log("track.onmute", evt); - }; - event.track.onended = (evt) => { - console.log("track.onended", evt); - }; - const videoStream = new MediaStream([event.track]); + // handle jsep contents + if (jsep) { + console.log("Handling SDP: ", jsep); + streaming?.createAnswer({ + jsep, + media: { audioSend: null, videoSend: null }, + success: (jsep) => { + console.log("Received SDP: ", jsep); + const startStreamRequest = { request: "start" }; + console.log("Sending start stream request", startStreamRequest); + streaming?.send({ message: startStreamRequest, jsep: jsep }); + }, + error: (error) => { + handleError("Janus Gateway SDP Error", new Error(error)); + }, + }); + } + }, + onremotetrack: (track: MediaStreamTrack, mid: string, on: boolean) => { + console.log(`Received remote track mid=${mid} on=${on}`, track); + const videoStream = new MediaStream([track]); + const showOverlay = self.showOverlay; - // if showOverlay is true - if (this.showOverlay) { - const opts = { - x: 0, // position of the topleft corner - y: 0, - width: merger.width, - height: merger.height, - mute: true, - muted: true, // we don't want sound from the screen (if there is any) - index: 0, - draw: null as null | DrawFunction, - } as AddStreamOptions; - if (event.transceiver.mid !== undefined) { - opts.index = parseInt( - event.transceiver.mid?.replace("v", "") as string - ); + // if showOverlay is true, render composite + if (showOverlay) { + const opts = { + x: 0, // position of the topleft corner + y: 0, + width: merger.width, + height: merger.height, + mute: true, + muted: true, // we don't want sound from the screen (if there is any) + index: 0, + draw: null as null | DrawFunction, + } as AddStreamOptions; + if (mid !== undefined) { + opts.index = parseInt(mid.replace("v", "") as string); - // remove black background from overlay video - if (opts.index == 2) { - opts.draw = (ctx, frame, done) => { - ctx.globalCompositeOperation = "screen"; - ctx.drawImage(frame, 0, 0, merger.width, merger.height); - done(); - }; + // remove black background from overlay video + if (opts.index == 2) { + opts.draw = (ctx, frame, done) => { + ctx.globalCompositeOperation = "screen"; + ctx.drawImage(frame, 0, 0, merger.width, merger.height); + done(); + }; + } + console.log("Merging stream with opts:", opts, event); + merger.addStream(videoStream, opts); } + } else if (mid === "v1") { + console.log( + `showOverlay=${showOverlay}, only adding stream with mid=${mid}` + ); + merger.addStream(videoStream, undefined); + } + + if (on) { + merger.start(); + this.setVideoElement(merger.result); + } + }, + }); + }, + + connectJanus() { + // initialize janus library + const self = this; + Janus.init({ + debug: "all", + callback: () => { + // is WebRTC supported for this browser? + if (!Janus.isWebrtcSupported()) { + const msg = `No WebRTC support for browser: ${Janus.webRTCAdapter.browserDetails.browser} - ${Janus.webRTCAdapter.browserDetails.version}`; + console.error(msg); + return handleError(msg, new Error(msg)); } - console.log("Merging stream with opts:", opts, event); - merger.addStream(videoStream, opts); - } - // otherwise, just add main video stream and ignore bounding box track - else if (event.transceiver.mid == "v1") { - console.log("Adding stream with mid v1"); - merger.addStream(videoStream, undefined); - } - }; - this.$patch({ janusPeerConnection: pc }); - await pc.setRemoteDescription(offer); - console.log("set remote sdp OK"); - const answer = await pc.createAnswer(); - console.log("create answer OK"); - pc.setLocalDescription(answer); - console.log("set local sdp OK"); - return answer; + const server = getJanusUri(); + const janus = new Janus({ + server, + iceServers: IceServers, + success: () => self.onConnectSuccess(janus), + error: (error) => + handleError(`Connection to ${server} failed`, error), + destroyed: () => console.warn("WebRTC connection was destroyed"), + }); + }, + }); }, + // async closePC() { + // if (this.janusPeerConnection !== undefined) { + // const janusPeerConnection = toRaw(this.janusPeerConnection); + + // console.log("stopping PeerConnection"); + // janusPeerConnection.close(); + // this.$patch({ janusPeerConnection: undefined }); + // } + // }, + // async trickle(event: any) { + // const { candidate } = event; + // if (this.janusStreamingPluginHandle === undefined) { + // console.warn( + // "trickle() called with undefined janusStreamingPluginHandle" + // ); + // return; + // } + // const janusStreamingPluginHandle = toRaw(this.janusStreamingPluginHandle); + + // if (candidate === undefined) { + // janusStreamingPluginHandle.trickleComplete().catch((e: any) => { + // console.error("trickleComplete error", e); + // }); + // } else { + // janusStreamingPluginHandle.trickle(candidate).catch((e: any) => { + // console.error("trickle error", e); + // }); + // } + // }, + // async jsepAnswer(offer: any) { + // const cameraSettings = useCameraSettingsStore(); + // if (cameraSettings.settings === undefined) { + // await cameraSettings.loadSettings(); + // } + // const streamSettings = toRaw(cameraSettings.settings); + // const pc = new RTCPeerConnection({ + // iceServers: [ + // { + // urls: "stun:stun.l.google.com:19302", + // }, + // ], + // }); + // pc.onnegotiationneeded = (event) => + // console.log("pc.onnegotiationneeded", event); + // pc.onicecandidate = (event) => + // this.trickle({ candidate: event.candidate }); + // pc.oniceconnectionstatechange = () => { + // console.log( + // "pc.oniceconnectionstatechange => " + pc.iceConnectionState + // ); + // if ( + // pc.iceConnectionState === "failed" || + // pc.iceConnectionState === "closed" + // ) { + // console.warn("Stopping all streams and closing peer connection"); + // this.stopAllStreams(); + // this.closePC(); + // } + // }; + + // const merger = new VideoStreamMerger({ + // fps: streamSettings?.camera.framerate_n, + // height: streamSettings?.camera.height, + // width: streamSettings?.camera.width, + // } as ConstructorOptions); + // merger.start(); + // this.setVideoElement(merger.result); + + // pc.ontrack = (event) => { + // console.log("pc.ontrack", event); + + // event.track.onunmute = (evt) => { + // console.log("track.onunmute", evt); + // }; + // event.track.onmute = (evt) => { + // console.log("track.onmute", evt); + // }; + // event.track.onended = (evt) => { + // console.log("track.onended", evt); + // }; + // const videoStream = new MediaStream([event.track]); + + // // if showOverlay is true + // if (this.showOverlay) { + // const opts = { + // x: 0, // position of the topleft corner + // y: 0, + // width: merger.width, + // height: merger.height, + // mute: true, + // muted: true, // we don't want sound from the screen (if there is any) + // index: 0, + // draw: null as null | DrawFunction, + // } as AddStreamOptions; + // if (event.transceiver.mid !== undefined) { + // opts.index = parseInt( + // event.transceiver.mid?.replace("v", "") as string + // ); + + // // remove black background from overlay video + // if (opts.index == 2) { + // opts.draw = (ctx, frame, done) => { + // ctx.globalCompositeOperation = "screen"; + // ctx.drawImage(frame, 0, 0, merger.width, merger.height); + // done(); + // }; + // } + // } + // console.log("Merging stream with opts:", opts, event); + // merger.addStream(videoStream, opts); + // } + // // otherwise, just add main video stream and ignore bounding box track + // else if (event.transceiver.mid == "v1") { + // console.log("Adding stream with mid v1"); + // merger.addStream(videoStream, undefined); + // } + // }; + + // this.$patch({ janusPeerConnection: pc }); + // await pc.setRemoteDescription(offer); + // console.log("set remote sdp OK"); + // const answer = await pc.createAnswer(); + // console.log("create answer OK"); + // pc.setLocalDescription(answer); + // console.log("set local sdp OK"); + // return answer; + // }, async setVideoElement(mediaStream: any) { if (!mediaStream) { return; @@ -325,47 +404,47 @@ export const useJanusStore = defineStore({ console.error("Error setting video player.play()", e); }); }, - async startJanusStream(showOverlay: boolean) { - console.log( - "WebRTC adaptor detected browser: ", - adapter.browserDetails.browser - ); - console.log( - "WebRTC adaptor detected version: ", - adapter.browserDetails.version - ); + // async startJanusStream(showOverlay: boolean) { + // console.log( + // "WebRTC adaptor detected browser: ", + // adapter.browserDetails.browser + // ); + // console.log( + // "WebRTC adaptor detected version: ", + // adapter.browserDetails.version + // ); - this.$patch({ showOverlay }); - if (this.selectedStream == undefined) { - console.warn( - "startJanusStream() was called, but no stream is selected" - ); - return; - } + // this.$patch({ showOverlay }); + // if (this.selectedStream == undefined) { + // console.warn( + // "startJanusStream() was called, but no stream is selected" + // ); + // return; + // } - this.$patch({ status: ConnectionStatus.ConnectionLoading }); + // this.$patch({ status: ConnectionStatus.ConnectionLoading }); - const janusStreamingPluginHandle = toRaw(this.janusStreamingPluginHandle); - const media = toRaw(this.selectedStream.media); - const watchdata = { - id: toRaw(this.selectedStream.id), - media, - }; - console.log("Sending watchdata", watchdata); - const { jsep, _restart = false } = await janusStreamingPluginHandle.watch( - watchdata - ); - console.log(`Received offer`, jsep); + // const janusStreamingPluginHandle = toRaw(this.janusStreamingPluginHandle); + // const media = toRaw(this.selectedStream.media); + // const watchdata = { + // id: toRaw(this.selectedStream.id), + // media, + // }; + // console.log("Sending watchdata", watchdata); + // const { jsep, _restart = false } = await janusStreamingPluginHandle.watch( + // watchdata + // ); + // console.log(`Received offer`, jsep); - const answer = await this.jsepAnswer(jsep); - const { status, id } = await janusStreamingPluginHandle.start({ - jsep: answer, - }); - console.log( - `Start mountpoint: ${id} response sent with status ${status}` - ); - this.$patch({ mountpoint: id }); - }, + // const answer = await this.jsepAnswer(jsep); + // const { status, id } = await janusStreamingPluginHandle.start({ + // jsep: answer, + // }); + // console.log( + // `Start mountpoint: ${id} response sent with status ${status}` + // ); + // this.$patch({ mountpoint: id }); + // }, }, }); diff --git a/src/stores/video.ts b/src/stores/video.ts index c3b67bc..1bf2290 100644 --- a/src/stores/video.ts +++ b/src/stores/video.ts @@ -321,10 +321,9 @@ export const useVideoStore = defineStore({ await printNannyVisionService.startService(); // show message indicating printnanny-vision.service was restarted const janusStore = useJanusStore(); - - await janusStore.connectJanus(); - janusStore.selectJanusStreamByPort(); - janusStore.startJanusStream(toRaw(this.showOverlay)); + janusStore.connectJanus(); + // janusStore.selectJanusStreamByPort(); + // janusStore.startJanusStream(toRaw(this.showOverlay)); }, async stopWebrtcStream() { this.$patch({ diff --git a/src/vendor/janus.d.ts b/src/vendor/janus.d.ts new file mode 100644 index 0000000..b391adb --- /dev/null +++ b/src/vendor/janus.d.ts @@ -0,0 +1,404 @@ +declare global { + const jQuery: any; +} + +declare namespace JanusJS { + interface Dependencies { + adapter: any; + WebSocket: (server: string, protocol: string) => WebSocket; + isArray: (array: any) => array is Array; + extension: ChromeExtension; + httpAPICall: (url: string, options: HttpApiCallOption) => void; + } + + interface DefaultDependencies extends Dependencies { + fetch: typeof fetch; + Promise: PromiseConstructorLike; + } + + interface OldDependencies extends Dependencies { + jQuery: typeof jQuery; + } + + interface DependenciesResult { + adapter: any; + newWebSocket: (server: string, protocol: string) => WebSocket; + isArray: (array: any) => array is Array; + extension: ChromeExtension; + httpAPICall: (url: string, options: HttpApiCallOption) => void; + } + + type ChromeExtension = { + cache?: { [key in string]: GetScreenCallback }; + extensionId: string; + isInstalled: () => boolean; + getScreen: (callback: GetScreenCallback) => void; + init: () => void; + }; + + type GetScreenCallback = (error?, sourceId?) => void; + + type HttpApiCallOption = { + async: boolean; + verb: string; + body: JanusRequest; + timeout: number; + withCredentials: boolean; + success: (result: unknown) => void; + error: (error: string, reason?: unknown) => void; + }; + + type JanusRequest = { + plugin?: string; + token?: string; + apisecret?: string; + session_id?: number; + handle_id?: number; + opaque_id?: string; + loop_index?: number; + janus: string; + transaction: string; + body?: any; + jsep?: JSEP; + }; + + enum DebugLevel { + Trace = "trace", + vDebug = "vdebug", + Debug = "debug", + Log = "log", + Warning = "warn", + Error = "error", + } + + interface JSEP { + e2ee?: boolean; + sdp?: string; + type?: string; + rid_order?: "hml" | "lmh"; + force_relay?: boolean; + } + + interface InitOptions { + debug?: boolean | "all" | DebugLevel[]; + callback?: Function; + dependencies?: DependenciesResult; + } + + interface ConstructorOptions { + server: string | string[]; + iceServers?: RTCIceServer[]; + ipv6?: boolean; + withCredentials?: boolean; + max_poll_events?: number; + destroyOnUnload?: boolean; + token?: string; + apisecret?: string; + success?: Function; + error?: (error: any) => void; + destroyed?: Function; + iceTransportPolicy?: RTCIceTransportPolicy; + bundlePolicy?: RTCBundlePolicy; + keepAlivePeriod?: number; + longPollTimeout?: number; + } + + interface ReconnectOptions { + success?: Function; + error?: (error: string) => void; + } + + interface DestroyOptions { + cleanupHandles?: boolean; + notifyDestroyed?: boolean; + unload?: boolean; + success?: () => void; + error?: (error: string) => void; + } + + interface GetInfoOptions { + success?: (info: any) => void; + error?: (error: string) => void; + } + + enum MessageType { + Recording = "recording", + Starting = "starting", + Started = "started", + Stopped = "stopped", + SlowLink = "slow_link", + Preparing = "preparing", + Refreshing = "refreshing", + } + + interface Message { + result?: { + status: MessageType; + id?: string; + uplink?: number; + }; + error?: string; + [key: string]: any; + } + + interface PluginCallbacks { + dataChannelOptions?: RTCDataChannelInit; + success?: (handle: PluginHandle) => void; + error?: (error: string) => void; + consentDialog?: (on: boolean) => void; + webrtcState?: (isConnected: boolean) => void; + iceState?: ( + state: "connected" | "failed" | "disconnected" | "closed" + ) => void; + mediaState?: ( + medium: "audio" | "video", + receiving: boolean, + mid?: number + ) => void; + slowLink?: (uplink: boolean, lost: number, mid: string) => void; + onmessage?: (message: Message, jsep?: JSEP) => void; + onlocaltrack?: (track: MediaStreamTrack, on: boolean) => void; + onremotetrack?: (track: MediaStreamTrack, mid: string, on: boolean) => void; + ondataopen?: Function; + ondata?: Function; + oncleanup?: Function; + ondetached?: Function; + } + + interface PluginOptions extends PluginCallbacks { + plugin: string; + opaqueId?: string; + token?: string; + loopIndex?: number; + } + + interface OfferParams { + media?: { + audioSend?: boolean; + audioRecv?: boolean; + videoSend?: boolean; + videoRecv?: boolean; + audio?: boolean | { deviceId: string }; + video?: + | boolean + | { deviceId: string } + | "lowres" + | "lowres-16:9" + | "stdres" + | "stdres-16:9" + | "hires" + | "hires-16:9"; + data?: boolean; + failIfNoAudio?: boolean; + failIfNoVideo?: boolean; + screenshareFrameRate?: number; + }; + trickle?: boolean; + stream?: MediaStream; + success: Function; + error: (error: any) => void; + } + + interface PluginMessage { + message: { + request: string; + [otherProps: string]: any; + }; + jsep?: JSEP; + success?: (data?: any) => void; + error?: (error: string) => void; + } + + interface WebRTCInfo { + bitrate: { + bsbefore: string | null; + bsnow: string | null; + timer: string | null; + tsbefore: string | null; + tsnow: string | null; + value: string | null; + }; + dataChannel: { [key in string]: RTCDataChannel }; + dataChannelOptions: RTCDataChannelInit; + + dtmfSender: RTCDTMFSender; + iceDone: boolean; + mediaConstraints: any; + mySdp: { + sdp: string; + type: string; + }; + myStream: MediaStream; + pc: RTCPeerConnection; + receiverTransforms: { + audio: TransformStream; + video: TransformStream; + }; + remoteSdp: string; + remoteStream: MediaStream; + senderTransforms: { + audio: TransformStream; + video: TransformStream; + }; + started: boolean; + streamExternal: boolean; + trickle: boolean; + volume: { + value: number; + timer: number; + }; + + sdpSent: boolean; + insertableStreams?: any; + candidates: RTCIceCandidateInit[]; + } + + type PluginCreateAnswerParam = { + jsep: JSEP; + media: { audioSend: any; videoSend: any }; + success?: (data: JSEP) => void; + error?: (error: string) => void; + }; + + type PluginHandleRemoteJsepParam = { + jsep: JSEP; + success?: (data: JSEP) => void; + error?: (error: string) => void; + }; + + type PluginReplaceTracksParam = { + tracks: TrackOption[]; + success?: (data: unknown) => void; + error?: (error: string) => void; + }; + + type TrackOption = { + add: boolean; + replace: boolean; + remove: boolean; + type: "video" | "screen" | "audio" | "data"; + mid: string; + capture: boolean | MediaStreamTrack; + recv: boolean; + group: "default" | string; + gumGroup: TrackOption["group"]; + simulcast: boolean; + svc: string; + simulcastMaxBitrates: { + low: number; + medium: number; + high: number; + }; + sendEncodings: RTCRtpEncodingParameters; + framerate: number; + bitrate: number; + dontStop: boolean; + transforms: { + sender: ReadableWritablePair; + receiver: ReadableWritablePair; + }; + }; + + type PluginDtmfParam = { + dtmf: Dtmf; + success?: (data: unknown) => void; + error?: (error: string) => void; + }; + + type Dtmf = { + tones: string; + duration: number; + gap: number; + }; + + type PluginDataParam = { + success?: (data: unknown) => void; + error?: (error: string) => void; + }; + + type TrackDesc = { + mid?: string; + type?: string; + id?: string; + label?: string; + }; + + interface DetachOptions { + success?: () => void; + error?: (error: string) => void; + noRequest?: boolean; + } + + interface PluginHandle { + plugin: string; + id: string; + token?: string; + detached: boolean; + webrtcStuff: WebRTCInfo; + getId(): string; + getPlugin(): string; + getVolume(mid: string, callback: (result: number) => void): void; + getRemoteVolume(mid: string, callback: (result: number) => void): void; + getLocalVolume(mid: string, callback: (result: number) => void): void; + isAudioMuted(): boolean; + muteAudio(): void; + unmuteAudio(): void; + isVideoMuted(): boolean; + muteVideo(): void; + unmuteVideo(): void; + getBitrate(): string; + setMaxBitrate(bitrate: number): void; + send(message: PluginMessage): void; + data(params: PluginDataParam): void; + dtmf(params: PluginDtmfParam): void; + createOffer(params: OfferParams): void; + createAnswer(params: PluginCreateAnswerParam): void; + handleRemoteJsep(params: PluginHandleRemoteJsepParam): void; + replaceTracks(params: PluginReplaceTracksParam): void; + getLocalTracks(): TrackDesc[]; + getRemoteTracks(): TrackDesc[]; + hangup(sendRequest?: boolean): void; + detach(params?: DetachOptions): void; + } + + class Janus { + static webRTCAdapter: any; + static safariVp8: boolean; + static useDefaultDependencies( + deps?: Partial + ): DependenciesResult; + static useOldDependencies( + deps?: Partial + ): DependenciesResult; + static init(options: InitOptions): void; + static isWebrtcSupported(): boolean; + static debug(...args: any[]): void; + static log(...args: any[]): void; + static warn(...args: any[]): void; + static error(...args: any[]): void; + static randomString(length: number): string; + static attachMediaStream( + element: HTMLMediaElement, + stream: MediaStream + ): void; + static reattachMediaStream( + to: HTMLMediaElement, + from: HTMLMediaElement + ): void; + + static stopAllTracks(stream: MediaStream): void; + + constructor(options: ConstructorOptions); + + attach(options: PluginOptions): void; + getServer(): string; + isConnected(): boolean; + reconnect(callbacks: ReconnectOptions): void; + getSessionId(): number; + getInfo(callbacks: GetInfoOptions): void; + destroy(callbacks: DestroyOptions): void; + } +} + +export default JanusJS.Janus; +export { JanusJS }; diff --git a/src/vendor/janus.js b/src/vendor/janus.js new file mode 100644 index 0000000..03ce9c8 --- /dev/null +++ b/src/vendor/janus.js @@ -0,0 +1,3171 @@ +/* eslint-disable */ +/* + * Module shim for rollup.js to work with. + * Simply re-export Janus from janus.js, the real 'magic' is in the rollup config. + * + * Since this counts as 'autogenerated' code, ESLint is instructed to ignore the contents of this file when linting your project. + */ + +"use strict"; +import adapter from "webrtc-adapter"; + +/* + The MIT License (MIT) + + Copyright (c) 2016 Meetecho + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + */ + +// List of sessions +Janus.sessions = {}; + +Janus.isExtensionEnabled = function() { + if(navigator.mediaDevices && navigator.mediaDevices.getDisplayMedia) { + // No need for the extension, getDisplayMedia is supported + return true; + } + if(window.navigator.userAgent.match('Chrome')) { + let chromever = parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10); + let maxver = 33; + if(window.navigator.userAgent.match('Linux')) + maxver = 35; // "known" crash in chrome 34 and 35 on linux + if(chromever >= 26 && chromever <= maxver) { + // Older versions of Chrome don't support this extension-based approach, so lie + return true; + } + return Janus.extension.isInstalled(); + } else { + // Firefox and others, no need for the extension (but this doesn't mean it will work) + return true; + } +}; + +var defaultExtension = { + // Screensharing Chrome Extension ID + extensionId: 'hapfgfdkleiggjjpfpenajgdnfckjpaj', + isInstalled: function() { return document.querySelector('#janus-extension-installed') !== null; }, + getScreen: function (callback) { + let pending = window.setTimeout(function () { + let error = new Error('NavigatorUserMediaError'); + error.name = 'The required Chrome extension is not installed: click here to install it. (NOTE: this will need you to refresh the page)'; + return callback(error); + }, 1000); + this.cache[pending] = callback; + window.postMessage({ type: 'janusGetScreen', id: pending }, '*'); + }, + init: function () { + let cache = {}; + this.cache = cache; + // Wait for events from the Chrome Extension + window.addEventListener('message', function (event) { + if(event.origin != window.location.origin) + return; + if(event.data.type == 'janusGotScreen' && cache[event.data.id]) { + let callback = cache[event.data.id]; + delete cache[event.data.id]; + if(event.data.sourceId === '') { + // user canceled + let error = new Error('NavigatorUserMediaError'); + error.name = 'You cancelled the request for permission, giving up...'; + callback(error); + } else { + callback(null, event.data.sourceId); + } + } else if(event.data.type == 'janusGetScreenPending') { + console.log('clearing ', event.data.id); + window.clearTimeout(event.data.id); + } + }); + } +}; + +Janus.useDefaultDependencies = function (deps) { + let f = (deps && deps.fetch) || fetch; + let p = (deps && deps.Promise) || Promise; + let socketCls = (deps && deps.WebSocket) || WebSocket; + + return { + newWebSocket: function(server, proto) { return new socketCls(server, proto); }, + extension: (deps && deps.extension) || defaultExtension, + isArray: function(arr) { return Array.isArray(arr); }, + webRTCAdapter: (deps && deps.adapter) || adapter, + httpAPICall: function(url, options) { + let fetchOptions = { + method: options.verb, + headers: { + 'Accept': 'application/json, text/plain, */*' + }, + cache: 'no-cache' + }; + if(options.verb === "POST") { + fetchOptions.headers['Content-Type'] = 'application/json'; + } + if(typeof options.withCredentials !== 'undefined') { + fetchOptions.credentials = options.withCredentials === true ? 'include' : (options.withCredentials ? options.withCredentials : 'omit'); + } + if(options.body) { + fetchOptions.body = JSON.stringify(options.body); + } + + let fetching = f(url, fetchOptions).catch(function(error) { + return p.reject({message: 'Probably a network error, is the server down?', error: error}); + }); + + /* + * fetch() does not natively support timeouts. + * Work around this by starting a timeout manually, and racing it agains the fetch() to see which thing resolves first. + */ + + if(options.timeout) { + let timeout = new p(function(resolve, reject) { + let timerId = setTimeout(function() { + clearTimeout(timerId); + return reject({message: 'Request timed out', timeout: options.timeout}); + }, options.timeout); + }); + fetching = p.race([fetching, timeout]); + } + + fetching.then(function(response) { + if(response.ok) { + if(typeof(options.success) === typeof(Janus.noop)) { + return response.json().then(function(parsed) { + try { + options.success(parsed); + } catch(error) { + Janus.error('Unhandled httpAPICall success callback error', error); + } + }, function(error) { + return p.reject({message: 'Failed to parse response body', error: error, response: response}); + }); + } + } + else { + return p.reject({message: 'API call failed', response: response}); + } + }).catch(function(error) { + if(typeof(options.error) === typeof(Janus.noop)) { + options.error(error.message || '<< internal error >>', error); + } + }); + + return fetching; + } + } +}; + +Janus.useOldDependencies = function (deps) { + let jq = (deps && deps.jQuery) || jQuery; + let socketCls = (deps && deps.WebSocket) || WebSocket; + return { + newWebSocket: function(server, proto) { return new socketCls(server, proto); }, + isArray: function(arr) { return jq.isArray(arr); }, + extension: (deps && deps.extension) || defaultExtension, + webRTCAdapter: (deps && deps.adapter) || adapter, + httpAPICall: function(url, options) { + let payload = (typeof options.body !== 'undefined') ? { + contentType: 'application/json', + data: JSON.stringify(options.body) + } : {}; + let credentials = (typeof options.withCredentials !== 'undefined') ? {xhrFields: {withCredentials: options.withCredentials}} : {}; + + return jq.ajax(jq.extend(payload, credentials, { + url: url, + type: options.verb, + cache: false, + dataType: 'json', + async: options.async, + timeout: options.timeout, + success: function(result) { + if(typeof(options.success) === typeof(Janus.noop)) { + options.success(result); + } + }, + error: function(xhr, status, err) { + if(typeof(options.error) === typeof(Janus.noop)) { + options.error(status, err); + } + } + })); + } + }; +}; + +// Helper function to convert a deprecated media object to a tracks array +Janus.mediaToTracks = function(media) { + let tracks = []; + if(!media) { + // Default is bidirectional audio and video, using default devices + tracks.push({ type: 'audio', capture: true, recv: true }); + tracks.push({ type: 'video', capture: true, recv: true }); + } else { + if(!media.keepAudio && media.audio !== false && ((typeof media.audio === 'undefined') || media.audio || media.audioSend || media.audioRecv || + media.addAudio || media.replaceAudio || media.removeAudio)) { + // We may need an audio track + let track = { type: 'audio' }; + if(media.removeAudio) { + track.remove = true; + } else { + if(media.addAudio) + track.add = true; + else if(media.replaceAudio) + track.replace = true; + // Check if we need to capture an audio device + if(media.audioSend !== false) + track.capture = media.audio || true; + // Check if we need to receive audio + if(media.audioRecv !== false) + track.recv = true; + } + // Add an audio track if needed + if(track.remove || track.capture || track.recv) + tracks.push(track); + } + if(!media.keepVideo && media.video !== false && ((typeof media.video === 'undefined') || media.video || media.videoSend || media.videoRecv || + media.addVideo || media.replaceVideo || media.removeVideo)) { + // We may need a video track + let track = { type: 'video' }; + if(media.removeVideo) { + track.remove = true; + } else { + if(media.addVideo) + track.add = true; + else if(media.replaceVideo) + track.replace = true; + // Check if we need to capture a video device + if(media.videoSend !== false) { + track.capture = media.video || true; + if(['screen', 'window', 'desktop'].includes(track.capture)) { + // Change the type to 'screen' + track.type = 'screen'; + track.capture = { video: {} }; + // Check if there's constraints + if(media.screenshareFrameRate) + track.capture.frameRate = media.screenshareFrameRate; + if(media.screenshareHeight) + track.capture.height = media.screenshareHeight; + if(media.screenshareWidth) + track.capture.width = media.screenshareWidth; + } + } + // Check if we need to receive video + if(media.videoRecv !== false) + track.recv = true; + } + // Add a video track if needed + if(track.remove || track.capture || track.recv) + tracks.push(track); + } + if(media.data) { + // We need a data channel + tracks.push({ type: 'data' }); + } + } + // Done + return tracks; +}; + +// Helper function to convert a track object to a set of constraints +Janus.trackConstraints = function(track) { + let constraints = {}; + if(!track || !track.capture) + return constraints; + if(track.type === 'audio') { + // Just put the capture part in the constraints + constraints.audio = track.capture; + } else if(track.type === 'video') { + // Check if one of the keywords was passed + if((track.simulcast || track.svc) && track.capture === true) + track.capture = 'hires'; + if(track.capture === true || typeof track.capture === 'object') { + // Use the provided capture object as video constraint + constraints.video = track.capture; + } else { + let width = 0; + let height = 0; + if(track.capture === 'lowres') { + // Small resolution, 4:3 + width = 320; + height = 240; + } else if(track.capture === 'lowres-16:9') { + // Small resolution, 16:9 + width = 320; + height = 180; + } else if(track.capture === 'hires' || track.capture === 'hires-16:9' || track.capture === 'hdres') { + // High(HD) resolution is only 16:9 + width = 1280; + height = 720; + } else if(track.capture === 'fhdres') { + // Full HD resolution is only 16:9 + width = 1920; + height = 1080; + } else if(track.capture === '4kres') { + // 4K resolution is only 16:9 + width = 3840; + height = 2160; + } else if(track.capture === 'stdres') { + // Normal resolution, 4:3 + width = 640; + height = 480; + } else if(track.capture === 'stdres-16:9') { + // Normal resolution, 16:9 + width = 640; + height = 360; + } else { + Janus.log('Default video setting is stdres 4:3'); + width = 640; + height = 480; + } + constraints.video = { + width: { ideal: width }, + height: { ideal: height } + }; + } + } else if(track.type === 'screen') { + // Use the provided capture object as video constraint + constraints.video = track.capture; + } + return constraints; +}; + +Janus.noop = function() {}; + +Janus.dataChanDefaultLabel = "JanusDataChannel"; + +// Note: in the future we may want to change this, e.g., as was +// attempted in https://github.com/meetecho/janus-gateway/issues/1670 +Janus.endOfCandidates = null; + +// Stop all tracks from a given stream +Janus.stopAllTracks = function(stream) { + try { + // Try a MediaStreamTrack.stop() for each track + let tracks = stream.getTracks(); + for(let mst of tracks) { + Janus.log(mst); + if(mst && mst.dontStop !== true) { + mst.stop(); + } + } + } catch(e) { + // Do nothing if this fails + } +}; + +// Initialization +Janus.init = function(options) { + options = options || {}; + options.callback = (typeof options.callback == "function") ? options.callback : Janus.noop; + if(Janus.initDone) { + // Already initialized + options.callback(); + } else { + if(typeof console.log == "undefined") { + console.log = function() {}; + } + // Console logging (all debugging disabled by default) + Janus.trace = Janus.noop; + Janus.debug = Janus.noop; + Janus.vdebug = Janus.noop; + Janus.log = Janus.noop; + Janus.warn = Janus.noop; + Janus.error = Janus.noop; + if(options.debug === true || options.debug === "all") { + // Enable all debugging levels + Janus.trace = console.trace.bind(console); + Janus.debug = console.debug.bind(console); + Janus.vdebug = console.debug.bind(console); + Janus.log = console.log.bind(console); + Janus.warn = console.warn.bind(console); + Janus.error = console.error.bind(console); + } else if(Array.isArray(options.debug)) { + for(let d of options.debug) { + switch(d) { + case "trace": + Janus.trace = console.trace.bind(console); + break; + case "debug": + Janus.debug = console.debug.bind(console); + break; + case "vdebug": + Janus.vdebug = console.debug.bind(console); + break; + case "log": + Janus.log = console.log.bind(console); + break; + case "warn": + Janus.warn = console.warn.bind(console); + break; + case "error": + Janus.error = console.error.bind(console); + break; + default: + console.error("Unknown debugging option '" + d + "' (supported: 'trace', 'debug', 'vdebug', 'log', warn', 'error')"); + break; + } + } + } + Janus.log("Initializing library"); + + let usedDependencies = options.dependencies || Janus.useDefaultDependencies(); + Janus.isArray = usedDependencies.isArray; + Janus.webRTCAdapter = usedDependencies.webRTCAdapter; + Janus.httpAPICall = usedDependencies.httpAPICall; + Janus.newWebSocket = usedDependencies.newWebSocket; + Janus.extension = usedDependencies.extension; + Janus.extension.init(); + + // Helper method to enumerate devices + Janus.listDevices = function(callback, config) { + callback = (typeof callback == "function") ? callback : Janus.noop; + if(!config) + config = { audio: true, video: true }; + if(Janus.isGetUserMediaAvailable()) { + navigator.mediaDevices.getUserMedia(config) + .then(function(stream) { + navigator.mediaDevices.enumerateDevices().then(function(devices) { + Janus.debug(devices); + callback(devices); + // Get rid of the now useless stream + Janus.stopAllTracks(stream); + }); + }) + .catch(function(err) { + Janus.error(err); + callback([]); + }); + } else { + Janus.warn("navigator.mediaDevices unavailable"); + callback([]); + } + }; + // Helper methods to attach/reattach a stream to a video element (previously part of adapter.js) + Janus.attachMediaStream = function(element, stream) { + try { + element.srcObject = stream; + } catch (e) { + try { + element.src = URL.createObjectURL(stream); + } catch (e) { + Janus.error("Error attaching stream to element", e); + } + } + }; + Janus.reattachMediaStream = function(to, from) { + try { + to.srcObject = from.srcObject; + } catch (e) { + try { + to.src = from.src; + } catch (e) { + Janus.error("Error reattaching stream to element", e); + } + } + }; + // Detect tab close: make sure we don't loose existing onbeforeunload handlers + // (note: for iOS we need to subscribe to a different event, 'pagehide', see + // https://gist.github.com/thehunmonkgroup/6bee8941a49b86be31a787fe8f4b8cfe) + let iOS = ['iPad', 'iPhone', 'iPod'].indexOf(navigator.platform) >= 0; + let eventName = iOS ? 'pagehide' : 'beforeunload'; + let oldOBF = window["on" + eventName]; + window.addEventListener(eventName, function() { + Janus.log("Closing window"); + for(let s in Janus.sessions) { + if(Janus.sessions[s] && Janus.sessions[s].destroyOnUnload) { + Janus.log("Destroying session " + s); + Janus.sessions[s].destroy({unload: true, notifyDestroyed: false}); + } + } + if(oldOBF && typeof oldOBF == "function") { + oldOBF(); + } + }); + // If this is a Safari Technology Preview, check if VP8 is supported + Janus.safariVp8 = false; + if(Janus.webRTCAdapter.browserDetails.browser === 'safari' && + Janus.webRTCAdapter.browserDetails.version >= 605) { + // Let's see if RTCRtpSender.getCapabilities() is there + if(RTCRtpSender && RTCRtpSender.getCapabilities && RTCRtpSender.getCapabilities("video") && + RTCRtpSender.getCapabilities("video").codecs && RTCRtpSender.getCapabilities("video").codecs.length) { + for(let codec of RTCRtpSender.getCapabilities("video").codecs) { + if(codec && codec.mimeType && codec.mimeType.toLowerCase() === "video/vp8") { + Janus.safariVp8 = true; + break; + } + } + if(Janus.safariVp8) { + Janus.log("This version of Safari supports VP8"); + } else { + Janus.warn("This version of Safari does NOT support VP8: if you're using a Technology Preview, " + + "try enabling the 'WebRTC VP8 codec' setting in the 'Experimental Features' Develop menu"); + } + } else { + // We do it in a very ugly way, as there's no alternative... + // We create a PeerConnection to see if VP8 is in an offer + let testpc = new RTCPeerConnection({}); + testpc.createOffer({offerToReceiveVideo: true}).then(function(offer) { + Janus.safariVp8 = offer.sdp.indexOf("VP8") !== -1; + if(Janus.safariVp8) { + Janus.log("This version of Safari supports VP8"); + } else { + Janus.warn("This version of Safari does NOT support VP8: if you're using a Technology Preview, " + + "try enabling the 'WebRTC VP8 codec' setting in the 'Experimental Features' Develop menu"); + } + testpc.close(); + testpc = null; + }); + } + } + Janus.initDone = true; + options.callback(); + } +}; + +// Helper method to check whether WebRTC is supported by this browser +Janus.isWebrtcSupported = function() { + return !!window.RTCPeerConnection; +}; +// Helper method to check whether devices can be accessed by this browser (e.g., not possible via plain HTTP) +Janus.isGetUserMediaAvailable = function() { + return navigator.mediaDevices && navigator.mediaDevices.getUserMedia; +}; + +// Helper method to create random identifiers (e.g., transaction) +Janus.randomString = function(len) { + let charSet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + let randomString = ''; + for(let i=0; i 3) { + // Did we just lose the server? :-( + connected = false; + gatewayCallbacks.error("Lost connection to the server (is it down?)"); + return; + } + eventHandler(); + } + }); + } + + // Private event handler: this will trigger plugin callbacks, if set + function handleEvent(json, skipTimeout) { + retries = 0; + if(!websockets && typeof sessionId !== 'undefined' && sessionId !== null && skipTimeout !== true) + eventHandler(); + if(!websockets && Janus.isArray(json)) { + // We got an array: it means we passed a maxev > 1, iterate on all objects + for(let i=0; i data channel: ' + dcState); + if(dcState === 'open') { + // Any pending messages to send? + if(config.dataChannel[label].pending && config.dataChannel[label].pending.length > 0) { + Janus.log("Sending pending messages on <" + label + ">:", config.dataChannel[label].pending.length); + for(let data of config.dataChannel[label].pending) { + Janus.log("Sending data on data channel <" + label + ">"); + Janus.debug(data); + config.dataChannel[label].send(data); + } + config.dataChannel[label].pending = []; + } + // Notify the open data channel + pluginHandle.ondataopen(label, protocol); + } + }; + let onDataChannelError = function(error) { + Janus.error('Got error on data channel:', error); + // TODO + }; + if(!incoming) { + // FIXME Add options (ordered, maxRetransmits, etc.) + let dcoptions = config.dataChannelOptions; + if(dcprotocol) + dcoptions.protocol = dcprotocol; + config.dataChannel[dclabel] = config.pc.createDataChannel(dclabel, dcoptions); + } else { + // The channel was created by Janus + config.dataChannel[dclabel] = incoming; + } + config.dataChannel[dclabel].onmessage = onDataChannelMessage; + config.dataChannel[dclabel].onopen = onDataChannelStateChange; + config.dataChannel[dclabel].onclose = onDataChannelStateChange; + config.dataChannel[dclabel].onerror = onDataChannelError; + config.dataChannel[dclabel].pending = []; + if(pendingData) + config.dataChannel[dclabel].pending.push(pendingData); + } + + // Private method to send a data channel message + function sendData(handleId, callbacks) { + callbacks = callbacks || {}; + callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; + callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn("Invalid handle"); + callbacks.error("Invalid handle"); + return; + } + let config = pluginHandle.webrtcStuff; + let data = callbacks.text || callbacks.data; + if(!data) { + Janus.warn("Invalid data"); + callbacks.error("Invalid data"); + return; + } + let label = callbacks.label ? callbacks.label : Janus.dataChanDefaultLabel; + if(!config.dataChannel[label]) { + // Create new data channel and wait for it to open + createDataChannel(handleId, label, callbacks.protocol, false, data, callbacks.protocol); + callbacks.success(); + return; + } + if(config.dataChannel[label].readyState !== "open") { + config.dataChannel[label].pending.push(data); + callbacks.success(); + return; + } + Janus.log("Sending data on data channel <" + label + ">"); + Janus.debug(data); + config.dataChannel[label].send(data); + callbacks.success(); + } + + // Private method to send a DTMF tone + function sendDtmf(handleId, callbacks) { + callbacks = callbacks || {}; + callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; + callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn("Invalid handle"); + callbacks.error("Invalid handle"); + return; + } + let config = pluginHandle.webrtcStuff; + if(!config.dtmfSender) { + // Create the DTMF sender the proper way, if possible + if(config.pc) { + let senders = config.pc.getSenders(); + let audioSender = senders.find(function(sender) { + return sender.track && sender.track.kind === 'audio'; + }); + if(!audioSender) { + Janus.warn("Invalid DTMF configuration (no audio track)"); + callbacks.error("Invalid DTMF configuration (no audio track)"); + return; + } + config.dtmfSender = audioSender.dtmf; + if(config.dtmfSender) { + Janus.log("Created DTMF Sender"); + config.dtmfSender.ontonechange = function(tone) { Janus.debug("Sent DTMF tone: " + tone.tone); }; + } + } + if(!config.dtmfSender) { + Janus.warn("Invalid DTMF configuration"); + callbacks.error("Invalid DTMF configuration"); + return; + } + } + let dtmf = callbacks.dtmf; + if(!dtmf) { + Janus.warn("Invalid DTMF parameters"); + callbacks.error("Invalid DTMF parameters"); + return; + } + let tones = dtmf.tones; + if(!tones) { + Janus.warn("Invalid DTMF string"); + callbacks.error("Invalid DTMF string"); + return; + } + let duration = (typeof dtmf.duration === 'number') ? dtmf.duration : 500; // We choose 500ms as the default duration for a tone + let gap = (typeof dtmf.gap === 'number') ? dtmf.gap : 50; // We choose 50ms as the default gap between tones + Janus.debug("Sending DTMF string " + tones + " (duration " + duration + "ms, gap " + gap + "ms)"); + config.dtmfSender.insertDTMF(tones, duration, gap); + callbacks.success(); + } + + // Private method to destroy a plugin handle + function destroyHandle(handleId, callbacks) { + callbacks = callbacks || {}; + callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; + callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : Janus.noop; + let noRequest = (callbacks.noRequest === true); + Janus.log("Destroying handle " + handleId + " (only-locally=" + noRequest + ")"); + cleanupWebrtc(handleId); + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || pluginHandle.detached) { + // Plugin was already detached by Janus, calling detach again will return a handle not found error, so just exit here + delete pluginHandles[handleId]; + callbacks.success(); + return; + } + pluginHandle.detached = true; + if(noRequest) { + // We're only removing the handle locally + delete pluginHandles[handleId]; + callbacks.success(); + return; + } + if(!connected) { + Janus.warn("Is the server down? (connected=false)"); + callbacks.error("Is the server down? (connected=false)"); + return; + } + let request = { "janus": "detach", "transaction": Janus.randomString(12) }; + if(pluginHandle.token) + request["token"] = pluginHandle.token; + if(apisecret) + request["apisecret"] = apisecret; + if(websockets) { + request["session_id"] = sessionId; + request["handle_id"] = handleId; + ws.send(JSON.stringify(request)); + delete pluginHandles[handleId]; + callbacks.success(); + return; + } + Janus.httpAPICall(server + "/" + sessionId + "/" + handleId, { + verb: 'POST', + withCredentials: withCredentials, + body: request, + success: function(json) { + Janus.log("Destroyed handle:"); + Janus.debug(json); + if(json["janus"] !== "success") { + Janus.error("Ooops: " + json["error"].code + " " + json["error"].reason); // FIXME + } + delete pluginHandles[handleId]; + callbacks.success(); + }, + error: function(textStatus, errorThrown) { + Janus.error(textStatus + ":", errorThrown); // FIXME + // We cleanup anyway + delete pluginHandles[handleId]; + callbacks.success(); + } + }); + } + + // WebRTC stuff + // Helper function to create a new PeerConnection, if we need one + function createPeerconnectionIfNeeded(handleId, callbacks) { + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn("Invalid handle"); + throw "Invalid handle"; + } + let config = pluginHandle.webrtcStuff; + if(config.pc) { + // Nothing to do, we have a PeerConnection already + return; + } + let pc_config = { + iceServers: iceServers, + iceTransportPolicy: iceTransportPolicy, + bundlePolicy: bundlePolicy + }; + pc_config.sdpSemantics = 'unified-plan'; + // Check if a sender or receiver transform has been provided + let insertableStreams = false; + if(callbacks.tracks) { + for(let track of callbacks.tracks) { + if(track.transforms && (track.transforms.sender || track.transforms.receiver)) { + insertableStreams = true; + break; + } + } + } + if(RTCRtpSender && (RTCRtpSender.prototype.createEncodedStreams || + (RTCRtpSender.prototype.createEncodedAudioStreams && + RTCRtpSender.prototype.createEncodedVideoStreams)) && insertableStreams) { + config.insertableStreams = true; + pc_config.forceEncodedAudioInsertableStreams = true; + pc_config.forceEncodedVideoInsertableStreams = true; + pc_config.encodedInsertableStreams = true; + } + Janus.log('Creating PeerConnection'); + config.pc = new RTCPeerConnection(pc_config); + Janus.debug(config.pc); + if(config.pc.getStats) { // FIXME + config.volume = {}; + config.bitrate.value = '0 kbits/sec'; + } + Janus.log('Preparing local SDP and gathering candidates (trickle=' + config.trickle + ')'); + config.pc.oniceconnectionstatechange = function() { + if(config.pc) + pluginHandle.iceState(config.pc.iceConnectionState); + }; + config.pc.onicecandidate = function(event) { + if(!event.candidate || (event.candidate.candidate && event.candidate.candidate.indexOf('endOfCandidates') > 0)) { + Janus.log('End of candidates.'); + config.iceDone = true; + if(config.trickle === true) { + // Notify end of candidates + sendTrickleCandidate(handleId, { completed : true }); + } else { + // No trickle, time to send the complete SDP (including all candidates) + sendSDP(handleId, callbacks); + } + } else { + // JSON.stringify doesn't work on some WebRTC objects anymore + // See https://code.google.com/p/chromium/issues/detail?id=467366 + let candidate = { + candidate: event.candidate.candidate, + sdpMid: event.candidate.sdpMid, + sdpMLineIndex: event.candidate.sdpMLineIndex + }; + if(config.trickle === true) { + // Send candidate + sendTrickleCandidate(handleId, candidate); + } + } + }; + config.pc.ontrack = function(event) { + Janus.log('Handling Remote Track', event); + if(!event.streams) + return; + if(!event.track) + return; + // Notify about the new track event + let mid = event.transceiver ? event.transceiver.mid : event.track.id; + try { + pluginHandle.onremotetrack(event.track, mid, true, { reason: 'created' }); + } catch(e) { + Janus.error("Error calling onremotetrack", e); + } + if(event.track.onended) + return; + let trackMutedTimeoutId = null; + Janus.log('Adding onended callback to track:', event.track); + event.track.onended = function(ev) { + Janus.log('Remote track removed:', ev); + clearTimeout(trackMutedTimeoutId); + // Notify the application + let transceivers = config.pc ? config.pc.getTransceivers() : null; + let transceiver = transceivers ? transceivers.find( + t => t.receiver.track === ev.target) : null; + let mid = transceiver ? transceiver.mid : ev.target.id; + try { + pluginHandle.onremotetrack(ev.target, mid, false, { reason: 'ended' }); + } catch(e) { + Janus.error("Error calling onremotetrack on removal", e); + } + }; + event.track.onmute = function(ev) { + Janus.log('Remote track muted:', ev); + if(!trackMutedTimeoutId) { + trackMutedTimeoutId = setTimeout(function() { + Janus.log('Removing remote track'); + // Notify the application the track is gone + let transceivers = config.pc ? config.pc.getTransceivers() : null; + let transceiver = transceivers ? transceivers.find( + t => t.receiver.track === ev.target) : null; + let mid = transceiver ? transceiver.mid : ev.target.id; + try { + pluginHandle.onremotetrack(ev.target, mid, false, { reason: 'mute' } ); + } catch(e) { + Janus.error("Error calling onremotetrack on mute", e); + } + trackMutedTimeoutId = null; + // Chrome seems to raise mute events only at multiples of 834ms; + // we set the timeout to three times this value (rounded to 840ms) + }, 3 * 840); + } + }; + event.track.onunmute = function(ev) { + Janus.log('Remote track flowing again:', ev); + if(trackMutedTimeoutId != null) { + clearTimeout(trackMutedTimeoutId); + trackMutedTimeoutId = null; + } else { + try { + // Notify the application the track is back + let transceivers = config.pc ? config.pc.getTransceivers() : null; + let transceiver = transceivers ? transceivers.find( + t => t.receiver.track === ev.target) : null; + let mid = transceiver ? transceiver.mid : ev.target.id; + pluginHandle.onremotetrack(ev.target, mid, true, { reason: 'unmute' }); + } catch(e) { + Janus.error("Error calling onremotetrack on unmute", e); + } + } + }; + }; + } + + // Helper function used when creating either an offer or answer: it + // prepares what needs to be prepared, including creating a new + // PeerConnection (if needed) and updating the tracks configuration, + // before invoking the function to actually generate the offer/answer + async function prepareWebrtc(handleId, offer, callbacks) { + callbacks = callbacks || {}; + callbacks.success = (typeof callbacks.success == "function") ? callbacks.success : Janus.noop; + callbacks.error = (typeof callbacks.error == "function") ? callbacks.error : webrtcError; + let jsep = callbacks.jsep; + if(offer && jsep) { + Janus.error("Provided a JSEP to a createOffer"); + callbacks.error("Provided a JSEP to a createOffer"); + return; + } else if(!offer && (!jsep || !jsep.type || !jsep.sdp)) { + Janus.error("A valid JSEP is required for createAnswer"); + callbacks.error("A valid JSEP is required for createAnswer"); + return; + } + // If the deprecated media was provided instead of tracks, translate it + if(callbacks.media && !callbacks.tracks) { + callbacks.tracks = Janus.mediaToTracks(callbacks.media); + if(callbacks.simulcast === true || callbacks.simulcast2 === true || callbacks.svc) { + // Find the video track and add simulcast/SVC info there + for(let track of callbacks.tracks) { + if(track.type === 'video') { + if(callbacks.simulcast === true || callbacks.simulcast2 === true) + track.simulcast = true; + else if(callbacks.svc) + track.svc = callbacks.svc; + break; + } + } + } + Janus.warn('Deprecated media object passed, use tracks instead. Automatically translated to:', callbacks.tracks); + } + // Check that callbacks.array is a valid array + if(callbacks.tracks && !Array.isArray(callbacks.tracks)) { + Janus.error("Tracks must be an array"); + callbacks.error("Tracks must be an array"); + return; + } + // Get the plugin handle + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn("Invalid handle"); + callbacks.error("Invalid handle"); + return; + } + let config = pluginHandle.webrtcStuff; + config.trickle = isTrickleEnabled(callbacks.trickle); + try { + // Create a PeerConnection, if needed + createPeerconnectionIfNeeded(handleId, callbacks); + if(offer) { + // Capture devices and setup tracks, if needed + await captureDevices(handleId, callbacks); + } + // Create offer or answer now (depending on the context) + if(!jsep) { + let offer = await createOffer(handleId, callbacks); + callbacks.success(offer); + } else { + await config.pc.setRemoteDescription(jsep); + Janus.log("Remote description accepted!"); + config.remoteSdp = jsep.sdp; + // Any trickle candidate we cached? + if(config.candidates && config.candidates.length > 0) { + for(let i=0; i 0) { + for(let i=0; i (t.mid === track.mid && t.receiver.track.kind === kind)); + } else { + // Find the first track of this type + transceiver = config.pc.getTransceivers() + .find(t => (t.receiver.track.kind === kind)); + } + if(track.replace || track.remove) { + if(!transceiver) { + Janus.warn("Couldn't find a transceiver for track:", track); + continue; + } + if(!transceiver.sender) { + Janus.warn('No sender in the transceiver for track:', track); + continue; + } + sender = transceiver.sender; + } + if(answer && !transceiver) { + transceiver = config.pc.getTransceivers() + .find(t => (t.receiver.track.kind === kind)); + if(!transceiver) { + Janus.warn("Couldn't find a transceiver for track:", track); + continue; + } + } + // Capture the new track, if we need to + let nt = null, trackId = null; + if(track.remove) { + Janus.log('Removing track from PeerConnection', track); + trackId = sender.track ? sender.track.id : null; + await sender.replaceTrack(null); + } else if(track.capture) { + if(track.gumGroup && groups[track.gumGroup] && groups[track.gumGroup].stream) { + // We did a getUserMedia before already + let stream = groups[track.gumGroup].stream; + nt = (track.type === 'audio' ? stream.getAudioTracks()[0] : stream.getVideoTracks()[0]); + delete groups[track.gumGroup].stream; + delete groups[track.gumGroup]; + delete track.gumGroup; + } else if(track.capture instanceof MediaStreamTrack) { + // An external track was provided, use that + nt = track.capture; + } else { + if(!openedConsentDialog) { + openedConsentDialog = true; + pluginHandle.consentDialog(true); + } + let constraints = Janus.trackConstraints(track), stream = null; + if(track.type === 'audio' || track.type === 'video') { + // Use getUserMedia: check if we need to group audio and video together + if(track.gumGroup) { + let otherType = (track.type === 'audio' ? 'video' : 'audio'); + if(groups[track.gumGroup] && groups[track.gumGroup][otherType]) { + let otherTrack = groups[track.gumGroup][otherType]; + let otherConstraints = Janus.trackConstraints(otherTrack); + constraints[otherType] = otherConstraints[otherType]; + } + } + stream = await navigator.mediaDevices.getUserMedia(constraints); + if(track.gumGroup && constraints.audio && constraints.video) { + // We just performed a grouped getUserMedia, keep track of the + // stream so that we can immediately assign the track later + groups[track.gumGroup].stream = stream; + delete track.gumGroup; + } + } else { + // Use getDisplayMedia + stream = await navigator.mediaDevices.getDisplayMedia(constraints); + } + nt = (track.type === 'audio' ? stream.getAudioTracks()[0] : stream.getVideoTracks()[0]); + } + if(track.replace) { + // Replace the track + await sender.replaceTrack(nt); + // Update the transceiver direction + let newDirection = 'sendrecv'; + if(track.recv === false || transceiver.direction === 'inactive' || transceiver.direction === 'sendonly') + newDirection = 'sendonly'; + if(transceiver.setDirection) + transceiver.setDirection(newDirection); + else + transceiver.direction = newDirection; + } else { + // FIXME Add as a new track + if(!config.myStream) + config.myStream = new MediaStream(); + if(kind === 'audio' || (!track.simulcast && !track.svc)) { + sender = config.pc.addTrack(nt, config.myStream); + transceiver = config.pc.getTransceivers() + .find(t => (t.sender === sender)); + } else if(track.simulcast) { + if(Janus.webRTCAdapter.browserDetails.browser !== 'firefox') { + // Standard RID + Janus.log('Enabling rid-based simulcasting:', nt); + let maxBitrates = getMaxBitrates(track.simulcastMaxBitrates); + transceiver = config.pc.addTransceiver(nt, { + direction: 'sendrecv', + streams: [config.myStream], + sendEncodings: track.sendEncodings || [ + { rid: 'h', active: true, maxBitrate: maxBitrates.high }, + { rid: 'm', active: true, maxBitrate: maxBitrates.medium, scaleResolutionDownBy: 2 }, + { rid: 'l', active: true, maxBitrate: maxBitrates.low, scaleResolutionDownBy: 4 } + ] + }); + } else { + // Firefox-based RID, based on https://gist.github.com/voluntas/088bc3cc62094730647b + Janus.log('Enabling Simulcasting for Firefox (RID)'); + transceiver = config.pc.addTransceiver(nt, { + direction: 'sendrecv', + streams: [config.myStream] + }); + sender = transceiver ? transceiver.sender : null; + if(sender) { + let parameters = sender.getParameters(); + if(!parameters) + parameters = {}; + let maxBitrates = getMaxBitrates(track.simulcastMaxBitrates); + parameters.encodings = track.sendEncodings || [ + { rid: 'h', active: true, maxBitrate: maxBitrates.high }, + { rid: 'm', active: true, maxBitrate: maxBitrates.medium, scaleResolutionDownBy: 2 }, + { rid: 'l', active: true, maxBitrate: maxBitrates.low, scaleResolutionDownBy: 4 } + ]; + sender.setParameters(parameters); + } + } + } else { + Janus.log('Enabling SVC (' + track.svc + '):', nt); + transceiver = config.pc.addTransceiver(nt, { + direction: 'sendrecv', + streams: [config.myStream], + sendEncodings: [ + { scalabilityMode: track.svc } + ] + }); + } + if(!sender) + sender = transceiver ? transceiver.sender : null; + // Check if we need to override some settings + if(track.codec) { + if(Janus.webRTCAdapter.browserDetails.browser === 'firefox') { + Janus.warn('setCodecPreferences not supported in Firefox, ignoring codec for track:', track); + } else if(typeof track.codec !== 'string') { + Janus.warn('Invalid codec value, ignoring for track:', track); + } else { + let mimeType = kind + '/' + track.codec.toLowerCase(); + let codecs = RTCRtpReceiver.getCapabilities(kind).codecs.filter(function(codec) { + return codec.mimeType.toLowerCase() === mimeType; + }); + if(!codecs || codecs.length === 0) { + Janus.warn('Codec not supported in this browser for this track, ignoring:', track); + } else if(transceiver) { + try { + transceiver.setCodecPreferences(codecs); + } catch(err) { + Janus.warn('Failed enforcing codec for this ' + kind + ' track:', err); + } + } + } + } + if(track.bitrate) { + // Override maximum bitrate + if(track.simulcast || track.svc) { + Janus.warn('Ignoring bitrate for simulcast/SVC track, use sendEncodings for that'); + } else if(isNaN(track.bitrate) || track.bitrate < 0) { + Janus.warn('Ignoring invalid bitrate for track:', track); + } else if(sender) { + let params = sender.getParameters(); + if(!params || !params.encodings || params.encodings.length === 0) { + Janus.warn('No encodings in the sender parameters, ignoring bitrate for track:', track); + } else { + params.encodings[0].maxBitrate = track.bitrate; + await sender.setParameters(params); + } + } + } + if(kind === 'video' && track.framerate) { + // Override maximum framerate + if(track.simulcast || track.svc) { + Janus.warn('Ignoring framerate for simulcast/SVC track, use sendEncodings for that'); + } else if(isNaN(track.framerate) || track.framerate < 0) { + Janus.warn('Ignoring invalid framerate for track:', track); + } else if(sender) { + let params = sender.getParameters(); + if(!params || !params.encodings || params.encodings.length === 0) { + Janus.warn('No encodings in the sender parameters, ignoring framerate for track:', track); + } else { + params.encodings[0].maxFramerate = track.framerate; + await sender.setParameters(params); + } + } + } + // Check if insertable streams are involved + if(track.transforms) { + if(sender && track.transforms.sender) { + // There's a sender transform, set it on the transceiver sender + let senderStreams = null; + if(RTCRtpSender.prototype.createEncodedStreams) { + senderStreams = sender.createEncodedStreams(); + } else if(RTCRtpSender.prototype.createAudioEncodedStreams || RTCRtpSender.prototype.createEncodedVideoStreams) { + if(kind === 'audio') { + senderStreams = sender.createEncodedAudioStreams(); + } else if(kind === 'video') { + senderStreams = sender.createEncodedVideoStreams(); + } + } + if(senderStreams) { + console.log('Insertable Streams sender transform:', senderStreams); + if(senderStreams.readableStream && senderStreams.writableStream) { + senderStreams.readableStream + .pipeThrough(track.transforms.sender) + .pipeTo(senderStreams.writableStream); + } else if(senderStreams.readable && senderStreams.writable) { + senderStreams.readable + .pipeThrough(track.transforms.sender) + .pipeTo(senderStreams.writable); + } + } + } + if(transceiver && transceiver.receiver && track.transforms.receiver) { + // There's a receiver transform, set it on the transceiver receiver + let receiverStreams = null; + if(RTCRtpReceiver.prototype.createEncodedStreams) { + receiverStreams = transceiver.receiver.createEncodedStreams(); + } else if(RTCRtpReceiver.prototype.createAudioEncodedStreams || RTCRtpReceiver.prototype.createEncodedVideoStreams) { + if(kind === 'audio') { + receiverStreams = transceiver.receiver.createEncodedAudioStreams(); + } else if(kind === 'video') { + receiverStreams = transceiver.receiver.createEncodedVideoStreams(); + } + } + if(receiverStreams) { + console.log('Insertable Streams receiver transform:', receiverStreams); + if(receiverStreams.readableStream && receiverStreams.writableStream) { + receiverStreams.readableStream + .pipeThrough(track.transforms.receiver) + .pipeTo(receiverStreams.writableStream); + } else if(receiverStreams.readable && receiverStreams.writable) { + receiverStreams.readable + .pipeThrough(track.transforms.receiver) + .pipeTo(receiverStreams.writable); + } + } + } + } + } + if(nt && track.dontStop === true) + nt.dontStop = true; + } else if(track.recv && !transceiver) { + // Maybe a new recvonly track + transceiver = config.pc.addTransceiver(kind); + if(transceiver) { + // Check if we need to override some settings + if(track.codec) { + if(Janus.webRTCAdapter.browserDetails.browser === 'firefox') { + Janus.warn('setCodecPreferences not supported in Firefox, ignoring codec for track:', track); + } else if(typeof track.codec !== 'string') { + Janus.warn('Invalid codec value, ignoring for track:', track); + } else { + let mimeType = kind + '/' + track.codec.toLowerCase(); + let codecs = RTCRtpReceiver.getCapabilities(kind).codecs.filter(function(codec) { + return codec.mimeType.toLowerCase() === mimeType; + }); + if(!codecs || codecs.length === 0) { + Janus.warn('Codec not supported in this browser for this track, ignoring:', track); + } else { + try { + transceiver.setCodecPreferences(codecs); + } catch(err) { + Janus.warn('Failed enforcing codec for this ' + kind + ' track:', err); + } + } + } + } + // Check if insertable streams are involved + if(transceiver.receiver && track.transforms && track.transforms.receiver) { + // There's a receiver transform, set it on the transceiver receiver + let receiverStreams = null; + if(RTCRtpReceiver.prototype.createEncodedStreams) { + receiverStreams = transceiver.receiver.createEncodedStreams(); + } else if(RTCRtpReceiver.prototype.createAudioEncodedStreams || RTCRtpReceiver.prototype.createEncodedVideoStreams) { + if(kind === 'audio') { + receiverStreams = transceiver.receiver.createEncodedAudioStreams(); + } else if(kind === 'video') { + receiverStreams = transceiver.receiver.createEncodedVideoStreams(); + } + } + if(receiverStreams) { + console.log('Insertable Streams receiver transform:', receiverStreams); + if(receiverStreams.readableStream && receiverStreams.writableStream) { + receiverStreams.readableStream + .pipeThrough(track.transforms.receiver) + .pipeTo(receiverStreams.writableStream); + } else if(receiverStreams.readable && receiverStreams.writable) { + receiverStreams.readable + .pipeThrough(track.transforms.receiver) + .pipeTo(receiverStreams.writable); + } + } + } + } + } + // Get rid of the old track + // FIXME We should probably do this *before* capturing the new + // track, since this prevents, for instance, just changing the + // resolution of the same webcam we're capturing already (the + // existing resolution would be returned, or an overconstrained + // error). On the other end, closing the track before we capture + // the new device means we'd end up with a period of time where + // no video is sent (changing device takes some time), and + // media would be stopped entirely in case capturing the new + // device results in an error. To keep things simpler, we're + // doing it after: we can make this configurable in the future. + if(trackId && config.myStream) { + let rt = null; + if(kind === 'audio' && config.myStream.getAudioTracks() && config.myStream.getAudioTracks().length) { + for(let t of config.myStream.getAudioTracks()) { + if(t.id === trackId) { + rt = t; + Janus.log('Removing audio track:', rt); + } + } + } else if(kind === 'video' && config.myStream.getVideoTracks() && config.myStream.getVideoTracks().length) { + for(let t of config.myStream.getVideoTracks()) { + if(t.id === trackId) { + rt = t; + Janus.log('Removing video track:', rt); + } + } + } + if(rt) { + // Remove the track and notify the application + try { + config.myStream.removeTrack(rt); + pluginHandle.onlocaltrack(rt, false); + } catch(e) { + Janus.error("Error calling onlocaltrack on removal for renegotiation", e); + } + // Close the old track (unless we've been asked not to) + if(rt.dontStop !== true) { + try { + rt.stop(); + } catch(e) {} + } + } + } + if(nt) { + // FIXME Add the new track locally + config.myStream.addTrack(nt); + // Notify the application about the new local track, if any + nt.onended = function(ev) { + Janus.log('Local track removed:', ev); + try { + pluginHandle.onlocaltrack(ev.target, false); + } catch(e) { + Janus.error("Error calling onlocaltrack following end", e); + } + }; + try { + pluginHandle.onlocaltrack(nt, true); + } catch(e) { + Janus.error("Error calling onlocaltrack for track add", e); + } + } + // Update the direction of the transceiver + if(transceiver) { + let curdir = transceiver.direction, newdir = null; + let send = (nt && transceiver.sender.track), + recv = (track.recv !== false && transceiver.receiver.track); + if(send && recv) + newdir = 'sendrecv'; + else if(send && !recv) + newdir = 'sendonly'; + else if(!send && recv) + newdir = 'recvonly'; + else if(!send && !recv) + newdir = 'inactive'; + if(newdir && newdir !== curdir) { + Janus.warn('Changing direction of transceiver to ' + newdir + ' (was ' + curdir + ')', track); + if(transceiver.setDirection) + transceiver.setDirection(newdir); + else + transceiver.direction = newdir; + } + } + } + if(openedConsentDialog) + pluginHandle.consentDialog(false); + } + + function getLocalTracks(handleId) { + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn('Invalid handle'); + return null; + } + let config = pluginHandle.webrtcStuff; + if(!config.pc) { + Janus.warn('Invalid PeerConnection'); + return null; + } + let tracks = []; + let transceivers = config.pc.getTransceivers(); + for(let tr of transceivers) { + let track = null; + if(tr.sender && tr.sender.track) { + track = { mid: tr.mid }; + track.type = tr.sender.track.kind; + track.id = tr.sender.track.id; + track.label = tr.sender.track.label; + } + if(track) + tracks.push(track); + } + return tracks; + } + + function getRemoteTracks(handleId) { + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn('Invalid handle'); + return null; + } + let config = pluginHandle.webrtcStuff; + if(!config.pc) { + Janus.warn('Invalid PeerConnection'); + return null; + } + let tracks = []; + let transceivers = config.pc.getTransceivers(); + for(let tr of transceivers) { + let track = null; + if(tr.receiver && tr.receiver.track) { + track = { mid: tr.mid }; + track.type = tr.receiver.track.kind; + track.id = tr.receiver.track.id; + track.label = tr.receiver.track.label; + } + if(track) + tracks.push(track); + } + return tracks; + } + + function getVolume(handleId, mid, remote, result) { + result = (typeof result == "function") ? result : Janus.noop; + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn("Invalid handle"); + result(0); + return; + } + let stream = remote ? "remote" : "local"; + let config = pluginHandle.webrtcStuff; + if(!config.volume[stream]) + config.volume[stream] = { value: 0 }; + // Start getting the volume, if audioLevel in getStats is supported (apparently + // they're only available in Chrome/Safari right now: https://webrtc-stats.callstats.io/) + if(config.pc && config.pc.getStats && (Janus.webRTCAdapter.browserDetails.browser === "chrome" || + Janus.webRTCAdapter.browserDetails.browser === "safari")) { + // Are we interested in a mid in particular? + let query = config.pc; + if(mid) { + let transceiver = config.pc.getTransceivers() + .find(t => (t.mid === mid && t.receiver.track.kind === "audio")); + if(!transceiver) { + Janus.warn("No audio transceiver with mid " + mid); + result(0); + return; + } + if(remote && !transceiver.receiver) { + Janus.warn("Remote transceiver track unavailable"); + result(0); + return; + } else if(!remote && !transceiver.sender) { + Janus.warn("Local transceiver track unavailable"); + result(0); + return; + } + query = remote ? transceiver.receiver : transceiver.sender; + } + query.getStats() + .then(function(stats) { + stats.forEach(function (res) { + if(!res || res.kind !== "audio") + return; + if((remote && !res.remoteSource) || (!remote && res.type !== "media-source")) + return; + result(res.audioLevel ? res.audioLevel : 0); + }); + }); + return config.volume[stream].value; + } else { + // audioInputLevel and audioOutputLevel seem only available in Chrome? audioLevel + // seems to be available on Chrome and Firefox, but they don't seem to work + Janus.warn("Getting the " + stream + " volume unsupported by browser"); + result(0); + return; + } + } + + function isMuted(handleId, mid, video) { + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn("Invalid handle"); + return true; + } + let config = pluginHandle.webrtcStuff; + if(!config.pc) { + Janus.warn("Invalid PeerConnection"); + return true; + } + if(!config.myStream) { + Janus.warn("Invalid local MediaStream"); + return true; + } + if(video) { + // Check video track + if(!config.myStream.getVideoTracks() || config.myStream.getVideoTracks().length === 0) { + Janus.warn("No video track"); + return true; + } + if(mid) { + let transceiver = config.pc.getTransceivers() + .find(t => (t.mid === mid && t.receiver.track.kind === "video")); + if(!transceiver) { + Janus.warn("No video transceiver with mid " + mid); + return true; + } + if(!transceiver.sender || !transceiver.sender.track) { + Janus.warn("No video sender with mid " + mid); + return true; + } + return !transceiver.sender.track.enabled; + } else { + return !config.myStream.getVideoTracks()[0].enabled; + } + } else { + // Check audio track + if(!config.myStream.getAudioTracks() || config.myStream.getAudioTracks().length === 0) { + Janus.warn("No audio track"); + return true; + } + if(mid) { + let transceiver = config.pc.getTransceivers() + .find(t => (t.mid === mid && t.receiver.track.kind === "audio")); + if(!transceiver) { + Janus.warn("No audio transceiver with mid " + mid); + return true; + } + if(!transceiver.sender || !transceiver.sender.track) { + Janus.warn("No audio sender with mid " + mid); + return true; + } + return !transceiver.sender.track.enabled; + } else { + return !config.myStream.getAudioTracks()[0].enabled; + } + } + } + + function mute(handleId, mid, video, mute) { + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn("Invalid handle"); + return false; + } + let config = pluginHandle.webrtcStuff; + if(!config.pc) { + Janus.warn("Invalid PeerConnection"); + return false; + } + if(!config.myStream) { + Janus.warn("Invalid local MediaStream"); + return false; + } + if(video) { + // Mute/unmute video track + if(!config.myStream.getVideoTracks() || config.myStream.getVideoTracks().length === 0) { + Janus.warn("No video track"); + return false; + } + if(mid) { + let transceiver = config.pc.getTransceivers() + .find(t => (t.mid === mid && t.receiver.track.kind === "video")); + if(!transceiver) { + Janus.warn("No video transceiver with mid " + mid); + return false; + } + if(!transceiver.sender || !transceiver.sender.track) { + Janus.warn("No video sender with mid " + mid); + return false; + } + transceiver.sender.track.enabled = mute ? false : true; + } else { + for(const videostream of config.myStream.getVideoTracks()) { + videostream.enabled = !mute; + } + } + } else { + // Mute/unmute audio track + if(!config.myStream.getAudioTracks() || config.myStream.getAudioTracks().length === 0) { + Janus.warn("No audio track"); + return false; + } + if(mid) { + let transceiver = config.pc.getTransceivers() + .find(t => (t.mid === mid && t.receiver.track.kind === "audio")); + if(!transceiver) { + Janus.warn("No audio transceiver with mid " + mid); + return false; + } + if(!transceiver.sender || !transceiver.sender.track) { + Janus.warn("No audio sender with mid " + mid); + return false; + } + transceiver.sender.track.enabled = mute ? false : true; + } else { + for(const audiostream of config.myStream.getAudioTracks()) { + audiostream.enabled = !mute; + } + } + } + return true; + } + + function getBitrate(handleId, mid) { + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn("Invalid handle"); + return "Invalid handle"; + } + let config = pluginHandle.webrtcStuff; + if(!config.pc) + return "Invalid PeerConnection"; + // Start getting the bitrate, if getStats is supported + if(config.pc.getStats) { + let query = config.pc; + let target = mid ? mid : "default"; + if(mid) { + let transceiver = config.pc.getTransceivers() + .find(t => (t.mid === mid && t.receiver.track.kind === "video")); + if(!transceiver) { + Janus.warn("No video transceiver with mid " + mid); + return ("No video transceiver with mid " + mid); + } + if(!transceiver.receiver) { + Janus.warn("No video receiver with mid " + mid); + return ("No video receiver with mid " + mid); + } + query = transceiver.receiver; + } + if(!config.bitrate[target]) { + config.bitrate[target] = { + timer: null, + bsnow: null, + bsbefore: null, + tsnow: null, + tsbefore: null, + value: "0 kbits/sec" + }; + } + if(!config.bitrate[target].timer) { + Janus.log("Starting bitrate timer" + (mid ? (" for mid " + mid) : "") + " (via getStats)"); + config.bitrate[target].timer = setInterval(function() { + query.getStats() + .then(function(stats) { + stats.forEach(function (res) { + if(!res) + return; + let inStats = false; + // Check if these are statistics on incoming media + if((res.mediaType === "video" || res.id.toLowerCase().indexOf("video") > -1) && + res.type === "inbound-rtp" && res.id.indexOf("rtcp") < 0) { + // New stats + inStats = true; + } else if(res.type == 'ssrc' && res.bytesReceived && + (res.googCodecName === "VP8" || res.googCodecName === "")) { + // Older Chromer versions + inStats = true; + } + // Parse stats now + if(inStats) { + config.bitrate[target].bsnow = res.bytesReceived; + config.bitrate[target].tsnow = res.timestamp; + if(config.bitrate[target].bsbefore === null || config.bitrate[target].tsbefore === null) { + // Skip this round + config.bitrate[target].bsbefore = config.bitrate[target].bsnow; + config.bitrate[target].tsbefore = config.bitrate[target].tsnow; + } else { + // Calculate bitrate + let timePassed = config.bitrate[target].tsnow - config.bitrate[target].tsbefore; + if(Janus.webRTCAdapter.browserDetails.browser === "safari") + timePassed = timePassed/1000; // Apparently the timestamp is in microseconds, in Safari + let bitRate = Math.round((config.bitrate[target].bsnow - config.bitrate[target].bsbefore) * 8 / timePassed); + if(Janus.webRTCAdapter.browserDetails.browser === "safari") + bitRate = parseInt(bitRate/1000); + config.bitrate[target].value = bitRate + ' kbits/sec'; + //~ Janus.log("Estimated bitrate is " + config.bitrate.value); + config.bitrate[target].bsbefore = config.bitrate[target].bsnow; + config.bitrate[target].tsbefore = config.bitrate[target].tsnow; + } + } + }); + }); + }, 1000); + return "0 kbits/sec"; // We don't have a bitrate value yet + } + return config.bitrate[target].value; + } else { + Janus.warn("Getting the video bitrate unsupported by browser"); + return "Feature unsupported by browser"; + } + } + + function setBitrate(handleId, mid, bitrate) { + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle || !pluginHandle.webrtcStuff) { + Janus.warn('Invalid handle'); + return; + } + let config = pluginHandle.webrtcStuff; + if(!config.pc) { + Janus.warn('Invalid PeerConnection'); + return; + } + let transceiver = config.pc.getTransceivers().find(t => (t.mid === mid)); + if(!transceiver) { + Janus.warn('No transceiver with mid', mid); + return; + } + if(!transceiver.sender) { + Janus.warn('No sender for transceiver with mid', mid); + return; + } + let params = transceiver.sender.getParameters(); + if(!params || !params.encodings || params.encodings.length === 0) { + Janus.warn('No parameters encodings'); + } else if(params.encodings.length > 1) { + Janus.warn('Ignoring bitrate for simulcast track, use sendEncodings for that'); + } else if(isNaN(bitrate) || bitrate < 0) { + Janus.warn('Invalid bitrate (must be a positive integer)'); + } else { + params.encodings[0].maxBitrate = bitrate; + transceiver.sender.setParameters(params); + } + } + + function webrtcError(error) { + Janus.error("WebRTC error:", error); + } + + function cleanupWebrtc(handleId, hangupRequest) { + Janus.log("Cleaning WebRTC stuff"); + let pluginHandle = pluginHandles[handleId]; + if(!pluginHandle) { + // Nothing to clean + return; + } + let config = pluginHandle.webrtcStuff; + if(config) { + if(hangupRequest === true) { + // Send a hangup request (we don't really care about the response) + let request = { "janus": "hangup", "transaction": Janus.randomString(12) }; + if(pluginHandle.token) + request["token"] = pluginHandle.token; + if(apisecret) + request["apisecret"] = apisecret; + Janus.debug("Sending hangup request (handle=" + handleId + "):"); + Janus.debug(request); + if(websockets) { + request["session_id"] = sessionId; + request["handle_id"] = handleId; + ws.send(JSON.stringify(request)); + } else { + Janus.httpAPICall(server + "/" + sessionId + "/" + handleId, { + verb: 'POST', + withCredentials: withCredentials, + body: request + }); + } + } + // Cleanup stack + if(config.volume) { + if(config.volume["local"] && config.volume["local"].timer) + clearInterval(config.volume["local"].timer); + if(config.volume["remote"] && config.volume["remote"].timer) + clearInterval(config.volume["remote"].timer); + } + for(let i in config.bitrate) { + if(config.bitrate[i].timer) + clearInterval(config.bitrate[i].timer); + } + config.bitrate = {}; + if(!config.streamExternal && config.myStream) { + Janus.log("Stopping local stream tracks"); + Janus.stopAllTracks(config.myStream); + } + config.streamExternal = false; + config.myStream = null; + // Close PeerConnection + try { + config.pc.close(); + } catch(e) { + // Do nothing + } + config.pc = null; + config.candidates = null; + config.mySdp = null; + config.remoteSdp = null; + config.iceDone = false; + config.dataChannel = {}; + config.dtmfSender = null; + config.insertableStreams = false; + } + pluginHandle.oncleanup(); + } + + function isTrickleEnabled(trickle) { + Janus.debug("isTrickleEnabled:", trickle); + return (trickle === false) ? false : true; + } +} + +export default Janus;