ios: webrtc calls started (#594)

* building towards ios calls

* minor ios changes

* linting

* RPC calls with webview

* tidy up

* more types

* ios: webrtc types, call works

* remove comment

* prettier

* add prettier to build

Co-authored-by: Evgeny Poberezkin <2769109+epoberezkin@users.noreply.github.com>
This commit is contained in:
IanRDavies 2022-05-04 23:07:26 +01:00 committed by GitHub
parent 3e19e495de
commit f2036236f6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 715 additions and 347 deletions

View file

@ -1 +1,3 @@
Do NOT edit call.js here, it is compiled from call.ts in packages/simplex-chat-webrtc
# WebView for WebRTC calls in SimpleX Chat
Do NOT edit call.js here, it is compiled abd copied here from call.ts in packages/simplex-chat-webrtc

View file

@ -6,15 +6,9 @@ var CallMediaType;
CallMediaType["Audio"] = "audio";
CallMediaType["Video"] = "video";
})(CallMediaType || (CallMediaType = {}));
// STUN servers
const peerConnectionConfig = {
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
iceCandidatePoolSize: 10,
encodedInsertableStreams: true,
};
const keyAlgorithm = {
name: "AES-GCM",
length: 256
length: 256,
};
const keyUsages = ["encrypt", "decrypt"];
let pc;
@ -24,17 +18,24 @@ const initialPlainTextRequired = {
delta: 3,
undefined: 1,
};
const defaultCallConfig = {
iceCandidates: {
delay: 2000,
extrasInterval: 2000,
extrasTimeout: 8000
}
};
function defaultCallConfig(encodedInsertableStreams) {
return {
peerConnectionConfig: {
iceServers: [{ urls: ["stun:stun.l.google.com:19302"] }],
iceCandidatePoolSize: 10,
encodedInsertableStreams,
},
iceCandidates: {
delay: 2000,
extrasInterval: 2000,
extrasTimeout: 8000,
},
};
}
async function initializeCall(config, mediaType, aesKey) {
const conn = new RTCPeerConnection(peerConnectionConfig);
const conn = new RTCPeerConnection(config.peerConnectionConfig);
const remoteStream = new MediaStream();
const localStream = await navigator.mediaDevices.getUserMedia(callMediaContraints(mediaType));
const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType));
await setUpMediaStreams(conn, localStream, remoteStream, aesKey);
conn.addEventListener("connectionstatechange", connectionStateChange);
const iceCandidates = new Promise((resolve, _) => {
@ -73,141 +74,144 @@ async function initializeCall(config, mediaType, aesKey) {
if (delay)
clearTimeout(delay);
resolved = true;
const iceCandidates = candidates.slice();
const iceCandidates = candidates.map((c) => JSON.stringify(c));
candidates = [];
resolve(iceCandidates);
}
function sendIceCandidates() {
if (candidates.length === 0)
return;
const iceCandidates = candidates.slice();
const iceCandidates = candidates.map((c) => JSON.stringify(c));
candidates = [];
sendMessageToNative({ type: "ice", iceCandidates });
sendMessageToNative({ resp: { type: "ice", iceCandidates } });
}
});
return { connection: conn, iceCandidates };
function connectionStateChange() {
sendMessageToNative({
type: "connection",
state: {
connectionState: conn.connectionState,
iceConnectionState: conn.iceConnectionState,
iceGatheringState: conn.iceGatheringState,
signalingState: conn.signalingState,
}
resp: {
type: "connection",
state: {
connectionState: conn.connectionState,
iceConnectionState: conn.iceConnectionState,
iceGatheringState: conn.iceGatheringState,
signalingState: conn.signalingState,
},
},
});
if (conn.connectionState == "disconnected" || conn.connectionState == "failed") {
conn.removeEventListener("connectionstatechange", connectionStateChange);
sendMessageToNative({ type: "ended" });
sendMessageToNative({ resp: { type: "ended" } });
conn.close();
pc = undefined;
resetVideoElements();
}
}
}
// TODO remove WCallCommand from parameter type
function sendMessageToNative(msg) {
console.log(JSON.stringify(msg));
}
// TODO remove WCallCommand from result type
async function processCommand(command) {
async function processCommand(body) {
const { command, corrId } = body;
let resp;
switch (command.type) {
case "capabilities":
const encryption = supportsInsertableStreams();
resp = { type: "capabilities", capabilities: { encryption } };
break;
case "start":
console.log("starting call");
if (pc) {
resp = { type: "error", message: "start: call already started" };
}
else if (!supportsInsertableStreams() && command.aesKey) {
resp = { type: "error", message: "start: encryption is not supported" };
}
else {
try {
try {
switch (command.type) {
case "capabilities":
const encryption = supportsInsertableStreams();
resp = { type: "capabilities", capabilities: { encryption } };
break;
case "start":
console.log("starting call");
if (pc) {
resp = { type: "error", message: "start: call already started" };
}
else if (!supportsInsertableStreams() && command.aesKey) {
resp = { type: "error", message: "start: encryption is not supported" };
}
else {
const { media, aesKey } = command;
const call = await initializeCall(defaultCallConfig, media, aesKey);
const call = await initializeCall(defaultCallConfig(!!aesKey), media, aesKey);
const { connection, iceCandidates } = call;
pc = connection;
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
// for debugging, returning the command for callee to use
resp = { type: "accept", offer, iceCandidates: await iceCandidates, media, aesKey };
resp = { type: "accept", offer: JSON.stringify(offer), iceCandidates: await iceCandidates, media, aesKey };
// resp = {type: "offer", offer, iceCandidates: await iceCandidates}
}
catch (e) {
resp = { type: "error", message: e.message };
break;
case "accept":
if (pc) {
resp = { type: "error", message: "accept: call already started" };
}
}
break;
case "accept":
if (pc) {
resp = { type: "error", message: "accept: call already started" };
}
else if (!supportsInsertableStreams() && command.aesKey) {
resp = { type: "error", message: "accept: encryption is not supported" };
}
else {
try {
const call = await initializeCall(defaultCallConfig, command.media, command.aesKey);
else if (!supportsInsertableStreams() && command.aesKey) {
resp = { type: "error", message: "accept: encryption is not supported" };
}
else {
const offer = JSON.parse(command.offer);
const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c));
const call = await initializeCall(defaultCallConfig(!!command.aesKey), command.media, command.aesKey);
const { connection, iceCandidates } = call;
pc = connection;
await pc.setRemoteDescription(new RTCSessionDescription(command.offer));
await pc.setRemoteDescription(new RTCSessionDescription(offer));
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
addIceCandidates(pc, command.iceCandidates);
addIceCandidates(pc, remoteIceCandidates);
// same as command for caller to use
resp = { type: "answer", answer, iceCandidates: await iceCandidates };
resp = { type: "answer", answer: JSON.stringify(answer), iceCandidates: await iceCandidates };
}
catch (e) {
resp = { type: "error", message: e.message };
break;
case "answer":
if (!pc) {
resp = { type: "error", message: "answer: call not started" };
}
}
break;
case "answer":
if (!pc) {
resp = { type: "error", message: "answer: call not started" };
}
else if (!pc.localDescription) {
resp = { type: "error", message: "answer: local description is not set" };
}
else if (pc.currentRemoteDescription) {
resp = { type: "error", message: "answer: remote description already set" };
}
else {
await pc.setRemoteDescription(new RTCSessionDescription(command.answer));
addIceCandidates(pc, command.iceCandidates);
resp = { type: "ok" };
}
break;
case "ice":
if (pc) {
addIceCandidates(pc, command.iceCandidates);
resp = { type: "ok" };
}
else {
resp = { type: "error", message: "ice: call not started" };
}
break;
case "end":
if (pc) {
pc.close();
pc = undefined;
resetVideoElements();
resp = { type: "ok" };
}
else {
resp = { type: "error", message: "end: call not started" };
}
break;
default:
resp = { type: "error", message: "unknown command" };
break;
else if (!pc.localDescription) {
resp = { type: "error", message: "answer: local description is not set" };
}
else if (pc.currentRemoteDescription) {
resp = { type: "error", message: "answer: remote description already set" };
}
else {
const answer = JSON.parse(command.answer);
const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c));
await pc.setRemoteDescription(new RTCSessionDescription(answer));
addIceCandidates(pc, remoteIceCandidates);
resp = { type: "ok" };
}
break;
case "ice":
if (pc) {
const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c));
addIceCandidates(pc, remoteIceCandidates);
resp = { type: "ok" };
}
else {
resp = { type: "error", message: "ice: call not started" };
}
break;
case "end":
if (pc) {
pc.close();
pc = undefined;
resetVideoElements();
resp = { type: "ok" };
}
else {
resp = { type: "error", message: "end: call not started" };
}
break;
default:
resp = { type: "error", message: "unknown command" };
break;
}
}
sendMessageToNative(resp);
return resp;
catch (e) {
resp = { type: "error", message: e.message };
}
const apiResp = { resp, corrId };
sendMessageToNative(apiResp);
return apiResp;
}
function addIceCandidates(conn, iceCandidates) {
for (const c of iceCandidates) {
@ -274,7 +278,7 @@ async function setUpMediaStreams(pc, localStream, remoteStream, aesKey) {
videos.local.srcObject = localStream;
videos.remote.srcObject = remoteStream;
}
function callMediaContraints(mediaType) {
function callMediaConstraints(mediaType) {
switch (mediaType) {
case CallMediaType.Audio:
return { audio: true, video: false };
@ -294,8 +298,7 @@ function callMediaContraints(mediaType) {
}
}
function supportsInsertableStreams() {
return ("createEncodedStreams" in RTCRtpSender.prototype)
&& ("createEncodedStreams" in RTCRtpReceiver.prototype);
return "createEncodedStreams" in RTCRtpSender.prototype && "createEncodedStreams" in RTCRtpReceiver.prototype;
}
function resetVideoElements() {
const videos = getVideoElements();
@ -326,10 +329,6 @@ function getVideoElements() {
// localStream.getVideoTracks()[0].enabled = false
// }
// }
function f() {
console.log("Debug Function");
return "Debugging Return";
}
/* Stream Transforms */
function setupPeerTransform(peer, transform) {
const streams = peer.createEncodedStreams();

View file

@ -1,24 +1,25 @@
video::-webkit-media-controls {
display: none;
}
html, body {
padding: 0;
margin: 0;
html,
body {
padding: 0;
margin: 0;
}
#remote-video-stream {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream {
position: absolute;
width: 30%;
max-width: 30%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
bottom: 0;
right: 0;
position: absolute;
width: 30%;
max-width: 30%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
bottom: 0;
right: 0;
}

View file

@ -9,83 +9,177 @@
import SwiftUI
import WebKit
struct WebView: UIViewRepresentable {
class Coordinator: NSObject, WKNavigationDelegate, WKScriptMessageHandler {
var webView: WKWebView!
class WebRTCCoordinator: NSObject, WKNavigationDelegate, WKScriptMessageHandler {
var webView: WKWebView!
var corrId = 0
var pendingCommands: Dictionary<Int, CheckedContinuation<WCallResponse, Never>> = [:]
func webView(_ webView: WKWebView, didFinish navigation: WKNavigation!) {
webView.allowsBackForwardNavigationGestures = false
self.webView = webView
}
func webView(_ webView: WKWebView, didFinish navigation: WKNavigation!) {
webView.allowsBackForwardNavigationGestures = false
self.webView = webView
}
// receive message from wkwebview
func userContentController(
_ userContentController: WKUserContentController,
didReceive message: WKScriptMessage
) {
print(message.body)
// let date = Date()
// DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
// self.messageToWebview(msg: "hello, I got your messsage: \(message.body) at \(date)")
// }
}
func messageToWebview(msg: String) {
self.webView?.evaluateJavaScript("webkit.messageHandlers.bridge.onMessage('\(msg)')")
// receive message from WKWebView
func userContentController(
_ userContentController: WKUserContentController,
didReceive message: WKScriptMessage
) {
logger.debug("WebRTCCoordinator.userContentController")
if let data = (message.body as? String)?.data(using: .utf8),
let msg = try? jsonDecoder.decode(WVAPIMessage.self, from: data) {
if let corrId = msg.corrId, let cont = pendingCommands.removeValue(forKey: corrId) {
cont.resume(returning: msg.resp)
} else {
// TODO pass messages to call view via binding
// print(msg.resp)
}
} else {
logger.error("WebRTCCoordinator.userContentController: invalid message \(String(describing: message.body))")
}
}
func makeCoordinator() -> Coordinator {
return Coordinator()
func messageToWebview(msg: String) {
logger.debug("WebRTCCoordinator.messageToWebview")
self.webView.evaluateJavaScript("webkit.messageHandlers.logHandler.postMessage('\(msg)')")
}
func processCommand(command: WCallCommand) async -> WCallResponse {
await withCheckedContinuation { cont in
logger.debug("WebRTCCoordinator.processCommand")
let corrId_ = corrId
corrId = corrId + 1
pendingCommands[corrId_] = cont
do {
let apiData = try jsonEncoder.encode(WVAPICall(corrId: corrId_, command: command))
DispatchQueue.main.async {
logger.debug("WebRTCCoordinator.processCommand DispatchQueue.main.async")
let js = "processCommand(\(String(decoding: apiData, as: UTF8.self)))"
self.webView.evaluateJavaScript(js)
}
} catch {
logger.error("WebRTCCoordinator.processCommand: error encoding command \(error.localizedDescription)")
}
}
}
}
struct WebRTCView: UIViewRepresentable {
@Binding var coordinator: WebRTCCoordinator?
func makeCoordinator() -> WebRTCCoordinator {
WebRTCCoordinator()
}
func makeUIView(context: Context) -> WKWebView {
let coordinator = makeCoordinator()
let userContentController = WKUserContentController()
userContentController.add(coordinator, name: "bridge")
let _coordinator = makeCoordinator()
DispatchQueue.main.async {
coordinator = _coordinator
}
let configuration = WKWebViewConfiguration()
configuration.userContentController = userContentController
configuration.mediaTypesRequiringUserActionForPlayback = []
configuration.allowsInlineMediaPlayback = true
let userContentController = WKUserContentController()
let cfg = WKWebViewConfiguration()
cfg.userContentController = userContentController
cfg.mediaTypesRequiringUserActionForPlayback = []
cfg.allowsInlineMediaPlayback = true
// Enable us to capture calls to console.log in the xcode logs
// Print actually happens on line 29
let source = "console.log = (msg) => webkit.messageHandlers.logHandler.postMessage(msg)"
let script = WKUserScript(source: source, injectionTime: .atDocumentStart, forMainFrameOnly: false)
configuration.userContentController.addUserScript(script)
configuration.userContentController.add(coordinator, name: "logHandler")
let _wkwebview = WKWebView(frame: .zero, configuration: configuration)
_wkwebview.navigationDelegate = coordinator
cfg.userContentController.addUserScript(script)
cfg.userContentController.add(_coordinator, name: "logHandler")
let _wkwebview = WKWebView(frame: .zero, configuration: cfg)
_wkwebview.navigationDelegate = _coordinator
guard let path: String = Bundle.main.path(forResource: "call", ofType: "html", inDirectory: "www") else {
logger.error("WebRTCView.makeUIView call.html not found")
return _wkwebview
}
let localHTMLUrl = URL(fileURLWithPath: path, isDirectory: false)
_wkwebview.loadFileURL(localHTMLUrl, allowingReadAccessTo: localHTMLUrl)
return _wkwebview
}
func updateUIView(_ webView: WKWebView, context: Context) {
guard let path: String = Bundle.main.path(forResource: "call", ofType: "html", inDirectory: "www") else {
print("page not found")
return
}
let localHTMLUrl = URL(fileURLWithPath: path, isDirectory: false)
webView.loadFileURL(localHTMLUrl, allowingReadAccessTo: localHTMLUrl)
logger.debug("WebRTCView.updateUIView")
}
}
struct CallView: View {
@State var coordinator: WebRTCCoordinator? = nil
@State var commandStr = ""
@FocusState private var keyboardVisible: Bool
var body: some View {
VStack {
WebView()
VStack(spacing: 30) {
WebRTCView(coordinator: $coordinator).frame(maxHeight: 260)
TextEditor(text: $commandStr)
.focused($keyboardVisible)
.disableAutocorrection(true)
.textInputAutocapitalization(.never)
.padding(.horizontal, 5)
.padding(.top, 2)
.frame(height: 112)
.overlay(
RoundedRectangle(cornerRadius: 10)
.strokeBorder(.secondary, lineWidth: 0.3, antialiased: true)
)
HStack(spacing: 20) {
Button("Copy") {
UIPasteboard.general.string = commandStr
}
Button("Paste") {
commandStr = UIPasteboard.general.string ?? ""
}
Button("Clear") {
commandStr = ""
}
Button("Send") {
do {
let command = try jsonDecoder.decode(WCallCommand.self, from: commandStr.data(using: .utf8)!)
if let c = coordinator {
Task {
let resp = await c.processCommand(command: command)
print(String(decoding: try! jsonEncoder.encode(resp), as: UTF8.self))
}
}
} catch {
print(error)
}
}
}
HStack(spacing: 20) {
Button("Capabilities") {
}
Button("Start") {
if let c = coordinator {
Task {
let resp = await c.processCommand(command: .start(media: .video))
print(String(decoding: try! jsonEncoder.encode(resp), as: UTF8.self))
}
}
}
Button("Accept") {
}
Button("Answer") {
}
Button("ICE") {
}
Button("End") {
}
}
}
}
}
struct CallView_Previews: PreviewProvider {
static var previews: some View {
CallView()

View file

@ -0,0 +1,229 @@
//
// WebRTC.swift
// SimpleX (iOS)
//
// Created by Evgeny on 03/05/2022.
// Copyright © 2022 SimpleX Chat. All rights reserved.
//
import Foundation
struct WVAPICall: Encodable {
var corrId: Int
var command: WCallCommand
}
struct WVAPIMessage: Decodable {
var corrId: Int?
var resp: WCallResponse
}
enum WCallCommand {
case capabilities
case start(media: CallMediaType, aesKey: String? = nil)
case accept(offer: String, iceCandidates: [String], media: CallMediaType, aesKey: String? = nil)
case answer(answer: String, iceCandidates: [String])
case ice(iceCandidates: [String])
case end
enum CodingKeys: String, CodingKey {
case type
case media
case aesKey
case offer
case answer
case iceCandidates
}
}
extension WCallCommand: Encodable {
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
switch self {
case .capabilities:
try container.encode("capabilities", forKey: .type)
case let .start(media, aesKey):
try container.encode("start", forKey: .type)
try container.encode(media, forKey: .media)
try container.encode(aesKey, forKey: .aesKey)
case let .accept(offer, iceCandidates, media, aesKey):
try container.encode("accept", forKey: .type)
try container.encode(offer, forKey: .offer)
try container.encode(iceCandidates, forKey: .iceCandidates)
try container.encode(media, forKey: .media)
try container.encode(aesKey, forKey: .aesKey)
case let .answer(answer, iceCandidates):
try container.encode("answer", forKey: .type)
try container.encode(answer, forKey: .answer)
try container.encode(iceCandidates, forKey: .iceCandidates)
case let .ice(iceCandidates):
try container.encode("ice", forKey: .type)
try container.encode(iceCandidates, forKey: .iceCandidates)
case .end:
try container.encode("end", forKey: .type)
}
}
}
// This protocol is only needed for debugging
extension WCallCommand: Decodable {
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
let type = try container.decode(String.self, forKey: CodingKeys.type)
switch type {
case "capabilities":
self = .capabilities
case "start":
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey)
self = .start(media: media, aesKey: aesKey)
case "accept":
let offer = try container.decode(String.self, forKey: CodingKeys.offer)
let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey)
self = .accept(offer: offer, iceCandidates: iceCandidates, media: media, aesKey: aesKey)
case "answer":
let answer = try container.decode(String.self, forKey: CodingKeys.answer)
let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
self = .answer(answer: answer, iceCandidates: iceCandidates)
case "ice":
let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
self = .ice(iceCandidates: iceCandidates)
case "end":
self = .end
default:
throw DecodingError.typeMismatch(WCallCommand.self, DecodingError.Context(codingPath: [CodingKeys.type], debugDescription: "cannot decode WCallCommand, unknown type \(type)"))
}
}
}
enum WCallResponse {
case capabilities(capabilities: CallCapabilities)
case offer(offer: String, iceCandidates: [String])
// TODO remove accept, it is needed for debugging
case accept(offer: String, iceCandidates: [String], media: CallMediaType, aesKey: String? = nil)
case answer(answer: String, iceCandidates: [String])
case ice(iceCandidates: [String])
case connection(state: ConnectionState)
case ended
case ok
case error(message: String)
case invalid(type: String)
enum CodingKeys: String, CodingKey {
case type
case capabilities
case offer
case answer
case iceCandidates
case state
case message
// TODO remove media, aesKey
case media
case aesKey
}
}
extension WCallResponse: Decodable {
init(from decoder: Decoder) throws {
do {
let container = try decoder.container(keyedBy: CodingKeys.self)
let type = try container.decode(String.self, forKey: CodingKeys.type)
switch type {
case "capabilities":
let capabilities = try container.decode(CallCapabilities.self, forKey: CodingKeys.capabilities)
self = .capabilities(capabilities: capabilities)
case "offer":
let offer = try container.decode(String.self, forKey: CodingKeys.offer)
let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
self = .offer(offer: offer, iceCandidates: iceCandidates)
// TODO remove accept
case "accept":
let offer = try container.decode(String.self, forKey: CodingKeys.offer)
let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
let media = try container.decode(CallMediaType.self, forKey: CodingKeys.media)
let aesKey = try? container.decode(String.self, forKey: CodingKeys.aesKey)
self = .accept(offer: offer, iceCandidates: iceCandidates, media: media, aesKey: aesKey)
case "answer":
let answer = try container.decode(String.self, forKey: CodingKeys.answer)
let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
self = .answer(answer: answer, iceCandidates: iceCandidates)
case "ice":
let iceCandidates = try container.decode([String].self, forKey: CodingKeys.iceCandidates)
self = .ice(iceCandidates: iceCandidates)
case "connection":
let state = try container.decode(ConnectionState.self, forKey: CodingKeys.state)
self = .connection(state: state)
case "ended":
self = .ended
case "ok":
self = .ok
case "error":
let message = try container.decode(String.self, forKey: CodingKeys.message)
self = .error(message: message)
default:
self = .invalid(type: type)
}
} catch {
self = .invalid(type: "unknown")
}
}
}
// This protocol is only needed for debugging
extension WCallResponse: Encodable {
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
switch self {
case .capabilities:
try container.encode("capabilities", forKey: .type)
case let .offer(offer, iceCandidates):
try container.encode("offer", forKey: .type)
try container.encode(offer, forKey: .offer)
try container.encode(iceCandidates, forKey: .iceCandidates)
case let .accept(offer, iceCandidates, media, aesKey):
try container.encode("accept", forKey: .type)
try container.encode(offer, forKey: .offer)
try container.encode(iceCandidates, forKey: .iceCandidates)
try container.encode(media, forKey: .media)
try container.encode(aesKey, forKey: .aesKey)
case let .answer(answer, iceCandidates):
try container.encode("answer", forKey: .type)
try container.encode(answer, forKey: .answer)
try container.encode(iceCandidates, forKey: .iceCandidates)
case let .ice(iceCandidates):
try container.encode("ice", forKey: .type)
try container.encode(iceCandidates, forKey: .iceCandidates)
case let .connection(state):
try container.encode("connection", forKey: .type)
try container.encode(state, forKey: .state)
case .ended:
try container.encode("ended", forKey: .type)
case .ok:
try container.encode("ok", forKey: .type)
case let .error(message):
try container.encode("error", forKey: .type)
try container.encode(message, forKey: .message)
case let .invalid(type):
try container.encode(type, forKey: .type)
}
}
}
enum CallMediaType: String, Codable {
case video = "video"
case audio = "audio"
}
struct CallCapabilities: Codable {
var encryption: Bool
}
struct ConnectionState: Codable {
var connectionState: String
var iceConnectionState: String
var iceGatheringState: String
var signalingState: String
}

View file

@ -45,6 +45,7 @@
5C8F01CD27A6F0D8007D2C8D /* CodeScanner in Frameworks */ = {isa = PBXBuildFile; productRef = 5C8F01CC27A6F0D8007D2C8D /* CodeScanner */; };
5C971E1D27AEBEF600C8A3CE /* ChatInfoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C971E1C27AEBEF600C8A3CE /* ChatInfoView.swift */; };
5C971E2127AEBF8300C8A3CE /* ChatInfoImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C971E2027AEBF8300C8A3CE /* ChatInfoImage.swift */; };
5C9D13A3282187BB00AB8B43 /* WebRTC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C9D13A2282187BB00AB8B43 /* WebRTC.swift */; };
5C9FD96B27A56D4D0075386C /* JSON.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C9FD96A27A56D4D0075386C /* JSON.swift */; };
5C9FD96E27A5D6ED0075386C /* SendMessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5C9FD96D27A5D6ED0075386C /* SendMessageView.swift */; };
5CA059DC279559F40002BEB4 /* Tests_iOS.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5CA059DB279559F40002BEB4 /* Tests_iOS.swift */; };
@ -172,6 +173,7 @@
5C764E88279CBCB3000C6508 /* ChatModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatModel.swift; sourceTree = "<group>"; };
5C971E1C27AEBEF600C8A3CE /* ChatInfoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatInfoView.swift; sourceTree = "<group>"; };
5C971E2027AEBF8300C8A3CE /* ChatInfoImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatInfoImage.swift; sourceTree = "<group>"; };
5C9D13A2282187BB00AB8B43 /* WebRTC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTC.swift; sourceTree = "<group>"; };
5C9FD96A27A56D4D0075386C /* JSON.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JSON.swift; sourceTree = "<group>"; };
5C9FD96D27A5D6ED0075386C /* SendMessageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SendMessageView.swift; sourceTree = "<group>"; };
5CA059C3279559F40002BEB4 /* SimpleXApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimpleXApp.swift; sourceTree = "<group>"; };
@ -265,6 +267,7 @@
isa = PBXGroup;
children = (
3C714776281C081000CB4D4B /* CallView.swift */,
5C9D13A2282187BB00AB8B43 /* WebRTC.swift */,
);
path = Call;
sourceTree = "<group>";
@ -654,6 +657,7 @@
5C063D2727A4564100AEC577 /* ChatPreviewView.swift in Sources */,
5C35CFCB27B2E91D00FB6C6D /* NtfManager.swift in Sources */,
3C8C548928133C84000A3EC7 /* PasteToConnectView.swift in Sources */,
5C9D13A3282187BB00AB8B43 /* WebRTC.swift in Sources */,
5C2E261227A30FEA00F70299 /* TerminalView.swift in Sources */,
5CDCAD7628188D3600503DA2 /* APITypes.swift in Sources */,
5C9FD96B27A56D4D0075386C /* JSON.swift in Sources */,
@ -1001,7 +1005,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_ENTITLEMENTS = "SimpleX NSE/SimpleX NSE.entitlements";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 39;
CURRENT_PROJECT_VERSION = 40;
DEVELOPMENT_TEAM = 5NN7GUYB6T;
ENABLE_BITCODE = NO;
GENERATE_INFOPLIST_FILE = YES;
@ -1022,7 +1026,7 @@
"$(inherited)",
"$(PROJECT_DIR)/Libraries/sim",
);
MARKETING_VERSION = 1.6;
MARKETING_VERSION = 1.7;
PRODUCT_BUNDLE_IDENTIFIER = "chat.simplex.app.SimpleX-NSE";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = iphoneos;
@ -1041,7 +1045,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_ENTITLEMENTS = "SimpleX NSE/SimpleX NSE.entitlements";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 39;
CURRENT_PROJECT_VERSION = 40;
DEVELOPMENT_TEAM = 5NN7GUYB6T;
ENABLE_BITCODE = NO;
GENERATE_INFOPLIST_FILE = YES;
@ -1062,7 +1066,7 @@
"$(inherited)",
"$(PROJECT_DIR)/Libraries/sim",
);
MARKETING_VERSION = 1.6;
MARKETING_VERSION = 1.7;
PRODUCT_BUNDLE_IDENTIFIER = "chat.simplex.app.SimpleX-NSE";
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = iphoneos;

View file

@ -0,0 +1 @@
dist

View file

@ -0,0 +1,6 @@
# WebView for WebRTC calls in SimpleX Chat
```
npm i
npm run build
```

View file

@ -0,0 +1,10 @@
#!/bin/sh
# it can be tested in the browser from dist folder
cp ./src/call.html ./dist/call.html
cp ./src/style.css ./dist/style.css
# copy to android app
cp ./src/call.html ../../apps/android/app/src/main/assets/www/call.html
cp ./src/style.css ../../apps/android/app/src/main/assets/www/style.css
cp ./dist/call.js ../../apps/android/app/src/main/assets/www/call.js

View file

@ -4,7 +4,8 @@
"description": "WebRTC call in browser and webview",
"main": "dist/call.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
"test": "echo \"Error: no test specified\" && exit 1",
"build": "prettier --write --ignore-unknown . && tsc && ./copy"
},
"keywords": [
"SimpleX",

View file

@ -3,73 +3,78 @@
// type WCallMessage = WCallCommand | WCallResponse
type WCallCommand = WCCapabilities | WCStartCall | WCAcceptOffer | WCEndCall | WCallCommandResponse
interface WebViewAPICall {
corrId: number
command: WCallCommand
}
type WCallResponse = WRCapabilities | WRConnection | WRCallEnded | WROk | WRError | WCallCommandResponse
// TODO remove WCallCommand from resp type
interface WebViewMessage {
corrId?: number
resp: WCallResponse | WCallCommand
}
type WCallCommandResponse = WCallOffer | WCallAnswer | WCallIceCandidates
type WCallCommand = WCCapabilities | WCStartCall | WCAcceptOffer | WCallAnswer | WCallIceCandidates | WCEndCall
type WCallMessageTag = "capabilities" | "connection" | "start" | "offer" | "accept" | "answer" | "ice" | "end" | "ended" | "ok" | "error"
type WCallResponse = WRCapabilities | WCallOffer | WCallAnswer | WCallIceCandidates | WRConnection | WRCallEnded | WROk | WRError
type WCallCommandTag = "capabilities" | "start" | "accept" | "answer" | "ice" | "end"
type WCallResponseTag = "capabilities" | "offer" | "answer" | "ice" | "connection" | "ended" | "ok" | "error"
enum CallMediaType {
Audio = "audio",
Video = "video",
}
interface IWebCallMessage {
type: WCallMessageTag
interface IWCallCommand {
type: WCallCommandTag
}
interface WCCapabilities extends IWebCallMessage {
interface IWCallResponse {
type: WCallResponseTag
}
interface WCCapabilities extends IWCallCommand {
type: "capabilities"
}
interface WRConnection extends IWebCallMessage {
type: "connection",
state: {
connectionState: string
iceConnectionState: string
iceGatheringState: string
signalingState: string
}
}
interface WCStartCall extends IWebCallMessage {
interface WCStartCall extends IWCallCommand {
type: "start"
media: CallMediaType
aesKey?: string
}
interface WCEndCall extends IWebCallMessage {
interface WCEndCall extends IWCallCommand {
type: "end"
}
interface WCAcceptOffer extends IWebCallMessage {
interface WCAcceptOffer extends IWCallCommand {
type: "accept"
offer: RTCSessionDescriptionInit
iceCandidates: RTCIceCandidateInit[]
offer: string // JSON string for RTCSessionDescriptionInit
iceCandidates: string[] // JSON strings for RTCIceCandidateInit
media: CallMediaType
aesKey?: string
}
interface WCallOffer extends IWebCallMessage {
interface WCallOffer extends IWCallResponse {
type: "offer"
offer: RTCSessionDescriptionInit
iceCandidates: RTCIceCandidateInit[]
offer: string // JSON string for RTCSessionDescriptionInit
iceCandidates: string[] // JSON strings for RTCIceCandidateInit
}
interface WCallAnswer extends IWebCallMessage {
interface WCallAnswer extends IWCallCommand, IWCallResponse {
type: "answer"
answer: RTCSessionDescriptionInit
iceCandidates: RTCIceCandidateInit[]
answer: string // JSON string for RTCSessionDescriptionInit
iceCandidates: string[] // JSON strings for RTCIceCandidateInit
}
interface WCallIceCandidates extends IWebCallMessage {
interface WCallIceCandidates extends IWCallCommand, IWCallResponse {
type: "ice"
iceCandidates: RTCIceCandidateInit[]
iceCandidates: string[] // JSON strings for RTCIceCandidateInit
}
interface WRCapabilities {
interface WRCapabilities extends IWCallResponse {
type: "capabilities"
capabilities: CallCapabilities
}
@ -78,15 +83,25 @@ interface CallCapabilities {
encryption: boolean
}
interface WRCallEnded extends IWebCallMessage {
interface WRConnection extends IWCallResponse {
type: "connection"
state: {
connectionState: string
iceConnectionState: string
iceGatheringState: string
signalingState: string
}
}
interface WRCallEnded extends IWCallResponse {
type: "ended"
}
interface WROk extends IWebCallMessage {
interface WROk extends IWCallResponse {
type: "ok"
}
interface WRError extends IWebCallMessage {
interface WRError extends IWCallResponse {
type: "error"
message: string
}
@ -103,16 +118,9 @@ type RTCConfigurationWithEncryption = RTCConfiguration & {
encodedInsertableStreams: boolean
}
// STUN servers
const peerConnectionConfig: RTCConfigurationWithEncryption = {
iceServers: [{urls: ["stun:stun.l.google.com:19302"]}],
iceCandidatePoolSize: 10,
encodedInsertableStreams: true,
}
const keyAlgorithm: AesKeyAlgorithm = {
name: "AES-GCM",
length: 256
length: 256,
}
const keyUsages: KeyUsage[] = ["encrypt", "decrypt"]
@ -129,10 +137,11 @@ const initialPlainTextRequired = {
interface Call {
connection: RTCPeerConnection
iceCandidates: Promise<RTCIceCandidate[]>
iceCandidates: Promise<string[]> // JSON strings for RTCIceCandidate
}
interface CallConfig {
peerConnectionConfig: RTCConfigurationWithEncryption
iceCandidates: {
delay: number
extrasInterval: number
@ -140,21 +149,28 @@ interface CallConfig {
}
}
const defaultCallConfig: CallConfig = {
iceCandidates: {
delay: 2000,
extrasInterval: 2000,
extrasTimeout: 8000
function defaultCallConfig(encodedInsertableStreams: boolean): CallConfig {
return {
peerConnectionConfig: {
iceServers: [{urls: ["stun:stun.l.google.com:19302"]}],
iceCandidatePoolSize: 10,
encodedInsertableStreams,
},
iceCandidates: {
delay: 2000,
extrasInterval: 2000,
extrasTimeout: 8000,
},
}
}
async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesKey?: string): Promise<Call> {
const conn = new RTCPeerConnection(peerConnectionConfig)
const conn = new RTCPeerConnection(config.peerConnectionConfig)
const remoteStream = new MediaStream()
const localStream = await navigator.mediaDevices.getUserMedia(callMediaContraints(mediaType))
const localStream = await navigator.mediaDevices.getUserMedia(callMediaConstraints(mediaType))
await setUpMediaStreams(conn, localStream, remoteStream, aesKey)
conn.addEventListener("connectionstatechange", connectionStateChange)
const iceCandidates = new Promise<RTCIceCandidate[]>((resolve, _) => {
const iceCandidates = new Promise<string[]>((resolve, _) => {
let candidates: RTCIceCandidate[] = []
let resolved = false
let extrasInterval: number | undefined
@ -188,16 +204,16 @@ async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesK
function resolveIceCandidates() {
if (delay) clearTimeout(delay)
resolved = true
const iceCandidates = candidates.slice()
const iceCandidates = candidates.map((c) => JSON.stringify(c))
candidates = []
resolve(iceCandidates)
}
}
function sendIceCandidates() {
if (candidates.length === 0) return
const iceCandidates = candidates.slice()
const iceCandidates = candidates.map((c) => JSON.stringify(c))
candidates = []
sendMessageToNative({type: "ice", iceCandidates})
sendMessageToNative({resp: {type: "ice", iceCandidates}})
}
})
@ -205,17 +221,19 @@ async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesK
function connectionStateChange() {
sendMessageToNative({
type: "connection",
state: {
connectionState: conn.connectionState,
iceConnectionState: conn.iceConnectionState,
iceGatheringState: conn.iceGatheringState,
signalingState: conn.signalingState,
}
resp: {
type: "connection",
state: {
connectionState: conn.connectionState,
iceConnectionState: conn.iceConnectionState,
iceGatheringState: conn.iceGatheringState,
signalingState: conn.signalingState,
},
},
})
if (conn.connectionState == "disconnected" || conn.connectionState == "failed") {
conn.removeEventListener("connectionstatechange", connectionStateChange)
sendMessageToNative({type: "ended"})
sendMessageToNative({resp: {type: "ended"}})
conn.close()
pc = undefined
resetVideoElements()
@ -223,99 +241,101 @@ async function initializeCall(config: CallConfig, mediaType: CallMediaType, aesK
}
}
// TODO remove WCallCommand from parameter type
function sendMessageToNative(msg: WCallResponse | WCallCommand) {
function sendMessageToNative(msg: WebViewMessage) {
console.log(JSON.stringify(msg))
}
// TODO remove WCallCommand from result type
async function processCommand(command: WCallCommand): Promise<WCallResponse | WCallCommand> {
async function processCommand(body: WebViewAPICall): Promise<WebViewMessage> {
const {command, corrId} = body
let resp: WCallResponse | WCallCommand
switch (command.type) {
case "capabilities":
const encryption = supportsInsertableStreams()
resp = {type: "capabilities", capabilities: {encryption}}
break
case "start":
console.log("starting call")
if (pc) {
resp = {type: "error", message: "start: call already started"}
} else if (!supportsInsertableStreams() && command.aesKey) {
resp = {type: "error", message: "start: encryption is not supported"}
} else {
try {
try {
switch (command.type) {
case "capabilities":
const encryption = supportsInsertableStreams()
resp = {type: "capabilities", capabilities: {encryption}}
break
case "start":
console.log("starting call")
if (pc) {
resp = {type: "error", message: "start: call already started"}
} else if (!supportsInsertableStreams() && command.aesKey) {
resp = {type: "error", message: "start: encryption is not supported"}
} else {
const {media, aesKey} = command
const call = await initializeCall(defaultCallConfig, media, aesKey)
const call = await initializeCall(defaultCallConfig(!!aesKey), media, aesKey)
const {connection, iceCandidates} = call
pc = connection
const offer = await pc.createOffer()
await pc.setLocalDescription(offer)
// for debugging, returning the command for callee to use
resp = {type: "accept", offer, iceCandidates: await iceCandidates, media, aesKey}
resp = {type: "accept", offer: JSON.stringify(offer), iceCandidates: await iceCandidates, media, aesKey}
// resp = {type: "offer", offer, iceCandidates: await iceCandidates}
} catch (e) {
resp = {type: "error", message: (e as Error).message}
}
}
break
case "accept":
if (pc) {
resp = {type: "error", message: "accept: call already started"}
} else if (!supportsInsertableStreams() && command.aesKey) {
resp = {type: "error", message: "accept: encryption is not supported"}
} else {
try {
const call = await initializeCall(defaultCallConfig, command.media, command.aesKey)
break
case "accept":
if (pc) {
resp = {type: "error", message: "accept: call already started"}
} else if (!supportsInsertableStreams() && command.aesKey) {
resp = {type: "error", message: "accept: encryption is not supported"}
} else {
const offer = JSON.parse(command.offer)
const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c))
const call = await initializeCall(defaultCallConfig(!!command.aesKey), command.media, command.aesKey)
const {connection, iceCandidates} = call
pc = connection
await pc.setRemoteDescription(new RTCSessionDescription(command.offer))
await pc.setRemoteDescription(new RTCSessionDescription(offer))
const answer = await pc.createAnswer()
await pc.setLocalDescription(answer)
addIceCandidates(pc, command.iceCandidates)
addIceCandidates(pc, remoteIceCandidates)
// same as command for caller to use
resp = {type: "answer", answer, iceCandidates: await iceCandidates}
} catch (e) {
resp = {type: "error", message: (e as Error).message}
resp = {type: "answer", answer: JSON.stringify(answer), iceCandidates: await iceCandidates}
}
}
break
case "answer":
if (!pc) {
resp = {type: "error", message: "answer: call not started"}
} else if (!pc.localDescription) {
resp = {type: "error", message: "answer: local description is not set"}
} else if (pc.currentRemoteDescription) {
resp = {type: "error", message: "answer: remote description already set"}
} else {
await pc.setRemoteDescription(new RTCSessionDescription(command.answer))
addIceCandidates(pc, command.iceCandidates)
resp = {type: "ok"}
}
break
case "ice":
if (pc) {
addIceCandidates(pc, command.iceCandidates)
resp = {type: "ok"}
} else {
resp = {type: "error", message: "ice: call not started"}
}
break
case "end":
if (pc) {
pc.close()
pc = undefined
resetVideoElements()
resp = {type: "ok"}
} else {
resp = {type: "error", message: "end: call not started"}
}
break
default:
resp = {type: "error", message: "unknown command"}
break
break
case "answer":
if (!pc) {
resp = {type: "error", message: "answer: call not started"}
} else if (!pc.localDescription) {
resp = {type: "error", message: "answer: local description is not set"}
} else if (pc.currentRemoteDescription) {
resp = {type: "error", message: "answer: remote description already set"}
} else {
const answer = JSON.parse(command.answer)
const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c))
await pc.setRemoteDescription(new RTCSessionDescription(answer))
addIceCandidates(pc, remoteIceCandidates)
resp = {type: "ok"}
}
break
case "ice":
if (pc) {
const remoteIceCandidates = command.iceCandidates.map((c) => JSON.parse(c))
addIceCandidates(pc, remoteIceCandidates)
resp = {type: "ok"}
} else {
resp = {type: "error", message: "ice: call not started"}
}
break
case "end":
if (pc) {
pc.close()
pc = undefined
resetVideoElements()
resp = {type: "ok"}
} else {
resp = {type: "error", message: "end: call not started"}
}
break
default:
resp = {type: "error", message: "unknown command"}
break
}
} catch (e) {
resp = {type: "error", message: (e as Error).message}
}
sendMessageToNative(resp)
return resp
const apiResp = {resp, corrId}
sendMessageToNative(apiResp)
return apiResp
}
function addIceCandidates(conn: RTCPeerConnection, iceCandidates: RTCIceCandidateInit[]) {
@ -324,7 +344,12 @@ function addIceCandidates(conn: RTCPeerConnection, iceCandidates: RTCIceCandidat
}
}
async function setUpMediaStreams(pc: RTCPeerConnection, localStream: MediaStream, remoteStream: MediaStream, aesKey?: string): Promise<void> {
async function setUpMediaStreams(
pc: RTCPeerConnection,
localStream: MediaStream,
remoteStream: MediaStream,
aesKey?: string
): Promise<void> {
const videos = getVideoElements()
if (!videos) throw Error("no video elements")
@ -384,7 +409,7 @@ async function setUpMediaStreams(pc: RTCPeerConnection, localStream: MediaStream
videos.remote.srcObject = remoteStream
}
function callMediaContraints(mediaType: CallMediaType): MediaStreamConstraints {
function callMediaConstraints(mediaType: CallMediaType): MediaStreamConstraints {
switch (mediaType) {
case CallMediaType.Audio:
return {audio: true, video: false}
@ -405,8 +430,7 @@ function callMediaContraints(mediaType: CallMediaType): MediaStreamConstraints {
}
function supportsInsertableStreams(): boolean {
return ("createEncodedStreams" in RTCRtpSender.prototype)
&& ("createEncodedStreams" in RTCRtpReceiver.prototype)
return "createEncodedStreams" in RTCRtpSender.prototype && "createEncodedStreams" in RTCRtpReceiver.prototype
}
interface VideoElements {
@ -445,13 +469,11 @@ function getVideoElements(): VideoElements | undefined {
// }
// }
function f() {
console.log("Debug Function")
return "Debugging Return"
}
/* Stream Transforms */
function setupPeerTransform(peer: RTCRtpSenderWithEncryption | RTCRtpReceiverWithEncryption, transform: (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => void) {
function setupPeerTransform(
peer: RTCRtpSenderWithEncryption | RTCRtpReceiverWithEncryption,
transform: (frame: RTCEncodedVideoFrame, controller: TransformStreamDefaultController) => void
) {
const streams = peer.createEncodedStreams()
streams.readable.pipeThrough(new TransformStream({transform})).pipeTo(streams.writable)
}
@ -525,9 +547,7 @@ function decodeAscii(a: Uint8Array): string {
return s
}
const base64chars = new Uint8Array(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("").map((c) => c.charCodeAt(0))
)
const base64chars = new Uint8Array("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("").map((c) => c.charCodeAt(0)))
const base64lookup = new Array(256) as (number | undefined)[]
base64chars.forEach((c, i) => (base64lookup[c] = i))

View file

@ -1,24 +1,25 @@
video::-webkit-media-controls {
display: none;
}
html, body {
padding: 0;
margin: 0;
html,
body {
padding: 0;
margin: 0;
}
#remote-video-stream {
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
position: absolute;
width: 100%;
height: 100%;
object-fit: cover;
}
#local-video-stream {
position: absolute;
width: 30%;
max-width: 30%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
bottom: 0;
right: 0;
position: absolute;
width: 30%;
max-width: 30%;
object-fit: cover;
margin: 16px;
border-radius: 16px;
bottom: 0;
right: 0;
}