ios: sound prompts and vibration during calls (#4005)

* ios: sound prompts and vibration

* awaiting call receipt

* update

---------

Co-authored-by: Evgeny Poberezkin <evgeny@poberezkin.com>
This commit is contained in:
Stanislav Dmitrenko 2024-04-11 18:02:41 +07:00 committed by GitHub
parent d8b52ee0d3
commit e560b49d14
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 104 additions and 0 deletions

View file

@ -1469,6 +1469,8 @@ class ChatReceiver {
private var receiveMessages = true
private var _lastMsgTime = Date.now
var messagesChannel: ((ChatResponse) -> Void)? = nil
static let shared = ChatReceiver()
var lastMsgTime: Date { get { _lastMsgTime } }
@ -1486,6 +1488,9 @@ class ChatReceiver {
if let msg = await chatRecvMsg() {
self._lastMsgTime = .now
await processReceivedMsg(msg)
if let messagesChannel {
messagesChannel(msg)
}
}
_ = try? await Task.sleep(nanoseconds: 7_500_000)
}

View file

@ -9,6 +9,7 @@
import SwiftUI
import WebKit
import SimpleXChat
import AVFoundation
struct ActiveCallView: View {
@EnvironmentObject var m: ChatModel
@ -21,6 +22,7 @@ struct ActiveCallView: View {
@Binding var canConnectCall: Bool
@State var prevColorScheme: ColorScheme = .dark
@State var pipShown = false
@State var wasConnected = false
var body: some View {
ZStack(alignment: .topLeading) {
@ -69,6 +71,11 @@ struct ActiveCallView: View {
Task { await m.callCommand.setClient(nil) }
AppDelegate.keepScreenOn(false)
client?.endCall()
CallSoundsPlayer.shared.stop()
try? AVAudioSession.sharedInstance().setCategory(.soloAmbient)
if (wasConnected) {
CallSoundsPlayer.shared.vibrate(long: true)
}
}
.background(m.activeCallViewIsCollapsed ? .clear : .black)
// Quite a big delay when opening/closing the view when a scheme changes (globally) this way. It's not needed when CallKit is used since status bar is green with white text on it
@ -103,6 +110,11 @@ struct ActiveCallView: View {
call.callState = .invitationSent
call.localCapabilities = capabilities
}
if call.supportsVideo {
try? AVAudioSession.sharedInstance().setCategory(.playback, options: .defaultToSpeaker)
}
CallSoundsPlayer.shared.startConnectingCallSound()
activeCallWaitDeliveryReceipt()
}
case let .offer(offer, iceCandidates, capabilities):
Task {
@ -126,6 +138,8 @@ struct ActiveCallView: View {
}
await MainActor.run {
call.callState = .negotiated
CallSoundsPlayer.shared.stop()
try? AVAudioSession.sharedInstance().setCategory(.soloAmbient)
}
}
case let .ice(iceCandidates):
@ -144,6 +158,10 @@ struct ActiveCallView: View {
: CallController.shared.reportIncomingCall(call: call, connectedAt: nil)
call.callState = .connected
call.connectedAt = .now
if !wasConnected {
CallSoundsPlayer.shared.vibrate(long: false)
wasConnected = true
}
}
if state.connectionState == "closed" {
closeCallView(client)
@ -161,6 +179,10 @@ struct ActiveCallView: View {
call.callState = .connected
call.connectionInfo = connectionInfo
call.connectedAt = .now
if !wasConnected {
CallSoundsPlayer.shared.vibrate(long: false)
wasConnected = true
}
case .ended:
closeCallView(client)
call.callState = .ended
@ -187,6 +209,22 @@ struct ActiveCallView: View {
}
}
private func activeCallWaitDeliveryReceipt() {
ChatReceiver.shared.messagesChannel = { msg in
guard let call = ChatModel.shared.activeCall, call.callState == .invitationSent else {
ChatReceiver.shared.messagesChannel = nil
return
}
if case let .chatItemStatusUpdated(_, msg) = msg,
msg.chatInfo.id == call.contact.id,
case .sndCall = msg.chatItem.content,
case .sndRcvd = msg.chatItem.meta.itemStatus {
CallSoundsPlayer.shared.startInCallSound()
ChatReceiver.shared.messagesChannel = nil
}
}
}
private func closeCallView(_ client: WebRTCClient) {
if m.activeCall != nil {
m.showCallView = false

View file

@ -8,6 +8,7 @@
import Foundation
import AVFoundation
import UIKit
class SoundPlayer {
static let shared = SoundPlayer()
@ -43,3 +44,63 @@ class SoundPlayer {
audioPlayer = nil
}
}
class CallSoundsPlayer {
static let shared = CallSoundsPlayer()
private var audioPlayer: AVAudioPlayer?
private var playerTask: Task = Task {}
private func start(_ soundName: String, delayMs: Double) {
audioPlayer?.stop()
playerTask.cancel()
logger.debug("start \(soundName)")
guard let path = Bundle.main.path(forResource: soundName, ofType: "mp3", inDirectory: "sounds") else {
logger.debug("start: file not found")
return
}
do {
let player = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: path))
if player.prepareToPlay() {
audioPlayer = player
}
} catch {
logger.debug("start: AVAudioPlayer error \(error.localizedDescription)")
}
playerTask = Task {
while let player = audioPlayer {
player.play()
do {
try await Task.sleep(nanoseconds: UInt64((player.duration * 1_000_000_000) + delayMs * 1_000_000))
} catch {
break
}
}
}
}
func startConnectingCallSound() {
start("connecting_call", delayMs: 0)
}
func startInCallSound() {
// Taken from https://github.com/TelegramOrg/Telegram-Android
// https://github.com/TelegramOrg/Telegram-Android/blob/master/LICENSE
start("in_call", delayMs: 1000)
}
func stop() {
playerTask.cancel()
audioPlayer?.stop()
audioPlayer = nil
}
func vibrate(long: Bool) {
// iOS just don't want to vibrate more than once after a short period of time, and all 'styles' feel the same
if long {
AudioServicesPlayAlertSound(kSystemSoundID_Vibrate)
} else {
UIImpactFeedbackGenerator(style: .heavy).impactOccurred()
}
}
}

Binary file not shown.

BIN
apps/ios/sounds/in_call.mp3 Normal file

Binary file not shown.