还原 CallProcessor 代码

This commit is contained in:
Fin 2024-11-29 18:05:57 +08:00
parent e8e079fc2f
commit 11b00b8dea
2 changed files with 128 additions and 102 deletions

View File

@ -142,6 +142,9 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
}
func userNotificationCenter(_ center: UNUserNotificationCenter, willPresent notification: UNNotification) async -> UNNotificationPresentationOptions {
if UIApplication.shared.applicationState == .active {
stopCallNotificationProcessor()
}
return .alert
}
@ -226,6 +229,11 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
UIApplication.shared.applicationIconBadgeNumber = -1
}
func applicationDidBecomeActive(_ application: UIApplication) {
//
stopCallNotificationProcessor()
}
func applicationWillTerminate(_ application: UIApplication) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
@ -248,4 +256,9 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
}
return false
}
///
func stopCallNotificationProcessor() {
CFNotificationCenterPostNotification(CFNotificationCenterGetDarwinNotifyCenter(), CFNotificationName(kStopCallProcessorKey as CFString), nil, nil, true)
}
}

View File

@ -7,33 +7,82 @@
//
import AudioToolbox
import AVFAudio
import Foundation
let kBarkSoundPrefix = "bark.sounds.30s"
class CallProcessor: NotificationContentProcessor {
/// 访APP
let soundsDirectoryUrl = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "group.bark")?.appendingPathComponent("Sounds")
///
var soundID: SystemSoundID = 0
/// content
var content: UNMutableNotificationContent? = nil
/// APP
var needsStop = false
func process(identifier: String, content bestAttemptContent: UNMutableNotificationContent) async throws -> UNMutableNotificationContent {
guard let call = bestAttemptContent.userInfo["call"] as? String, call == "1" else {
return bestAttemptContent
}
// 30s
return self.processNotificationSound(content: bestAttemptContent)
self.content = bestAttemptContent
self.registerObserver()
self.sendLocalNotification(identifier: identifier, content: bestAttemptContent)
self.cancelRemoteNotification(content: bestAttemptContent)
await startAudioWork()
return bestAttemptContent
}
func serviceExtensionTimeWillExpire(contentHandler: (UNNotificationContent) -> Void) {
stopAudioWork()
if let content {
contentHandler(content)
}
}
///
private func sendLocalNotification(identifier: String, content: UNMutableNotificationContent) {
// id使APNS
guard let content = content.mutableCopy() as? UNMutableNotificationContent else {
return
}
if !content.isCritical { //
content.sound = nil
}
let request = UNNotificationRequest(identifier: identifier, content: content, trigger: nil)
UNUserNotificationCenter.current().add(request)
}
///
private func cancelRemoteNotification(content: UNMutableNotificationContent) {
//
// iOS15
// level level
if #available(iOSApplicationExtension 15.0, *), self.content?.userInfo["level"] == nil {
self.content?.interruptionLevel = .passive
}
}
// startAudioWork(completion:)
private func startAudioWork() async {
return await withCheckedContinuation { continuation in
self.startAudioWork {
continuation.resume()
}
}
}
}
// MARK: -
extension CallProcessor {
// 30s30s
func processNotificationSound(content: UNMutableNotificationContent) -> UNMutableNotificationContent {
///
var startAudioWorkCompletion: (() -> Void)? = nil
///
private func startAudioWork(completion: @escaping () -> Void) {
guard let content else {
completion()
return
}
self.startAudioWorkCompletion = completion
let sound = ((content.userInfo["aps"] as? [String: Any])?["sound"] as? String)?.split(separator: ".")
let soundName: String
let soundType: String
if sound?.count == 2, let first = sound?.first, let last = sound?.last, last == "caf" {
if sound?.count == 2, let first = sound?.first, let last = sound?.last {
soundName = String(first)
soundType = String(last)
} else {
@ -41,101 +90,65 @@ extension CallProcessor {
soundType = "caf"
}
if let longSoundUrl = getLongSound(soundName: soundName, soundType: soundType) {
content.sound = UNNotificationSound(named: UNNotificationSoundName(rawValue: longSoundUrl.lastPathComponent))
//
guard let audioPath = getSoundInCustomSoundsDirectory(soundName: "\(soundName).\(soundType)") ??
Bundle.main.path(forResource: soundName, ofType: soundType)
else {
completion()
return
}
return content
let fileUrl = URL(string: audioPath)
//
AudioServicesCreateSystemSoundID(fileUrl! as CFURL, &soundID)
//
AudioServicesPlayAlertSound(soundID)
//
let selfPointer = unsafeBitCast(self, to: UnsafeMutableRawPointer.self)
AudioServicesAddSystemSoundCompletion(soundID, nil, nil, { sound, clientData in
guard let pointer = clientData else { return }
let processor = unsafeBitCast(pointer, to: CallProcessor.self)
if processor.needsStop {
processor.startAudioWorkCompletion?()
return
}
//
AudioServicesPlayAlertSound(sound)
}, selfPointer)
}
///
private func stopAudioWork() {
AudioServicesRemoveSystemSoundCompletion(soundID)
AudioServicesDisposeSystemSoundID(soundID)
}
func getLongSound(soundName: String, soundType: String) -> URL? {
guard let soundsDirectoryUrl else {
///
func registerObserver() {
let notification = CFNotificationCenterGetDarwinNotifyCenter()
let observer = Unmanaged.passUnretained(self).toOpaque()
CFNotificationCenterAddObserver(notification, observer, { _, pointer, _, _, _ in
guard let observer = pointer else { return }
let processor = Unmanaged<CallProcessor>.fromOpaque(observer).takeUnretainedValue()
processor.needsStop = true
}, kStopCallProcessorKey as CFString, nil, .deliverImmediately)
}
func getSoundInCustomSoundsDirectory(soundName: String) -> String? {
// 访APP
guard let soundsDirectoryUrl = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "group.bark")?.appendingPathComponent("Sounds") else {
return nil
}
//
let longSoundName = "\(kBarkSoundPrefix).\(soundName).\(soundType)"
let longSoundPath = soundsDirectoryUrl.appendingPathComponent(longSoundName)
if FileManager.default.fileExists(atPath: longSoundPath.path) {
return longSoundPath
let path = soundsDirectoryUrl.appendingPathComponent(soundName).path
if FileManager.default.fileExists(atPath: path) {
return path
}
//
var path: String = soundsDirectoryUrl.appendingPathComponent("\(soundName).\(soundType)").path
if !FileManager.default.fileExists(atPath: path) {
//
path = Bundle.main.path(forResource: soundName, ofType: soundType) ?? ""
}
guard !path.isEmpty else {
return nil
}
// 30s
return mergeCAFFilesToDuration(inputFile: URL(fileURLWithPath: path))
return nil
}
/// - Author: @uuneo
/// - Description:
/// - Parameters:
/// - inputFile:
/// - targetDuration:
/// - Returns:
func mergeCAFFilesToDuration(inputFile: URL, targetDuration: TimeInterval = 30) -> URL? {
guard let soundsDirectoryUrl else {
return nil
}
let longSoundName = "\(kBarkSoundPrefix).\(inputFile.lastPathComponent)"
let longSoundPath = soundsDirectoryUrl.appendingPathComponent(longSoundName)
do {
//
let audioFile = try AVAudioFile(forReading: inputFile)
let audioFormat = audioFile.processingFormat
let sampleRate = audioFormat.sampleRate
//
let targetFrames = AVAudioFramePosition(targetDuration * sampleRate)
var currentFrames: AVAudioFramePosition = 0
//
let outputAudioFile = try AVAudioFile(forWriting: longSoundPath, settings: audioFormat.settings)
//
while currentFrames < targetFrames {
//
guard let buffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: AVAudioFrameCount(audioFile.length)) else {
//
return nil
}
try audioFile.read(into: buffer)
//
let remainingFrames = targetFrames - currentFrames
if AVAudioFramePosition(buffer.frameLength) > remainingFrames {
//
let truncatedBuffer = AVAudioPCMBuffer(pcmFormat: buffer.format, frameCapacity: AVAudioFrameCount(remainingFrames))!
let channelCount = Int(buffer.format.channelCount)
for channel in 0..<channelCount {
let sourcePointer = buffer.floatChannelData![channel]
let destinationPointer = truncatedBuffer.floatChannelData![channel]
memcpy(destinationPointer, sourcePointer, Int(remainingFrames) * MemoryLayout<Float>.size)
}
truncatedBuffer.frameLength = AVAudioFrameCount(remainingFrames)
try outputAudioFile.write(from: truncatedBuffer)
break
} else {
//
try outputAudioFile.write(from: buffer)
currentFrames += AVAudioFramePosition(buffer.frameLength)
}
//
audioFile.framePosition = 0
}
return longSoundPath
} catch {
print("Error processing CAF file: \(error)")
return nil
}
deinit {
let observer = Unmanaged.passUnretained(self).toOpaque()
let name = CFNotificationName(kStopCallProcessorKey as CFString)
CFNotificationCenterRemoveObserver(CFNotificationCenterGetDarwinNotifyCenter(), observer, name, nil)
}
}