2025-01-17 11:13:51 +08:00

147 lines
6.5 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

//
// CallProcessor.swift
// NotificationServiceExtension
//
// Created by huangfeng on 2024/6/6.
// Copyright © 2024 Fin. All rights reserved.
//
import AudioToolbox
import AVFAudio
import Foundation
class CallProcessor: NotificationContentProcessor {
/// 访APP
let soundsDirectoryUrl = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: "group.bark")?.appendingPathComponent("Library/Sounds")
func process(identifier: String, content bestAttemptContent: UNMutableNotificationContent) async throws -> UNMutableNotificationContent {
guard let call = bestAttemptContent.userInfo["call"] as? String, call == "1" else {
return bestAttemptContent
}
// 30s
return self.processNotificationSound(content: bestAttemptContent)
}
}
// MARK: -
extension CallProcessor {
// 30s30s
func processNotificationSound(content: UNMutableNotificationContent) -> UNMutableNotificationContent {
let sound = ((content.userInfo["aps"] as? [String: Any])?["sound"] as? String)?.split(separator: ".")
let soundName: String
let soundType: String
if sound?.count == 2, let first = sound?.first, let last = sound?.last, last == "caf" {
soundName = String(first)
soundType = String(last)
} else {
soundName = "multiwayinvitation"
soundType = "caf"
}
if let longSoundUrl = getLongSound(soundName: soundName, soundType: soundType) {
if let level = content.userInfo["level"] as? String, level == "critical" {
LevelProcessor.setCriticalSound(content: content, soundName: longSoundUrl.lastPathComponent)
} else {
content.sound = UNNotificationSound(named: UNNotificationSoundName(rawValue: longSoundUrl.lastPathComponent))
}
}
return content
}
func getLongSound(soundName: String, soundType: String) -> URL? {
guard let soundsDirectoryUrl else {
return nil
}
//
if !FileManager.default.fileExists(atPath: soundsDirectoryUrl.path) {
try? FileManager.default.createDirectory(atPath: soundsDirectoryUrl.path, withIntermediateDirectories: true, attributes: nil)
}
//
let longSoundName = "\(kBarkSoundPrefix).\(soundName).\(soundType)"
let longSoundPath = soundsDirectoryUrl.appendingPathComponent(longSoundName)
if FileManager.default.fileExists(atPath: longSoundPath.path) {
return longSoundPath
}
//
var path: String = soundsDirectoryUrl.appendingPathComponent("\(soundName).\(soundType)").path
if !FileManager.default.fileExists(atPath: path) {
//
path = Bundle.main.path(forResource: soundName, ofType: soundType) ?? ""
}
guard !path.isEmpty else {
return nil
}
// 30s
return mergeCAFFilesToDuration(inputFile: URL(fileURLWithPath: path))
}
/// - Author: @uuneo
/// - Description:
/// - Parameters:
/// - inputFile:
/// - targetDuration:
/// - Returns:
func mergeCAFFilesToDuration(inputFile: URL, targetDuration: TimeInterval = 30) -> URL? {
guard let soundsDirectoryUrl else {
return nil
}
let longSoundName = "\(kBarkSoundPrefix).\(inputFile.lastPathComponent)"
let longSoundPath = soundsDirectoryUrl.appendingPathComponent(longSoundName)
do {
//
let audioFile = try AVAudioFile(forReading: inputFile)
let audioFormat = audioFile.processingFormat
let sampleRate = audioFormat.sampleRate
//
let targetFrames = AVAudioFramePosition(targetDuration * sampleRate)
var currentFrames: AVAudioFramePosition = 0
//
let outputAudioFile = try AVAudioFile(forWriting: longSoundPath, settings: audioFormat.settings)
//
while currentFrames < targetFrames {
//
guard let buffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: AVAudioFrameCount(audioFile.length)) else {
//
return nil
}
try audioFile.read(into: buffer)
//
let remainingFrames = targetFrames - currentFrames
if AVAudioFramePosition(buffer.frameLength) > remainingFrames {
//
let truncatedBuffer = AVAudioPCMBuffer(pcmFormat: buffer.format, frameCapacity: AVAudioFrameCount(remainingFrames))!
let channelCount = Int(buffer.format.channelCount)
for channel in 0..<channelCount {
let sourcePointer = buffer.floatChannelData![channel]
let destinationPointer = truncatedBuffer.floatChannelData![channel]
memcpy(destinationPointer, sourcePointer, Int(remainingFrames) * MemoryLayout<Float>.size)
}
truncatedBuffer.frameLength = AVAudioFrameCount(remainingFrames)
try outputAudioFile.write(from: truncatedBuffer)
break
} else {
//
try outputAudioFile.write(from: buffer)
currentFrames += AVAudioFramePosition(buffer.frameLength)
}
//
audioFile.framePosition = 0
}
return longSoundPath
} catch {
print("Error processing CAF file: \(error)")
return nil
}
}
}