Skip to content

Commit

Permalink
whisper.swiftui : model list & bench methods
Browse files Browse the repository at this point in the history
  • Loading branch information
jhen0409 committed Nov 10, 2024
1 parent f78aea7 commit a0bd84a
Show file tree
Hide file tree
Showing 6 changed files with 356 additions and 15 deletions.
74 changes: 74 additions & 0 deletions examples/whisper.swiftui/whisper.cpp.swift/LibWhisper.swift
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,85 @@ actor WhisperContext {
return transcription
}

func system_info() -> String {
var info = ""
if (ggml_cpu_has_neon() != 0) { info += "NEON " }
if (ggml_cpu_has_metal() != 0) { info += "METAL " }
if (ggml_cpu_has_blas() != 0) { info += "BLAS " }
return String(info.dropLast())
}

func bench_full(modelName: String) async -> String {
let n_threads = Int32(min(4, cpuCount())) // Default in whisper.cpp

let n_mels = whisper_model_n_mels(context)
if (whisper_set_mel(context, nil, 0, n_mels) != 0) {
return "error: failed to set mel"
}

// heat encoder
if (whisper_encode(context, 0, n_threads) != 0) {
return "error: failed to encode"
}

var tokens = [whisper_token](repeating: 0, count: 512)

// prompt heat
if (whisper_decode(context, &tokens, 256, 0, n_threads) != 0) {
return "error: failed to decode"
}

// text-generation heat
if (whisper_decode(context, &tokens, 1, 256, n_threads) != 0) {
return "error: failed to decode"
}

whisper_reset_timings(context)

// actual run
if (whisper_encode(context, 0, n_threads) != 0) {
return "error: failed to encode"
}

// text-generation
for i in 0..<256 {
if (whisper_decode(context, &tokens, 1, Int32(i), n_threads) != 0) {
return "error: failed to decode"
}
}

// batched decoding
for i in 0..<64 {
if (whisper_decode(context, &tokens, 5, 0, n_threads) != 0) {
return "error: failed to decode"
}
}

// prompt processing
for i in 0..<16 {
if (whisper_decode(context, &tokens, 256, 0, n_threads) != 0) {
return "error: failed to decode"
}
}

whisper_print_timings(context)

let system_info = self.system_info()
let timings: whisper_timings = whisper_get_timings(context)
let encode_ms = String(format: "%.2f", timings.encode_ms)
let decode_ms = String(format: "%.2f", timings.decode_ms)
let batchd_ms = String(format: "%.2f", timings.batchd_ms)
let prompt_ms = String(format: "%.2f", timings.prompt_ms)
return "| <todo> | iOS | \(system_info) | \(modelName) | \(n_threads) | 1 | \(encode_ms) | \(decode_ms) | \(batchd_ms) | \(prompt_ms) | <todo> |"
}

static func createContext(path: String) throws -> WhisperContext {
var params = whisper_context_default_params()
#if targetEnvironment(simulator)
params.use_gpu = false
print("Running on the simulator, using CPU")
#else
params.flash_attn = true // Enabled by default for Metal
#endif
let context = whisper_init_from_file_with_params(path, params)
if let context {
Expand Down
17 changes: 17 additions & 0 deletions examples/whisper.swiftui/whisper.swiftui.demo/Models/Model.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import Foundation

struct Model: Identifiable {
var id = UUID()
var name: String
var info: String
var url: String

var filename: String
var fileURL: URL {
FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent(filename)
}

func fileExists() -> Bool {
FileManager.default.fileExists(atPath: fileURL.path)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import Foundation
import SwiftUI
import AVFoundation


@MainActor
class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
@Published var isModelLoaded = false
Expand All @@ -14,7 +15,7 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {
private var recordedFile: URL? = nil
private var audioPlayer: AVAudioPlayer?

private var modelUrl: URL? {
private var builtInModelUrl: URL? {
Bundle.main.url(forResource: "ggml-base.en", withExtension: "bin", subdirectory: "models")
}

Expand All @@ -28,23 +29,51 @@ class WhisperState: NSObject, ObservableObject, AVAudioRecorderDelegate {

override init() {
super.init()
loadModel()
}

func loadModel(path: URL? = nil, log: Bool = true) {
do {
try loadModel()
whisperContext = nil
if (log) { messageLog += "Loading model...\n" }
let modelUrl = path ?? builtInModelUrl
if let modelUrl {
whisperContext = try WhisperContext.createContext(path: modelUrl.path())
if (log) { messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" }
} else {
if (log) { messageLog += "Could not locate model\n" }
}
canTranscribe = true
} catch {
print(error.localizedDescription)
messageLog += "\(error.localizedDescription)\n"
if (log) { messageLog += "\(error.localizedDescription)\n" }
}
}

private func loadModel() throws {
messageLog += "Loading model...\n"
if let modelUrl {
whisperContext = try WhisperContext.createContext(path: modelUrl.path())
messageLog += "Loaded model \(modelUrl.lastPathComponent)\n"
} else {
messageLog += "Could not locate model\n"

func benchCurrentModel() async {
if whisperContext == nil {
messageLog += "Cannot bench without loaded model\n"
return
}
messageLog += "Benchmarking current model\n"
let result = await whisperContext?.bench_full(modelName: "<current>")
if (result != nil) { messageLog += result! + "\n" }
}

func bench(models: [Model]) async {
messageLog += "Benchmarking models\n"
messageLog += "| CPU | OS | Config | Model | Th | FA | Enc. | Dec. | Bch5 | PP | Commit |\n"
messageLog += "| --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- |\n"
for model in models {
loadModel(path: model.fileURL, log: false)
if whisperContext == nil {
messageLog += "Cannot bench without loaded model\n"
break
}
let result = await whisperContext?.bench_full(modelName: model.name)
if (result != nil) { messageLog += result! + "\n" }
}
messageLog += "Benchmarking completed\n"
}

func transcribeSample() async {
Expand Down
119 changes: 115 additions & 4 deletions examples/whisper.swiftui/whisper.swiftui.demo/UI/ContentView.swift
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import SwiftUI
import AVFoundation
import Foundation

struct ContentView: View {
@StateObject var whisperState = WhisperState()
Expand Down Expand Up @@ -29,15 +30,125 @@ struct ContentView: View {
Text(verbatim: whisperState.messageLog)
.frame(maxWidth: .infinity, alignment: .leading)
}
.font(.footnote)
.padding()
.background(Color.gray.opacity(0.1))
.cornerRadius(10)

HStack {
Button("Clear Logs", action: {
whisperState.messageLog = ""
})
.font(.footnote)
.buttonStyle(.bordered)

Button("Copy Logs", action: {
UIPasteboard.general.string = whisperState.messageLog
})
.font(.footnote)
.buttonStyle(.bordered)

Button("Bench", action: {
Task {
await whisperState.benchCurrentModel()
}
})
.font(.footnote)
.buttonStyle(.bordered)
.disabled(!whisperState.canTranscribe)

Button("Bench All", action: {
Task {
await whisperState.bench(models: ModelSettingsView.getDownloadedModels())
}
})
.font(.footnote)
.buttonStyle(.bordered)
.disabled(!whisperState.canTranscribe)
}

NavigationLink(destination: ModelSettingsView(whisperState: whisperState)) {
Text("View Models")
}
.font(.footnote)
.padding()
}
.navigationTitle("Whisper SwiftUI Demo")
.padding()
}
}
}

struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
struct ModelSettingsView: View {
@ObservedObject var whisperState: WhisperState
@Environment(\.dismiss) var dismiss

private static let models: [Model] = [
Model(name: "tiny", info: "(F16, 75 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.bin", filename: "tiny.bin"),
Model(name: "tiny-q5_1", info: "(31 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny-q5_1.bin", filename: "tiny-q5_1.bin"),
Model(name: "tiny-q8_0", info: "(42 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny-q8_0.bin", filename: "tiny-q8_0.bin"),
Model(name: "tiny.en", info: "(F16, 75 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en.bin", filename: "tiny.en.bin"),
Model(name: "tiny.en-q5_1", info: "(31 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en-q5_1.bin", filename: "tiny.en-q5_1.bin"),
Model(name: "tiny.en-q8_0", info: "(42 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.en-q8_0.bin", filename: "tiny.en-q8_0.bin"),
Model(name: "base", info: "(F16, 142 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin", filename: "base.bin"),
Model(name: "base-q5_1", info: "(57 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base-q5_1.bin", filename: "base-q5_1.bin"),
Model(name: "base-q8_0", info: "(78 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base-q8_0.bin", filename: "base-q8_0.bin"),
Model(name: "base.en", info: "(F16, 142 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en.bin", filename: "base.en.bin"),
Model(name: "base.en-q5_1", info: "(57 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q5_1.bin", filename: "base.en-q5_1.bin"),
Model(name: "base.en-q8_0", info: "(78 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.en-q8_0.bin", filename: "base.en-q8_0.bin"),
Model(name: "small", info: "(F16, 466 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin", filename: "small.bin"),
Model(name: "small-q5_1", info: "(181 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small-q5_1.bin", filename: "small-q5_1.bin"),
Model(name: "small-q8_0", info: "(252 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small-q8_0.bin", filename: "small-q8_0.bin"),
Model(name: "small.en", info: "(F16, 466 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.en.bin", filename: "small.en.bin"),
Model(name: "small.en-q5_1", info: "(181 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.en-q5_1.bin", filename: "small.en-q5_1.bin"),
Model(name: "small.en-q8_0", info: "(252 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.en-q8_0.bin", filename: "small.en-q8_0.bin"),
Model(name: "medium", info: "(F16, 1.5 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.bin", filename: "medium.bin"),
Model(name: "medium-q5_0", info: "(514 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium-q5_0.bin", filename: "medium-q5_0.bin"),
Model(name: "medium-q8_0", info: "(785 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium-q8_0.bin", filename: "medium-q8_0.bin"),
Model(name: "medium.en", info: "(F16, 1.5 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.en.bin", filename: "medium.en.bin"),
Model(name: "medium.en-q5_0", info: "(514 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.en-q5_0.bin", filename: "medium.en-q5_0.bin"),
Model(name: "medium.en-q8_0", info: "(785 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.en-q8_0.bin", filename: "medium.en-q8_0.bin"),
Model(name: "large-v1", info: "(F16, 2.9 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large.bin", filename: "large.bin"),
Model(name: "large-v2", info: "(F16, 2.9 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2.bin", filename: "large-v2.bin"),
Model(name: "large-v2-q5_0", info: "(1.1 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2-q5_0.bin", filename: "large-v2-q5_0.bin"),
Model(name: "large-v2-q8_0", info: "(1.5 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2-q8_0.bin", filename: "large-v2-q8_0.bin"),
Model(name: "large-v3", info: "(F16, 2.9 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3.bin", filename: "large-v3.bin"),
Model(name: "large-v3-q5_0", info: "(1.1 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-q5_0.bin", filename: "large-v3-q5_0.bin"),
Model(name: "large-v3-turbo", info: "(F16, 1.5 GiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo.bin", filename: "large-v3-turbo.bin"),
Model(name: "large-v3-turbo-q5_0", info: "(547 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo-q5_0.bin", filename: "large-v3-turbo-q5_0.bin"),
Model(name: "large-v3-turbo-q8_0", info: "(834 MiB)", url: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v3-turbo-q8_0.bin", filename: "large-v3-turbo-q8_0.bin"),
]

static func getDownloadedModels() -> [Model] {
// Filter models that have been downloaded
return models.filter {
FileManager.default.fileExists(atPath: $0.fileURL.path())
}
}

func loadModel(model: Model) {
Task {
dismiss()
whisperState.loadModel(path: model.fileURL)
}
}

var body: some View {
List {
Section(header: Text("Models")) {
ForEach(ModelSettingsView.models) { model in
DownloadButton(model: model)
.onLoad(perform: loadModel)
}
}
}
.listStyle(GroupedListStyle())
.navigationBarTitle("Model Settings", displayMode: .inline).toolbar {}
}
}
}

//struct ContentView_Previews: PreviewProvider {
// static var previews: some View {
// ContentView()
// }
//}
Loading

0 comments on commit a0bd84a

Please sign in to comment.