Skip to content

Commit

Permalink
llama.swiftui: fix end of generation bug (#8268)
Browse files Browse the repository at this point in the history
* fix continuing generating blank lines after getting EOT token or EOS token from LLM

* change variable name to is_done (variable name suggested by ggerganov)

* minor : fix trailing whitespace

* minor : add space

---------

Co-authored-by: Georgi Gerganov <ggerganov@gmail.com>
  • Loading branch information
ho2103 and ggerganov authored Jul 20, 2024
1 parent c3776ca commit 69b9945
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 2 deletions.
4 changes: 3 additions & 1 deletion examples/llama.swiftui/llama.cpp.swift/LibLlama.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,12 @@ actor LlamaContext {
private var context: OpaquePointer
private var batch: llama_batch
private var tokens_list: [llama_token]
var is_done: Bool = false

/// This variable is used to store temporarily invalid cchars
private var temporary_invalid_cchars: [CChar]

var n_len: Int32 = 64
var n_len: Int32 = 1024
var n_cur: Int32 = 0

var n_decode: Int32 = 0
Expand Down Expand Up @@ -160,6 +161,7 @@ actor LlamaContext {

if llama_token_is_eog(model, new_token_id) || n_cur == n_len {
print("\n")
is_done = true
let new_token_str = String(cString: temporary_invalid_cchars + [0])
temporary_invalid_cchars.removeAll()
return new_token_str
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ class LlamaState: ObservableObject {
messageLog += "\(text)"

Task.detached {
while await llamaContext.n_cur < llamaContext.n_len {
while await !llamaContext.is_done {
let result = await llamaContext.completion_loop()
await MainActor.run {
self.messageLog += "\(result)"
Expand Down

0 comments on commit 69b9945

Please sign in to comment.