From b25654dde12ee88657115b6936e7cbfe59904e0f Mon Sep 17 00:00:00 2001 From: o Date: Wed, 3 Jul 2024 02:02:01 -0400 Subject: [PATCH] fix continuing generating blank lines after getting EOT token or EOS token from LLM --- examples/llama.swiftui/llama.cpp.swift/LibLlama.swift | 2 ++ examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 737f882fb..c9efe11b6 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -26,6 +26,7 @@ actor LlamaContext { private var context: OpaquePointer private var batch: llama_batch private var tokens_list: [llama_token] + var latest_llama_token_is_eog_or_reach_len: Bool = false /// This variable is used to store temporarily invalid cchars private var temporary_invalid_cchars: [CChar] @@ -160,6 +161,7 @@ actor LlamaContext { if llama_token_is_eog(model, new_token_id) || n_cur == n_len { print("\n") + latest_llama_token_is_eog_or_reach_len = true let new_token_str = String(cString: temporary_invalid_cchars + [0]) temporary_invalid_cchars.removeAll() return new_token_str diff --git a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift index 2c1e3f61b..f8c470c29 100644 --- a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift +++ b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift @@ -132,7 +132,7 @@ class LlamaState: ObservableObject { messageLog += "\(text)" Task.detached { - while await llamaContext.n_cur < llamaContext.n_len { + while await !llamaContext.latest_llama_token_is_eog_or_reach_len{ let result = await llamaContext.completion_loop() await MainActor.run { self.messageLog += "\(result)"