fixed some bugs

This commit is contained in:
FSSRepo 2023-05-03 14:25:14 -06:00
parent f684c4d414
commit 9f4505a0c6

View file

@ -682,6 +682,7 @@ int main(int argc, char ** argv) {
[&llama](size_t offset, DataSink &sink) [&llama](size_t offset, DataSink &sink)
{ {
int ignore = 0; int ignore = 0;
llama->tokens_completion = 0;
while(!llama->is_antiprompt) { while(!llama->is_antiprompt) {
std::string result = llama->inference(); std::string result = llama->inference();
// ignore ### Human: and ### Assistant: // ignore ### Human: and ### Assistant:
@ -699,6 +700,7 @@ int main(int argc, char ** argv) {
} }
printf("\rProcessing: %i tokens processed.", llama->tokens_completion); printf("\rProcessing: %i tokens processed.", llama->tokens_completion);
} }
sink.write("[DONE]", 6);
sink.done(); // No more data sink.done(); // No more data
printf("\rCompletion finished: %i tokens predicted.\n", llama->tokens_completion); printf("\rCompletion finished: %i tokens predicted.\n", llama->tokens_completion);
return true; // return 'false' if you want to cancel the process. return true; // return 'false' if you want to cancel the process.