ci: bench: fix case when there is no token generated

This commit is contained in:
Pierrick HYMBERT 2024-04-05 00:53:08 +02:00
parent 3694026669
commit 59dc4bbb99

View file

@ -96,13 +96,13 @@ export default function () {
const params = {method: 'POST', body: JSON.stringify(payload)}; const params = {method: 'POST', body: JSON.stringify(payload)};
const startTime = new Date() const startTime = new Date()
let promptEvalTime = null let promptEvalEndTime = null
let prompt_tokens = 0 let prompt_tokens = 0
let completions_tokens = 0 let completions_tokens = 0
const res = sse.open(`${server_url}/chat/completions`, params, function (client) { const res = sse.open(`${server_url}/chat/completions`, params, function (client) {
client.on('event', function (event) { client.on('event', function (event) {
if (promptEvalTime == null) { if (promptEvalEndTime == null) {
promptEvalTime = new Date() promptEvalEndTime = new Date()
} }
let chunk = JSON.parse(event.data) let chunk = JSON.parse(event.data)
@ -131,8 +131,15 @@ export default function () {
const endTime = new Date() const endTime = new Date()
llamacpp_tokens_second.add(completions_tokens / (endTime - promptEvalTime) * 1.e3) const promptEvalTime = promptEvalEndTime - startTime
llamacpp_prompt_processing_second.add(prompt_tokens / (promptEvalTime - startTime) * 1.e3) if (promptEvalTime > 0) {
llamacpp_prompt_processing_second.add(prompt_tokens / (promptEvalEndTime - startTime) * 1.e3)
}
const completion_time = endTime - promptEvalEndTime
if (completions_tokens > 0 && completion_time > 0) {
llamacpp_tokens_second.add(completions_tokens / completion_time * 1.e3)
}
sleep(0.3) sleep(0.3)
} }