server: metrics: fix when no prompt processed
This commit is contained in:
parent
9fcfa63a11
commit
61b97915b0
2 changed files with 6 additions and 3 deletions
|
@ -2924,9 +2924,10 @@ int main(int argc, char **argv)
|
|||
for (const auto& metric_def : metrics_def) {
|
||||
std::string name = metric_def["name"];
|
||||
std::string help = metric_def["help"];
|
||||
auto value = json_value(metric_def, "value", 0);
|
||||
prometheus << "# HELP llamacpp:" << name << " " << help << "\n"
|
||||
<< "# TYPE llamacpp:" << name << " " << type << "\n"
|
||||
<< "llamacpp:" << name << " " << metric_def["value"] << "\n";
|
||||
<< "llamacpp:" << name << " " << value << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -546,6 +546,8 @@ async def step_prometheus_metrics_exported(context):
|
|||
assert metrics_response.headers['Content-Type'] == "text/plain; version=0.0.4"
|
||||
metrics_raw = await metrics_response.text()
|
||||
metric_exported = False
|
||||
if context.debug:
|
||||
print(f"/metrics answer:\n{metrics_raw}\n")
|
||||
for metric in parser.text_string_to_metric_families(metrics_raw):
|
||||
match metric.name:
|
||||
case "llamacpp:kv_cache_usage_ratio":
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue