Merge branch 'master' into custom-attention-mask

This commit is contained in:
Georgi Gerganov 2023-09-28 15:19:57 +03:00
commit 25856900db
No known key found for this signature in database
GPG key ID: 449E073F9DC10735
36 changed files with 730 additions and 239 deletions

View file

@ -3,7 +3,6 @@
#pragma once
#include "llama.h"
#include "build-info.h"
#define LOG_NO_FILE_LINE_FUNCTION
#include "log.h"
@ -51,8 +50,8 @@ struct gpt_params {
float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs
int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens.
int32_t n_beams = 0; // if non-zero then use beam search of given width.
float rope_freq_base = 10000.0f; // RoPE base frequency
float rope_freq_scale = 1.0f; // RoPE frequency scaling factor
float rope_freq_base = 0.0f; // RoPE base frequency
float rope_freq_scale = 0.0f; // RoPE frequency scaling factor
// sampling parameters
int32_t top_k = 40; // <= 0 to use vocab size